summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2022-06-20 11:10:13 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2022-06-20 11:10:13 +0000
commit0ea3fcec397b69815975647f5e2aa5fe944a8486 (patch)
tree7979381b89d26011bcf9bdc989a40fcc2f1ed4ff /spec
parent72123183a20411a36d607d70b12d57c484394c8e (diff)
downloadgitlab-ce-0ea3fcec397b69815975647f5e2aa5fe944a8486.tar.gz
Add latest changes from gitlab-org/gitlab@15-1-stable-eev15.1.0-rc42
Diffstat (limited to 'spec')
-rw-r--r--spec/commands/sidekiq_cluster/cli_spec.rb21
-rw-r--r--spec/components/pajamas/alert_component_spec.rb18
-rw-r--r--spec/components/pajamas/banner_component_spec.rb169
-rw-r--r--spec/components/pajamas/button_component_spec.rb273
-rw-r--r--spec/components/pajamas/card_component_spec.rb80
-rw-r--r--spec/components/pajamas/checkbox_component_spec.rb130
-rw-r--r--spec/components/pajamas/component_spec.rb17
-rw-r--r--spec/components/pajamas/concerns/checkbox_radio_label_with_help_text_spec.rb110
-rw-r--r--spec/components/pajamas/concerns/checkbox_radio_options_spec.rb32
-rw-r--r--spec/components/pajamas/radio_component_spec.rb126
-rw-r--r--spec/contracts/.gitignore2
-rw-r--r--spec/contracts/README.md15
-rw-r--r--spec/contracts/consumer/.eslintrc.yml7
-rw-r--r--spec/contracts/consumer/.node-version1
-rw-r--r--spec/contracts/consumer/babel.config.json3
-rw-r--r--spec/contracts/consumer/endpoints/project/merge_requests.js34
-rw-r--r--spec/contracts/consumer/fixtures/project/merge_request/diffs_batch.fixture.js91
-rw-r--r--spec/contracts/consumer/fixtures/project/merge_request/diffs_metadata.fixture.js98
-rw-r--r--spec/contracts/consumer/fixtures/project/merge_request/discussions.fixture.js87
-rw-r--r--spec/contracts/consumer/package.json26
-rw-r--r--spec/contracts/consumer/specs/project/merge_request/show.spec.js112
-rw-r--r--spec/contracts/contracts/project/merge_request/show/mergerequest#show-merge_request_diffs_batch_endpoint.json229
-rw-r--r--spec/contracts/contracts/project/merge_request/show/mergerequest#show-merge_request_diffs_metadata_endpoint.json223
-rw-r--r--spec/contracts/contracts/project/merge_request/show/mergerequest#show-merge_request_discussions_endpoint.json236
-rw-r--r--spec/contracts/provider/environments/test.rb15
-rw-r--r--spec/contracts/provider/helpers/users_helper.rb7
-rw-r--r--spec/contracts/provider/pact_helpers/project/merge_request/diffs_batch_helper.rb16
-rw-r--r--spec/contracts/provider/pact_helpers/project/merge_request/diffs_metadata_helper.rb16
-rw-r--r--spec/contracts/provider/pact_helpers/project/merge_request/discussions_helper.rb16
-rw-r--r--spec/contracts/provider/spec_helper.rb39
-rw-r--r--spec/contracts/provider/states/project/merge_request/diffs_batch_state.rb18
-rw-r--r--spec/contracts/provider/states/project/merge_request/diffs_metadata_state.rb18
-rw-r--r--spec/contracts/provider/states/project/merge_request/discussions_state.rb17
-rw-r--r--spec/controllers/admin/clusters_controller_spec.rb57
-rw-r--r--spec/controllers/admin/integrations_controller_spec.rb2
-rw-r--r--spec/controllers/application_controller_spec.rb4
-rw-r--r--spec/controllers/autocomplete_controller_spec.rb2
-rw-r--r--spec/controllers/chaos_controller_spec.rb4
-rw-r--r--spec/controllers/concerns/sorting_preference_spec.rb44
-rw-r--r--spec/controllers/confirmations_controller_spec.rb23
-rw-r--r--spec/controllers/graphql_controller_spec.rb11
-rw-r--r--spec/controllers/groups/clusters_controller_spec.rb136
-rw-r--r--spec/controllers/groups/group_members_controller_spec.rb28
-rw-r--r--spec/controllers/groups/settings/integrations_controller_spec.rb2
-rw-r--r--spec/controllers/groups_controller_spec.rb24
-rw-r--r--spec/controllers/help_controller_spec.rb29
-rw-r--r--spec/controllers/import/fogbugz_controller_spec.rb36
-rw-r--r--spec/controllers/import/github_controller_spec.rb13
-rw-r--r--spec/controllers/metrics_controller_spec.rb4
-rw-r--r--spec/controllers/oauth/authorizations_controller_spec.rb18
-rw-r--r--spec/controllers/omniauth_callbacks_controller_spec.rb32
-rw-r--r--spec/controllers/passwords_controller_spec.rb23
-rw-r--r--spec/controllers/profiles/personal_access_tokens_controller_spec.rb25
-rw-r--r--spec/controllers/projects/autocomplete_sources_controller_spec.rb12
-rw-r--r--spec/controllers/projects/clusters_controller_spec.rb144
-rw-r--r--spec/controllers/projects/commits_controller_spec.rb8
-rw-r--r--spec/controllers/projects/compare_controller_spec.rb27
-rw-r--r--spec/controllers/projects/environments_controller_spec.rb11
-rw-r--r--spec/controllers/projects/import/jira_controller_spec.rb2
-rw-r--r--spec/controllers/projects/incidents_controller_spec.rb1
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb99
-rw-r--r--spec/controllers/projects/jobs_controller_spec.rb92
-rw-r--r--spec/controllers/projects/mattermosts_controller_spec.rb2
-rw-r--r--spec/controllers/projects/merge_requests/drafts_controller_spec.rb32
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb8
-rw-r--r--spec/controllers/projects/notes_controller_spec.rb94
-rw-r--r--spec/controllers/projects/pipelines/tests_controller_spec.rb12
-rw-r--r--spec/controllers/projects/pipelines_controller_spec.rb12
-rw-r--r--spec/controllers/projects/project_members_controller_spec.rb83
-rw-r--r--spec/controllers/projects/prometheus/alerts_controller_spec.rb106
-rw-r--r--spec/controllers/projects/prometheus/metrics_controller_spec.rb6
-rw-r--r--spec/controllers/projects/releases_controller_spec.rb23
-rw-r--r--spec/controllers/projects/services_controller_spec.rb356
-rw-r--r--spec/controllers/projects/settings/ci_cd_controller_spec.rb13
-rw-r--r--spec/controllers/projects/settings/integration_hook_logs_controller_spec.rb (renamed from spec/controllers/projects/service_hook_logs_controller_spec.rb)5
-rw-r--r--spec/controllers/projects/settings/integrations_controller_spec.rb382
-rw-r--r--spec/controllers/projects/static_site_editor_controller_spec.rb101
-rw-r--r--spec/controllers/projects/tags_controller_spec.rb10
-rw-r--r--spec/controllers/registrations/welcome_controller_spec.rb5
-rw-r--r--spec/controllers/registrations_controller_spec.rb23
-rw-r--r--spec/controllers/repositories/git_http_controller_spec.rb13
-rw-r--r--spec/controllers/sessions_controller_spec.rb25
-rw-r--r--spec/db/migration_spec.rb2
-rw-r--r--spec/db/schema_spec.rb16
-rw-r--r--spec/events/pages/page_deleted_event_spec.rb34
-rw-r--r--spec/experiments/application_experiment_spec.rb21
-rw-r--r--spec/factories/alert_management/alerts.rb4
-rw-r--r--spec/factories/application_settings.rb4
-rw-r--r--spec/factories/ci/builds.rb18
-rw-r--r--spec/factories/clusters/applications/helm.rb13
-rw-r--r--spec/factories/clusters/cluster_enabled_grant.rb7
-rw-r--r--spec/factories/commit_signature/ssh_signature.rb10
-rw-r--r--spec/factories/commits.rb9
-rw-r--r--spec/factories/container_repositories.rb9
-rw-r--r--spec/factories/deployments.rb2
-rw-r--r--spec/factories/environments.rb2
-rw-r--r--spec/factories/gitlab/database/background_migration/batched_migrations.rb1
-rw-r--r--spec/factories/incident_management/timeline_events.rb9
-rw-r--r--spec/factories/issues.rb7
-rw-r--r--spec/factories/merge_requests.rb6
-rw-r--r--spec/factories/plan_limits.rb6
-rw-r--r--spec/factories/project_members.rb5
-rw-r--r--spec/factories/projects.rb6
-rw-r--r--spec/factories/releases.rb6
-rw-r--r--spec/factories/sequences.rb1
-rw-r--r--spec/factories/terraform/state.rb4
-rw-r--r--spec/factories/time_tracking/timelog_categories.rb9
-rw-r--r--spec/factories/work_items.rb5
-rw-r--r--spec/factories/work_items/parent_links.rb21
-rw-r--r--spec/fast_spec_helper.rb7
-rw-r--r--spec/features/admin/admin_disables_two_factor_spec.rb7
-rw-r--r--spec/features/admin/admin_groups_spec.rb11
-rw-r--r--spec/features/admin/admin_hook_logs_spec.rb14
-rw-r--r--spec/features/admin/admin_hooks_spec.rb7
-rw-r--r--spec/features/admin/admin_labels_spec.rb9
-rw-r--r--spec/features/admin/admin_runners_spec.rb91
-rw-r--r--spec/features/admin/admin_settings_spec.rb14
-rw-r--r--spec/features/admin/admin_users_impersonation_tokens_spec.rb5
-rw-r--r--spec/features/admin/admin_uses_repository_checks_spec.rb6
-rw-r--r--spec/features/admin/users/user_spec.rb3
-rw-r--r--spec/features/admin/users/users_spec.rb10
-rw-r--r--spec/features/boards/boards_spec.rb1
-rw-r--r--spec/features/clusters/cluster_detail_page_spec.rb14
-rw-r--r--spec/features/clusters/create_agent_spec.rb1
-rw-r--r--spec/features/commits_spec.rb3
-rw-r--r--spec/features/dashboard/todos/todos_spec.rb3
-rw-r--r--spec/features/group_variables_spec.rb14
-rw-r--r--spec/features/groups/clusters/user_spec.rb4
-rw-r--r--spec/features/groups/empty_states_spec.rb2
-rw-r--r--spec/features/groups/group_runners_spec.rb52
-rw-r--r--spec/features/groups/import_export/connect_instance_spec.rb2
-rw-r--r--spec/features/groups/issues_spec.rb47
-rw-r--r--spec/features/groups/members/leave_group_spec.rb4
-rw-r--r--spec/features/groups/navbar_spec.rb9
-rw-r--r--spec/features/groups/settings/access_tokens_spec.rb3
-rw-r--r--spec/features/groups/show_spec.rb52
-rw-r--r--spec/features/groups/user_sees_users_dropdowns_in_issuables_list_spec.rb47
-rw-r--r--spec/features/groups_spec.rb15
-rw-r--r--spec/features/incidents/incidents_list_spec.rb13
-rw-r--r--spec/features/issuables/issuable_list_spec.rb2
-rw-r--r--spec/features/issue_rebalancing_spec.rb4
-rw-r--r--spec/features/issues/filtered_search/dropdown_assignee_spec.rb4
-rw-r--r--spec/features/issues/filtered_search/dropdown_author_spec.rb2
-rw-r--r--spec/features/issues/filtered_search/dropdown_base_spec.rb2
-rw-r--r--spec/features/issues/filtered_search/dropdown_emoji_spec.rb2
-rw-r--r--spec/features/issues/filtered_search/dropdown_hint_spec.rb2
-rw-r--r--spec/features/issues/filtered_search/dropdown_label_spec.rb2
-rw-r--r--spec/features/issues/filtered_search/dropdown_milestone_spec.rb2
-rw-r--r--spec/features/issues/filtered_search/dropdown_release_spec.rb2
-rw-r--r--spec/features/issues/filtered_search/filter_issues_spec.rb1
-rw-r--r--spec/features/issues/filtered_search/recent_searches_spec.rb2
-rw-r--r--spec/features/issues/filtered_search/search_bar_spec.rb1
-rw-r--r--spec/features/issues/filtered_search/visual_tokens_spec.rb1
-rw-r--r--spec/features/issues/incident_issue_spec.rb62
-rw-r--r--spec/features/issues/issue_detail_spec.rb10
-rw-r--r--spec/features/issues/rss_spec.rb4
-rw-r--r--spec/features/issues/user_bulk_edits_issues_labels_spec.rb4
-rw-r--r--spec/features/issues/user_bulk_edits_issues_spec.rb4
-rw-r--r--spec/features/issues/user_comments_on_issue_spec.rb14
-rw-r--r--spec/features/issues/user_creates_issue_spec.rb4
-rw-r--r--spec/features/issues/user_filters_issues_spec.rb2
-rw-r--r--spec/features/issues/user_sees_breadcrumb_links_spec.rb2
-rw-r--r--spec/features/issues/user_sorts_issues_spec.rb2
-rw-r--r--spec/features/labels_hierarchy_spec.rb2
-rw-r--r--spec/features/markdown/math_spec.rb20
-rw-r--r--spec/features/markdown/mermaid_spec.rb361
-rw-r--r--spec/features/merge_request/batch_comments_spec.rb271
-rw-r--r--spec/features/merge_request/user_comments_on_diff_spec.rb1
-rw-r--r--spec/features/merge_request/user_creates_merge_request_spec.rb3
-rw-r--r--spec/features/merge_request/user_posts_diff_notes_spec.rb2
-rw-r--r--spec/features/merge_request/user_posts_notes_spec.rb3
-rw-r--r--spec/features/merge_request/user_resolves_conflicts_spec.rb17
-rw-r--r--spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb1
-rw-r--r--spec/features/merge_request/user_sees_deployment_widget_spec.rb6
-rw-r--r--spec/features/merge_request/user_sees_discussions_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_versions_spec.rb1
-rw-r--r--spec/features/merge_request/user_views_user_status_on_merge_request_spec.rb10
-rw-r--r--spec/features/merge_requests/user_sorts_merge_requests_spec.rb2
-rw-r--r--spec/features/milestone_spec.rb10
-rw-r--r--spec/features/nav/top_nav_responsive_spec.rb2
-rw-r--r--spec/features/nav/top_nav_tooltip_spec.rb24
-rw-r--r--spec/features/oauth_registration_spec.rb91
-rw-r--r--spec/features/profile_spec.rb19
-rw-r--r--spec/features/profiles/active_sessions_spec.rb10
-rw-r--r--spec/features/profiles/oauth_applications_spec.rb11
-rw-r--r--spec/features/profiles/personal_access_tokens_spec.rb44
-rw-r--r--spec/features/profiles/user_visits_profile_spec.rb4
-rw-r--r--spec/features/project_variables_spec.rb38
-rw-r--r--spec/features/projects/branches/user_deletes_branch_spec.rb6
-rw-r--r--spec/features/projects/ci/editor_spec.rb10
-rw-r--r--spec/features/projects/clusters/gcp_spec.rb4
-rw-r--r--spec/features/projects/clusters/user_spec.rb4
-rw-r--r--spec/features/projects/commit/comments/user_deletes_comments_spec.rb6
-rw-r--r--spec/features/projects/commit/user_comments_on_commit_spec.rb7
-rw-r--r--spec/features/projects/commits/multi_view_diff_spec.rb128
-rw-r--r--spec/features/projects/container_registry_spec.rb2
-rw-r--r--spec/features/projects/environments/environment_spec.rb10
-rw-r--r--spec/features/projects/environments/environments_spec.rb2
-rw-r--r--spec/features/projects/features_visibility_spec.rb2
-rw-r--r--spec/features/projects/hook_logs/user_reads_log_spec.rb73
-rw-r--r--spec/features/projects/integrations/user_activates_issue_tracker_spec.rb6
-rw-r--r--spec/features/projects/integrations/user_activates_jira_spec.rb8
-rw-r--r--spec/features/projects/integrations/user_activates_mattermost_slash_command_spec.rb2
-rw-r--r--spec/features/projects/integrations/user_activates_slack_notifications_spec.rb2
-rw-r--r--spec/features/projects/integrations/user_activates_slack_slash_command_spec.rb12
-rw-r--r--spec/features/projects/integrations/user_uses_inherited_settings_spec.rb2
-rw-r--r--spec/features/projects/jobs/user_browses_job_spec.rb9
-rw-r--r--spec/features/projects/jobs_spec.rb8
-rw-r--r--spec/features/projects/members/manage_members_spec.rb50
-rw-r--r--spec/features/projects/members/member_leaves_project_spec.rb4
-rw-r--r--spec/features/projects/members/user_requests_access_spec.rb5
-rw-r--r--spec/features/projects/new_project_spec.rb33
-rw-r--r--spec/features/projects/pages/user_adds_domain_spec.rb7
-rw-r--r--spec/features/projects/pages/user_edits_lets_encrypt_settings_spec.rb9
-rw-r--r--spec/features/projects/pages/user_edits_settings_spec.rb5
-rw-r--r--spec/features/projects/pipeline_schedules_spec.rb10
-rw-r--r--spec/features/projects/pipelines/legacy_pipeline_spec.rb198
-rw-r--r--spec/features/projects/pipelines/pipeline_spec.rb58
-rw-r--r--spec/features/projects/pipelines/pipelines_spec.rb1
-rw-r--r--spec/features/projects/releases/user_views_edit_release_spec.rb4
-rw-r--r--spec/features/projects/settings/access_tokens_spec.rb77
-rw-r--r--spec/features/projects/settings/branch_rules_settings_spec.rb47
-rw-r--r--spec/features/projects/settings/packages_settings_spec.rb24
-rw-r--r--spec/features/projects/settings/repository_settings_spec.rb16
-rw-r--r--spec/features/projects/settings/user_searches_in_settings_spec.rb1
-rw-r--r--spec/features/promotion_spec.rb2
-rw-r--r--spec/features/refactor_blob_viewer_disabled/projects/files/user_replaces_files_spec.rb93
-rw-r--r--spec/features/snippets/notes_on_personal_snippets_spec.rb8
-rw-r--r--spec/features/snippets/user_creates_snippet_spec.rb1
-rw-r--r--spec/features/tags/developer_deletes_tag_spec.rb17
-rw-r--r--spec/features/tags/maintainer_deletes_protected_tag_spec.rb62
-rw-r--r--spec/features/triggers_spec.rb5
-rw-r--r--spec/features/user_sorts_things_spec.rb2
-rw-r--r--spec/features/users/signup_spec.rb1
-rw-r--r--spec/features/users/zuora_csp_spec.rb20
-rw-r--r--spec/finders/crm/contacts_finder_spec.rb96
-rw-r--r--spec/finders/crm/organizations_finder_spec.rb132
-rw-r--r--spec/finders/issues_finder_spec.rb1448
-rw-r--r--spec/finders/packages/pypi/packages_finder_spec.rb94
-rw-r--r--spec/finders/work_items/work_items_finder_spec.rb10
-rw-r--r--spec/fixtures/api/schemas/entities/issue.json1
-rw-r--r--spec/fixtures/api/schemas/entities/issue_board.json1
-rw-r--r--spec/fixtures/api/schemas/external_validation.json8
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/system_hook.json8
-rw-r--r--spec/fixtures/emails/service_desk_reference_headers.eml31
-rw-r--r--spec/fixtures/emails/service_desk_reply_illegal_utf8.eml26
-rw-r--r--spec/fixtures/glfm/example_snapshots/examples_index.yml1285
-rw-r--r--spec/fixtures/glfm/example_snapshots/html.yml3526
-rw-r--r--spec/fixtures/glfm/example_snapshots/markdown.yml1290
-rw-r--r--spec/fixtures/glfm/example_snapshots/prosemirror_json.yml2283
-rw-r--r--spec/fixtures/integrations/campfire/rooms.json (renamed from spec/fixtures/project_services/campfire/rooms.json)0
-rw-r--r--spec/fixtures/integrations/campfire/rooms2.json (renamed from spec/fixtures/project_services/campfire/rooms2.json)0
-rw-r--r--spec/fixtures/lib/generators/gitlab/usage_metric_generator/sample_numbers_metric.rb19
-rw-r--r--spec/fixtures/markdown/markdown_golden_master_examples.yml37
-rw-r--r--spec/fixtures/security_reports/feature-branch/gl-secret-detection-report.json2
-rw-r--r--spec/fixtures/security_reports/master/gl-sast-missing-scanner.json2
-rw-r--r--spec/fixtures/security_reports/master/gl-secret-detection-report.json2
-rw-r--r--spec/frontend/__helpers__/dl_locator_helper.js28
-rw-r--r--spec/frontend/__helpers__/emoji.js10
-rw-r--r--spec/frontend/__helpers__/init_vue_mr_page_helper.js18
-rw-r--r--spec/frontend/__helpers__/matchers/to_have_sprite_icon.js2
-rw-r--r--spec/frontend/access_tokens/components/access_token_table_app_spec.js241
-rw-r--r--spec/frontend/access_tokens/components/expires_at_field_spec.js33
-rw-r--r--spec/frontend/access_tokens/components/new_access_token_app_spec.js169
-rw-r--r--spec/frontend/access_tokens/index_spec.js214
-rw-r--r--spec/frontend/admin/application_settings/inactive_project_deletion/components/form_spec.js148
-rw-r--r--spec/frontend/admin/users/index_spec.js8
-rw-r--r--spec/frontend/analytics/usage_trends/components/usage_counts_spec.js4
-rw-r--r--spec/frontend/api_spec.js29
-rw-r--r--spec/frontend/authentication/two_factor_auth/index_spec.js4
-rw-r--r--spec/frontend/awards_handler_spec.js29
-rw-r--r--spec/frontend/batch_comments/components/submit_dropdown_spec.js69
-rw-r--r--spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js10
-rw-r--r--spec/frontend/behaviors/markdown/render_mermaid_spec.js25
-rw-r--r--spec/frontend/blob/blob_file_dropzone_spec.js49
-rw-r--r--spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap4
-rw-r--r--spec/frontend/blob/components/blob_header_default_actions_spec.js8
-rw-r--r--spec/frontend/blob/components/table_contents_spec.js4
-rw-r--r--spec/frontend/blob/csv/csv_viewer_spec.js10
-rw-r--r--spec/frontend/blob/viewer/index_spec.js6
-rw-r--r--spec/frontend/boards/components/board_column_spec.js5
-rw-r--r--spec/frontend/boards/components/board_form_spec.js38
-rw-r--r--spec/frontend/boards/components/boards_selector_spec.js36
-rw-r--r--spec/frontend/boards/mock_data.js36
-rw-r--r--spec/frontend/boards/stores/actions_spec.js38
-rw-r--r--spec/frontend/boards/stores/mutations_spec.js26
-rw-r--r--spec/frontend/cascading_settings/components/lock_popovers_spec.js10
-rw-r--r--spec/frontend/ci_variable_list/components/legacy_ci_environments_dropdown_spec.js (renamed from spec/frontend/ci_variable_list/components/ci_environments_dropdown_spec.js)4
-rw-r--r--spec/frontend/ci_variable_list/components/legacy_ci_variable_modal_spec.js (renamed from spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js)9
-rw-r--r--spec/frontend/ci_variable_list/components/legacy_ci_variable_settings_spec.js (renamed from spec/frontend/ci_variable_list/components/ci_variable_settings_spec.js)4
-rw-r--r--spec/frontend/ci_variable_list/components/legacy_ci_variable_table_spec.js (renamed from spec/frontend/ci_variable_list/components/ci_variable_table_spec.js)4
-rw-r--r--spec/frontend/clusters/agents/components/create_token_button_spec.js7
-rw-r--r--spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap191
-rw-r--r--spec/frontend/clusters/components/remove_cluster_confirmation_spec.js22
-rw-r--r--spec/frontend/clusters_list/components/agent_token_spec.js10
-rw-r--r--spec/frontend/clusters_list/components/clusters_spec.js19
-rw-r--r--spec/frontend/clusters_list/components/install_agent_modal_spec.js7
-rw-r--r--spec/frontend/code_navigation/store/actions_spec.js12
-rw-r--r--spec/frontend/confirm_modal_spec.js6
-rw-r--r--spec/frontend/content_editor/components/__snapshots__/toolbar_link_button_spec.js.snap16
-rw-r--r--spec/frontend/content_editor/components/bubble_menus/code_block_spec.js157
-rw-r--r--spec/frontend/content_editor/components/toolbar_more_dropdown_spec.js54
-rw-r--r--spec/frontend/content_editor/components/top_toolbar_spec.js29
-rw-r--r--spec/frontend/content_editor/components/wrappers/code_block_spec.js85
-rw-r--r--spec/frontend/content_editor/components/wrappers/footnote_definition_spec.js30
-rw-r--r--spec/frontend/content_editor/extensions/footnote_definition_spec.js7
-rw-r--r--spec/frontend/content_editor/remark_markdown_processing_spec.js885
-rw-r--r--spec/frontend/content_editor/services/asset_resolver_spec.js10
-rw-r--r--spec/frontend/content_editor/services/code_block_language_loader_spec.js15
-rw-r--r--spec/frontend/content_editor/services/markdown_serializer_spec.js86
-rw-r--r--spec/frontend/custom_metrics/components/custom_metrics_form_spec.js2
-rw-r--r--spec/frontend/cycle_analytics/path_navigation_spec.js6
-rw-r--r--spec/frontend/cycle_analytics/stage_table_spec.js23
-rw-r--r--spec/frontend/cycle_analytics/value_stream_metrics_spec.js6
-rw-r--r--spec/frontend/design_management/components/design_presentation_spec.js1
-rw-r--r--spec/frontend/design_management/components/design_sidebar_spec.js1
-rw-r--r--spec/frontend/design_management/components/toolbar/__snapshots__/design_navigation_spec.js.snap4
-rw-r--r--spec/frontend/design_management/components/toolbar/__snapshots__/index_spec.js.snap2
-rw-r--r--spec/frontend/design_management/pages/__snapshots__/index_spec.js.snap2
-rw-r--r--spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap23
-rw-r--r--spec/frontend/design_management/pages/design/index_spec.js21
-rw-r--r--spec/frontend/design_management/pages/index_spec.js20
-rw-r--r--spec/frontend/design_management/router_spec.js2
-rw-r--r--spec/frontend/diffs/components/commit_item_spec.js2
-rw-r--r--spec/frontend/diffs/components/diff_expansion_cell_spec.js11
-rw-r--r--spec/frontend/diffs/components/diff_file_header_spec.js11
-rw-r--r--spec/frontend/diffs/components/diff_line_note_form_spec.js10
-rw-r--r--spec/frontend/diffs/components/diff_view_spec.js16
-rw-r--r--spec/frontend/diffs/store/utils_spec.js7
-rw-r--r--spec/frontend/editor/helpers.js6
-rw-r--r--spec/frontend/editor/schema/ci/ci_schema_spec.js1
-rw-r--r--spec/frontend/editor/source_editor_extension_spec.js1
-rw-r--r--spec/frontend/editor/source_editor_markdown_livepreview_ext_spec.js67
-rw-r--r--spec/frontend/editor/source_editor_spec.js29
-rw-r--r--spec/frontend/editor/source_editor_webide_ext_spec.js55
-rw-r--r--spec/frontend/emoji/index_spec.js115
-rw-r--r--spec/frontend/emoji/utils_spec.js15
-rw-r--r--spec/frontend/environments/deploy_board_wrapper_spec.js4
-rw-r--r--spec/frontend/environments/environment_folder_spec.js4
-rw-r--r--spec/frontend/environments/new_environment_item_spec.js4
-rw-r--r--spec/frontend/error_tracking_settings/components/app_spec.js2
-rw-r--r--spec/frontend/feature_flags/components/new_feature_flag_spec.js2
-rw-r--r--spec/frontend/fixtures/integrations.rb (renamed from spec/frontend/fixtures/services.rb)6
-rw-r--r--spec/frontend/fixtures/prometheus_integration.rb (renamed from spec/frontend/fixtures/prometheus_service.rb)6
-rw-r--r--spec/frontend/fixtures/runner.rb6
-rw-r--r--spec/frontend/frequent_items/components/frequent_items_list_item_spec.js2
-rw-r--r--spec/frontend/google_tag_manager/index_spec.js24
-rw-r--r--spec/frontend/groups/components/app_spec.js68
-rw-r--r--spec/frontend/groups/components/empty_state_spec.js78
-rw-r--r--spec/frontend/groups/components/group_name_and_path_spec.js347
-rw-r--r--spec/frontend/groups/components/item_caret_spec.js4
-rw-r--r--spec/frontend/helpers/startup_css_helper_spec.js7
-rw-r--r--spec/frontend/ide/components/commit_sidebar/editor_header_spec.js2
-rw-r--r--spec/frontend/ide/components/commit_sidebar/new_merge_request_option_spec.js210
-rw-r--r--spec/frontend/ide/components/ide_side_bar_spec.js4
-rw-r--r--spec/frontend/ide/components/ide_status_bar_spec.js2
-rw-r--r--spec/frontend/ide/components/jobs/__snapshots__/stage_spec.js.snap2
-rw-r--r--spec/frontend/ide/components/jobs/detail/description_spec.js6
-rw-r--r--spec/frontend/ide/components/repo_editor_spec.js240
-rw-r--r--spec/frontend/import_entities/import_groups/components/import_table_spec.js8
-rw-r--r--spec/frontend/incidents/components/incidents_list_spec.js15
-rw-r--r--spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap4
-rw-r--r--spec/frontend/incidents_settings/components/pagerduty_form_spec.js4
-rw-r--r--spec/frontend/integrations/edit/components/jira_issues_fields_spec.js1
-rw-r--r--spec/frontend/integrations/edit/components/jira_upgrade_cta_spec.js31
-rw-r--r--spec/frontend/integrations/edit/components/sections/configuration_spec.js57
-rw-r--r--spec/frontend/integrations/edit/components/sections/trigger_spec.js38
-rw-r--r--spec/frontend/integrations/edit/components/trigger_field_spec.js71
-rw-r--r--spec/frontend/integrations/edit/mock_data.js5
-rw-r--r--spec/frontend/invite_members/components/invite_members_trigger_spec.js20
-rw-r--r--spec/frontend/invite_members/components/invite_modal_base_spec.js24
-rw-r--r--spec/frontend/invite_members/components/user_limit_notification_spec.js34
-rw-r--r--spec/frontend/issuable/components/csv_export_modal_spec.js32
-rw-r--r--spec/frontend/issuable/components/csv_import_modal_spec.js4
-rw-r--r--spec/frontend/issuable/popover/components/issue_popover_spec.js81
-rw-r--r--spec/frontend/issuable/popover/components/mr_popover_spec.js119
-rw-r--r--spec/frontend/issuable/popover/index_spec.js (renamed from spec/frontend/mr_popover/index_spec.js)19
-rw-r--r--spec/frontend/issues/create_merge_request_dropdown_spec.js4
-rw-r--r--spec/frontend/issues/list/components/issues_list_app_spec.js120
-rw-r--r--spec/frontend/issues/list/utils_spec.js24
-rw-r--r--spec/frontend/issues/show/components/description_spec.js5
-rw-r--r--spec/frontend/issues/show/components/incidents/incident_tabs_spec.js24
-rw-r--r--spec/frontend/issues/show/components/incidents/mock_data.js72
-rw-r--r--spec/frontend/issues/show/components/incidents/timeline_events_list_item_spec.js87
-rw-r--r--spec/frontend/issues/show/components/incidents/timeline_events_list_spec.js87
-rw-r--r--spec/frontend/issues/show/components/incidents/timeline_events_tab_spec.js105
-rw-r--r--spec/frontend/issues/show/components/incidents/utils_spec.js31
-rw-r--r--spec/frontend/jira_connect/subscriptions/components/sign_in_oauth_button_spec.js11
-rw-r--r--spec/frontend/jira_connect/subscriptions/components/user_link_spec.js45
-rw-r--r--spec/frontend/jobs/components/log/collapsible_section_spec.js4
-rw-r--r--spec/frontend/jobs/components/log/line_header_spec.js8
-rw-r--r--spec/frontend/jobs/components/log/log_spec.js8
-rw-r--r--spec/frontend/labels/delete_label_modal_spec.js6
-rw-r--r--spec/frontend/lazy_loader_spec.js4
-rw-r--r--spec/frontend/lib/gfm/index_spec.js90
-rw-r--r--spec/frontend/lib/utils/dom_utils_spec.js38
-rw-r--r--spec/frontend/lib/utils/forms_spec.js4
-rw-r--r--spec/frontend/lib/utils/rails_ujs_spec.js78
-rw-r--r--spec/frontend/lib/utils/table_utility_spec.js7
-rw-r--r--spec/frontend/lib/utils/users_cache_spec.js27
-rw-r--r--spec/frontend/logs/utils_spec.js38
-rw-r--r--spec/frontend/members/components/members_tabs_spec.js14
-rw-r--r--spec/frontend/members/components/table/members_table_spec.js31
-rw-r--r--spec/frontend/members/index_spec.js2
-rw-r--r--spec/frontend/members/utils_spec.js2
-rw-r--r--spec/frontend/merge_conflicts/components/merge_conflict_resolver_app_spec.js2
-rw-r--r--spec/frontend/merge_request_tabs_spec.js22
-rw-r--r--spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap2
-rw-r--r--spec/frontend/monitoring/components/graph_group_spec.js18
-rw-r--r--spec/frontend/monitoring/fixture_data.js3
-rw-r--r--spec/frontend/mr_popover/__snapshots__/mr_popover_spec.js.snap91
-rw-r--r--spec/frontend/mr_popover/mr_popover_spec.js80
-rw-r--r--spec/frontend/nav/components/responsive_header_spec.js4
-rw-r--r--spec/frontend/notebook/cells/markdown_spec.js4
-rw-r--r--spec/frontend/notes/components/comment_field_layout_spec.js4
-rw-r--r--spec/frontend/notes/components/comment_form_spec.js32
-rw-r--r--spec/frontend/notes/components/note_body_spec.js35
-rw-r--r--spec/frontend/notes/components/note_header_spec.js16
-rw-r--r--spec/frontend/notes/components/noteable_discussion_spec.js19
-rw-r--r--spec/frontend/notes/components/notes_app_spec.js4
-rw-r--r--spec/frontend/notes/mock_data.js10
-rw-r--r--spec/frontend/notes/stores/actions_spec.js34
-rw-r--r--spec/frontend/notes/stores/mutation_spec.js12
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row_spec.js3
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/cleanup_status_spec.js20
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/image_list_row_spec.js3
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/registry_header_spec.js2
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/package_files_spec.js6
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/infrastructure_search_spec.js4
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/additional_metadata_spec.js84
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/metadata/composer_spec.js10
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/metadata/conan_spec.js10
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/metadata/maven_spec.js12
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/metadata/nuget_spec.js32
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/metadata/pypi_spec.js9
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/package_files_spec.js8
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/package_history_spec.js84
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/list/packages_search_spec.js4
-rw-r--r--spec/frontend/packages_and_registries/package_registry/mock_data.js52
-rw-r--r--spec/frontend/packages_and_registries/package_registry/pages/details_spec.js41
-rw-r--r--spec/frontend/packages_and_registries/settings/group/components/dependency_proxy_settings_spec.js12
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/container_expiration_policy_form_spec.js.snap (renamed from spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/settings_form_spec.js.snap)12
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/components/container_expiration_policy_form_spec.js (renamed from spec/frontend/packages_and_registries/settings/project/settings/components/settings_form_spec.js)4
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/components/container_expiration_policy_spec.js167
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/components/registry_settings_app_spec.js160
-rw-r--r--spec/frontend/packages_and_registries/shared/components/__snapshots__/registry_breadcrumb_spec.js.snap4
-rw-r--r--spec/frontend/packages_and_registries/shared/components/persisted_search_spec.js4
-rw-r--r--spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js2
-rw-r--r--spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js109
-rw-r--r--spec/frontend/performance_bar/components/add_request_spec.js28
-rw-r--r--spec/frontend/performance_bar/index_spec.js12
-rw-r--r--spec/frontend/performance_bar/services/performance_bar_service_spec.js12
-rw-r--r--spec/frontend/performance_bar/stores/performance_bar_store_spec.js9
-rw-r--r--spec/frontend/pipeline_editor/components/drawer/cards/first_pipeline_card_spec.js27
-rw-r--r--spec/frontend/pipeline_editor/components/drawer/cards/pipeline_config_reference_card_spec.js48
-rw-r--r--spec/frontend/pipeline_editor/components/editor/ci_editor_header_spec.js33
-rw-r--r--spec/frontend/pipeline_editor/components/file-nav/pipeline_editor_file_nav_spec.js40
-rw-r--r--spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js93
-rw-r--r--spec/frontend/pipeline_editor/components/validate/ci_validate_spec.js40
-rw-r--r--spec/frontend/pipeline_editor/pipeline_editor_home_spec.js118
-rw-r--r--spec/frontend/pipeline_wizard/components/input_wrapper_spec.js (renamed from spec/frontend/pipeline_wizard/components/input_spec.js)2
-rw-r--r--spec/frontend/pipeline_wizard/components/step_spec.js2
-rw-r--r--spec/frontend/pipeline_wizard/components/widgets_spec.js2
-rw-r--r--spec/frontend/pipelines/components/pipeline_tabs_spec.js46
-rw-r--r--spec/frontend/pipelines/components/pipelines_list/pipeline_stage_spec.js37
-rw-r--r--spec/frontend/pipelines/graph/linked_pipeline_spec.js323
-rw-r--r--spec/frontend/pipelines/notification/deprecated_type_keyword_notification_spec.js146
-rw-r--r--spec/frontend/pipelines/pipeline_tabs_spec.js95
-rw-r--r--spec/frontend/pipelines/test_reports/test_case_details_spec.js21
-rw-r--r--spec/frontend/pipelines/test_reports/test_suite_table_spec.js13
-rw-r--r--spec/frontend/profile/account/components/update_username_spec.js2
-rw-r--r--spec/frontend/projects/clusters_deprecation_slert/components/clusters_deprecation_alert_spec.js45
-rw-r--r--spec/frontend/projects/compare/components/revision_card_spec.js8
-rw-r--r--spec/frontend/projects/new/components/new_project_push_tip_popover_spec.js2
-rw-r--r--spec/frontend/projects/pipelines/charts/components/app_spec.js30
-rw-r--r--spec/frontend/projects/project_new_spec.js92
-rw-r--r--spec/frontend/projects/settings/branch_rules/branch_dropdown_spec.js101
-rw-r--r--spec/frontend/projects/settings/branch_rules/rule_edit_spec.js49
-rw-r--r--spec/frontend/projects/settings/repository/branch_rules/app_spec.js18
-rw-r--r--spec/frontend/prometheus_metrics/custom_metrics_spec.js4
-rw-r--r--spec/frontend/prometheus_metrics/prometheus_metrics_spec.js2
-rw-r--r--spec/frontend/repository/components/blob_content_viewer_spec.js16
-rw-r--r--spec/frontend/repository/components/blob_viewers/sketch_viewer_spec.js32
-rw-r--r--spec/frontend/repository/components/new_directory_modal_spec.js2
-rw-r--r--spec/frontend/repository/components/table/index_spec.js4
-rw-r--r--spec/frontend/runner/admin_runner_show/admin_runner_show_app_spec.js64
-rw-r--r--spec/frontend/runner/admin_runners/admin_runners_app_spec.js26
-rw-r--r--spec/frontend/runner/components/__snapshots__/runner_status_popover_spec.js.snap2
-rw-r--r--spec/frontend/runner/components/cells/runner_status_cell_spec.js17
-rw-r--r--spec/frontend/runner/components/registration/registration_dropdown_spec.js16
-rw-r--r--spec/frontend/runner/components/registration/registration_token_spec.js1
-rw-r--r--spec/frontend/runner/components/runner_details_spec.js70
-rw-r--r--spec/frontend/runner/components/runner_jobs_spec.js4
-rw-r--r--spec/frontend/runner/components/runner_list_empty_state_spec.js76
-rw-r--r--spec/frontend/runner/components/runner_projects_spec.js4
-rw-r--r--spec/frontend/runner/group_runners/group_runners_app_spec.js10
-rw-r--r--spec/frontend/runner/mock_data.js5
-rw-r--r--spec/frontend/runner/runner_search_utils_spec.js20
-rw-r--r--spec/frontend/search/store/actions_spec.js19
-rw-r--r--spec/frontend/search_autocomplete_spec.js2
-rw-r--r--spec/frontend/security_configuration/components/app_spec.js94
-rw-r--r--spec/frontend/security_configuration/mock_data.js9
-rw-r--r--spec/frontend/sidebar/components/assignees/assignee_avatar_link_spec.js24
-rw-r--r--spec/frontend/sidebar/components/assignees/sidebar_editable_item_spec.js2
-rw-r--r--spec/frontend/sidebar/components/attention_requested_toggle_spec.js12
-rw-r--r--spec/frontend/sidebar/components/confidential/sidebar_confidentiality_content_spec.js4
-rw-r--r--spec/frontend/sidebar/components/confidential/sidebar_confidentiality_form_spec.js2
-rw-r--r--spec/frontend/sidebar/components/incidents/sidebar_escalation_status_spec.js7
-rw-r--r--spec/frontend/sidebar/components/time_tracking/mock_data.js28
-rw-r--r--spec/frontend/sidebar/components/time_tracking/report_spec.js86
-rw-r--r--spec/frontend/static_site_editor/components/app_spec.js34
-rw-r--r--spec/frontend/static_site_editor/components/edit_area_spec.js264
-rw-r--r--spec/frontend/static_site_editor/components/edit_drawer_spec.js67
-rw-r--r--spec/frontend/static_site_editor/components/edit_header_spec.js38
-rw-r--r--spec/frontend/static_site_editor/components/edit_meta_controls_spec.js115
-rw-r--r--spec/frontend/static_site_editor/components/edit_meta_modal_spec.js172
-rw-r--r--spec/frontend/static_site_editor/components/front_matter_controls_spec.js71
-rw-r--r--spec/frontend/static_site_editor/components/invalid_content_message_spec.js23
-rw-r--r--spec/frontend/static_site_editor/components/publish_toolbar_spec.js92
-rw-r--r--spec/frontend/static_site_editor/components/submit_changes_error_spec.js48
-rw-r--r--spec/frontend/static_site_editor/components/unsaved_changes_confirm_dialog_spec.js44
-rw-r--r--spec/frontend/static_site_editor/graphql/resolvers/file_spec.js25
-rw-r--r--spec/frontend/static_site_editor/graphql/resolvers/has_submitted_changes_spec.js27
-rw-r--r--spec/frontend/static_site_editor/graphql/resolvers/submit_content_changes_spec.js37
-rw-r--r--spec/frontend/static_site_editor/mock_data.js91
-rw-r--r--spec/frontend/static_site_editor/pages/home_spec.js301
-rw-r--r--spec/frontend/static_site_editor/pages/success_spec.js131
-rw-r--r--spec/frontend/static_site_editor/rich_content_editor/editor_service_spec.js214
-rw-r--r--spec/frontend/static_site_editor/rich_content_editor/modals/add_image/add_image_modal_spec.js77
-rw-r--r--spec/frontend/static_site_editor/rich_content_editor/modals/add_image/upload_image_tab_spec.js41
-rw-r--r--spec/frontend/static_site_editor/rich_content_editor/modals/insert_video_modal_spec.js44
-rw-r--r--spec/frontend/static_site_editor/rich_content_editor/rich_content_editor_integration_spec.js69
-rw-r--r--spec/frontend/static_site_editor/rich_content_editor/rich_content_editor_spec.js222
-rw-r--r--spec/frontend/static_site_editor/rich_content_editor/services/build_custom_renderer_spec.js32
-rw-r--r--spec/frontend/static_site_editor/rich_content_editor/services/build_html_to_markdown_renderer_spec.js218
-rw-r--r--spec/frontend/static_site_editor/rich_content_editor/services/renderers/build_uneditable_token_spec.js88
-rw-r--r--spec/frontend/static_site_editor/rich_content_editor/services/renderers/mock_data.js54
-rw-r--r--spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_attribute_definition_spec.js25
-rw-r--r--spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_embedded_ruby_spec.js24
-rw-r--r--spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_font_awesome_html_inline_spec.js33
-rw-r--r--spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_heading_spec.js12
-rw-r--r--spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_html_block_spec.js37
-rw-r--r--spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_identifier_instance_text_spec.js55
-rw-r--r--spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_identifier_paragraph_spec.js84
-rw-r--r--spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_list_item_spec.js12
-rw-r--r--spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_softbreak_spec.js23
-rw-r--r--spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_utils_spec.js109
-rw-r--r--spec/frontend/static_site_editor/rich_content_editor/services/sanitize_html_spec.js11
-rw-r--r--spec/frontend/static_site_editor/rich_content_editor/toolbar_item_spec.js57
-rw-r--r--spec/frontend/static_site_editor/services/formatter_spec.js39
-rw-r--r--spec/frontend/static_site_editor/services/front_matterify_spec.js54
-rw-r--r--spec/frontend/static_site_editor/services/generate_branch_name_spec.js22
-rw-r--r--spec/frontend/static_site_editor/services/load_source_content_spec.js36
-rw-r--r--spec/frontend/static_site_editor/services/parse_source_file_spec.js101
-rw-r--r--spec/frontend/static_site_editor/services/renderers/render_image_spec.js96
-rw-r--r--spec/frontend/static_site_editor/services/submit_content_changes_spec.js261
-rw-r--r--spec/frontend/static_site_editor/services/templater_spec.js112
-rw-r--r--spec/frontend/tags/components/delete_tag_modal_spec.js138
-rw-r--r--spec/frontend/tags/init_delete_tag_modal_spec.js23
-rw-r--r--spec/frontend/terraform/components/states_table_actions_spec.js1
-rw-r--r--spec/frontend/terraform/components/states_table_spec.js41
-rw-r--r--spec/frontend/terraform/components/terraform_list_spec.js4
-rw-r--r--spec/frontend/user_popovers_spec.js137
-rw-r--r--spec/frontend/users_select/test_helper.js8
-rw-r--r--spec/frontend/vue_mr_widget/components/approvals/approvals_spec.js50
-rw-r--r--spec/frontend/vue_mr_widget/components/approvals/humanized_text_spec.js18
-rw-r--r--spec/frontend/vue_mr_widget/components/extensions/index_spec.js2
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_collapsible_extension_spec.js8
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js176
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_suggest_pipeline_spec.js2
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_merged_spec.js4
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js42
-rw-r--r--spec/frontend/vue_mr_widget/components/terraform/mr_widget_terraform_container_spec.js8
-rw-r--r--spec/frontend/vue_mr_widget/extensions/test_report/index_spec.js17
-rw-r--r--spec/frontend/vue_mr_widget/extentions/terraform/index_spec.js24
-rw-r--r--spec/frontend/vue_mr_widget/mr_widget_options_spec.js236
-rw-r--r--spec/frontend/vue_mr_widget/test_extensions.js51
-rw-r--r--spec/frontend/vue_shared/alert_details/alert_details_spec.js22
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap120
-rw-r--r--spec/frontend/vue_shared/components/ci_icon_spec.js48
-rw-r--r--spec/frontend/vue_shared/components/color_select_dropdown/color_item_spec.js35
-rw-r--r--spec/frontend/vue_shared/components/color_select_dropdown/color_select_root_spec.js192
-rw-r--r--spec/frontend/vue_shared/components/color_select_dropdown/dropdown_contents_color_view_spec.js43
-rw-r--r--spec/frontend/vue_shared/components/color_select_dropdown/dropdown_contents_spec.js113
-rw-r--r--spec/frontend/vue_shared/components/color_select_dropdown/dropdown_header_spec.js40
-rw-r--r--spec/frontend/vue_shared/components/color_select_dropdown/dropdown_value_spec.js46
-rw-r--r--spec/frontend/vue_shared/components/color_select_dropdown/mock_data.js30
-rw-r--r--spec/frontend/vue_shared/components/confidentiality_badge_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/content_viewer/viewers/markdown_viewer_spec.js5
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/form/input_copy_toggle_visibility_spec.js69
-rw-r--r--spec/frontend/vue_shared/components/notes/__snapshots__/noteable_warning_spec.js.snap4
-rw-r--r--spec/frontend/vue_shared/components/notes/system_note_spec.js10
-rw-r--r--spec/frontend/vue_shared/components/papa_parse_alert_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/registry/registry_search_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js234
-rw-r--r--spec/frontend/vue_shared/components/runner_instructions/runner_instructions_spec.js24
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js7
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/label_item_spec.js34
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js65
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/getters_spec.js45
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js26
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/plugins/index_spec.js14
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/plugins/wrap_comments_spec.js29
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/upload_dropzone/__snapshots__/upload_dropzone_spec.js.snap30
-rw-r--r--spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js35
-rw-r--r--spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js9
-rw-r--r--spec/frontend/vue_shared/issuable/show/components/issuable_body_spec.js1
-rw-r--r--spec/frontend/vue_shared/issuable/show/components/issuable_header_spec.js4
-rw-r--r--spec/frontend/vue_shared/issuable/show/components/issuable_show_root_spec.js3
-rw-r--r--spec/frontend/vue_shared/issuable/show/components/issuable_title_spec.js15
-rw-r--r--spec/frontend/vue_shared/issuable/show/mock_data.js1
-rw-r--r--spec/frontend/work_items/components/item_title_spec.js2
-rw-r--r--spec/frontend/work_items/components/work_item_assignees_spec.js93
-rw-r--r--spec/frontend/work_items/components/work_item_description_spec.js222
-rw-r--r--spec/frontend/work_items/components/work_item_detail_modal_spec.js12
-rw-r--r--spec/frontend/work_items/components/work_item_links/work_item_links_spec.js88
-rw-r--r--spec/frontend/work_items/components/work_item_state_spec.js14
-rw-r--r--spec/frontend/work_items/components/work_item_title_spec.js37
-rw-r--r--spec/frontend/work_items/components/work_item_weight_spec.js47
-rw-r--r--spec/frontend/work_items/mock_data.js151
-rw-r--r--spec/frontend/work_items/pages/work_item_detail_spec.js105
-rw-r--r--spec/frontend/work_items/pages/work_item_root_spec.js2
-rw-r--r--spec/frontend_integration/content_editor/content_editor_integration_spec.js26
-rw-r--r--spec/frontend_integration/ide/helpers/ide_helper.js8
-rw-r--r--spec/graphql/features/authorization_spec.rb76
-rw-r--r--spec/graphql/gitlab_schema_spec.rb11
-rw-r--r--spec/graphql/mutations/boards/issues/issue_move_list_spec.rb4
-rw-r--r--spec/graphql/mutations/branches/create_spec.rb7
-rw-r--r--spec/graphql/mutations/ci/runner/update_spec.rb14
-rw-r--r--spec/graphql/mutations/clusters/agent_tokens/create_spec.rb4
-rw-r--r--spec/graphql/mutations/clusters/agents/create_spec.rb4
-rw-r--r--spec/graphql/mutations/clusters/agents/delete_spec.rb4
-rw-r--r--spec/graphql/mutations/commits/create_spec.rb4
-rw-r--r--spec/graphql/mutations/customer_relations/contacts/create_spec.rb11
-rw-r--r--spec/graphql/mutations/customer_relations/contacts/update_spec.rb11
-rw-r--r--spec/graphql/mutations/customer_relations/organizations/create_spec.rb11
-rw-r--r--spec/graphql/mutations/customer_relations/organizations/update_spec.rb11
-rw-r--r--spec/graphql/mutations/incident_management/timeline_event/create_spec.rb3
-rw-r--r--spec/graphql/mutations/incident_management/timeline_event/promote_from_note_spec.rb3
-rw-r--r--spec/graphql/mutations/issues/set_escalation_status_spec.rb10
-rw-r--r--spec/graphql/mutations/release_asset_links/create_spec.rb2
-rw-r--r--spec/graphql/mutations/security/ci_configuration/base_security_analyzer_spec.rb4
-rw-r--r--spec/graphql/mutations/terraform/state/delete_spec.rb10
-rw-r--r--spec/graphql/mutations/work_items/update_task_spec.rb40
-rw-r--r--spec/graphql/mutations/work_items/update_widgets_spec.rb58
-rw-r--r--spec/graphql/resolvers/alert_management/alert_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/board_list_issues_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/board_lists_resolver_spec.rb4
-rw-r--r--spec/graphql/resolvers/ci/group_runners_resolver_spec.rb5
-rw-r--r--spec/graphql/resolvers/ci/jobs_resolver_spec.rb7
-rw-r--r--spec/graphql/resolvers/ci/runners_resolver_spec.rb5
-rw-r--r--spec/graphql/resolvers/concerns/caching_array_resolver_spec.rb4
-rw-r--r--spec/graphql/resolvers/concerns/resolves_groups_spec.rb2
-rw-r--r--spec/graphql/resolvers/container_repositories_resolver_spec.rb5
-rw-r--r--spec/graphql/resolvers/container_repository_tags_resolver_spec.rb5
-rw-r--r--spec/graphql/resolvers/crm/contacts_resolver_spec.rb92
-rw-r--r--spec/graphql/resolvers/crm/organizations_resolver_spec.rb88
-rw-r--r--spec/graphql/resolvers/design_management/versions_resolver_spec.rb10
-rw-r--r--spec/graphql/resolvers/group_labels_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/group_milestones_resolver_spec.rb12
-rw-r--r--spec/graphql/resolvers/group_packages_resolver_spec.rb9
-rw-r--r--spec/graphql/resolvers/incident_management/timeline_events_resolver_spec.rb3
-rw-r--r--spec/graphql/resolvers/issue_status_counts_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/issues_resolver_spec.rb34
-rw-r--r--spec/graphql/resolvers/merge_requests_resolver_spec.rb4
-rw-r--r--spec/graphql/resolvers/namespace_projects_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/packages_base_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/project_milestones_resolver_spec.rb58
-rw-r--r--spec/graphql/resolvers/project_packages_resolver_spec.rb10
-rw-r--r--spec/graphql/resolvers/releases_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/user_resolver_spec.rb37
-rw-r--r--spec/graphql/resolvers/users/groups_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/users_resolver_spec.rb14
-rw-r--r--spec/graphql/resolvers/work_items_resolver_spec.rb190
-rw-r--r--spec/graphql/subscriptions/issuable_updated_spec.rb3
-rw-r--r--spec/graphql/types/base_edge_spec.rb1
-rw-r--r--spec/graphql/types/base_field_spec.rb86
-rw-r--r--spec/graphql/types/base_object_spec.rb1
-rw-r--r--spec/graphql/types/ci/detailed_status_type_spec.rb6
-rw-r--r--spec/graphql/types/ci/pipeline_merge_request_event_type_enum_spec.rb14
-rw-r--r--spec/graphql/types/ci/pipeline_type_spec.rb2
-rw-r--r--spec/graphql/types/ci/runner_type_spec.rb2
-rw-r--r--spec/graphql/types/ci/status_action_type_spec.rb8
-rw-r--r--spec/graphql/types/issue_type_spec.rb10
-rw-r--r--spec/graphql/types/limited_countable_connection_type_spec.rb11
-rw-r--r--spec/graphql/types/packages/cleanup/keep_duplicated_package_files_enum_spec.rb15
-rw-r--r--spec/graphql/types/packages/cleanup/policy_type_spec.rb26
-rw-r--r--spec/graphql/types/project_type_spec.rb36
-rw-r--r--spec/graphql/types/terraform/state_type_spec.rb3
-rw-r--r--spec/graphql/types/time_type_spec.rb5
-rw-r--r--spec/graphql/types/todo_type_spec.rb14
-rw-r--r--spec/graphql/types/user_type_spec.rb8
-rw-r--r--spec/graphql/types/work_item_type_spec.rb13
-rw-r--r--spec/graphql/types/work_items/widget_interface_spec.rb37
-rw-r--r--spec/graphql/types/work_items/widget_type_enum_spec.rb13
-rw-r--r--spec/graphql/types/work_items/widgets/description_type_spec.rb11
-rw-r--r--spec/graphql/types/work_items/widgets/hierarchy_type_spec.rb11
-rw-r--r--spec/helpers/access_tokens_helper_spec.rb8
-rw-r--r--spec/helpers/admin/application_settings/settings_helper_spec.rb34
-rw-r--r--spec/helpers/ci/pipeline_editor_helper_spec.rb14
-rw-r--r--spec/helpers/ci/runners_helper_spec.rb16
-rw-r--r--spec/helpers/diff_helper_spec.rb21
-rw-r--r--spec/helpers/emails_helper_spec.rb28
-rw-r--r--spec/helpers/environments_helper_spec.rb3
-rw-r--r--spec/helpers/form_helper_spec.rb50
-rw-r--r--spec/helpers/groups/crm_settings_helper_spec.rb47
-rw-r--r--spec/helpers/groups/group_members_helper_spec.rb50
-rw-r--r--spec/helpers/groups_helper_spec.rb85
-rw-r--r--spec/helpers/issues_helper_spec.rb39
-rw-r--r--spec/helpers/jira_connect_helper_spec.rb2
-rw-r--r--spec/helpers/markup_helper_spec.rb27
-rw-r--r--spec/helpers/nav/new_dropdown_helper_spec.rb2
-rw-r--r--spec/helpers/nav/top_nav_helper_spec.rb113
-rw-r--r--spec/helpers/notes_helper_spec.rb4
-rw-r--r--spec/helpers/operations_helper_spec.rb2
-rw-r--r--spec/helpers/preferences_helper_spec.rb2
-rw-r--r--spec/helpers/projects/pipeline_helper_spec.rb8
-rw-r--r--spec/helpers/projects/project_members_helper_spec.rb26
-rw-r--r--spec/helpers/projects_helper_spec.rb196
-rw-r--r--spec/helpers/routing/pseudonymization_helper_spec.rb3
-rw-r--r--spec/helpers/search_helper_spec.rb24
-rw-r--r--spec/helpers/snippets_helper_spec.rb27
-rw-r--r--spec/helpers/sorting_helper_spec.rb6
-rw-r--r--spec/helpers/storage_helper_spec.rb27
-rw-r--r--spec/helpers/todos_helper_spec.rb16
-rw-r--r--spec/helpers/tooling/visual_review_helper_spec.rb25
-rw-r--r--spec/initializers/forbid_sidekiq_in_transactions_spec.rb57
-rw-r--r--spec/initializers/mail_encoding_patch_spec.rb34
-rw-r--r--spec/initializers/omniauth_spec.rb29
-rw-r--r--spec/initializers/set_active_support_hash_digest_class_spec.rb9
-rw-r--r--spec/initializers/validate_database_config_spec.rb3
-rw-r--r--spec/lib/api/entities/ci/job_request/image_spec.rb16
-rw-r--r--spec/lib/api/entities/personal_access_token_with_details_spec.rb29
-rw-r--r--spec/lib/api/entities/wiki_page_spec.rb17
-rw-r--r--spec/lib/api/helpers/project_stats_refresh_conflicts_helpers_spec.rb49
-rw-r--r--spec/lib/api/helpers/sse_helpers_spec.rb44
-rw-r--r--spec/lib/api/helpers_spec.rb249
-rw-r--r--spec/lib/api/integrations/slack/events/url_verification_spec.rb11
-rw-r--r--spec/lib/atlassian/jira_connect/client_spec.rb39
-rw-r--r--spec/lib/backup/manager_spec.rb55
-rw-r--r--spec/lib/backup/repositories_spec.rb96
-rw-r--r--spec/lib/banzai/filter/references/issue_reference_filter_spec.rb22
-rw-r--r--spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb11
-rw-r--r--spec/lib/banzai/filter/syntax_highlight_filter_spec.rb10
-rw-r--r--spec/lib/bulk_imports/groups/stage_spec.rb93
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/design_bundle_pipeline_spec.rb171
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb84
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/releases_pipeline_spec.rb40
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/repository_bundle_pipeline_spec.rb169
-rw-r--r--spec/lib/bulk_imports/projects/stage_spec.rb77
-rw-r--r--spec/lib/container_registry/migration_spec.rb61
-rw-r--r--spec/lib/error_tracking/stacktrace_builder_spec.rb95
-rw-r--r--spec/lib/generators/gitlab/usage_metric_generator_spec.rb28
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb20
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb31
-rw-r--r--spec/lib/gitlab/asciidoc_spec.rb6
-rw-r--r--spec/lib/gitlab/audit/unauthenticated_author_spec.rb6
-rw-r--r--spec/lib/gitlab/auth/o_auth/user_spec.rb44
-rw-r--r--spec/lib/gitlab/background_migration/backfill_namespace_id_for_project_route_spec.rb4
-rw-r--r--spec/lib/gitlab/background_migration/backfill_project_feature_package_registry_access_level_spec.rb124
-rw-r--r--spec/lib/gitlab/background_migration/backfill_project_member_namespace_id_spec.rb104
-rw-r--r--spec/lib/gitlab/background_migration/cleanup_orphaned_routes_spec.rb80
-rw-r--r--spec/lib/gitlab/background_migration/encrypt_integration_properties_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/fix_merge_request_diff_commit_users_spec.rb297
-rw-r--r--spec/lib/gitlab/background_migration/migrate_pages_to_zip_storage_spec.rb43
-rw-r--r--spec/lib/gitlab/background_migration/set_legacy_open_source_license_available_for_non_public_projects_spec.rb54
-rw-r--r--spec/lib/gitlab/bitbucket_server_import/importer_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/changes_access_spec.rb40
-rw-r--r--spec/lib/gitlab/checks/single_change_access_spec.rb16
-rw-r--r--spec/lib/gitlab/checks/tag_check_spec.rb29
-rw-r--r--spec/lib/gitlab/ci/build/image_spec.rb9
-rw-r--r--spec/lib/gitlab/ci/config/entry/image_spec.rb66
-rw-r--r--spec/lib/gitlab/ci/config/entry/pull_policy_spec.rb87
-rw-r--r--spec/lib/gitlab/ci/config/entry/rules/rule/changes_spec.rb81
-rw-r--r--spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/config/external/mapper_spec.rb25
-rw-r--r--spec/lib/gitlab/ci/jwt_spec.rb14
-rw-r--r--spec/lib/gitlab/ci/parsers/coverage/sax_document_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/parsers/security/secret_detection_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/limit/rate_limit_spec.rb35
-rw-r--r--spec/lib/gitlab/ci/pipeline/quota/deployments_spec.rb3
-rw-r--r--spec/lib/gitlab/ci/reports/coverage_report_generator_spec.rb104
-rw-r--r--spec/lib/gitlab/ci/reports/coverage_report_spec.rb (renamed from spec/lib/gitlab/ci/reports/coverage_reports_spec.rb)16
-rw-r--r--spec/lib/gitlab/ci/runner_upgrade_check_spec.rb15
-rw-r--r--spec/lib/gitlab/ci/status/build/play_spec.rb7
-rw-r--r--spec/lib/gitlab/ci/status/build/scheduled_spec.rb5
-rw-r--r--spec/lib/gitlab/ci/trace/archive_spec.rb71
-rw-r--r--spec/lib/gitlab/ci/variables/builder_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb47
-rw-r--r--spec/lib/gitlab/content_security_policy/config_loader_spec.rb10
-rw-r--r--spec/lib/gitlab/daemon_spec.rb37
-rw-r--r--spec/lib/gitlab/data_builder/pipeline_spec.rb39
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb7
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_spec.rb85
-rw-r--r--spec/lib/gitlab/database/batch_count_spec.rb54
-rw-r--r--spec/lib/gitlab/database/each_database_spec.rb6
-rw-r--r--spec/lib/gitlab/database/gitlab_schema_spec.rb6
-rw-r--r--spec/lib/gitlab/database/load_balancing/configuration_spec.rb61
-rw-r--r--spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb55
-rw-r--r--spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb40
-rw-r--r--spec/lib/gitlab/database/load_balancing/setup_spec.rb149
-rw-r--r--spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb4
-rw-r--r--spec/lib/gitlab/database/load_balancing/transaction_leaking_spec.rb81
-rw-r--r--spec/lib/gitlab/database/migration_helpers/announce_database_spec.rb31
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb46
-rw-r--r--spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb2
-rw-r--r--spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb216
-rw-r--r--spec/lib/gitlab/database/migrations/reestablished_connection_stack_spec.rb2
-rw-r--r--spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb17
-rw-r--r--spec/lib/gitlab/database/partitioning/partition_manager_spec.rb34
-rw-r--r--spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb68
-rw-r--r--spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb73
-rw-r--r--spec/lib/gitlab/database/shared_model_spec.rb13
-rw-r--r--spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb1
-rw-r--r--spec/lib/gitlab/database_spec.rb32
-rw-r--r--spec/lib/gitlab/diff/custom_diff_spec.rb115
-rw-r--r--spec/lib/gitlab/diff/file_spec.rb36
-rw-r--r--spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb134
-rw-r--r--spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb29
-rw-r--r--spec/lib/gitlab/email/handler/service_desk_handler_spec.rb10
-rw-r--r--spec/lib/gitlab/email/message/repository_push_spec.rb2
-rw-r--r--spec/lib/gitlab/email/receiver_spec.rb58
-rw-r--r--spec/lib/gitlab/email/reply_parser_spec.rb67
-rw-r--r--spec/lib/gitlab/email/service_desk_receiver_spec.rb12
-rw-r--r--spec/lib/gitlab/error_tracking/logger_spec.rb21
-rw-r--r--spec/lib/gitlab/event_store/store_spec.rb18
-rw-r--r--spec/lib/gitlab/fogbugz_import/project_creator_spec.rb9
-rw-r--r--spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb269
-rw-r--r--spec/lib/gitlab/gfm/reference_rewriter_spec.rb3
-rw-r--r--spec/lib/gitlab/gfm/uploads_rewriter_spec.rb22
-rw-r--r--spec/lib/gitlab/git_access_snippet_spec.rb10
-rw-r--r--spec/lib/gitlab/git_access_spec.rb2
-rw-r--r--spec/lib/gitlab/git_access_wiki_spec.rb36
-rw-r--r--spec/lib/gitlab/gitaly_client/commit_service_spec.rb115
-rw-r--r--spec/lib/gitlab/gitaly_client/operation_service_spec.rb230
-rw-r--r--spec/lib/gitlab/github_import/importer/issue_importer_spec.rb56
-rw-r--r--spec/lib/gitlab/github_import/importer/releases_importer_spec.rb72
-rw-r--r--spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb12
-rw-r--r--spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb32
-rw-r--r--spec/lib/gitlab/graphql/markdown_field_spec.rb40
-rw-r--r--spec/lib/gitlab/graphql/negatable_arguments_spec.rb4
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb4
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb4
-rw-r--r--spec/lib/gitlab/graphql/present/field_extension_spec.rb17
-rw-r--r--spec/lib/gitlab/graphql/query_analyzers/ast/logger_analyzer_spec.rb51
-rw-r--r--spec/lib/gitlab/graphql/query_analyzers/ast/recursion_analyzer_spec.rb72
-rw-r--r--spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb49
-rw-r--r--spec/lib/gitlab/graphql/tracers/logger_tracer_spec.rb6
-rw-r--r--spec/lib/gitlab/hash_digest/facade_spec.rb36
-rw-r--r--spec/lib/gitlab/highlight_spec.rb15
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml7
-rw-r--r--spec/lib/gitlab/import_export/lfs_saver_spec.rb12
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml5
-rw-r--r--spec/lib/gitlab/import_sources_spec.rb24
-rw-r--r--spec/lib/gitlab/inactive_projects_deletion_warning_tracker_spec.rb46
-rw-r--r--spec/lib/gitlab/instrumentation_helper_spec.rb8
-rw-r--r--spec/lib/gitlab/jira_import/base_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/jira_import/issues_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/jira_import/labels_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/jira_import_spec.rb2
-rw-r--r--spec/lib/gitlab/legacy_github_import/importer_spec.rb22
-rw-r--r--spec/lib/gitlab/mail_room/mail_room_spec.rb2
-rw-r--r--spec/lib/gitlab/mailgun/webhook_processors/failure_logger_spec.rb92
-rw-r--r--spec/lib/gitlab/mailgun/webhook_processors/member_invites_spec.rb74
-rw-r--r--spec/lib/gitlab/memory/jemalloc_spec.rb121
-rw-r--r--spec/lib/gitlab/metrics/dashboard/processor_spec.rb38
-rw-r--r--spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb4
-rw-r--r--spec/lib/gitlab/metrics/sli_spec.rb58
-rw-r--r--spec/lib/gitlab/middleware/compressed_json_spec.rb9
-rw-r--r--spec/lib/gitlab/pages_transfer_spec.rb157
-rw-r--r--spec/lib/gitlab/patch/database_config_spec.rb3
-rw-r--r--spec/lib/gitlab/project_stats_refresh_conflicts_logger_spec.rb67
-rw-r--r--spec/lib/gitlab/project_template_spec.rb13
-rw-r--r--spec/lib/gitlab/protocol_access_spec.rb50
-rw-r--r--spec/lib/gitlab/rack_attack_spec.rb4
-rw-r--r--spec/lib/gitlab/redis/duplicate_jobs_spec.rb94
-rw-r--r--spec/lib/gitlab/redis/multi_store_spec.rb924
-rw-r--r--spec/lib/gitlab/redis/sidekiq_status_spec.rb68
-rw-r--r--spec/lib/gitlab/regex_requires_app_spec.rb90
-rw-r--r--spec/lib/gitlab/regex_spec.rb120
-rw-r--r--spec/lib/gitlab/render_timeout_spec.rb25
-rw-r--r--spec/lib/gitlab/seeder_spec.rb2
-rw-r--r--spec/lib/gitlab/service_desk_email_spec.rb6
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb22
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb649
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executed_spec.rb30
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb22
-rw-r--r--spec/lib/gitlab/sidekiq_status_spec.rb203
-rw-r--r--spec/lib/gitlab/slash_commands/deploy_spec.rb2
-rw-r--r--spec/lib/gitlab/sql/cte_spec.rb19
-rw-r--r--spec/lib/gitlab/ssh/signature_spec.rb227
-rw-r--r--spec/lib/gitlab/ssh_public_key_spec.rb101
-rw-r--r--spec/lib/gitlab/static_site_editor/config/file_config/entry/global_spec.rb245
-rw-r--r--spec/lib/gitlab/static_site_editor/config/file_config/entry/image_upload_path_spec.rb38
-rw-r--r--spec/lib/gitlab/static_site_editor/config/file_config/entry/mount_spec.rb101
-rw-r--r--spec/lib/gitlab/static_site_editor/config/file_config/entry/mounts_spec.rb53
-rw-r--r--spec/lib/gitlab/static_site_editor/config/file_config/entry/static_site_generator_spec.rb50
-rw-r--r--spec/lib/gitlab/static_site_editor/config/file_config_spec.rb87
-rw-r--r--spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb127
-rw-r--r--spec/lib/gitlab/subscription_portal_spec.rb46
-rw-r--r--spec/lib/gitlab/themes_spec.rb4
-rw-r--r--spec/lib/gitlab/tracking/standard_context_spec.rb28
-rw-r--r--spec/lib/gitlab/updated_notes_paginator_spec.rb57
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_imported_projects_metric_spec.rb25
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_imported_projects_total_metric_spec.rb62
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/jira_imports_total_imported_issues_count_metric_spec.rb15
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/numbers_metric_spec.rb75
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/unique_active_users_metric_spec.rb31
-rw-r--r--spec/lib/gitlab/usage/metrics/name_suggestion_spec.rb12
-rw-r--r--spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb3
-rw-r--r--spec/lib/gitlab/usage/metrics/query_spec.rb6
-rw-r--r--spec/lib/gitlab/usage/service_ping_report_spec.rb5
-rw-r--r--spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb3
-rw-r--r--spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb12
-rw-r--r--spec/lib/gitlab/usage_data_counters/static_site_editor_counter_spec.rb14
-rw-r--r--spec/lib/gitlab/usage_data_counters_spec.rb4
-rw-r--r--spec/lib/gitlab/usage_data_metrics_spec.rb5
-rw-r--r--spec/lib/gitlab/usage_data_queries_spec.rb21
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb122
-rw-r--r--spec/lib/gitlab/utils/usage_data_spec.rb31
-rw-r--r--spec/lib/gitlab/web_hooks/rate_limiter_spec.rb123
-rw-r--r--spec/lib/marginalia_spec.rb2
-rw-r--r--spec/lib/object_storage/direct_upload_spec.rb98
-rw-r--r--spec/lib/security/ci_configuration/sast_build_action_spec.rb80
-rw-r--r--spec/lib/service_ping/build_payload_spec.rb31
-rw-r--r--spec/lib/service_ping/permit_data_categories_spec.rb20
-rw-r--r--spec/mailers/emails/admin_notification_spec.rb53
-rw-r--r--spec/mailers/notify_spec.rb2
-rw-r--r--spec/migrations/20220416054011_schedule_backfill_project_member_namespace_id_spec.rb29
-rw-r--r--spec/migrations/20220502015011_clean_up_fix_merge_request_diff_commit_users_spec.rb16
-rw-r--r--spec/migrations/20220503035221_add_gitlab_schema_to_batched_background_migrations_spec.rb34
-rw-r--r--spec/migrations/20220505044348_fix_automatic_iterations_cadences_start_date_spec.rb3
-rw-r--r--spec/migrations/20220512190659_remove_web_hooks_web_hook_logs_web_hook_id_fk_spec.rb33
-rw-r--r--spec/migrations/20220520040416_schedule_set_legacy_open_source_license_available_for_non_public_projects_spec.rb63
-rw-r--r--spec/migrations/20220523171107_drop_deploy_tokens_token_column_spec.rb22
-rw-r--r--spec/migrations/20220524074947_finalize_backfill_null_note_discussion_ids_spec.rb34
-rw-r--r--spec/migrations/add_web_hook_calls_to_plan_limits_paid_tiers_spec.rb101
-rw-r--r--spec/migrations/bulk_insert_cluster_enabled_grants_spec.rb85
-rw-r--r--spec/migrations/cleanup_backfill_integrations_enable_ssl_verification_spec.rb35
-rw-r--r--spec/migrations/cleanup_orphaned_routes_spec.rb30
-rw-r--r--spec/migrations/finalize_routes_backfilling_for_projects_spec.rb72
-rw-r--r--spec/migrations/queue_backfill_project_feature_package_registry_access_level_spec.rb24
-rw-r--r--spec/migrations/remove_invalid_integrations_spec.rb31
-rw-r--r--spec/migrations/remove_not_null_contraint_on_title_from_sprints_spec.rb2
-rw-r--r--spec/migrations/schedule_migrate_pages_to_zip_storage_spec.rb46
-rw-r--r--spec/migrations/schedule_populate_requirements_issue_id_spec.rb79
-rw-r--r--spec/migrations/schedule_purging_stale_security_scans_spec.rb69
-rw-r--r--spec/models/application_setting_spec.rb43
-rw-r--r--spec/models/bulk_imports/entity_spec.rb26
-rw-r--r--spec/models/bulk_imports/export_status_spec.rb99
-rw-r--r--spec/models/bulk_imports/file_transfer/project_config_spec.rb2
-rw-r--r--spec/models/bulk_imports/tracker_spec.rb2
-rw-r--r--spec/models/ci/bridge_spec.rb33
-rw-r--r--spec/models/ci/build_spec.rb266
-rw-r--r--spec/models/ci/job_artifact_spec.rb40
-rw-r--r--spec/models/ci/namespace_mirror_spec.rb5
-rw-r--r--spec/models/ci/pipeline_spec.rb178
-rw-r--r--spec/models/ci/runner_spec.rb62
-rw-r--r--spec/models/ci/secure_file_spec.rb15
-rw-r--r--spec/models/ci/sources/pipeline_spec.rb2
-rw-r--r--spec/models/clusters/agent_spec.rb53
-rw-r--r--spec/models/clusters/cluster_enabled_grant_spec.rb7
-rw-r--r--spec/models/clusters/cluster_spec.rb4
-rw-r--r--spec/models/clusters/integrations/prometheus_spec.rb38
-rw-r--r--spec/models/commit_signatures/gpg_signature_spec.rb87
-rw-r--r--spec/models/commit_signatures/ssh_signature_spec.rb38
-rw-r--r--spec/models/commit_signatures/x509_commit_signature_spec.rb28
-rw-r--r--spec/models/commit_spec.rb10
-rw-r--r--spec/models/compare_spec.rb15
-rw-r--r--spec/models/concerns/as_cte_spec.rb40
-rw-r--r--spec/models/concerns/cache_markdown_field_spec.rb10
-rw-r--r--spec/models/concerns/ci/artifactable_spec.rb4
-rw-r--r--spec/models/concerns/integrations/has_data_fields_spec.rb88
-rw-r--r--spec/models/concerns/issuable_spec.rb8
-rw-r--r--spec/models/concerns/limitable_spec.rb4
-rw-r--r--spec/models/concerns/pg_full_text_searchable_spec.rb11
-rw-r--r--spec/models/concerns/project_features_compatibility_spec.rb2
-rw-r--r--spec/models/concerns/sensitive_serializable_hash_spec.rb26
-rw-r--r--spec/models/container_registry/event_spec.rb14
-rw-r--r--spec/models/container_repository_spec.rb14
-rw-r--r--spec/models/customer_relations/contact_spec.rb95
-rw-r--r--spec/models/customer_relations/organization_spec.rb79
-rw-r--r--spec/models/data_list_spec.rb2
-rw-r--r--spec/models/deployment_cluster_spec.rb7
-rw-r--r--spec/models/deployment_spec.rb283
-rw-r--r--spec/models/environment_spec.rb52
-rw-r--r--spec/models/error_tracking/error_event_spec.rb44
-rw-r--r--spec/models/factories_spec.rb (renamed from spec/factories_spec.rb)4
-rw-r--r--spec/models/group_spec.rb106
-rw-r--r--spec/models/hooks/project_hook_spec.rb9
-rw-r--r--spec/models/hooks/service_hook_spec.rb8
-rw-r--r--spec/models/hooks/system_hook_spec.rb8
-rw-r--r--spec/models/hooks/web_hook_log_spec.rb56
-rw-r--r--spec/models/hooks/web_hook_spec.rb96
-rw-r--r--spec/models/integration_spec.rb99
-rw-r--r--spec/models/integrations/campfire_spec.rb4
-rw-r--r--spec/models/integrations/field_spec.rb21
-rw-r--r--spec/models/integrations/harbor_spec.rb10
-rw-r--r--spec/models/integrations/irker_spec.rb36
-rw-r--r--spec/models/integrations/issue_tracker_data_spec.rb8
-rw-r--r--spec/models/integrations/jira_spec.rb55
-rw-r--r--spec/models/integrations/jira_tracker_data_spec.rb8
-rw-r--r--spec/models/integrations/prometheus_spec.rb11
-rw-r--r--spec/models/integrations/zentao_tracker_data_spec.rb6
-rw-r--r--spec/models/issue_spec.rb68
-rw-r--r--spec/models/key_spec.rb151
-rw-r--r--spec/models/members/group_member_spec.rb141
-rw-r--r--spec/models/members/last_group_owner_assigner_spec.rb13
-rw-r--r--spec/models/members/project_member_spec.rb24
-rw-r--r--spec/models/merge_request/cleanup_schedule_spec.rb35
-rw-r--r--spec/models/merge_request_diff_file_spec.rb8
-rw-r--r--spec/models/merge_request_spec.rb216
-rw-r--r--spec/models/milestone_spec.rb6
-rw-r--r--spec/models/namespace/root_storage_statistics_spec.rb13
-rw-r--r--spec/models/namespace_setting_spec.rb1
-rw-r--r--spec/models/namespace_spec.rb258
-rw-r--r--spec/models/packages/cleanup/policy_spec.rb2
-rw-r--r--spec/models/packages/package_spec.rb20
-rw-r--r--spec/models/plan_limits_spec.rb3
-rw-r--r--spec/models/preloaders/user_max_access_level_in_projects_preloader_spec.rb13
-rw-r--r--spec/models/project_feature_spec.rb50
-rw-r--r--spec/models/project_group_link_spec.rb6
-rw-r--r--spec/models/project_spec.rb231
-rw-r--r--spec/models/project_statistics_spec.rb26
-rw-r--r--spec/models/projects/build_artifacts_size_refresh_spec.rb59
-rw-r--r--spec/models/protected_tag_spec.rb52
-rw-r--r--spec/models/release_spec.rb26
-rw-r--r--spec/models/remote_mirror_spec.rb2
-rw-r--r--spec/models/repository_spec.rb47
-rw-r--r--spec/models/route_spec.rb51
-rw-r--r--spec/models/terraform/state_spec.rb18
-rw-r--r--spec/models/terraform/state_version_spec.rb1
-rw-r--r--spec/models/time_tracking/timelog_category_spec.rb59
-rw-r--r--spec/models/user_spec.rb33
-rw-r--r--spec/models/users/callout_spec.rb12
-rw-r--r--spec/models/work_item_spec.rb31
-rw-r--r--spec/models/work_items/parent_link_spec.rb66
-rw-r--r--spec/models/work_items/type_spec.rb11
-rw-r--r--spec/models/work_items/widgets/base_spec.rb19
-rw-r--r--spec/models/work_items/widgets/description_spec.rb25
-rw-r--r--spec/models/work_items/widgets/hierarchy_spec.rb52
-rw-r--r--spec/policies/blob_policy_spec.rb7
-rw-r--r--spec/policies/group_policy_spec.rb13
-rw-r--r--spec/policies/project_policy_spec.rb159
-rw-r--r--spec/policies/work_item_policy_spec.rb6
-rw-r--r--spec/presenters/blob_presenter_spec.rb49
-rw-r--r--spec/presenters/merge_request_presenter_spec.rb40
-rw-r--r--spec/presenters/packages/pypi/simple_index_presenter_spec.rb68
-rw-r--r--spec/presenters/packages/pypi/simple_package_versions_presenter_spec.rb (renamed from spec/presenters/packages/pypi/package_presenter_spec.rb)21
-rw-r--r--spec/presenters/project_presenter_spec.rb31
-rw-r--r--spec/presenters/releases/link_presenter_spec.rb31
-rw-r--r--spec/presenters/service_hook_presenter_spec.rb4
-rw-r--r--spec/presenters/web_hook_log_presenter_spec.rb4
-rw-r--r--spec/requests/admin/background_migrations_controller_spec.rb6
-rw-r--r--spec/requests/admin/batched_jobs_controller_spec.rb2
-rw-r--r--spec/requests/api/bulk_imports_spec.rb4
-rw-r--r--spec/requests/api/ci/job_artifacts_spec.rb80
-rw-r--r--spec/requests/api/ci/jobs_spec.rb103
-rw-r--r--spec/requests/api/ci/pipelines_spec.rb12
-rw-r--r--spec/requests/api/ci/runner/jobs_request_post_spec.rb41
-rw-r--r--spec/requests/api/clusters/agent_tokens_spec.rb4
-rw-r--r--spec/requests/api/clusters/agents_spec.rb18
-rw-r--r--spec/requests/api/environments_spec.rb27
-rw-r--r--spec/requests/api/error_tracking/collector_spec.rb21
-rw-r--r--spec/requests/api/features_spec.rb133
-rw-r--r--spec/requests/api/graphql/ci/jobs_spec.rb77
-rw-r--r--spec/requests/api/graphql/ci/runner_spec.rb143
-rw-r--r--spec/requests/api/graphql/ci/runners_spec.rb49
-rw-r--r--spec/requests/api/graphql/gitlab_schema_spec.rb2
-rw-r--r--spec/requests/api/graphql/issue/issue_spec.rb23
-rw-r--r--spec/requests/api/graphql/milestone_spec.rb124
-rw-r--r--spec/requests/api/graphql/mutations/ci/pipeline_destroy_spec.rb17
-rw-r--r--spec/requests/api/graphql/mutations/container_repository/destroy_tags_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/design_management/delete_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/incident_management/timeline_event/create_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/incident_management/timeline_event/destroy_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/incident_management/timeline_event/promote_from_note_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/issues/set_crm_contacts_spec.rb12
-rw-r--r--spec/requests/api/graphql/mutations/issues/set_escalation_status_spec.rb8
-rw-r--r--spec/requests/api/graphql/mutations/jira_import/import_users_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/jira_import/start_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/metrics/dashboard/annotations/create_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/notes/reposition_image_diff_note_spec.rb10
-rw-r--r--spec/requests/api/graphql/mutations/packages/cleanup/policy/update_spec.rb109
-rw-r--r--spec/requests/api/graphql/mutations/packages/destroy_files_spec.rb103
-rw-r--r--spec/requests/api/graphql/mutations/releases/create_spec.rb20
-rw-r--r--spec/requests/api/graphql/mutations/user_preferences/update_spec.rb22
-rw-r--r--spec/requests/api/graphql/mutations/work_items/update_task_spec.rb101
-rw-r--r--spec/requests/api/graphql/mutations/work_items/update_widgets_spec.rb78
-rw-r--r--spec/requests/api/graphql/project/incident_management/timeline_events_spec.rb2
-rw-r--r--spec/requests/api/graphql/project/issues_spec.rb12
-rw-r--r--spec/requests/api/graphql/project/merge_request/pipelines_spec.rb69
-rw-r--r--spec/requests/api/graphql/project/milestones_spec.rb21
-rw-r--r--spec/requests/api/graphql/project/packages_cleanup_policy_spec.rb79
-rw-r--r--spec/requests/api/graphql/project/work_items_spec.rb121
-rw-r--r--spec/requests/api/graphql/terraform/state/delete_spec.rb8
-rw-r--r--spec/requests/api/graphql/user/starred_projects_query_spec.rb21
-rw-r--r--spec/requests/api/graphql/work_item_spec.rb142
-rw-r--r--spec/requests/api/graphql_spec.rb8
-rw-r--r--spec/requests/api/groups_spec.rb20
-rw-r--r--spec/requests/api/integrations/jira_connect/subscriptions_spec.rb1
-rw-r--r--spec/requests/api/integrations/slack/events_spec.rb112
-rw-r--r--spec/requests/api/integrations_spec.rb52
-rw-r--r--spec/requests/api/internal/base_spec.rb58
-rw-r--r--spec/requests/api/internal/mail_room_spec.rb53
-rw-r--r--spec/requests/api/internal/workhorse_spec.rb42
-rw-r--r--spec/requests/api/invitations_spec.rb17
-rw-r--r--spec/requests/api/issue_links_spec.rb81
-rw-r--r--spec/requests/api/issues/issues_spec.rb65
-rw-r--r--spec/requests/api/markdown_snapshot_spec.rb11
-rw-r--r--spec/requests/api/members_spec.rb126
-rw-r--r--spec/requests/api/merge_requests_spec.rb83
-rw-r--r--spec/requests/api/namespaces_spec.rb18
-rw-r--r--spec/requests/api/personal_access_tokens_spec.rb55
-rw-r--r--spec/requests/api/project_attributes.yml1
-rw-r--r--spec/requests/api/project_hooks_spec.rb4
-rw-r--r--spec/requests/api/projects_spec.rb7
-rw-r--r--spec/requests/api/pypi_packages_spec.rb65
-rw-r--r--spec/requests/api/release/links_spec.rb76
-rw-r--r--spec/requests/api/releases_spec.rb17
-rw-r--r--spec/requests/api/system_hooks_spec.rb37
-rw-r--r--spec/requests/api/terraform/modules/v1/packages_spec.rb382
-rw-r--r--spec/requests/api/terraform/state_spec.rb64
-rw-r--r--spec/requests/api/users_spec.rb105
-rw-r--r--spec/requests/api/wikis_spec.rb18
-rw-r--r--spec/requests/git_http_spec.rb10
-rw-r--r--spec/requests/groups/crm/contacts_controller_spec.rb8
-rw-r--r--spec/requests/groups/crm/organizations_controller_spec.rb8
-rw-r--r--spec/requests/ide_controller_spec.rb25
-rw-r--r--spec/requests/jira_connect/oauth_application_ids_controller_spec.rb42
-rw-r--r--spec/requests/mailgun/webhooks_controller_spec.rb149
-rw-r--r--spec/requests/members/mailgun/permanent_failure_spec.rb128
-rw-r--r--spec/requests/oauth/authorizations_controller_spec.rb76
-rw-r--r--spec/requests/oauth/tokens_controller_spec.rb5
-rw-r--r--spec/requests/openid_connect_spec.rb2
-rw-r--r--spec/requests/projects/environments_controller_spec.rb36
-rw-r--r--spec/requests/projects/issue_links_controller_spec.rb5
-rw-r--r--spec/requests/projects/merge_requests_controller_spec.rb64
-rw-r--r--spec/requests/pwa_controller_spec.rb9
-rw-r--r--spec/requests/robots_txt_spec.rb3
-rw-r--r--spec/routing/project_routing_spec.rb61
-rw-r--r--spec/rubocop/cop/migration/background_migrations_spec.rb41
-rw-r--r--spec/rubocop/cop/migration/migration_record_spec.rb56
-rw-r--r--spec/rubocop/cop/static_translation_definition_spec.rb54
-rw-r--r--spec/rubocop/formatter/todo_formatter_spec.rb9
-rw-r--r--spec/rubocop/todo_dir_spec.rb6
-rw-r--r--spec/scripts/lib/glfm/update_example_snapshots_spec.rb483
-rw-r--r--spec/serializers/analytics_issue_entity_spec.rb25
-rw-r--r--spec/serializers/deploy_keys/basic_deploy_key_entity_spec.rb (renamed from spec/serializers/deploy_key_entity_spec.rb)18
-rw-r--r--spec/serializers/deploy_keys/deploy_key_entity_spec.rb51
-rw-r--r--spec/serializers/deployment_entity_spec.rb12
-rw-r--r--spec/serializers/diff_file_entity_spec.rb33
-rw-r--r--spec/serializers/environment_serializer_spec.rb23
-rw-r--r--spec/serializers/integrations/event_entity_spec.rb (renamed from spec/serializers/service_event_entity_spec.rb)12
-rw-r--r--spec/serializers/integrations/field_entity_spec.rb (renamed from spec/serializers/service_field_entity_spec.rb)32
-rw-r--r--spec/serializers/issue_board_entity_spec.rb6
-rw-r--r--spec/serializers/issue_entity_spec.rb6
-rw-r--r--spec/serializers/issue_sidebar_basic_entity_spec.rb10
-rw-r--r--spec/serializers/linked_project_issue_entity_spec.rb16
-rw-r--r--spec/serializers/merge_request_poll_widget_entity_spec.rb36
-rw-r--r--spec/serializers/merge_request_widget_entity_spec.rb40
-rw-r--r--spec/serializers/prometheus_alert_entity_spec.rb4
-rw-r--r--spec/services/alert_management/alerts/update_service_spec.rb8
-rw-r--r--spec/services/bulk_create_integration_service_spec.rb42
-rw-r--r--spec/services/bulk_imports/create_pipeline_trackers_service_spec.rb168
-rw-r--r--spec/services/bulk_imports/file_export_service_spec.rb41
-rw-r--r--spec/services/bulk_imports/lfs_objects_export_service_spec.rb12
-rw-r--r--spec/services/bulk_imports/repository_bundle_export_service_spec.rb46
-rw-r--r--spec/services/bulk_update_integration_service_spec.rb30
-rw-r--r--spec/services/ci/abort_pipelines_service_spec.rb23
-rw-r--r--spec/services/ci/after_requeue_job_service_spec.rb45
-rw-r--r--spec/services/ci/create_pipeline_service/rate_limit_spec.rb6
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb11
-rw-r--r--spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb10
-rw-r--r--spec/services/ci/job_artifacts/destroy_batch_service_spec.rb131
-rw-r--r--spec/services/ci/pipeline_artifacts/coverage_report_service_spec.rb32
-rw-r--r--spec/services/ci/process_sync_events_service_spec.rb5
-rw-r--r--spec/services/clusters/applications/schedule_update_service_spec.rb63
-rw-r--r--spec/services/deployments/create_service_spec.rb37
-rw-r--r--spec/services/deployments/update_environment_service_spec.rb4
-rw-r--r--spec/services/emails/confirm_service_spec.rb6
-rw-r--r--spec/services/environments/auto_stop_service_spec.rb2
-rw-r--r--spec/services/environments/stop_service_spec.rb60
-rw-r--r--spec/services/event_create_service_spec.rb80
-rw-r--r--spec/services/git/branch_push_service_spec.rb2
-rw-r--r--spec/services/import/fogbugz_service_spec.rb150
-rw-r--r--spec/services/incident_management/issuable_escalation_statuses/prepare_update_service_spec.rb8
-rw-r--r--spec/services/incident_management/timeline_events/create_service_spec.rb30
-rw-r--r--spec/services/incident_management/timeline_events/destroy_service_spec.rb16
-rw-r--r--spec/services/incident_management/timeline_events/update_service_spec.rb13
-rw-r--r--spec/services/integrations/propagate_service_spec.rb2
-rw-r--r--spec/services/issues/create_service_spec.rb45
-rw-r--r--spec/services/issues/move_service_spec.rb14
-rw-r--r--spec/services/issues/update_service_spec.rb8
-rw-r--r--spec/services/jira_connect_subscriptions/create_service_spec.rb46
-rw-r--r--spec/services/jira_import/start_import_service_spec.rb2
-rw-r--r--spec/services/jira_import/users_importer_spec.rb2
-rw-r--r--spec/services/markdown_content_rewriter_service_spec.rb83
-rw-r--r--spec/services/members/approve_access_request_service_spec.rb53
-rw-r--r--spec/services/members/create_service_spec.rb12
-rw-r--r--spec/services/members/creator_service_spec.rb4
-rw-r--r--spec/services/members/destroy_service_spec.rb42
-rw-r--r--spec/services/members/groups/bulk_creator_service_spec.rb10
-rw-r--r--spec/services/members/groups/creator_service_spec.rb16
-rw-r--r--spec/services/members/invite_service_spec.rb15
-rw-r--r--spec/services/members/mailgun/process_webhook_service_spec.rb42
-rw-r--r--spec/services/members/projects/bulk_creator_service_spec.rb10
-rw-r--r--spec/services/members/projects/creator_service_spec.rb16
-rw-r--r--spec/services/members/update_service_spec.rb76
-rw-r--r--spec/services/merge_requests/build_service_spec.rb104
-rw-r--r--spec/services/merge_requests/create_pipeline_service_spec.rb13
-rw-r--r--spec/services/merge_requests/create_service_spec.rb6
-rw-r--r--spec/services/merge_requests/merge_service_spec.rb9
-rw-r--r--spec/services/merge_requests/mergeability/run_checks_service_spec.rb22
-rw-r--r--spec/services/merge_requests/post_merge_service_spec.rb26
-rw-r--r--spec/services/merge_requests/refresh_service_spec.rb34
-rw-r--r--spec/services/metrics/dashboard/panel_preview_service_spec.rb1
-rw-r--r--spec/services/notes/copy_service_spec.rb28
-rw-r--r--spec/services/notes/create_service_spec.rb4
-rw-r--r--spec/services/notification_recipients/build_service_spec.rb10
-rw-r--r--spec/services/notification_service_spec.rb27
-rw-r--r--spec/services/packages/cleanup/update_policy_service_spec.rb105
-rw-r--r--spec/services/packages/go/create_package_service_spec.rb16
-rw-r--r--spec/services/packages/maven/metadata/append_package_file_service_spec.rb21
-rw-r--r--spec/services/packages/rubygems/create_gemspec_service_spec.rb13
-rw-r--r--spec/services/pages/delete_service_spec.rb6
-rw-r--r--spec/services/pages_domains/create_acme_order_service_spec.rb10
-rw-r--r--spec/services/projects/after_rename_service_spec.rb34
-rw-r--r--spec/services/projects/autocomplete_service_spec.rb1
-rw-r--r--spec/services/projects/destroy_rollback_service_spec.rb46
-rw-r--r--spec/services/projects/destroy_service_spec.rb57
-rw-r--r--spec/services/projects/refresh_build_artifacts_size_statistics_service_spec.rb13
-rw-r--r--spec/services/projects/transfer_service_spec.rb12
-rw-r--r--spec/services/projects/update_pages_service_spec.rb19
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb166
-rw-r--r--spec/services/releases/create_service_spec.rb21
-rw-r--r--spec/services/repositories/changelog_service_spec.rb48
-rw-r--r--spec/services/repositories/destroy_rollback_service_spec.rb85
-rw-r--r--spec/services/repositories/destroy_service_spec.rb74
-rw-r--r--spec/services/repositories/shell_destroy_service_spec.rb26
-rw-r--r--spec/services/resource_access_tokens/create_service_spec.rb40
-rw-r--r--spec/services/service_response_spec.rb75
-rw-r--r--spec/services/snippets/bulk_destroy_service_spec.rb55
-rw-r--r--spec/services/snippets/destroy_service_spec.rb13
-rw-r--r--spec/services/static_site_editor/config_service_spec.rb126
-rw-r--r--spec/services/terraform/remote_state_handler_spec.rb40
-rw-r--r--spec/services/terraform/states/destroy_service_spec.rb36
-rw-r--r--spec/services/terraform/states/trigger_destroy_service_spec.rb45
-rw-r--r--spec/services/user_project_access_changed_service_spec.rb13
-rw-r--r--spec/services/web_hook_service_spec.rb201
-rw-r--r--spec/services/web_hooks/destroy_service_spec.rb68
-rw-r--r--spec/services/web_hooks/log_destroy_service_spec.rb56
-rw-r--r--spec/services/work_items/update_service_spec.rb15
-rw-r--r--spec/spec_helper.rb18
-rw-r--r--spec/support/factory_bot.rb1
-rw-r--r--spec/support/graphql/field_inspection.rb2
-rw-r--r--spec/support/graphql/field_selection.rb8
-rw-r--r--spec/support/graphql/resolver_factories.rb4
-rw-r--r--spec/support/helpers/callouts_test_helper.rb9
-rw-r--r--spec/support/helpers/countries_controller_test_helper.rb9
-rw-r--r--spec/support/helpers/doc_url_helper.rb21
-rw-r--r--spec/support/helpers/emails_helper_test_helper.rb9
-rw-r--r--spec/support/helpers/form_builder_helpers.rb13
-rw-r--r--spec/support/helpers/gitaly_setup.rb14
-rw-r--r--spec/support/helpers/graphql_helpers.rb155
-rw-r--r--spec/support/helpers/jira_integration_helpers.rb (renamed from spec/support/helpers/jira_service_helper.rb)20
-rw-r--r--spec/support/helpers/login_helpers.rb8
-rw-r--r--spec/support/helpers/namespaces_test_helper.rb4
-rw-r--r--spec/support/helpers/next_instance_of.rb2
-rw-r--r--spec/support/helpers/project_helpers.rb4
-rw-r--r--spec/support/helpers/project_template_test_helper.rb17
-rw-r--r--spec/support/helpers/search_settings_helpers.rb2
-rw-r--r--spec/support/helpers/stub_method_calls.rb66
-rw-r--r--spec/support/helpers/subscription_portal_helper.rb13
-rw-r--r--spec/support/helpers/test_env.rb2
-rw-r--r--spec/support/matchers/background_migrations_matchers.rb6
-rw-r--r--spec/support/matchers/exceed_query_limit.rb92
-rw-r--r--spec/support/shared_contexts/features/integrations/integrations_shared_context.rb40
-rw-r--r--spec/support/shared_contexts/finders/issues_finder_shared_contexts.rb26
-rw-r--r--spec/support/shared_contexts/finders/work_items_finder_shared_contexts.rb79
-rw-r--r--spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb18
-rw-r--r--spec/support/shared_contexts/markdown_snapshot_shared_examples.rb64
-rw-r--r--spec/support/shared_contexts/navbar_structure_context.rb4
-rw-r--r--spec/support/shared_contexts/policies/project_policy_shared_context.rb2
-rw-r--r--spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb10
-rw-r--r--spec/support/shared_examples/components/pajamas_shared_examples.rb13
-rw-r--r--spec/support/shared_examples/controllers/environments_controller_shared_examples.rb17
-rw-r--r--spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb13
-rw-r--r--spec/support/shared_examples/features/2fa_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/features/access_tokens_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/features/cascading_settings_shared_examples.rb3
-rw-r--r--spec/support/shared_examples/features/container_registry_shared_examples.rb5
-rw-r--r--spec/support/shared_examples/features/content_editor_shared_examples.rb70
-rw-r--r--spec/support/shared_examples/features/issuables_user_dropdown_behaviors_shared_examples.rb23
-rw-r--r--spec/support/shared_examples/features/runners_shared_examples.rb16
-rw-r--r--spec/support/shared_examples/features/sidebar_shared_examples.rb5
-rw-r--r--spec/support/shared_examples/finders/issues_finder_shared_examples.rb1471
-rw-r--r--spec/support/shared_examples/graphql/members_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/graphql/mutations/incident_management_timeline_events_shared_examples.rb1
-rw-r--r--spec/support/shared_examples/graphql/mutations/security/ci_configuration_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/graphql/n_plus_one_query_examples.rb (renamed from spec/support/shared_examples/graphql/projects/merge_request_n_plus_one_query_examples.rb)0
-rw-r--r--spec/support/shared_examples/graphql/resolvers/packages_resolvers_shared_examples.rb12
-rw-r--r--spec/support/shared_examples/graphql/types/gitlab_style_deprecations_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/integrations/integration_settings_form.rb28
-rw-r--r--spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/lib/gitlab/database/reestablished_connection_stack_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/lib/gitlab/event_store_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/lib/gitlab/redis/multi_store_feature_flags_shared_examples.rb51
-rw-r--r--spec/support/shared_examples/models/application_setting_shared_examples.rb12
-rw-r--r--spec/support/shared_examples/models/commit_signature_shared_examples.rb51
-rw-r--r--spec/support/shared_examples/models/concerns/limitable_shared_examples.rb24
-rw-r--r--spec/support/shared_examples/models/integrations/base_data_fields_shared_examples.rb51
-rw-r--r--spec/support/shared_examples/models/member_shared_examples.rb323
-rw-r--r--spec/support/shared_examples/models/members_notifications_shared_example.rb12
-rw-r--r--spec/support/shared_examples/models/wiki_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/namespaces/traversal_scope_examples.rb22
-rw-r--r--spec/support/shared_examples/requests/api/project_statistics_refresh_conflicts_shared_examples.rb21
-rw-r--r--spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb56
-rw-r--r--spec/support/shared_examples/requests/projects/environments_controller_spec_shared_examples.rb18
-rw-r--r--spec/support/shared_examples/serializers/environment_serializer_shared_examples.rb10
-rw-r--r--spec/support/shared_examples/services/alert_management_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/services/boards/items_list_service_shared_examples.rb40
-rw-r--r--spec/support/shared_examples/views/pagination_shared_examples.rb35
-rw-r--r--spec/support/shared_examples/workers/background_migration_worker_shared_examples.rb238
-rw-r--r--spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb42
-rw-r--r--spec/support/shared_examples/workers/idempotency_shared_examples.rb14
-rw-r--r--spec/support_specs/helpers/stub_method_calls_spec.rb107
-rw-r--r--spec/support_specs/matchers/exceed_query_limit_helpers_spec.rb8
-rw-r--r--spec/tasks/gitlab/background_migrations_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/backup_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb103
-rw-r--r--spec/tasks/gitlab/db/lock_writes_rake_spec.rb177
-rw-r--r--spec/tasks/gitlab/db/validate_config_rake_spec.rb41
-rw-r--r--spec/tasks/gitlab/db_rake_spec.rb17
-rw-r--r--spec/tasks/gitlab/pages_rake_spec.rb80
-rw-r--r--spec/tasks/rubocop_rake_spec.rb11
-rw-r--r--spec/tooling/danger/datateam_spec.rb14
-rw-r--r--spec/tooling/danger/project_helper_spec.rb10
-rw-r--r--spec/tooling/lib/tooling/find_codeowners_spec.rb107
-rw-r--r--spec/tooling/lib/tooling/test_map_generator_spec.rb14
-rw-r--r--spec/tooling/quality/test_level_spec.rb43
-rw-r--r--spec/uploaders/gitlab_uploader_spec.rb6
-rw-r--r--spec/uploaders/metric_image_uploader_spec.rb11
-rw-r--r--spec/views/admin/application_settings/_repository_check.html.haml_spec.rb75
-rw-r--r--spec/views/admin/application_settings/general.html.haml_spec.rb29
-rw-r--r--spec/views/layouts/application.html.haml_spec.rb29
-rw-r--r--spec/views/projects/issues/_service_desk_info_content.html.haml_spec.rb2
-rw-r--r--spec/views/projects/settings/integrations/edit.html.haml_spec.rb (renamed from spec/views/projects/services/edit.html.haml_spec.rb)2
-rw-r--r--spec/views/projects/tags/index.html.haml_spec.rb33
-rw-r--r--spec/views/shared/projects/_inactive_project_deletion_alert.html.haml_spec.rb54
-rw-r--r--spec/workers/build_success_worker_spec.rb17
-rw-r--r--spec/workers/bulk_imports/pipeline_worker_spec.rb142
-rw-r--r--spec/workers/ci/archive_trace_worker_spec.rb28
-rw-r--r--spec/workers/ci/resource_groups/assign_resource_from_resource_group_worker_spec.rb2
-rw-r--r--spec/workers/clusters/applications/activate_integration_worker_spec.rb (renamed from spec/workers/clusters/applications/activate_service_worker_spec.rb)21
-rw-r--r--spec/workers/clusters/applications/deactivate_integration_worker_spec.rb (renamed from spec/workers/clusters/applications/deactivate_service_worker_spec.rb)31
-rw-r--r--spec/workers/clusters/applications/wait_for_uninstall_app_worker_spec.rb4
-rw-r--r--spec/workers/concerns/cronjob_queue_spec.rb32
-rw-r--r--spec/workers/concerns/limited_capacity/job_tracker_spec.rb2
-rw-r--r--spec/workers/concerns/worker_attributes_spec.rb106
-rw-r--r--spec/workers/container_registry/migration/enqueuer_worker_spec.rb788
-rw-r--r--spec/workers/container_registry/migration/guard_worker_spec.rb106
-rw-r--r--spec/workers/database/batched_background_migration/ci_database_worker_spec.rb2
-rw-r--r--spec/workers/database/batched_background_migration_worker_spec.rb2
-rw-r--r--spec/workers/database/ci_namespace_mirrors_consistency_check_worker_spec.rb29
-rw-r--r--spec/workers/database/ci_project_mirrors_consistency_check_worker_spec.rb29
-rw-r--r--spec/workers/delete_container_repository_worker_spec.rb120
-rw-r--r--spec/workers/deployments/hooks_worker_spec.rb8
-rw-r--r--spec/workers/environments/auto_stop_worker_spec.rb2
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb15
-rw-r--r--spec/workers/expire_job_cache_worker_spec.rb31
-rw-r--r--spec/workers/expire_pipeline_cache_worker_spec.rb38
-rw-r--r--spec/workers/gitlab/jira_import/stage/import_issues_worker_spec.rb2
-rw-r--r--spec/workers/gitlab/jira_import/stage/import_labels_worker_spec.rb2
-rw-r--r--spec/workers/gitlab_service_ping_worker_spec.rb56
-rw-r--r--spec/workers/integrations/execute_worker_spec.rb (renamed from spec/workers/project_service_worker_spec.rb)24
-rw-r--r--spec/workers/integrations/irker_worker_spec.rb (renamed from spec/workers/irker_worker_spec.rb)22
-rw-r--r--spec/workers/issue_placement_worker_spec.rb151
-rw-r--r--spec/workers/issue_rebalancing_worker_spec.rb104
-rw-r--r--spec/workers/loose_foreign_keys/cleanup_worker_spec.rb8
-rw-r--r--spec/workers/merge_requests/create_pipeline_worker_spec.rb37
-rw-r--r--spec/workers/merge_requests/update_head_pipeline_worker_spec.rb28
-rw-r--r--spec/workers/namespaceless_project_destroy_worker_spec.rb77
-rw-r--r--spec/workers/pages_transfer_worker_spec.rb38
-rw-r--r--spec/workers/pipeline_hooks_worker_spec.rb10
-rw-r--r--spec/workers/pipeline_notification_worker_spec.rb14
-rw-r--r--spec/workers/project_daily_statistics_worker_spec.rb35
-rw-r--r--spec/workers/projects/inactive_projects_deletion_cron_worker_spec.rb48
-rw-r--r--spec/workers/prometheus/create_default_alerts_worker_spec.rb13
-rw-r--r--spec/workers/repository_remove_remote_worker_spec.rb48
-rw-r--r--spec/workers/schedule_merge_request_cleanup_refs_worker_spec.rb6
-rw-r--r--spec/workers/terraform/states/destroy_worker_spec.rb30
-rw-r--r--spec/workers/update_merge_requests_worker_spec.rb49
-rw-r--r--spec/workers/users/deactivate_dormant_users_worker_spec.rb9
-rw-r--r--spec/workers/web_hooks/destroy_worker_spec.rb23
-rw-r--r--spec/workers/web_hooks/log_destroy_worker_spec.rb86
1398 files changed, 44367 insertions, 23821 deletions
diff --git a/spec/commands/sidekiq_cluster/cli_spec.rb b/spec/commands/sidekiq_cluster/cli_spec.rb
index 223d0c3b0ec..55e8ab7885e 100644
--- a/spec/commands/sidekiq_cluster/cli_spec.rb
+++ b/spec/commands/sidekiq_cluster/cli_spec.rb
@@ -195,22 +195,22 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
},
'high urgency CI queues' => {
query: 'feature_category=continuous_integration&urgency=high',
- included_queues: %w(pipeline_cache:expire_job_cache pipeline_cache:expire_pipeline_cache),
+ included_queues: %w(pipeline_default:ci_drop_pipeline),
excluded_queues: %w(merge)
},
'CPU-bound high urgency CI queues' => {
query: 'feature_category=continuous_integration&urgency=high&resource_boundary=cpu',
- included_queues: %w(pipeline_cache:expire_pipeline_cache),
- excluded_queues: %w(pipeline_cache:expire_job_cache merge)
+ included_queues: %w(pipeline_default:ci_create_downstream_pipeline),
+ excluded_queues: %w(pipeline_default:ci_drop_pipeline merge)
},
'CPU-bound high urgency non-CI queues' => {
query: 'feature_category!=continuous_integration&urgency=high&resource_boundary=cpu',
included_queues: %w(new_issue),
- excluded_queues: %w(pipeline_cache:expire_pipeline_cache)
+ excluded_queues: %w(pipeline_default:ci_create_downstream_pipeline)
},
'CI and SCM queues' => {
query: 'feature_category=continuous_integration|feature_category=source_code_management',
- included_queues: %w(pipeline_cache:expire_job_cache merge),
+ included_queues: %w(pipeline_default:ci_drop_pipeline merge),
excluded_queues: %w(mailers)
}
}
@@ -243,10 +243,17 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
end
it 'expands multiple queue groups correctly' do
+ expected_workers =
+ if Gitlab.ee?
+ [%w[chat_notification], %w[project_export project_template_export]]
+ else
+ [%w[chat_notification], %w[project_export]]
+ end
+
expect(Gitlab::SidekiqCluster)
.to receive(:start)
- .with([['chat_notification'], ['project_export']], default_options)
- .and_return([])
+ .with(expected_workers, default_options)
+ .and_return([])
cli.run(%w(--queue-selector feature_category=chatops&has_external_dependencies=true resource_boundary=memory&feature_category=importers))
end
diff --git a/spec/components/pajamas/alert_component_spec.rb b/spec/components/pajamas/alert_component_spec.rb
index e596f07a15a..1e2845c44a8 100644
--- a/spec/components/pajamas/alert_component_spec.rb
+++ b/spec/components/pajamas/alert_component_spec.rb
@@ -50,10 +50,12 @@ RSpec.describe Pajamas::AlertComponent, :aggregate_failures, type: :component do
before do
render_inline described_class.new(
title: '_title_',
- alert_class: '_alert_class_',
- alert_data: {
- feature_id: '_feature_id_',
- dismiss_endpoint: '_dismiss_endpoint_'
+ alert_options: {
+ class: '_alert_class_',
+ data: {
+ feature_id: '_feature_id_',
+ dismiss_endpoint: '_dismiss_endpoint_'
+ }
}
)
end
@@ -106,9 +108,11 @@ RSpec.describe Pajamas::AlertComponent, :aggregate_failures, type: :component do
context 'with dismissible content' do
before do
render_inline described_class.new(
- close_button_class: '_close_button_class_',
- close_button_data: {
- testid: '_close_button_testid_'
+ close_button_options: {
+ class: '_close_button_class_',
+ data: {
+ testid: '_close_button_testid_'
+ }
}
)
end
diff --git a/spec/components/pajamas/banner_component_spec.rb b/spec/components/pajamas/banner_component_spec.rb
new file mode 100644
index 00000000000..5969f06dbad
--- /dev/null
+++ b/spec/components/pajamas/banner_component_spec.rb
@@ -0,0 +1,169 @@
+# frozen_string_literal: true
+require "spec_helper"
+
+RSpec.describe Pajamas::BannerComponent, type: :component do
+ subject do
+ described_class.new(**options)
+ end
+
+ let(:title) { "Banner title" }
+ let(:content) { "Banner content"}
+ let(:options) { {} }
+
+ describe 'basic usage' do
+ before do
+ render_inline(subject) do |c|
+ c.title { title }
+ content
+ end
+ end
+
+ it 'renders its content' do
+ expect(rendered_component).to have_text content
+ end
+
+ it 'renders its title' do
+ expect(rendered_component).to have_css "h1[class='gl-banner-title']", text: title
+ end
+
+ it 'renders a close button' do
+ expect(rendered_component).to have_css "button.gl-banner-close"
+ end
+
+ describe 'button_text and button_link' do
+ let(:options) { { button_text: 'Learn more', button_link: '/learn-more' } }
+
+ it 'define the primary action' do
+ expect(rendered_component).to have_css "a.btn-confirm.gl-button[href='/learn-more']", text: 'Learn more'
+ end
+ end
+
+ describe 'banner_options' do
+ let(:options) { { banner_options: { class: "baz", data: { foo: "bar" } } } }
+
+ it 'are on the banner' do
+ expect(rendered_component).to have_css ".gl-banner.baz[data-foo='bar']"
+ end
+
+ context 'with custom classes' do
+ let(:options) { { variant: :introduction, banner_options: { class: 'extra special' } } }
+
+ it 'don\'t conflict with internal banner_classes' do
+ expect(rendered_component).to have_css '.extra.special.gl-banner-introduction.gl-banner'
+ end
+ end
+ end
+
+ describe 'close_options' do
+ let(:options) { { close_options: { class: "js-foo", data: { uid: "123" } } } }
+
+ it 'are on the close button' do
+ expect(rendered_component).to have_css "button.gl-banner-close.js-foo[data-uid='123']"
+ end
+ end
+
+ describe 'embedded' do
+ context 'by default (false)' do
+ it 'keeps the banner\'s borders' do
+ expect(rendered_component).not_to have_css ".gl-banner.gl-border-none"
+ end
+ end
+
+ context 'when set to true' do
+ let(:options) { { embedded: true } }
+
+ it 'removes the banner\'s borders' do
+ expect(rendered_component).to have_css ".gl-banner.gl-border-none"
+ end
+ end
+ end
+
+ describe 'variant' do
+ context 'by default (promotion)' do
+ it 'applies no variant class' do
+ expect(rendered_component).to have_css "[class='gl-banner']"
+ end
+ end
+
+ context 'when set to introduction' do
+ let(:options) { { variant: :introduction } }
+
+ it "applies the introduction class to the banner" do
+ expect(rendered_component).to have_css ".gl-banner.gl-banner-introduction"
+ end
+
+ it "applies the confirm class to the close button" do
+ expect(rendered_component).to have_css ".gl-banner-close.btn-confirm.btn-confirm-tertiary"
+ end
+ end
+
+ context 'when set to unknown variant' do
+ let(:options) { { variant: :foobar } }
+
+ it 'ignores the unknown variant' do
+ expect(rendered_component).to have_css "[class='gl-banner']"
+ end
+ end
+ end
+
+ describe 'illustration' do
+ it 'has none by default' do
+ expect(rendered_component).not_to have_css ".gl-banner-illustration"
+ end
+
+ context 'with svg_path' do
+ let(:options) { { svg_path: 'logo.svg' } }
+
+ it 'renders an image as illustration' do
+ expect(rendered_component).to have_css ".gl-banner-illustration img"
+ end
+ end
+ end
+ end
+
+ context 'with illustration slot' do
+ before do
+ render_inline(subject) do |c|
+ c.title { title }
+ c.illustration { "<svg></svg>".html_safe }
+ content
+ end
+ end
+
+ it 'renders the slot content as illustration' do
+ expect(rendered_component).to have_css ".gl-banner-illustration svg"
+ end
+
+ context 'and conflicting svg_path' do
+ let(:options) { { svg_path: 'logo.svg' } }
+
+ it 'uses the slot content' do
+ expect(rendered_component).to have_css ".gl-banner-illustration svg"
+ expect(rendered_component).not_to have_css ".gl-banner-illustration img"
+ end
+ end
+ end
+
+ context 'with primary_action slot' do
+ before do
+ render_inline(subject) do |c|
+ c.title { title }
+ c.primary_action { "<a class='special' href='#'>Special</a>".html_safe }
+ content
+ end
+ end
+
+ it 'renders the slot content as the primary action' do
+ expect(rendered_component).to have_css "a.special", text: 'Special'
+ end
+
+ context 'and conflicting button_text and button_link' do
+ let(:options) { { button_text: 'Not special', button_link: '/' } }
+
+ it 'uses the slot content' do
+ expect(rendered_component).to have_css "a.special[href='#']", text: 'Special'
+ expect(rendered_component).not_to have_css "a.btn[href='/']"
+ end
+ end
+ end
+end
diff --git a/spec/components/pajamas/button_component_spec.rb b/spec/components/pajamas/button_component_spec.rb
new file mode 100644
index 00000000000..60c2a2e5a06
--- /dev/null
+++ b/spec/components/pajamas/button_component_spec.rb
@@ -0,0 +1,273 @@
+# frozen_string_literal: true
+require "spec_helper"
+
+RSpec.describe Pajamas::ButtonComponent, type: :component do
+ subject do
+ described_class.new(**options)
+ end
+
+ let(:content) { "Button content" }
+ let(:options) { {} }
+
+ describe 'basic usage' do
+ before do
+ render_inline(subject) do |c|
+ content
+ end
+ end
+
+ it 'renders its content' do
+ expect(rendered_component).to have_text content
+ end
+
+ it 'adds default styling' do
+ expect(rendered_component).to have_css ".btn.btn-default.btn-md.gl-button"
+ end
+
+ describe 'button_options' do
+ let(:options) { { button_options: { id: 'baz', data: { foo: 'bar' } } } }
+
+ it 'are added to the button' do
+ expect(rendered_component).to have_css ".gl-button#baz[data-foo='bar']"
+ end
+
+ context 'with custom classes' do
+ let(:options) { { variant: :danger, category: :tertiary, button_options: { class: 'custom-class' } } }
+
+ it 'don\'t conflict with internal button_classes' do
+ expect(rendered_component).to have_css '.gl-button.btn-danger.btn-danger-tertiary.custom-class'
+ end
+ end
+
+ context 'overriding base attributes' do
+ let(:options) { { button_options: { type: 'submit' } } }
+
+ it 'overrides type' do
+ expect(rendered_component).to have_css '[type="submit"]'
+ end
+ end
+ end
+
+ describe 'button_text_classes' do
+ let(:options) { { button_text_classes: 'custom-text-class' } }
+
+ it 'is added to the button text' do
+ expect(rendered_component).to have_css ".gl-button-text.custom-text-class"
+ end
+ end
+
+ describe 'disabled' do
+ context 'by default (false)' do
+ it 'does not have disabled styling and behavior' do
+ expect(rendered_component).not_to have_css ".disabled[disabled='disabled'][aria-disabled='true']"
+ end
+ end
+
+ context 'when set to true' do
+ let(:options) { { disabled: true } }
+
+ it 'has disabled styling and behavior' do
+ expect(rendered_component).to have_css ".disabled[disabled='disabled'][aria-disabled='true']"
+ end
+ end
+ end
+
+ describe 'loading' do
+ context 'by default (false)' do
+ it 'is not disabled' do
+ expect(rendered_component).not_to have_css ".disabled[disabled='disabled']"
+ end
+
+ it 'does not render a spinner' do
+ expect(rendered_component).not_to have_css ".gl-spinner[aria-label='Loading']"
+ end
+ end
+
+ context 'when set to true' do
+ let(:options) { { loading: true } }
+
+ it 'is disabled' do
+ expect(rendered_component).to have_css ".disabled[disabled='disabled']"
+ end
+
+ it 'renders a spinner' do
+ expect(rendered_component).to have_css ".gl-spinner[aria-label='Loading']"
+ end
+ end
+ end
+
+ describe 'block' do
+ context 'by default (false)' do
+ it 'is inline' do
+ expect(rendered_component).not_to have_css ".btn-block"
+ end
+ end
+
+ context 'when set to true' do
+ let(:options) { { block: true } }
+
+ it 'is block element' do
+ expect(rendered_component).to have_css ".btn-block"
+ end
+ end
+ end
+
+ describe 'selected' do
+ context 'by default (false)' do
+ it 'does not have selected styling and behavior' do
+ expect(rendered_component).not_to have_css ".selected"
+ end
+ end
+
+ context 'when set to true' do
+ let(:options) { { selected: true } }
+
+ it 'has selected styling and behavior' do
+ expect(rendered_component).to have_css ".selected"
+ end
+ end
+ end
+
+ describe 'category & variant' do
+ context 'with category variants' do
+ where(:variant) { [:default, :confirm, :danger] }
+
+ let(:options) { { variant: variant, category: :tertiary } }
+
+ with_them do
+ it 'renders the button in correct variant && category' do
+ expect(rendered_component).to have_css(".#{described_class::VARIANT_CLASSES[variant]}")
+ expect(rendered_component).to have_css(".#{described_class::VARIANT_CLASSES[variant]}-tertiary")
+ end
+ end
+ end
+
+ context 'with non-category variants' do
+ where(:variant) { [:dashed, :link, :reset] }
+
+ let(:options) { { variant: variant, category: :tertiary } }
+
+ with_them do
+ it 'renders the button in correct variant && category' do
+ expect(rendered_component).to have_css(".#{described_class::VARIANT_CLASSES[variant]}")
+ expect(rendered_component).not_to have_css(".#{described_class::VARIANT_CLASSES[variant]}-tertiary")
+ end
+ end
+ end
+
+ context 'with primary category' do
+ where(:variant) { [:default, :confirm, :danger] }
+
+ let(:options) { { variant: variant, category: :primary } }
+
+ with_them do
+ it 'renders the button in correct variant && category' do
+ expect(rendered_component).to have_css(".#{described_class::VARIANT_CLASSES[variant]}")
+ expect(rendered_component).not_to have_css(".#{described_class::VARIANT_CLASSES[variant]}-primary")
+ end
+ end
+ end
+ end
+
+ describe 'size' do
+ context 'by default (medium)' do
+ it 'applies medium class' do
+ expect(rendered_component).to have_css ".btn-md"
+ end
+ end
+
+ context 'when set to small' do
+ let(:options) { { size: :small } }
+
+ it "applies the small class to the button" do
+ expect(rendered_component).to have_css ".btn-sm"
+ end
+ end
+ end
+
+ describe 'icon' do
+ it 'has none by default' do
+ expect(rendered_component).not_to have_css ".gl-icon"
+ end
+
+ context 'with icon' do
+ let(:options) { { icon: 'star-o', icon_classes: 'custom-icon' } }
+
+ it 'renders an icon with custom CSS class' do
+ expect(rendered_component).to have_css "svg.gl-icon.gl-button-icon.custom-icon[data-testid='star-o-icon']"
+ expect(rendered_component).not_to have_css ".btn-icon"
+ end
+ end
+
+ context 'with icon only and no content' do
+ let(:content) { nil }
+ let(:options) { { icon: 'star-o' } }
+
+ it 'adds a "btn-icon" CSS class' do
+ expect(rendered_component).to have_css ".btn.btn-icon"
+ end
+ end
+
+ context 'with icon only and when loading' do
+ let(:content) { nil }
+ let(:options) { { icon: 'star-o', loading: true } }
+
+ it 'renders only a loading icon' do
+ expect(rendered_component).not_to have_css "svg.gl-icon.gl-button-icon.custom-icon[data-testid='star-o-icon']"
+ expect(rendered_component).to have_css ".gl-spinner[aria-label='Loading']"
+ end
+ end
+ end
+
+ describe 'type' do
+ context 'by default (without href)' do
+ it 'has type "button"' do
+ expect(rendered_component).to have_css "button[type='button']"
+ end
+ end
+
+ context 'when set to known type' do
+ where(:type) { [:button, :reset, :submit] }
+
+ let(:options) { { type: type } }
+
+ with_them do
+ it 'has the correct type' do
+ expect(rendered_component).to have_css "button[type='#{type}']"
+ end
+ end
+ end
+
+ context 'when set to unkown type' do
+ let(:options) { { type: :madeup } }
+
+ it 'has type "button"' do
+ expect(rendered_component).to have_css "button[type='button']"
+ end
+ end
+
+ context 'for links (with href)' do
+ let(:options) { { href: 'https://example.com', type: :reset } }
+
+ it 'ignores type' do
+ expect(rendered_component).not_to have_css "[type]"
+ end
+ end
+ end
+
+ describe 'link button' do
+ it 'renders a button tag with type="button" when "href" is not set' do
+ expect(rendered_component).to have_css "button[type='button']"
+ end
+
+ context 'when "href" is provided' do
+ let(:options) { { href: 'https://gitlab.com', target: '_blank' } }
+
+ it "renders a link instead of the button" do
+ expect(rendered_component).not_to have_css "button[type='button']"
+ expect(rendered_component).to have_css "a[href='https://gitlab.com'][target='_blank']"
+ end
+ end
+ end
+ end
+end
diff --git a/spec/components/pajamas/card_component_spec.rb b/spec/components/pajamas/card_component_spec.rb
new file mode 100644
index 00000000000..65522a9023f
--- /dev/null
+++ b/spec/components/pajamas/card_component_spec.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+require "spec_helper"
+
+RSpec.describe Pajamas::CardComponent, :aggregate_failures, type: :component do
+ let(:header) { 'Card header' }
+ let(:body) { 'Card body' }
+ let(:footer) { 'Card footer' }
+
+ context 'slots' do
+ before do
+ render_inline described_class.new do |c|
+ c.header { header }
+ c.body { body }
+ c.footer { footer }
+ end
+ end
+
+ it 'renders card header' do
+ expect(rendered_component).to have_content(header)
+ end
+
+ it 'renders card body' do
+ expect(rendered_component).to have_content(body)
+ end
+
+ it 'renders footer' do
+ expect(rendered_component).to have_content(footer)
+ end
+ end
+
+ context 'with defaults' do
+ before do
+ render_inline described_class.new
+ end
+
+ it 'does not have a header or footer' do
+ expect(rendered_component).not_to have_selector('.gl-card-header')
+ expect(rendered_component).not_to have_selector('.gl-card-footer')
+ end
+
+ it 'renders the card and body' do
+ expect(rendered_component).to have_selector('.gl-card')
+ expect(rendered_component).to have_selector('.gl-card-body')
+ end
+ end
+
+ context 'with custom options' do
+ before do
+ render_inline described_class.new(
+ card_options: { class: '_card_class_', data: { testid: '_card_testid_' } },
+ header_options: { class: '_header_class_', data: { testid: '_header_testid_' } },
+ body_options: { class: '_body_class_', data: { testid: '_body_testid_' } },
+ footer_options: { class: '_footer_class_', data: { testid: '_footer_testid_' } }) do |c|
+ c.header { header }
+ c.body { body }
+ c.footer { footer }
+ end
+ end
+
+ it 'renders card options' do
+ expect(rendered_component).to have_selector('._card_class_')
+ expect(rendered_component).to have_selector('[data-testid="_card_testid_"]')
+ end
+
+ it 'renders header options' do
+ expect(rendered_component).to have_selector('._header_class_')
+ expect(rendered_component).to have_selector('[data-testid="_header_testid_"]')
+ end
+
+ it 'renders body options' do
+ expect(rendered_component).to have_selector('._body_class_')
+ expect(rendered_component).to have_selector('[data-testid="_body_testid_"]')
+ end
+
+ it 'renders footer options' do
+ expect(rendered_component).to have_selector('._footer_class_')
+ expect(rendered_component).to have_selector('[data-testid="_footer_testid_"]')
+ end
+ end
+end
diff --git a/spec/components/pajamas/checkbox_component_spec.rb b/spec/components/pajamas/checkbox_component_spec.rb
new file mode 100644
index 00000000000..b2f3a84fbfe
--- /dev/null
+++ b/spec/components/pajamas/checkbox_component_spec.rb
@@ -0,0 +1,130 @@
+# frozen_string_literal: true
+require "spec_helper"
+
+RSpec.describe Pajamas::CheckboxComponent, :aggregate_failures, type: :component do
+ include FormBuilderHelpers
+
+ let_it_be(:method) { :view_diffs_file_by_file }
+ let_it_be(:label) { "Show one file at a time on merge request's Changes tab" }
+ let_it_be(:help_text) { 'Instead of all the files changed, show only one file at a time.' }
+
+ RSpec.shared_examples 'it renders unchecked checkbox with value of `1`' do
+ it 'renders unchecked checkbox with value of `1`' do
+ expect(rendered_component).to have_unchecked_field(label, with: '1')
+ end
+ end
+
+ context 'with default options' do
+ before do
+ fake_form_for do |form|
+ render_inline(
+ described_class.new(
+ form: form,
+ method: method,
+ label: label
+ )
+ )
+ end
+ end
+
+ include_examples 'it renders unchecked checkbox with value of `1`'
+ include_examples 'it does not render help text'
+
+ it 'renders hidden input with value of `0`' do
+ expect(rendered_component).to have_field('user[view_diffs_file_by_file]', type: 'hidden', with: '0')
+ end
+ end
+
+ context 'with custom options' do
+ let_it_be(:checked_value) { 'yes' }
+ let_it_be(:unchecked_value) { 'no' }
+ let_it_be(:checkbox_options) { { class: 'checkbox-foo-bar', checked: true } }
+ let_it_be(:label_options) { { class: 'label-foo-bar' } }
+
+ before do
+ fake_form_for do |form|
+ render_inline(
+ described_class.new(
+ form: form,
+ method: method,
+ label: label,
+ help_text: help_text,
+ checked_value: checked_value,
+ unchecked_value: unchecked_value,
+ checkbox_options: checkbox_options,
+ label_options: label_options
+ )
+ )
+ end
+ end
+
+ include_examples 'it renders help text'
+
+ it 'renders checked checkbox with value of `yes`' do
+ expect(rendered_component).to have_checked_field(label, with: checked_value, class: checkbox_options[:class])
+ end
+
+ it 'adds CSS class to label' do
+ expect(rendered_component).to have_selector('label.label-foo-bar')
+ end
+
+ it 'renders hidden input with value of `no`' do
+ expect(rendered_component).to have_field('user[view_diffs_file_by_file]', type: 'hidden', with: unchecked_value)
+ end
+ end
+
+ context 'with `label` slot' do
+ before do
+ fake_form_for do |form|
+ render_inline(
+ described_class.new(
+ form: form,
+ method: method
+ )
+ ) do |c|
+ c.label { label }
+ end
+ end
+ end
+
+ include_examples 'it renders unchecked checkbox with value of `1`'
+ end
+
+ context 'with `help_text` slot' do
+ before do
+ fake_form_for do |form|
+ render_inline(
+ described_class.new(
+ form: form,
+ method: method,
+ label: label
+ )
+ ) do |c|
+ c.help_text { help_text }
+ end
+ end
+ end
+
+ include_examples 'it renders unchecked checkbox with value of `1`'
+ include_examples 'it renders help text'
+ end
+
+ context 'with `label` and `help_text` slots' do
+ before do
+ fake_form_for do |form|
+ render_inline(
+ described_class.new(
+ form: form,
+ method: method
+ )
+ ) do |c|
+ c.label { label }
+ c.help_text { help_text }
+ end
+ end
+ end
+
+ include_examples 'it renders unchecked checkbox with value of `1`'
+ include_examples 'it renders help text'
+ end
+end
diff --git a/spec/components/pajamas/component_spec.rb b/spec/components/pajamas/component_spec.rb
index 96f6b43bac1..7385519b468 100644
--- a/spec/components/pajamas/component_spec.rb
+++ b/spec/components/pajamas/component_spec.rb
@@ -23,4 +23,21 @@ RSpec.describe Pajamas::Component do
expect(value).to eq('something')
end
end
+
+ describe '#format_options' do
+ it 'merges CSS classes and additional options' do
+ expect(
+ subject.send(
+ :format_options,
+ options: { foo: 'bar', class: 'gl-display-flex gl-py-5' },
+ css_classes: %w(gl-px-5 gl-mt-5),
+ additional_options: { baz: 'bax' }
+ )
+ ).to match({
+ foo: 'bar',
+ baz: 'bax',
+ class: ['gl-px-5', 'gl-mt-5', 'gl-display-flex gl-py-5']
+ })
+ end
+ end
end
diff --git a/spec/components/pajamas/concerns/checkbox_radio_label_with_help_text_spec.rb b/spec/components/pajamas/concerns/checkbox_radio_label_with_help_text_spec.rb
new file mode 100644
index 00000000000..7a792592b3c
--- /dev/null
+++ b/spec/components/pajamas/concerns/checkbox_radio_label_with_help_text_spec.rb
@@ -0,0 +1,110 @@
+# frozen_string_literal: true
+require "spec_helper"
+
+RSpec.describe Pajamas::Concerns::CheckboxRadioLabelWithHelpText do
+ let(:form) { instance_double('ActionView::Helpers::FormBuilder') }
+ let(:component_class) do
+ Class.new do
+ attr_reader(
+ :form,
+ :method,
+ :label_argument,
+ :help_text_argument,
+ :label_options,
+ :input_options,
+ :value
+ )
+
+ def initialize(
+ form:,
+ method:,
+ label: nil,
+ help_text: nil,
+ label_options: {},
+ radio_options: {},
+ value: nil
+ )
+ @form = form
+ @method = method
+ @label_argument = label
+ @help_text_argument = help_text
+ @label_options = label_options
+ @input_options = radio_options
+ @value = value
+ end
+
+ def label_content
+ @label_argument
+ end
+
+ def help_text_content
+ @help_text_argument
+ end
+
+ def format_options(options:, css_classes: [], additional_options: {})
+ {}
+ end
+
+ include Pajamas::Concerns::CheckboxRadioLabelWithHelpText
+ include ActionView::Helpers::TagHelper
+ end
+ end
+
+ let_it_be(:method) { 'username' }
+ let_it_be(:label_options) { { class: 'foo-bar' } }
+ let_it_be(:value) { 'Foo bar' }
+
+ describe '#render_label_with_help_text' do
+ it 'calls `#format_options` with correct arguments' do
+ allow(form).to receive(:label)
+
+ component = component_class.new(form: form, method: method, label_options: label_options, value: value)
+
+ expect(component).to receive(:format_options).with(
+ options: label_options,
+ css_classes: ['custom-control-label'],
+ additional_options: { value: value }
+ )
+
+ component.render_label_with_help_text
+ end
+
+ context 'when `help_text` argument is passed' do
+ it 'calls `form.label` with `label` and `help_text` arguments used in the block' do
+ component = component_class.new(
+ form: form,
+ method: method,
+ label: 'Label argument',
+ help_text: 'Help text argument'
+ )
+
+ expected_label_entry = '<span>Label argument</span><p class="help-text"' \
+ ' data-testid="pajamas-component-help-text">Help text argument</p>'
+
+ expect(form).to receive(:label).with(method, {}) do |&block|
+ expect(block.call).to eq(expected_label_entry)
+ end
+
+ component.render_label_with_help_text
+ end
+ end
+
+ context 'when `help_text` argument is not passed' do
+ it 'calls `form.label` with `label` argument used in the block' do
+ component = component_class.new(
+ form: form,
+ method: method,
+ label: 'Label argument'
+ )
+
+ expected_label_entry = '<span>Label argument</span>'
+
+ expect(form).to receive(:label).with(method, {}) do |&block|
+ expect(block.call).to eq(expected_label_entry)
+ end
+
+ component.render_label_with_help_text
+ end
+ end
+ end
+end
diff --git a/spec/components/pajamas/concerns/checkbox_radio_options_spec.rb b/spec/components/pajamas/concerns/checkbox_radio_options_spec.rb
new file mode 100644
index 00000000000..3eb888e5f3b
--- /dev/null
+++ b/spec/components/pajamas/concerns/checkbox_radio_options_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+require "spec_helper"
+
+RSpec.describe Pajamas::Concerns::CheckboxRadioOptions do
+ let(:component_class) do
+ Class.new do
+ include Pajamas::Concerns::CheckboxRadioOptions
+
+ attr_reader(:input_options)
+
+ def initialize(input_options: {})
+ @input_options = input_options
+ end
+
+ def format_options(options:, css_classes: [], additional_options: {})
+ {}
+ end
+ end
+ end
+
+ describe '#formatted_input_options' do
+ let_it_be(:input_options) { { class: 'foo-bar' } }
+
+ it 'calls `#format_options` with correct arguments' do
+ component = component_class.new(input_options: input_options)
+
+ expect(component).to receive(:format_options).with(options: input_options, css_classes: ['custom-control-input'])
+
+ component.formatted_input_options
+ end
+ end
+end
diff --git a/spec/components/pajamas/radio_component_spec.rb b/spec/components/pajamas/radio_component_spec.rb
new file mode 100644
index 00000000000..3885d101c7a
--- /dev/null
+++ b/spec/components/pajamas/radio_component_spec.rb
@@ -0,0 +1,126 @@
+# frozen_string_literal: true
+require "spec_helper"
+
+RSpec.describe Pajamas::RadioComponent, :aggregate_failures, type: :component do
+ include FormBuilderHelpers
+
+ let_it_be(:method) { :access_level }
+ let_it_be(:label) { "Access Level" }
+ let_it_be(:value) { :regular }
+ let_it_be(:help_text) do
+ 'Administrators have access to all groups, projects, and users and can manage all features in this installation'
+ end
+
+ RSpec.shared_examples 'it renders unchecked radio' do
+ it 'renders unchecked radio' do
+ expect(rendered_component).to have_unchecked_field(label)
+ end
+ end
+
+ context 'with default options' do
+ before do
+ fake_form_for do |form|
+ render_inline(
+ described_class.new(
+ form: form,
+ method: method,
+ value: value,
+ label: label
+ )
+ )
+ end
+ end
+
+ include_examples 'it renders unchecked radio'
+ include_examples 'it does not render help text'
+ end
+
+ context 'with custom options' do
+ let_it_be(:radio_options) { { class: 'radio-foo-bar', checked: true } }
+ let_it_be(:label_options) { { class: 'label-foo-bar' } }
+
+ before do
+ fake_form_for do |form|
+ render_inline(
+ described_class.new(
+ form: form,
+ method: method,
+ value: method,
+ label: label,
+ help_text: help_text,
+ radio_options: radio_options,
+ label_options: label_options
+ )
+ )
+ end
+ end
+
+ include_examples 'it renders help text'
+
+ it 'renders checked radio' do
+ expect(rendered_component).to have_checked_field(label, class: radio_options[:class])
+ end
+
+ it 'adds CSS class to label' do
+ expect(rendered_component).to have_selector('label.label-foo-bar')
+ end
+ end
+
+ context 'with `label` slot' do
+ before do
+ fake_form_for do |form|
+ render_inline(
+ described_class.new(
+ form: form,
+ method: method,
+ value: value
+ )
+ ) do |c|
+ c.label { label }
+ end
+ end
+ end
+
+ include_examples 'it renders unchecked radio'
+ end
+
+ context 'with `help_text` slot' do
+ before do
+ fake_form_for do |form|
+ render_inline(
+ described_class.new(
+ form: form,
+ method: method,
+ value: value,
+ label: label
+ )
+ ) do |c|
+ c.help_text { help_text }
+ end
+ end
+ end
+
+ include_examples 'it renders unchecked radio'
+ include_examples 'it renders help text'
+ end
+
+ context 'with `label` and `help_text` slots' do
+ before do
+ fake_form_for do |form|
+ render_inline(
+ described_class.new(
+ form: form,
+ method: method,
+ value: value
+ )
+ ) do |c|
+ c.label { label }
+ c.help_text { help_text }
+ end
+ end
+ end
+
+ include_examples 'it renders unchecked radio'
+ include_examples 'it renders help text'
+ end
+end
diff --git a/spec/contracts/.gitignore b/spec/contracts/.gitignore
new file mode 100644
index 00000000000..cb89d4102d3
--- /dev/null
+++ b/spec/contracts/.gitignore
@@ -0,0 +1,2 @@
+logs/
+consumer/node_modules
diff --git a/spec/contracts/README.md b/spec/contracts/README.md
new file mode 100644
index 00000000000..d1f902064f1
--- /dev/null
+++ b/spec/contracts/README.md
@@ -0,0 +1,15 @@
+# Contract testing for GitLab
+
+This directory contains the contract test suites for GitLab, which use the [Pact](https://pact.io/) framework.
+
+The consumer tests are written using [`jest-pact`](https://github.com/pact-foundation/jest-pact) and the provider tests are written using [`pact-ruby`](https://github.com/pact-foundation/pact-ruby).
+
+## Write the tests
+
+- [Writing consumer tests](../../doc/development/testing_guide/contract/consumer_tests.md)
+- [Writing provider tests](../../doc/development/testing_guide/contract/provider_tests.md)
+
+### Run the tests
+
+- [Running consumer tests](../../doc/development/testing_guide/contract/index.md#run-the-consumer-tests)
+- [Running provider tests](../../doc/development/testing_guide/contract/index.md#run-the-provider-tests)
diff --git a/spec/contracts/consumer/.eslintrc.yml b/spec/contracts/consumer/.eslintrc.yml
new file mode 100644
index 00000000000..e4b380714d3
--- /dev/null
+++ b/spec/contracts/consumer/.eslintrc.yml
@@ -0,0 +1,7 @@
+---
+extends:
+ - 'plugin:@gitlab/jest'
+settings:
+ import/core-modules:
+ - '@pact-foundation/pact'
+ - jest-pact
diff --git a/spec/contracts/consumer/.node-version b/spec/contracts/consumer/.node-version
new file mode 100644
index 00000000000..18711d290ea
--- /dev/null
+++ b/spec/contracts/consumer/.node-version
@@ -0,0 +1 @@
+14.17.5
diff --git a/spec/contracts/consumer/babel.config.json b/spec/contracts/consumer/babel.config.json
new file mode 100644
index 00000000000..1320b9a3272
--- /dev/null
+++ b/spec/contracts/consumer/babel.config.json
@@ -0,0 +1,3 @@
+{
+ "presets": ["@babel/preset-env"]
+}
diff --git a/spec/contracts/consumer/endpoints/project/merge_requests.js b/spec/contracts/consumer/endpoints/project/merge_requests.js
new file mode 100644
index 00000000000..38773e5fb10
--- /dev/null
+++ b/spec/contracts/consumer/endpoints/project/merge_requests.js
@@ -0,0 +1,34 @@
+import { request } from 'axios';
+
+export function getDiffsMetadata(endpoint) {
+ const { url } = endpoint;
+
+ return request({
+ method: 'GET',
+ baseURL: url,
+ url: '/gitlab-org/gitlab-qa/-/merge_requests/1/diffs_metadata.json',
+ headers: { Accept: '*/*' },
+ }).then((response) => response.data);
+}
+
+export function getDiscussions(endpoint) {
+ const { url } = endpoint;
+
+ return request({
+ method: 'GET',
+ baseURL: url,
+ url: '/gitlab-org/gitlab-qa/-/merge_requests/1/discussions.json',
+ headers: { Accept: '*/*' },
+ }).then((response) => response.data);
+}
+
+export function getDiffsBatch(endpoint) {
+ const { url } = endpoint;
+
+ return request({
+ method: 'GET',
+ baseURL: url,
+ url: '/gitlab-org/gitlab-qa/-/merge_requests/1/diffs_batch.json?page=0',
+ headers: { Accept: '*/*' },
+ }).then((response) => response.data);
+}
diff --git a/spec/contracts/consumer/fixtures/project/merge_request/diffs_batch.fixture.js b/spec/contracts/consumer/fixtures/project/merge_request/diffs_batch.fixture.js
new file mode 100644
index 00000000000..b53e4bb335d
--- /dev/null
+++ b/spec/contracts/consumer/fixtures/project/merge_request/diffs_batch.fixture.js
@@ -0,0 +1,91 @@
+/* eslint-disable @gitlab/require-i18n-strings */
+
+import { Matchers } from '@pact-foundation/pact';
+
+const body = {
+ diff_files: Matchers.eachLike({
+ content_sha: Matchers.string('b0c94059db75b2473d616d4b1fde1a77533355a3'),
+ submodule: Matchers.boolean(false),
+ edit_path: Matchers.string('/gitlab-qa-bot/...'),
+ ide_edit_path: Matchers.string('/gitlab-qa-bot/...'),
+ old_path_html: Matchers.string('Gemfile'),
+ new_path_html: Matchers.string('Gemfile'),
+ blob: {
+ id: Matchers.string('855071bb3928d140764885964f7be1bb3e582495'),
+ path: Matchers.string('Gemfile'),
+ name: Matchers.string('Gemfile'),
+ mode: Matchers.string('1234567'),
+ readable_text: Matchers.boolean(true),
+ icon: Matchers.string('doc-text'),
+ },
+ can_modify_blob: Matchers.boolean(false),
+ file_identifier_hash: Matchers.string('67d82b8716a5b6c52c7abf0b2cd99c7594ed3587'),
+ file_hash: Matchers.string('67d82b8716a5b6c52c7abf0b2cd99c7594ed3587'),
+ file_path: Matchers.string('Gemfile'),
+ old_path: Matchers.string('Gemfile'),
+ new_path: Matchers.string('Gemfile'),
+ new_file: Matchers.boolean(false),
+ renamed_file: Matchers.boolean(false),
+ deleted_file: Matchers.boolean(false),
+ diff_refs: {
+ base_sha: Matchers.string('67d82b8716a5b6c52c7abf0b2cd99c7594ed3587'),
+ start_sha: Matchers.string('67d82b8716a5b6c52c7abf0b2cd99c7594ed3587'),
+ head_sha: Matchers.string('67d82b8716a5b6c52c7abf0b2cd99c7594ed3587'),
+ },
+ mode_changed: Matchers.boolean(false),
+ a_mode: Matchers.string('123456'),
+ b_mode: Matchers.string('123456'),
+ viewer: {
+ name: Matchers.string('text'),
+ collapsed: Matchers.boolean(false),
+ },
+ old_size: Matchers.integer(2288),
+ new_size: Matchers.integer(2288),
+ added_lines: Matchers.integer(1),
+ removed_lines: Matchers.integer(1),
+ load_collapsed_diff_url: Matchers.string('/gitlab-qa-bot/...'),
+ view_path: Matchers.string('/gitlab-qa-bot/...'),
+ context_lines_path: Matchers.string('/gitlab-qa-bot/...'),
+ highlighted_diff_lines: Matchers.eachLike({
+ // The following values can also be null which is not supported
+ // line_code: Matchers.string('de3150c01c3a946a6168173c4116741379fe3579_1_1'),
+ // old_line: Matchers.integer(1),
+ // new_line: Matchers.integer(1),
+ text: Matchers.string('source'),
+ rich_text: Matchers.string('<span></span>'),
+ can_receive_suggestion: Matchers.boolean(true),
+ }),
+ is_fully_expanded: Matchers.boolean(false),
+ }),
+ pagination: {
+ total_pages: Matchers.integer(1),
+ },
+};
+
+const DiffsBatch = {
+ body: Matchers.extractPayload(body),
+
+ success: {
+ status: 200,
+ headers: {
+ 'Content-Type': 'application/json; charset=utf-8',
+ },
+ body,
+ },
+
+ request: {
+ uponReceiving: 'a request for diff lines',
+ withRequest: {
+ method: 'GET',
+ path: '/gitlab-org/gitlab-qa/-/merge_requests/1/diffs_batch.json',
+ headers: {
+ Accept: '*/*',
+ },
+ query: 'page=0',
+ },
+ },
+};
+
+export { DiffsBatch };
+
+/* eslint-enable @gitlab/require-i18n-strings */
diff --git a/spec/contracts/consumer/fixtures/project/merge_request/diffs_metadata.fixture.js b/spec/contracts/consumer/fixtures/project/merge_request/diffs_metadata.fixture.js
new file mode 100644
index 00000000000..39dbcf78ee7
--- /dev/null
+++ b/spec/contracts/consumer/fixtures/project/merge_request/diffs_metadata.fixture.js
@@ -0,0 +1,98 @@
+/* eslint-disable @gitlab/require-i18n-strings */
+
+import { Matchers } from '@pact-foundation/pact';
+
+const body = {
+ real_size: Matchers.string('1'),
+ size: Matchers.integer(1),
+ branch_name: Matchers.string('testing-branch-1'),
+ source_branch_exists: Matchers.boolean(true),
+ target_branch_name: Matchers.string('master'),
+ merge_request_diff: {
+ created_at: Matchers.iso8601DateTimeWithMillis('2022-02-17T11:47:08.804Z'),
+ commits_count: Matchers.integer(1),
+ latest: Matchers.boolean(true),
+ short_commit_sha: Matchers.string('aee1ffec'),
+ base_version_path: Matchers.string(
+ '/gitlab-qa-bot/contract-testing/-/merge_requests/1/diffs?diff_id=10581773',
+ ),
+ head_version_path: Matchers.string(
+ '/gitlab-qa-bot/contract-testing/-/merge_requests/1/diffs?diff_head=true',
+ ),
+ version_path: Matchers.string(
+ '/gitlab-qa-bot/contract-testing/-/merge_requests/1/diffs?diff_id=10581773',
+ ),
+ compare_path: Matchers.string(
+ '/gitlab-qa-bot/contract-testing/-/merge_requests/1/diffs?diff_id=10581773&start_sha=aee1ffec2299c0cfb17c8821e931339b73a3759f',
+ ),
+ },
+ latest_diff: Matchers.boolean(true),
+ latest_version_path: Matchers.string('/gitlab-qa-bot/contract-testing/-/merge_requests/1/diffs'),
+ added_lines: Matchers.integer(1),
+ removed_lines: Matchers.integer(1),
+ render_overflow_warning: Matchers.boolean(false),
+ email_patch_path: Matchers.string('/gitlab-qa-bot/contract-testing/-/merge_requests/1.patch'),
+ plain_diff_path: Matchers.string('/gitlab-qa-bot/contract-testing/-/merge_requests/1.diff'),
+ merge_request_diffs: Matchers.eachLike({
+ commits_count: Matchers.integer(1),
+ latest: Matchers.boolean(true),
+ short_commit_sha: Matchers.string('aee1ffec'),
+ base_version_path: Matchers.string(
+ '/gitlab-qa-bot/contract-testing/-/merge_requests/1/diffs?diff_id=10581773',
+ ),
+ head_version_path: Matchers.string(
+ '/gitlab-qa-bot/contract-testing/-/merge_requests/1/diffs?diff_head=true',
+ ),
+ version_path: Matchers.string(
+ '/gitlab-qa-bot/contract-testing/-/merge_requests/1/diffs?diff_id=10581773',
+ ),
+ compare_path: Matchers.string(
+ '/gitlab-qa-bot/contract-testing/-/merge_requests/1/diffs?diff_id=10581773&start_sha=aee1ffec2299c0cfb17c8821e931339b73a3759f',
+ ),
+ }),
+ definition_path_prefix: Matchers.string(
+ '/gitlab-qa-bot/contract-testing/-/blob/aee1ffec2299c0cfb17c8821e931339b73a3759f',
+ ),
+ diff_files: Matchers.eachLike({
+ added_lines: Matchers.integer(1),
+ removed_lines: Matchers.integer(1),
+ new_path: Matchers.string('Gemfile'),
+ old_path: Matchers.string('Gemfile'),
+ new_file: Matchers.boolean(false),
+ deleted_file: Matchers.boolean(false),
+ submodule: Matchers.boolean(false),
+ file_identifier_hash: Matchers.string('67d82b8716a5b6c52c7abf0b2cd99c7594ed3587'),
+ file_hash: Matchers.string('de3150c01c3a946a6168173c4116741379fe3579'),
+ }),
+ has_conflicts: Matchers.boolean(false),
+ can_merge: Matchers.boolean(false),
+ project_path: Matchers.string('gitlab-qa-bot/contract-testing'),
+ project_name: Matchers.string('contract-testing'),
+};
+
+const DiffsMetadata = {
+ body: Matchers.extractPayload(body),
+
+ success: {
+ status: 200,
+ headers: {
+ 'Content-Type': 'application/json; charset=utf-8',
+ },
+ body,
+ },
+
+ request: {
+ uponReceiving: 'a request for Diffs Metadata',
+ withRequest: {
+ method: 'GET',
+ path: '/gitlab-org/gitlab-qa/-/merge_requests/1/diffs_metadata.json',
+ headers: {
+ Accept: '*/*',
+ },
+ },
+ },
+};
+
+export { DiffsMetadata };
+
+/* eslint-enable @gitlab/require-i18n-strings */
diff --git a/spec/contracts/consumer/fixtures/project/merge_request/discussions.fixture.js b/spec/contracts/consumer/fixtures/project/merge_request/discussions.fixture.js
new file mode 100644
index 00000000000..af0962a01cb
--- /dev/null
+++ b/spec/contracts/consumer/fixtures/project/merge_request/discussions.fixture.js
@@ -0,0 +1,87 @@
+/* eslint-disable @gitlab/require-i18n-strings */
+
+import { Matchers } from '@pact-foundation/pact';
+
+const body = Matchers.eachLike({
+ id: Matchers.string('fd73763cbcbf7b29eb8765d969a38f7d735e222a'),
+ reply_id: Matchers.string('fd73763cbcbf7b29eb8765d969a38f7d735e222a'),
+ project_id: Matchers.integer(6954442),
+ confidential: Matchers.boolean(false),
+ diff_discussion: Matchers.boolean(false),
+ expanded: Matchers.boolean(false),
+ for_commit: Matchers.boolean(false),
+ individual_note: Matchers.boolean(true),
+ resolvable: Matchers.boolean(false),
+ resolved_by_push: Matchers.boolean(false),
+ notes: Matchers.eachLike({
+ id: Matchers.string('76489845'),
+ author: {
+ id: Matchers.integer(1675733),
+ username: Matchers.string('gitlab-qa-bot'),
+ name: Matchers.string('gitlab-qa-bot'),
+ state: Matchers.string('active'),
+ avatar_url: Matchers.string(
+ 'https://secure.gravatar.com/avatar/8355ad0f2761367fae6b9c4fe80994b9?s=80&d=identicon',
+ ),
+ show_status: Matchers.boolean(false),
+ path: Matchers.string('/gitlab-qa-bot'),
+ },
+ created_at: Matchers.iso8601DateTimeWithMillis('2022-02-22T07:06:55.038Z'),
+ updated_at: Matchers.iso8601DateTimeWithMillis('2022-02-22T07:06:55.038Z'),
+ system: Matchers.boolean(false),
+ noteable_id: Matchers.integer(8333422),
+ noteable_type: Matchers.string('MergeRequest'),
+ resolvable: Matchers.boolean(false),
+ resolved: Matchers.boolean(true),
+ confidential: Matchers.boolean(false),
+ noteable_iid: Matchers.integer(1),
+ note: Matchers.string('This is a test comment'),
+ note_html: Matchers.string(
+ '<p data-sourcepos="1:1-1:22" dir="auto">This is a test comment</p>',
+ ),
+ current_user: {
+ can_edit: Matchers.boolean(true),
+ can_award_emoji: Matchers.boolean(true),
+ can_resolve: Matchers.boolean(false),
+ can_resolve_discussion: Matchers.boolean(false),
+ },
+ is_noteable_author: Matchers.boolean(true),
+ discussion_id: Matchers.string('fd73763cbcbf7b29eb8765d969a38f7d735e222a'),
+ emoji_awardable: Matchers.boolean(true),
+ report_abuse_path: Matchers.string('/gitlab-qa-bot/...'),
+ noteable_note_url: Matchers.string('https://staging.gitlab.com/gitlab-qa-bot/...'),
+ cached_markdown_version: Matchers.integer(1900552),
+ human_access: Matchers.string('Maintainer'),
+ is_contributor: Matchers.boolean(false),
+ project_name: Matchers.string('contract-testing'),
+ path: Matchers.string('/gitlab-qa-bot/...'),
+ }),
+ resolved: Matchers.boolean(true),
+});
+
+const Discussions = {
+ body: Matchers.extractPayload(body),
+
+ success: {
+ status: 200,
+ headers: {
+ 'Content-Type': 'application/json; charset=utf-8',
+ },
+ body,
+ },
+
+ request: {
+ uponReceiving: 'a request for discussions',
+ withRequest: {
+ method: 'GET',
+ path: '/gitlab-org/gitlab-qa/-/merge_requests/1/discussions.json',
+ headers: {
+ Accept: '*/*',
+ },
+ },
+ },
+};
+
+export { Discussions };
+
+/* eslint-enable @gitlab/require-i18n-strings */
diff --git a/spec/contracts/consumer/package.json b/spec/contracts/consumer/package.json
new file mode 100644
index 00000000000..6d3feaa6d4c
--- /dev/null
+++ b/spec/contracts/consumer/package.json
@@ -0,0 +1,26 @@
+{
+ "name": "consumer",
+ "version": "1.0.0",
+ "description": "consumer side contract testing",
+ "license": "MIT",
+ "repository": "https://gitlab.com/gitlab-org/gitlab.git",
+ "dependencies": {
+ "@pact-foundation/pact": "^9.17.2",
+ "axios": "^0.26.0",
+ "jest": "^27.5.1",
+ "jest-pact": "^0.9.1",
+ "prettier": "^2.5.1"
+ },
+ "scripts": {
+ "test": "jest --runInBand"
+ },
+ "jest": {
+ "transform": {
+ "^.+\\.[t|j]sx?$": "babel-jest"
+ }
+ },
+ "devDependencies": {
+ "@babel/preset-env": "^7.18.2",
+ "babel-jest": "^28.1.1"
+ }
+}
diff --git a/spec/contracts/consumer/specs/project/merge_request/show.spec.js b/spec/contracts/consumer/specs/project/merge_request/show.spec.js
new file mode 100644
index 00000000000..8c6e029cb12
--- /dev/null
+++ b/spec/contracts/consumer/specs/project/merge_request/show.spec.js
@@ -0,0 +1,112 @@
+/* eslint-disable @gitlab/require-i18n-strings */
+
+import { pactWith } from 'jest-pact';
+
+import { DiffsBatch } from '../../../fixtures/project/merge_request/diffs_batch.fixture';
+import { Discussions } from '../../../fixtures/project/merge_request/discussions.fixture';
+import { DiffsMetadata } from '../../../fixtures/project/merge_request/diffs_metadata.fixture';
+import {
+ getDiffsBatch,
+ getDiffsMetadata,
+ getDiscussions,
+} from '../../../endpoints/project/merge_requests';
+
+const CONSUMER_NAME = 'MergeRequest#show';
+const CONSUMER_LOG = '../logs/consumer.log';
+const CONTRACT_DIR = '../contracts/project/merge_request/show';
+const DIFFS_BATCH_PROVIDER_NAME = 'Merge Request Diffs Batch Endpoint';
+const DISCUSSIONS_PROVIDER_NAME = 'Merge Request Discussions Endpoint';
+const DIFFS_METADATA_PROVIDER_NAME = 'Merge Request Diffs Metadata Endpoint';
+
+// API endpoint: /merge_requests/:id/diffs_batch.json
+pactWith(
+ {
+ consumer: CONSUMER_NAME,
+ provider: DIFFS_BATCH_PROVIDER_NAME,
+ log: CONSUMER_LOG,
+ dir: CONTRACT_DIR,
+ },
+
+ (provider) => {
+ describe(DIFFS_BATCH_PROVIDER_NAME, () => {
+ beforeEach(() => {
+ const interaction = {
+ state: 'a merge request with diffs exists',
+ ...DiffsBatch.request,
+ willRespondWith: DiffsBatch.success,
+ };
+ provider.addInteraction(interaction);
+ });
+
+ it('returns a successful body', () => {
+ return getDiffsBatch({
+ url: provider.mockService.baseUrl,
+ }).then((diffsBatch) => {
+ expect(diffsBatch).toEqual(DiffsBatch.body);
+ });
+ });
+ });
+ },
+);
+
+pactWith(
+ {
+ consumer: CONSUMER_NAME,
+ provider: DISCUSSIONS_PROVIDER_NAME,
+ log: CONSUMER_LOG,
+ dir: CONTRACT_DIR,
+ },
+
+ (provider) => {
+ describe(DISCUSSIONS_PROVIDER_NAME, () => {
+ beforeEach(() => {
+ const interaction = {
+ state: 'a merge request with discussions exists',
+ ...Discussions.request,
+ willRespondWith: Discussions.success,
+ };
+ provider.addInteraction(interaction);
+ });
+
+ it('return a successful body', () => {
+ return getDiscussions({
+ url: provider.mockService.baseUrl,
+ }).then((discussions) => {
+ expect(discussions).toEqual(Discussions.body);
+ });
+ });
+ });
+ },
+);
+
+pactWith(
+ {
+ consumer: CONSUMER_NAME,
+ provider: DIFFS_METADATA_PROVIDER_NAME,
+ log: CONSUMER_LOG,
+ dir: CONTRACT_DIR,
+ },
+
+ (provider) => {
+ describe(DIFFS_METADATA_PROVIDER_NAME, () => {
+ beforeEach(() => {
+ const interaction = {
+ state: 'a merge request exists',
+ ...DiffsMetadata.request,
+ willRespondWith: DiffsMetadata.success,
+ };
+ provider.addInteraction(interaction);
+ });
+
+ it('return a successful body', () => {
+ return getDiffsMetadata({
+ url: provider.mockService.baseUrl,
+ }).then((diffsMetadata) => {
+ expect(diffsMetadata).toEqual(DiffsMetadata.body);
+ });
+ });
+ });
+ },
+);
+
+/* eslint-enable @gitlab/require-i18n-strings */
diff --git a/spec/contracts/contracts/project/merge_request/show/mergerequest#show-merge_request_diffs_batch_endpoint.json b/spec/contracts/contracts/project/merge_request/show/mergerequest#show-merge_request_diffs_batch_endpoint.json
new file mode 100644
index 00000000000..3fa13766766
--- /dev/null
+++ b/spec/contracts/contracts/project/merge_request/show/mergerequest#show-merge_request_diffs_batch_endpoint.json
@@ -0,0 +1,229 @@
+{
+ "consumer": {
+ "name": "MergeRequest#show"
+ },
+ "provider": {
+ "name": "Merge Request Diffs Batch Endpoint"
+ },
+ "interactions": [
+ {
+ "description": "a request for diff lines",
+ "providerState": "a merge request with diffs exists",
+ "request": {
+ "method": "GET",
+ "path": "/gitlab-org/gitlab-qa/-/merge_requests/1/diffs_batch.json",
+ "query": "page=0",
+ "headers": {
+ "Accept": "*/*"
+ }
+ },
+ "response": {
+ "status": 200,
+ "headers": {
+ "Content-Type": "application/json; charset=utf-8"
+ },
+ "body": {
+ "diff_files": [
+ {
+ "content_sha": "b0c94059db75b2473d616d4b1fde1a77533355a3",
+ "submodule": false,
+ "edit_path": "/gitlab-qa-bot/...",
+ "ide_edit_path": "/gitlab-qa-bot/...",
+ "old_path_html": "Gemfile",
+ "new_path_html": "Gemfile",
+ "blob": {
+ "id": "855071bb3928d140764885964f7be1bb3e582495",
+ "path": "Gemfile",
+ "name": "Gemfile",
+ "mode": "1234567",
+ "readable_text": true,
+ "icon": "doc-text"
+ },
+ "can_modify_blob": false,
+ "file_identifier_hash": "67d82b8716a5b6c52c7abf0b2cd99c7594ed3587",
+ "file_hash": "67d82b8716a5b6c52c7abf0b2cd99c7594ed3587",
+ "file_path": "Gemfile",
+ "old_path": "Gemfile",
+ "new_path": "Gemfile",
+ "new_file": false,
+ "renamed_file": false,
+ "deleted_file": false,
+ "diff_refs": {
+ "base_sha": "67d82b8716a5b6c52c7abf0b2cd99c7594ed3587",
+ "start_sha": "67d82b8716a5b6c52c7abf0b2cd99c7594ed3587",
+ "head_sha": "67d82b8716a5b6c52c7abf0b2cd99c7594ed3587"
+ },
+ "mode_changed": false,
+ "a_mode": "123456",
+ "b_mode": "123456",
+ "viewer": {
+ "name": "text",
+ "collapsed": false
+ },
+ "old_size": 2288,
+ "new_size": 2288,
+ "added_lines": 1,
+ "removed_lines": 1,
+ "load_collapsed_diff_url": "/gitlab-qa-bot/...",
+ "view_path": "/gitlab-qa-bot/...",
+ "context_lines_path": "/gitlab-qa-bot/...",
+ "highlighted_diff_lines": [
+ {
+ "text": "source",
+ "rich_text": "<span></span>",
+ "can_receive_suggestion": true
+ }
+ ],
+ "is_fully_expanded": false
+ }
+ ],
+ "pagination": {
+ "total_pages": 1
+ }
+ },
+ "matchingRules": {
+ "$.body.diff_files": {
+ "min": 1
+ },
+ "$.body.diff_files[*].*": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].content_sha": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].submodule": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].edit_path": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].ide_edit_path": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].old_path_html": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].new_path_html": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].blob.id": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].blob.path": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].blob.name": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].blob.mode": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].blob.readable_text": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].blob.icon": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].can_modify_blob": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].file_identifier_hash": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].file_hash": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].file_path": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].old_path": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].new_path": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].new_file": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].renamed_file": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].deleted_file": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].diff_refs.base_sha": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].diff_refs.start_sha": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].diff_refs.head_sha": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].mode_changed": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].a_mode": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].b_mode": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].viewer.name": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].viewer.collapsed": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].old_size": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].new_size": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].added_lines": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].removed_lines": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].load_collapsed_diff_url": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].view_path": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].context_lines_path": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].highlighted_diff_lines": {
+ "min": 1
+ },
+ "$.body.diff_files[*].highlighted_diff_lines[*].*": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].highlighted_diff_lines[*].text": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].highlighted_diff_lines[*].rich_text": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].highlighted_diff_lines[*].can_receive_suggestion": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].is_fully_expanded": {
+ "match": "type"
+ },
+ "$.body.pagination.total_pages": {
+ "match": "type"
+ }
+ }
+ }
+ }
+ ],
+ "metadata": {
+ "pactSpecification": {
+ "version": "2.0.0"
+ }
+ }
+} \ No newline at end of file
diff --git a/spec/contracts/contracts/project/merge_request/show/mergerequest#show-merge_request_diffs_metadata_endpoint.json b/spec/contracts/contracts/project/merge_request/show/mergerequest#show-merge_request_diffs_metadata_endpoint.json
new file mode 100644
index 00000000000..b98a0127e54
--- /dev/null
+++ b/spec/contracts/contracts/project/merge_request/show/mergerequest#show-merge_request_diffs_metadata_endpoint.json
@@ -0,0 +1,223 @@
+{
+ "consumer": {
+ "name": "MergeRequest#show"
+ },
+ "provider": {
+ "name": "Merge Request Diffs Metadata Endpoint"
+ },
+ "interactions": [
+ {
+ "description": "a request for Diffs Metadata",
+ "providerState": "a merge request exists",
+ "request": {
+ "method": "GET",
+ "path": "/gitlab-org/gitlab-qa/-/merge_requests/1/diffs_metadata.json",
+ "headers": {
+ "Accept": "*/*"
+ }
+ },
+ "response": {
+ "status": 200,
+ "headers": {
+ "Content-Type": "application/json; charset=utf-8"
+ },
+ "body": {
+ "real_size": "1",
+ "size": 1,
+ "branch_name": "testing-branch-1",
+ "source_branch_exists": true,
+ "target_branch_name": "master",
+ "merge_request_diff": {
+ "created_at": "2022-02-17T11:47:08.804Z",
+ "commits_count": 1,
+ "latest": true,
+ "short_commit_sha": "aee1ffec",
+ "base_version_path": "/gitlab-qa-bot/contract-testing/-/merge_requests/1/diffs?diff_id=10581773",
+ "head_version_path": "/gitlab-qa-bot/contract-testing/-/merge_requests/1/diffs?diff_head=true",
+ "version_path": "/gitlab-qa-bot/contract-testing/-/merge_requests/1/diffs?diff_id=10581773",
+ "compare_path": "/gitlab-qa-bot/contract-testing/-/merge_requests/1/diffs?diff_id=10581773&start_sha=aee1ffec2299c0cfb17c8821e931339b73a3759f"
+ },
+ "latest_diff": true,
+ "latest_version_path": "/gitlab-qa-bot/contract-testing/-/merge_requests/1/diffs",
+ "added_lines": 1,
+ "removed_lines": 1,
+ "render_overflow_warning": false,
+ "email_patch_path": "/gitlab-qa-bot/contract-testing/-/merge_requests/1.patch",
+ "plain_diff_path": "/gitlab-qa-bot/contract-testing/-/merge_requests/1.diff",
+ "merge_request_diffs": [
+ {
+ "commits_count": 1,
+ "latest": true,
+ "short_commit_sha": "aee1ffec",
+ "base_version_path": "/gitlab-qa-bot/contract-testing/-/merge_requests/1/diffs?diff_id=10581773",
+ "head_version_path": "/gitlab-qa-bot/contract-testing/-/merge_requests/1/diffs?diff_head=true",
+ "version_path": "/gitlab-qa-bot/contract-testing/-/merge_requests/1/diffs?diff_id=10581773",
+ "compare_path": "/gitlab-qa-bot/contract-testing/-/merge_requests/1/diffs?diff_id=10581773&start_sha=aee1ffec2299c0cfb17c8821e931339b73a3759f"
+ }
+ ],
+ "definition_path_prefix": "/gitlab-qa-bot/contract-testing/-/blob/aee1ffec2299c0cfb17c8821e931339b73a3759f",
+ "diff_files": [
+ {
+ "added_lines": 1,
+ "removed_lines": 1,
+ "new_path": "Gemfile",
+ "old_path": "Gemfile",
+ "new_file": false,
+ "deleted_file": false,
+ "submodule": false,
+ "file_identifier_hash": "67d82b8716a5b6c52c7abf0b2cd99c7594ed3587",
+ "file_hash": "de3150c01c3a946a6168173c4116741379fe3579"
+ }
+ ],
+ "has_conflicts": false,
+ "can_merge": false,
+ "project_path": "gitlab-qa-bot/contract-testing",
+ "project_name": "contract-testing"
+ },
+ "matchingRules": {
+ "$.body.real_size": {
+ "match": "type"
+ },
+ "$.body.size": {
+ "match": "type"
+ },
+ "$.body.branch_name": {
+ "match": "type"
+ },
+ "$.body.source_branch_exists": {
+ "match": "type"
+ },
+ "$.body.target_branch_name": {
+ "match": "type"
+ },
+ "$.body.merge_request_diff.created_at": {
+ "match": "regex",
+ "regex": "^\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d\\.\\d+([+-][0-2]\\d(:?[0-5]\\d)?|Z)$"
+ },
+ "$.body.merge_request_diff.commits_count": {
+ "match": "type"
+ },
+ "$.body.merge_request_diff.latest": {
+ "match": "type"
+ },
+ "$.body.merge_request_diff.short_commit_sha": {
+ "match": "type"
+ },
+ "$.body.merge_request_diff.base_version_path": {
+ "match": "type"
+ },
+ "$.body.merge_request_diff.head_version_path": {
+ "match": "type"
+ },
+ "$.body.merge_request_diff.version_path": {
+ "match": "type"
+ },
+ "$.body.merge_request_diff.compare_path": {
+ "match": "type"
+ },
+ "$.body.latest_diff": {
+ "match": "type"
+ },
+ "$.body.latest_version_path": {
+ "match": "type"
+ },
+ "$.body.added_lines": {
+ "match": "type"
+ },
+ "$.body.removed_lines": {
+ "match": "type"
+ },
+ "$.body.render_overflow_warning": {
+ "match": "type"
+ },
+ "$.body.email_patch_path": {
+ "match": "type"
+ },
+ "$.body.plain_diff_path": {
+ "match": "type"
+ },
+ "$.body.merge_request_diffs": {
+ "min": 1
+ },
+ "$.body.merge_request_diffs[*].*": {
+ "match": "type"
+ },
+ "$.body.merge_request_diffs[*].commits_count": {
+ "match": "type"
+ },
+ "$.body.merge_request_diffs[*].latest": {
+ "match": "type"
+ },
+ "$.body.merge_request_diffs[*].short_commit_sha": {
+ "match": "type"
+ },
+ "$.body.merge_request_diffs[*].base_version_path": {
+ "match": "type"
+ },
+ "$.body.merge_request_diffs[*].head_version_path": {
+ "match": "type"
+ },
+ "$.body.merge_request_diffs[*].version_path": {
+ "match": "type"
+ },
+ "$.body.merge_request_diffs[*].compare_path": {
+ "match": "type"
+ },
+ "$.body.definition_path_prefix": {
+ "match": "type"
+ },
+ "$.body.diff_files": {
+ "min": 1
+ },
+ "$.body.diff_files[*].*": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].added_lines": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].removed_lines": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].new_path": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].old_path": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].new_file": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].deleted_file": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].submodule": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].file_identifier_hash": {
+ "match": "type"
+ },
+ "$.body.diff_files[*].file_hash": {
+ "match": "type"
+ },
+ "$.body.has_conflicts": {
+ "match": "type"
+ },
+ "$.body.can_merge": {
+ "match": "type"
+ },
+ "$.body.project_path": {
+ "match": "type"
+ },
+ "$.body.project_name": {
+ "match": "type"
+ }
+ }
+ }
+ }
+ ],
+ "metadata": {
+ "pactSpecification": {
+ "version": "2.0.0"
+ }
+ }
+} \ No newline at end of file
diff --git a/spec/contracts/contracts/project/merge_request/show/mergerequest#show-merge_request_discussions_endpoint.json b/spec/contracts/contracts/project/merge_request/show/mergerequest#show-merge_request_discussions_endpoint.json
new file mode 100644
index 00000000000..ecaf9c123af
--- /dev/null
+++ b/spec/contracts/contracts/project/merge_request/show/mergerequest#show-merge_request_discussions_endpoint.json
@@ -0,0 +1,236 @@
+{
+ "consumer": {
+ "name": "MergeRequest#show"
+ },
+ "provider": {
+ "name": "Merge Request Discussions Endpoint"
+ },
+ "interactions": [
+ {
+ "description": "a request for discussions",
+ "providerState": "a merge request with discussions exists",
+ "request": {
+ "method": "GET",
+ "path": "/gitlab-org/gitlab-qa/-/merge_requests/1/discussions.json",
+ "headers": {
+ "Accept": "*/*"
+ }
+ },
+ "response": {
+ "status": 200,
+ "headers": {
+ "Content-Type": "application/json; charset=utf-8"
+ },
+ "body": [
+ {
+ "id": "fd73763cbcbf7b29eb8765d969a38f7d735e222a",
+ "reply_id": "fd73763cbcbf7b29eb8765d969a38f7d735e222a",
+ "project_id": 6954442,
+ "confidential": false,
+ "diff_discussion": false,
+ "expanded": false,
+ "for_commit": false,
+ "individual_note": true,
+ "resolvable": false,
+ "resolved_by_push": false,
+ "notes": [
+ {
+ "id": "76489845",
+ "author": {
+ "id": 1675733,
+ "username": "gitlab-qa-bot",
+ "name": "gitlab-qa-bot",
+ "state": "active",
+ "avatar_url": "https://secure.gravatar.com/avatar/8355ad0f2761367fae6b9c4fe80994b9?s=80&d=identicon",
+ "show_status": false,
+ "path": "/gitlab-qa-bot"
+ },
+ "created_at": "2022-02-22T07:06:55.038Z",
+ "updated_at": "2022-02-22T07:06:55.038Z",
+ "system": false,
+ "noteable_id": 8333422,
+ "noteable_type": "MergeRequest",
+ "resolvable": false,
+ "resolved": true,
+ "confidential": false,
+ "noteable_iid": 1,
+ "note": "This is a test comment",
+ "note_html": "<p data-sourcepos=\"1:1-1:22\" dir=\"auto\">This is a test comment</p>",
+ "current_user": {
+ "can_edit": true,
+ "can_award_emoji": true,
+ "can_resolve": false,
+ "can_resolve_discussion": false
+ },
+ "is_noteable_author": true,
+ "discussion_id": "fd73763cbcbf7b29eb8765d969a38f7d735e222a",
+ "emoji_awardable": true,
+ "report_abuse_path": "/gitlab-qa-bot/...",
+ "noteable_note_url": "https://staging.gitlab.com/gitlab-qa-bot/...",
+ "cached_markdown_version": 1900552,
+ "human_access": "Maintainer",
+ "is_contributor": false,
+ "project_name": "contract-testing",
+ "path": "/gitlab-qa-bot/..."
+ }
+ ],
+ "resolved": true
+ }
+ ],
+ "matchingRules": {
+ "$.body": {
+ "min": 1
+ },
+ "$.body[*].*": {
+ "match": "type"
+ },
+ "$.body[*].id": {
+ "match": "type"
+ },
+ "$.body[*].reply_id": {
+ "match": "type"
+ },
+ "$.body[*].project_id": {
+ "match": "type"
+ },
+ "$.body[*].confidential": {
+ "match": "type"
+ },
+ "$.body[*].diff_discussion": {
+ "match": "type"
+ },
+ "$.body[*].expanded": {
+ "match": "type"
+ },
+ "$.body[*].for_commit": {
+ "match": "type"
+ },
+ "$.body[*].individual_note": {
+ "match": "type"
+ },
+ "$.body[*].resolvable": {
+ "match": "type"
+ },
+ "$.body[*].resolved_by_push": {
+ "match": "type"
+ },
+ "$.body[*].notes": {
+ "min": 1
+ },
+ "$.body[*].notes[*].*": {
+ "match": "type"
+ },
+ "$.body[*].notes[*].id": {
+ "match": "type"
+ },
+ "$.body[*].notes[*].author.id": {
+ "match": "type"
+ },
+ "$.body[*].notes[*].author.username": {
+ "match": "type"
+ },
+ "$.body[*].notes[*].author.name": {
+ "match": "type"
+ },
+ "$.body[*].notes[*].author.state": {
+ "match": "type"
+ },
+ "$.body[*].notes[*].author.avatar_url": {
+ "match": "type"
+ },
+ "$.body[*].notes[*].author.show_status": {
+ "match": "type"
+ },
+ "$.body[*].notes[*].author.path": {
+ "match": "type"
+ },
+ "$.body[*].notes[*].created_at": {
+ "match": "regex",
+ "regex": "^\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d\\.\\d+([+-][0-2]\\d(:?[0-5]\\d)?|Z)$"
+ },
+ "$.body[*].notes[*].updated_at": {
+ "match": "regex",
+ "regex": "^\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d\\.\\d+([+-][0-2]\\d(:?[0-5]\\d)?|Z)$"
+ },
+ "$.body[*].notes[*].system": {
+ "match": "type"
+ },
+ "$.body[*].notes[*].noteable_id": {
+ "match": "type"
+ },
+ "$.body[*].notes[*].noteable_type": {
+ "match": "type"
+ },
+ "$.body[*].notes[*].resolvable": {
+ "match": "type"
+ },
+ "$.body[*].notes[*].resolved": {
+ "match": "type"
+ },
+ "$.body[*].notes[*].confidential": {
+ "match": "type"
+ },
+ "$.body[*].notes[*].noteable_iid": {
+ "match": "type"
+ },
+ "$.body[*].notes[*].note": {
+ "match": "type"
+ },
+ "$.body[*].notes[*].note_html": {
+ "match": "type"
+ },
+ "$.body[*].notes[*].current_user.can_edit": {
+ "match": "type"
+ },
+ "$.body[*].notes[*].current_user.can_award_emoji": {
+ "match": "type"
+ },
+ "$.body[*].notes[*].current_user.can_resolve": {
+ "match": "type"
+ },
+ "$.body[*].notes[*].current_user.can_resolve_discussion": {
+ "match": "type"
+ },
+ "$.body[*].notes[*].is_noteable_author": {
+ "match": "type"
+ },
+ "$.body[*].notes[*].discussion_id": {
+ "match": "type"
+ },
+ "$.body[*].notes[*].emoji_awardable": {
+ "match": "type"
+ },
+ "$.body[*].notes[*].report_abuse_path": {
+ "match": "type"
+ },
+ "$.body[*].notes[*].noteable_note_url": {
+ "match": "type"
+ },
+ "$.body[*].notes[*].cached_markdown_version": {
+ "match": "type"
+ },
+ "$.body[*].notes[*].human_access": {
+ "match": "type"
+ },
+ "$.body[*].notes[*].is_contributor": {
+ "match": "type"
+ },
+ "$.body[*].notes[*].project_name": {
+ "match": "type"
+ },
+ "$.body[*].notes[*].path": {
+ "match": "type"
+ },
+ "$.body[*].resolved": {
+ "match": "type"
+ }
+ }
+ }
+ }
+ ],
+ "metadata": {
+ "pactSpecification": {
+ "version": "2.0.0"
+ }
+ }
+} \ No newline at end of file
diff --git a/spec/contracts/provider/environments/test.rb b/spec/contracts/provider/environments/test.rb
new file mode 100644
index 00000000000..6efb19508d8
--- /dev/null
+++ b/spec/contracts/provider/environments/test.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module Provider
+ module Environments
+ class Test
+ def self.app
+ Rack::Builder.app do
+ map "/" do
+ run Gitlab::Application
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/contracts/provider/helpers/users_helper.rb b/spec/contracts/provider/helpers/users_helper.rb
new file mode 100644
index 00000000000..1982bd9cfd2
--- /dev/null
+++ b/spec/contracts/provider/helpers/users_helper.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+module Provider
+ module UsersHelper
+ CONTRACT_USER_NAME = "Contract Test User"
+ end
+end
diff --git a/spec/contracts/provider/pact_helpers/project/merge_request/diffs_batch_helper.rb b/spec/contracts/provider/pact_helpers/project/merge_request/diffs_batch_helper.rb
new file mode 100644
index 00000000000..7d1fbe91e86
--- /dev/null
+++ b/spec/contracts/provider/pact_helpers/project/merge_request/diffs_batch_helper.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require_relative '../../../spec_helper'
+require_relative '../../../states/project/merge_request/diffs_batch_state'
+
+module Provider
+ module DiffsBatchHelper
+ Pact.service_provider "Merge Request Diffs Batch Endpoint" do
+ app { Environments::Test.app }
+
+ honours_pact_with 'MergeRequest#show' do
+ pact_uri '../contracts/project/merge_request/show/mergerequest#show-merge_request_diffs_batch_endpoint.json'
+ end
+ end
+ end
+end
diff --git a/spec/contracts/provider/pact_helpers/project/merge_request/diffs_metadata_helper.rb b/spec/contracts/provider/pact_helpers/project/merge_request/diffs_metadata_helper.rb
new file mode 100644
index 00000000000..5f0c58d18d4
--- /dev/null
+++ b/spec/contracts/provider/pact_helpers/project/merge_request/diffs_metadata_helper.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require_relative '../../../spec_helper'
+require_relative '../../../states/project/merge_request/diffs_metadata_state'
+
+module Provider
+ module DiffsMetadataHelper
+ Pact.service_provider "Merge Request Diffs Metadata Endpoint" do
+ app { Environments::Test.app }
+
+ honours_pact_with 'MergeRequest#show' do
+ pact_uri '../contracts/project/merge_request/show/mergerequest#show-merge_request_diffs_metadata_endpoint.json'
+ end
+ end
+ end
+end
diff --git a/spec/contracts/provider/pact_helpers/project/merge_request/discussions_helper.rb b/spec/contracts/provider/pact_helpers/project/merge_request/discussions_helper.rb
new file mode 100644
index 00000000000..0f4244ba40a
--- /dev/null
+++ b/spec/contracts/provider/pact_helpers/project/merge_request/discussions_helper.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require_relative '../../../spec_helper'
+require_relative '../../../states/project/merge_request/discussions_state'
+
+module Provider
+ module DiscussionsHelper
+ Pact.service_provider "Merge Request Discussions Endpoint" do
+ app { Environments::Test.app }
+
+ honours_pact_with 'MergeRequest#show' do
+ pact_uri '../contracts/project/merge_request/show/mergerequest#show-merge_request_discussions_endpoint.json'
+ end
+ end
+ end
+end
diff --git a/spec/contracts/provider/spec_helper.rb b/spec/contracts/provider/spec_helper.rb
new file mode 100644
index 00000000000..6009d6524e1
--- /dev/null
+++ b/spec/contracts/provider/spec_helper.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require 'zeitwerk'
+require_relative 'helpers/users_helper'
+
+RSpec.configure do |config|
+ config.include Devise::Test::IntegrationHelpers
+ config.include FactoryBot::Syntax::Methods
+
+ config.before do
+ user = create(:user, name: Provider::UsersHelper::CONTRACT_USER_NAME).tap do |user|
+ user.current_sign_in_at = Time.current
+ end
+
+ sign_in user
+ end
+end
+
+Pact.configure do |config|
+ config.include FactoryBot::Syntax::Methods
+end
+
+module SpecHelper
+ require_relative '../../../config/bundler_setup'
+ Bundler.require(:default)
+
+ root = File.expand_path('../', __dir__)
+
+ loader = Zeitwerk::Loader.new
+ loader.push_dir(root)
+
+ loader.ignore("#{root}/consumer")
+ loader.ignore("#{root}/contracts")
+
+ loader.collapse("#{root}/provider/spec")
+
+ loader.setup
+end
diff --git a/spec/contracts/provider/states/project/merge_request/diffs_batch_state.rb b/spec/contracts/provider/states/project/merge_request/diffs_batch_state.rb
new file mode 100644
index 00000000000..ac20c17c187
--- /dev/null
+++ b/spec/contracts/provider/states/project/merge_request/diffs_batch_state.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+Pact.provider_states_for "MergeRequest#show" do
+ provider_state "a merge request with diffs exists" do
+ set_up do
+ user = User.find_by(name: Provider::UsersHelper::CONTRACT_USER_NAME)
+ namespace = create(:namespace, name: 'gitlab-org')
+ project = create(:project, :custom_repo, name: 'gitlab-qa', namespace: namespace, files: {})
+
+ project.add_maintainer(user)
+
+ merge_request = create(:merge_request_with_multiple_diffs, source_project: project)
+ merge_request_diff = create(:merge_request_diff, merge_request: merge_request)
+
+ create(:merge_request_diff_file, :new_file, merge_request_diff: merge_request_diff)
+ end
+ end
+end
diff --git a/spec/contracts/provider/states/project/merge_request/diffs_metadata_state.rb b/spec/contracts/provider/states/project/merge_request/diffs_metadata_state.rb
new file mode 100644
index 00000000000..8754232690c
--- /dev/null
+++ b/spec/contracts/provider/states/project/merge_request/diffs_metadata_state.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+Pact.provider_states_for "MergeRequest#show" do
+ provider_state "a merge request exists" do
+ set_up do
+ user = User.find_by(name: Provider::UsersHelper::CONTRACT_USER_NAME)
+ namespace = create(:namespace, name: 'gitlab-org')
+ project = create(:project, :custom_repo, name: 'gitlab-qa', namespace: namespace, files: {})
+
+ project.add_maintainer(user)
+
+ merge_request = create(:merge_request, source_project: project)
+ merge_request_diff = create(:merge_request_diff, merge_request: merge_request)
+
+ create(:merge_request_diff_file, :new_file, merge_request_diff: merge_request_diff)
+ end
+ end
+end
diff --git a/spec/contracts/provider/states/project/merge_request/discussions_state.rb b/spec/contracts/provider/states/project/merge_request/discussions_state.rb
new file mode 100644
index 00000000000..2d64f85eedf
--- /dev/null
+++ b/spec/contracts/provider/states/project/merge_request/discussions_state.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+Pact.provider_states_for "MergeRequest#show" do
+ provider_state "a merge request with discussions exists" do
+ set_up do
+ user = User.find_by(name: Provider::UsersHelper::CONTRACT_USER_NAME)
+ namespace = create(:namespace, name: 'gitlab-org')
+ project = create(:project, name: 'gitlab-qa', namespace: namespace)
+
+ project.add_maintainer(user)
+
+ merge_request = create(:merge_request_with_diffs, source_project: project, author: user)
+
+ create(:discussion_note_on_merge_request, noteable: merge_request, project: project, author: user)
+ end
+ end
+end
diff --git a/spec/controllers/admin/clusters_controller_spec.rb b/spec/controllers/admin/clusters_controller_spec.rb
index ca2b50b529c..c432adb6ae3 100644
--- a/spec/controllers/admin/clusters_controller_spec.rb
+++ b/spec/controllers/admin/clusters_controller_spec.rb
@@ -210,63 +210,6 @@ RSpec.describe Admin::ClustersController do
end
end
- describe 'POST authorize AWS role for EKS cluster' do
- let!(:role) { create(:aws_role, user: admin) }
-
- let(:role_arn) { 'arn:new-role' }
- let(:params) do
- {
- cluster: {
- role_arn: role_arn
- }
- }
- end
-
- def go
- post :authorize_aws_role, params: params
- end
-
- include_examples ':certificate_based_clusters feature flag controller responses' do
- let(:subject) { go }
- end
-
- before do
- allow(Clusters::Aws::FetchCredentialsService).to receive(:new)
- .and_return(double(execute: double))
- end
-
- it 'updates the associated role with the supplied ARN' do
- go
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(role.reload.role_arn).to eq(role_arn)
- end
-
- context 'supplied role is invalid' do
- let(:role_arn) { 'invalid-role' }
-
- it 'does not update the associated role' do
- expect { go }.not_to change { role.role_arn }
-
- expect(response).to have_gitlab_http_status(:unprocessable_entity)
- end
- end
-
- describe 'security' do
- before do
- allow_next_instance_of(Clusters::Aws::AuthorizeRoleService) do |service|
- response = double(status: :ok, body: double)
-
- allow(service).to receive(:execute).and_return(response)
- end
- end
-
- it { expect { go }.to be_allowed_for(:admin) }
- it { expect { go }.to be_denied_for(:user) }
- it { expect { go }.to be_denied_for(:external) }
- end
- end
-
describe 'DELETE clear cluster cache' do
let(:cluster) { create(:cluster, :instance) }
let!(:kubernetes_namespace) do
diff --git a/spec/controllers/admin/integrations_controller_spec.rb b/spec/controllers/admin/integrations_controller_spec.rb
index 410bc0ddc1d..0e456858b49 100644
--- a/spec/controllers/admin/integrations_controller_spec.rb
+++ b/spec/controllers/admin/integrations_controller_spec.rb
@@ -43,7 +43,7 @@ RSpec.describe Admin::IntegrationsController do
end
describe '#update' do
- include JiraServiceHelper
+ include JiraIntegrationHelpers
let(:integration) { create(:jira_integration, :instance) }
diff --git a/spec/controllers/application_controller_spec.rb b/spec/controllers/application_controller_spec.rb
index ddd80b67639..c5306fda0a5 100644
--- a/spec/controllers/application_controller_spec.rb
+++ b/spec/controllers/application_controller_spec.rb
@@ -105,10 +105,6 @@ RSpec.describe ApplicationController do
describe 'session expiration' do
controller(described_class) do
- # The anonymous controller will report 401 and fail to run any actions.
- # Normally, GitLab will just redirect you to sign in.
- skip_before_action :authenticate_user!, only: :index
-
def index
render html: 'authenticated'
end
diff --git a/spec/controllers/autocomplete_controller_spec.rb b/spec/controllers/autocomplete_controller_spec.rb
index 0a809e80fcd..e874df62cd7 100644
--- a/spec/controllers/autocomplete_controller_spec.rb
+++ b/spec/controllers/autocomplete_controller_spec.rb
@@ -411,6 +411,7 @@ RSpec.describe AutocompleteController do
expect(json_response.count).to eq(1)
expect(json_response.first['title']).to eq(deploy_key.title)
expect(json_response.first['owner']['id']).to eq(deploy_key.user.id)
+ expect(json_response.first['deploy_keys_projects']).to be_nil
end
context 'with an unknown project' do
@@ -433,6 +434,7 @@ RSpec.describe AutocompleteController do
expect(json_response.count).to eq(1)
expect(json_response.first['title']).to eq(deploy_key.title)
expect(json_response.first['owner']).to be_nil
+ expect(json_response.first['deploy_keys_projects']).to be_nil
end
end
end
diff --git a/spec/controllers/chaos_controller_spec.rb b/spec/controllers/chaos_controller_spec.rb
index 26ae4a6b693..36ccf868d82 100644
--- a/spec/controllers/chaos_controller_spec.rb
+++ b/spec/controllers/chaos_controller_spec.rb
@@ -147,8 +147,8 @@ RSpec.describe ChaosController do
let(:gc_stat) { GC.stat.stringify_keys }
it 'runs a full GC on the current web worker' do
- expect(Prometheus::PidProvider).to receive(:worker_id).and_return('worker-0')
- expect(Gitlab::Chaos).to receive(:run_gc).and_return(gc_stat)
+ allow(Prometheus::PidProvider).to receive(:worker_id).and_return('worker-0')
+ allow(Gitlab::Chaos).to receive(:run_gc).and_return(gc_stat)
post :gc
diff --git a/spec/controllers/concerns/sorting_preference_spec.rb b/spec/controllers/concerns/sorting_preference_spec.rb
index c0091e8b694..82a920215ca 100644
--- a/spec/controllers/concerns/sorting_preference_spec.rb
+++ b/spec/controllers/concerns/sorting_preference_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe SortingPreference do
let(:user) { create(:user) }
+ let(:params) { {} }
let(:controller_class) do
Class.new do
@@ -23,6 +24,46 @@ RSpec.describe SortingPreference do
allow(controller).to receive(:sorting_field).and_return(:issues_sort)
end
+ describe '#set_sort_order' do
+ let(:group) { build(:group) }
+ let(:issue_weights_available) { true }
+
+ before do
+ allow(controller).to receive(:default_sort_order).and_return('updated_desc')
+ allow(controller).to receive(:action_name).and_return('issues')
+ allow(controller).to receive(:can_sort_by_issue_weight?).and_return(issue_weights_available)
+ user.user_preference.update!(issues_sort: sorting_field)
+ end
+
+ subject { controller.send(:set_sort_order) }
+
+ context 'when user preference contains allowed sorting' do
+ let(:sorting_field) { 'updated_asc' }
+
+ it 'sets sort order from user_preference' do
+ is_expected.to eq('updated_asc')
+ end
+ end
+
+ context 'when user preference contains weight sorting' do
+ let(:sorting_field) { 'weight_desc' }
+
+ context 'when user can sort by issue weight' do
+ it 'sets sort order from user_preference' do
+ is_expected.to eq('weight_desc')
+ end
+ end
+
+ context 'when user cannot sort by issue weight' do
+ let(:issue_weights_available) { false }
+
+ it 'sets default sort order' do
+ is_expected.to eq('updated_desc')
+ end
+ end
+ end
+ end
+
describe '#set_sort_order_from_user_preference' do
subject { controller.send(:set_sort_order_from_user_preference) }
@@ -49,8 +90,6 @@ RSpec.describe SortingPreference do
end
context 'when a user sorting preference exists' do
- let(:params) { {} }
-
before do
user.user_preference.update!(issues_sort: 'updated_asc')
end
@@ -81,7 +120,6 @@ RSpec.describe SortingPreference do
context 'when cookie exists' do
let(:cookies) { { 'issue_sort' => 'id_asc' } }
- let(:params) { {} }
it 'sets the cookie with the right values and flags' do
subject
diff --git a/spec/controllers/confirmations_controller_spec.rb b/spec/controllers/confirmations_controller_spec.rb
index 3b5afbcebca..5b137ada141 100644
--- a/spec/controllers/confirmations_controller_spec.rb
+++ b/spec/controllers/confirmations_controller_spec.rb
@@ -146,13 +146,26 @@ RSpec.describe ConfirmationsController do
stub_application_setting(recaptcha_enabled: true)
end
- it 'displays an error when the reCAPTCHA is not solved' do
- Recaptcha.configuration.skip_verify_env.delete('test')
+ context 'when the reCAPTCHA is not solved' do
+ before do
+ Recaptcha.configuration.skip_verify_env.delete('test')
+ end
- perform_request
+ it 'displays an error' do
+ perform_request
- expect(response).to render_template(:new)
- expect(flash[:alert]).to include _('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.')
+ expect(response).to render_template(:new)
+ expect(flash[:alert]).to include _('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.')
+ end
+
+ it 'sets gon variables' do
+ Gon.clear
+
+ perform_request
+
+ expect(response).to render_template(:new)
+ expect(Gon.all_variables).not_to be_empty
+ end
end
it 'successfully sends password reset when reCAPTCHA is solved' do
diff --git a/spec/controllers/graphql_controller_spec.rb b/spec/controllers/graphql_controller_spec.rb
index 0818dce776d..e85f5b7a972 100644
--- a/spec/controllers/graphql_controller_spec.rb
+++ b/spec/controllers/graphql_controller_spec.rb
@@ -5,6 +5,9 @@ require 'spec_helper'
RSpec.describe GraphqlController do
include GraphqlHelpers
+ # two days is enough to make timezones irrelevant
+ let_it_be(:last_activity_on) { 2.days.ago.to_date }
+
before do
stub_feature_flags(graphql: true)
end
@@ -40,7 +43,7 @@ RSpec.describe GraphqlController do
describe 'POST #execute' do
context 'when user is logged in' do
- let(:user) { create(:user, last_activity_on: Date.yesterday) }
+ let(:user) { create(:user, last_activity_on: last_activity_on) }
before do
sign_in(user)
@@ -161,7 +164,7 @@ RSpec.describe GraphqlController do
end
context 'when 2FA is required for the user' do
- let(:user) { create(:user, last_activity_on: Date.yesterday) }
+ let(:user) { create(:user, last_activity_on: last_activity_on) }
before do
group = create(:group, require_two_factor_authentication: true)
@@ -186,14 +189,14 @@ RSpec.describe GraphqlController do
end
context 'when user uses an API token' do
- let(:user) { create(:user, last_activity_on: Date.yesterday) }
+ let(:user) { create(:user, last_activity_on: last_activity_on) }
let(:token) { create(:personal_access_token, user: user, scopes: [:api]) }
let(:query) { '{ __typename }' }
subject { post :execute, params: { query: query, access_token: token.token } }
context 'when the user is a project bot' do
- let(:user) { create(:user, :project_bot, last_activity_on: Date.yesterday) }
+ let(:user) { create(:user, :project_bot, last_activity_on: last_activity_on) }
it 'updates the users last_activity_on field' do
expect { subject }.to change { user.reload.last_activity_on }
diff --git a/spec/controllers/groups/clusters_controller_spec.rb b/spec/controllers/groups/clusters_controller_spec.rb
index 4b82c5ceb1c..eb3fe4bc330 100644
--- a/spec/controllers/groups/clusters_controller_spec.rb
+++ b/spec/controllers/groups/clusters_controller_spec.rb
@@ -262,142 +262,6 @@ RSpec.describe Groups::ClustersController do
end
end
- describe 'POST #create_aws' do
- let(:params) do
- {
- cluster: {
- name: 'new-cluster',
- provider_aws_attributes: {
- key_name: 'key',
- role_arn: 'arn:role',
- region: 'region',
- vpc_id: 'vpc',
- instance_type: 'instance type',
- num_nodes: 3,
- security_group_id: 'security group',
- subnet_ids: %w(subnet1 subnet2)
- }
- }
- }
- end
-
- def post_create_aws
- post :create_aws, params: params.merge(group_id: group)
- end
-
- include_examples ':certificate_based_clusters feature flag controller responses' do
- let(:subject) { post_create_aws }
- end
-
- it 'creates a new cluster' do
- expect(ClusterProvisionWorker).to receive(:perform_async)
- expect { post_create_aws }.to change { Clusters::Cluster.count }
- .and change { Clusters::Providers::Aws.count }
-
- cluster = group.clusters.first
-
- expect(response).to have_gitlab_http_status(:created)
- expect(response.location).to eq(group_cluster_path(group, cluster))
- expect(cluster).to be_aws
- expect(cluster).to be_kubernetes
- end
-
- context 'params are invalid' do
- let(:params) do
- {
- cluster: { name: '' }
- }
- end
-
- it 'does not create a cluster' do
- expect { post_create_aws }.not_to change { Clusters::Cluster.count }
-
- expect(response).to have_gitlab_http_status(:unprocessable_entity)
- expect(response.media_type).to eq('application/json')
- expect(response.body).to include('is invalid')
- end
- end
-
- describe 'security' do
- before do
- allow(WaitForClusterCreationWorker).to receive(:perform_in)
- end
-
- it('is allowed for admin when admin mode is enabled', :enable_admin_mode) { expect { post_create_aws }.to be_allowed_for(:admin) }
- it('is denied for admin when admin mode is disabled') { expect { post_create_aws }.to be_denied_for(:admin) }
- it { expect { post_create_aws }.to be_allowed_for(:owner).of(group) }
- it { expect { post_create_aws }.to be_allowed_for(:maintainer).of(group) }
- it { expect { post_create_aws }.to be_denied_for(:developer).of(group) }
- it { expect { post_create_aws }.to be_denied_for(:reporter).of(group) }
- it { expect { post_create_aws }.to be_denied_for(:guest).of(group) }
- it { expect { post_create_aws }.to be_denied_for(:user) }
- it { expect { post_create_aws }.to be_denied_for(:external) }
- end
- end
-
- describe 'POST authorize AWS role for EKS cluster' do
- let!(:role) { create(:aws_role, user: user) }
-
- let(:role_arn) { 'arn:new-role' }
- let(:params) do
- {
- cluster: {
- role_arn: role_arn
- }
- }
- end
-
- def go
- post :authorize_aws_role, params: params.merge(group_id: group)
- end
-
- include_examples ':certificate_based_clusters feature flag controller responses' do
- let(:subject) { go }
- end
-
- before do
- allow(Clusters::Aws::FetchCredentialsService).to receive(:new)
- .and_return(double(execute: double))
- end
-
- it 'updates the associated role with the supplied ARN' do
- go
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(role.reload.role_arn).to eq(role_arn)
- end
-
- context 'supplied role is invalid' do
- let(:role_arn) { 'invalid-role' }
-
- it 'does not update the associated role' do
- expect { go }.not_to change { role.role_arn }
-
- expect(response).to have_gitlab_http_status(:unprocessable_entity)
- end
- end
-
- describe 'security' do
- before do
- allow_next_instance_of(Clusters::Aws::AuthorizeRoleService) do |service|
- response = double(status: :ok, body: double)
-
- allow(service).to receive(:execute).and_return(response)
- end
- end
-
- it('is allowed for admin when admin mode is enabled', :enable_admin_mode) { expect { go }.to be_allowed_for(:admin) }
- it('is denied for admin when admin mode is disabled') { expect { go }.to be_denied_for(:admin) }
- it { expect { go }.to be_allowed_for(:owner).of(group) }
- it { expect { go }.to be_allowed_for(:maintainer).of(group) }
- it { expect { go }.to be_denied_for(:developer).of(group) }
- it { expect { go }.to be_denied_for(:reporter).of(group) }
- it { expect { go }.to be_denied_for(:guest).of(group) }
- it { expect { go }.to be_denied_for(:user) }
- it { expect { go }.to be_denied_for(:external) }
- end
- end
-
describe 'DELETE clear cluster cache' do
let(:cluster) { create(:cluster, :group, groups: [group]) }
let!(:kubernetes_namespace) do
diff --git a/spec/controllers/groups/group_members_controller_spec.rb b/spec/controllers/groups/group_members_controller_spec.rb
index 25d32436d58..c6fd184ede0 100644
--- a/spec/controllers/groups/group_members_controller_spec.rb
+++ b/spec/controllers/groups/group_members_controller_spec.rb
@@ -305,11 +305,37 @@ RSpec.describe Groups::GroupMembersController do
group.add_owner(user)
end
- it 'cannot removes himself from the group' do
+ it 'cannot remove user from the group' do
delete :leave, params: { group_id: group }
expect(response).to have_gitlab_http_status(:forbidden)
end
+
+ context 'and there is a group project bot owner' do
+ before do
+ create(:group_member, :owner, source: group, user: create(:user, :project_bot))
+ end
+
+ it 'cannot remove user from the group' do
+ delete :leave, params: { group_id: group }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'and there is another owner' do
+ before do
+ create(:group_member, :owner, source: group)
+ end
+
+ it 'removes user from members', :aggregate_failures do
+ delete :leave, params: { group_id: group }
+
+ expect(controller).to set_flash.to "You left the \"#{group.name}\" group."
+ expect(response).to redirect_to(dashboard_groups_path)
+ expect(group.users).not_to include user
+ end
+ end
end
context 'and is a requester' do
diff --git a/spec/controllers/groups/settings/integrations_controller_spec.rb b/spec/controllers/groups/settings/integrations_controller_spec.rb
index c070094babd..377c38ce087 100644
--- a/spec/controllers/groups/settings/integrations_controller_spec.rb
+++ b/spec/controllers/groups/settings/integrations_controller_spec.rb
@@ -76,7 +76,7 @@ RSpec.describe Groups::Settings::IntegrationsController do
end
describe '#update' do
- include JiraServiceHelper
+ include JiraIntegrationHelpers
let(:integration) { create(:jira_integration, :group, group: group) }
diff --git a/spec/controllers/groups_controller_spec.rb b/spec/controllers/groups_controller_spec.rb
index 4a74eff90dc..aabceda7187 100644
--- a/spec/controllers/groups_controller_spec.rb
+++ b/spec/controllers/groups_controller_spec.rb
@@ -18,7 +18,6 @@ RSpec.describe GroupsController, factory_default: :keep do
let_it_be(:guest) { group.add_guest(create(:user)).user }
before do
- stub_feature_flags(vue_issues_list: true)
enable_admin_mode!(admin_with_admin_mode)
end
@@ -373,13 +372,26 @@ RSpec.describe GroupsController, factory_default: :keep do
end
end
- it 'displays an error when the reCAPTCHA is not solved' do
- allow(controller).to receive(:verify_recaptcha).and_return(false)
+ context 'when the reCAPTCHA is not solved' do
+ before do
+ allow(controller).to receive(:verify_recaptcha).and_return(false)
+ end
- post :create, params: { group: { name: 'new_group', path: "new_group" } }
+ it 'displays an error' do
+ post :create, params: { group: { name: 'new_group', path: "new_group" } }
+
+ expect(response).to render_template(:new)
+ expect(flash[:alert]).to eq(_('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.'))
+ end
+
+ it 'sets gon variables' do
+ Gon.clear
+
+ post :create, params: { group: { name: 'new_group', path: "new_group" } }
- expect(response).to render_template(:new)
- expect(flash[:alert]).to eq(_('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.'))
+ expect(response).to render_template(:new)
+ expect(Gon.all_variables).not_to be_empty
+ end
end
it 'allows creating a group when the reCAPTCHA is solved' do
diff --git a/spec/controllers/help_controller_spec.rb b/spec/controllers/help_controller_spec.rb
index 70dc710f604..26e65711e9f 100644
--- a/spec/controllers/help_controller_spec.rb
+++ b/spec/controllers/help_controller_spec.rb
@@ -4,34 +4,35 @@ require 'spec_helper'
RSpec.describe HelpController do
include StubVersion
+ include DocUrlHelper
let(:user) { create(:user) }
shared_examples 'documentation pages local render' do
it 'renders HTML' do
aggregate_failures do
- is_expected.to render_template('show.html.haml')
+ is_expected.to render_template('help/show')
expect(response.media_type).to eq 'text/html'
end
end
end
shared_examples 'documentation pages redirect' do |documentation_base_url|
- let(:gitlab_version) { '13.4.0-ee' }
+ let(:gitlab_version) { version }
before do
stub_version(gitlab_version, 'ignored_revision_value')
end
it 'redirects user to custom documentation url with a specified version' do
- is_expected.to redirect_to("#{documentation_base_url}/13.4/ee/#{path}.html")
+ is_expected.to redirect_to(doc_url(documentation_base_url))
end
context 'when it is a pre-release' do
let(:gitlab_version) { '13.4.0-pre' }
it 'redirects user to custom documentation url without a version' do
- is_expected.to redirect_to("#{documentation_base_url}/ee/#{path}.html")
+ is_expected.to redirect_to(doc_url_without_version(documentation_base_url))
end
end
end
@@ -43,7 +44,7 @@ RSpec.describe HelpController do
describe 'GET #index' do
context 'with absolute url' do
it 'keeps the URL absolute' do
- stub_readme("[API](/api/README.md)")
+ stub_doc_file_read(content: "[API](/api/README.md)")
get :index
@@ -53,7 +54,7 @@ RSpec.describe HelpController do
context 'with relative url' do
it 'prefixes it with /help/' do
- stub_readme("[API](api/README.md)")
+ stub_doc_file_read(content: "[API](api/README.md)")
get :index
@@ -63,7 +64,7 @@ RSpec.describe HelpController do
context 'when url is an external link' do
it 'does not change it' do
- stub_readme("[external](https://some.external.link)")
+ stub_doc_file_read(content: "[external](https://some.external.link)")
get :index
@@ -73,7 +74,7 @@ RSpec.describe HelpController do
context 'when relative url with external on same line' do
it 'prefix it with /help/' do
- stub_readme("[API](api/README.md) [external](https://some.external.link)")
+ stub_doc_file_read(content: "[API](api/README.md) [external](https://some.external.link)")
get :index
@@ -83,7 +84,7 @@ RSpec.describe HelpController do
context 'when relative url with http:// in query' do
it 'prefix it with /help/' do
- stub_readme("[API](api/README.md?go=https://example.com/)")
+ stub_doc_file_read(content: "[API](api/README.md?go=https://example.com/)")
get :index
@@ -93,7 +94,7 @@ RSpec.describe HelpController do
context 'when mailto URL' do
it 'do not change it' do
- stub_readme("[report bug](mailto:bugs@example.com)")
+ stub_doc_file_read(content: "[report bug](mailto:bugs@example.com)")
get :index
@@ -103,7 +104,7 @@ RSpec.describe HelpController do
context 'when protocol-relative link' do
it 'do not change it' do
- stub_readme("[protocol-relative](//example.com)")
+ stub_doc_file_read(content: "[protocol-relative](//example.com)")
get :index
@@ -146,7 +147,7 @@ RSpec.describe HelpController do
context 'when requested file exists' do
before do
- expect_file_read(File.join(Rails.root, 'doc/user/ssh.md'), content: fixture_file('blockquote_fence_after.md'))
+ stub_doc_file_read(file_name: 'user/ssh.md', content: fixture_file('blockquote_fence_after.md'))
subject
end
@@ -265,10 +266,6 @@ RSpec.describe HelpController do
end
end
- def stub_readme(content)
- expect_file_read(Rails.root.join('doc', 'index.md'), content: content)
- end
-
def stub_two_factor_required
allow(controller).to receive(:two_factor_authentication_required?).and_return(true)
allow(controller).to receive(:current_user_requires_two_factor?).and_return(true)
diff --git a/spec/controllers/import/fogbugz_controller_spec.rb b/spec/controllers/import/fogbugz_controller_spec.rb
index d351e1cc3f3..8f8cc9590a5 100644
--- a/spec/controllers/import/fogbugz_controller_spec.rb
+++ b/spec/controllers/import/fogbugz_controller_spec.rb
@@ -6,14 +6,14 @@ RSpec.describe Import::FogbugzController do
include ImportSpecHelper
let(:user) { create(:user) }
+ let(:token) { FFaker::Lorem.characters(8) }
+ let(:uri) { 'https://example.com' }
before do
sign_in(user)
end
describe 'POST #callback' do
- let(:token) { FFaker::Lorem.characters(8) }
- let(:uri) { 'https://example.com' }
let(:xml_response) { %Q(<?xml version=\"1.0\" encoding=\"UTF-8\"?><response><token><![CDATA[#{token}]]></token></response>) }
it 'attempts to contact Fogbugz server' do
@@ -97,6 +97,38 @@ RSpec.describe Import::FogbugzController do
end
describe 'POST create' do
+ let(:repo_id) { 'FOGBUGZ_REPO_ID' }
+ let(:project) { create(:project) }
+ let(:client) { instance_double(Gitlab::FogbugzImport::Client, user_map: {}) }
+
+ before do
+ allow(controller).to receive(:client).and_return(client)
+ end
+
+ it 'returns the new project' do
+ expect(Import::FogbugzService).to receive(:new).and_return(
+ instance_double(Import::FogbugzService, execute: ServiceResponse.success)
+ )
+
+ post :create, format: :json
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'returns an error when service reports an error' do
+ message = 'Error message'
+ status = :unprocessable_entity
+
+ expect(Import::FogbugzService).to receive(:new).and_return(
+ instance_double(Import::FogbugzService, execute: ServiceResponse.error(message: message, http_status: status))
+ )
+
+ post :create, format: :json
+
+ expect(response).to have_gitlab_http_status(status)
+ expect(json_response).to eq({ 'errors' => message })
+ end
+
it_behaves_like 'project import rate limiter'
end
end
diff --git a/spec/controllers/import/github_controller_spec.rb b/spec/controllers/import/github_controller_spec.rb
index ef66124bff1..56e55c45e66 100644
--- a/spec/controllers/import/github_controller_spec.rb
+++ b/spec/controllers/import/github_controller_spec.rb
@@ -96,19 +96,6 @@ RSpec.describe Import::GithubController do
describe "POST personal_access_token" do
it_behaves_like 'a GitHub-ish import controller: POST personal_access_token'
-
- it 'passes namespace_id param as query param if it was present' do
- namespace_id = 5
- status_import_url = public_send("status_import_#{provider}_url", { namespace_id: namespace_id })
-
- allow_next_instance_of(Gitlab::LegacyGithubImport::Client) do |client|
- allow(client).to receive(:user).and_return(true)
- end
-
- post :personal_access_token, params: { personal_access_token: 'some-token', namespace_id: 5 }
-
- expect(controller).to redirect_to(status_import_url)
- end
end
describe "GET status" do
diff --git a/spec/controllers/metrics_controller_spec.rb b/spec/controllers/metrics_controller_spec.rb
index 9fa90dde997..6fffa607bf8 100644
--- a/spec/controllers/metrics_controller_spec.rb
+++ b/spec/controllers/metrics_controller_spec.rb
@@ -94,8 +94,8 @@ RSpec.describe MetricsController, :request_store do
end
it 'renders system stats JSON' do
- expect(Prometheus::PidProvider).to receive(:worker_id).and_return('worker-0')
- expect(Gitlab::Metrics::System).to receive(:summary).and_return(summary)
+ allow(Prometheus::PidProvider).to receive(:worker_id).and_return('worker-0')
+ allow(Gitlab::Metrics::System).to receive(:summary).and_return(summary)
get :system
diff --git a/spec/controllers/oauth/authorizations_controller_spec.rb b/spec/controllers/oauth/authorizations_controller_spec.rb
index 7489f506674..fb90a70d91d 100644
--- a/spec/controllers/oauth/authorizations_controller_spec.rb
+++ b/spec/controllers/oauth/authorizations_controller_spec.rb
@@ -195,6 +195,24 @@ RSpec.describe Oauth::AuthorizationsController do
end
end
end
+
+ context 'when the user is not signed in' do
+ before do
+ sign_out(user)
+ end
+
+ it 'sets a lower session expiry and redirects to the sign in page' do
+ subject
+
+ expect(request.env['rack.session.options'][:expire_after]).to eq(
+ Settings.gitlab['unauthenticated_session_expire_delay']
+ )
+
+ expect(request.session['user_return_to']).to eq("/oauth/authorize?#{params.to_query}")
+ expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to(new_user_session_path)
+ end
+ end
end
describe 'POST #create' do
diff --git a/spec/controllers/omniauth_callbacks_controller_spec.rb b/spec/controllers/omniauth_callbacks_controller_spec.rb
index e70b8af2068..9ecef8b7450 100644
--- a/spec/controllers/omniauth_callbacks_controller_spec.rb
+++ b/spec/controllers/omniauth_callbacks_controller_spec.rb
@@ -222,10 +222,36 @@ RSpec.describe OmniauthCallbacksController, type: :controller do
context 'sign up' do
include_context 'sign_up'
- it 'is allowed' do
- post provider
+ context 'when intent to register is added to omniauth params' do
+ before do
+ request.env['omniauth.params'] = { 'intent' => 'register' }
+ end
- expect(request.env['warden']).to be_authenticated
+ it 'is allowed' do
+ post provider
+
+ expect(request.env['warden']).to be_authenticated
+ end
+
+ it 'redirects to welcome path' do
+ post provider
+
+ expect(response).to redirect_to(users_sign_up_welcome_path)
+ end
+ end
+
+ context 'when intent to register is not added to omniauth params' do
+ it 'is allowed' do
+ post provider
+
+ expect(request.env['warden']).to be_authenticated
+ end
+
+ it 'redirects to root path' do
+ post provider
+
+ expect(response).to redirect_to(root_path)
+ end
end
end
diff --git a/spec/controllers/passwords_controller_spec.rb b/spec/controllers/passwords_controller_spec.rb
index 82014282c6e..e4be2fbef3c 100644
--- a/spec/controllers/passwords_controller_spec.rb
+++ b/spec/controllers/passwords_controller_spec.rb
@@ -115,13 +115,26 @@ RSpec.describe PasswordsController do
stub_application_setting(recaptcha_enabled: true)
end
- it 'displays an error when the reCAPTCHA is not solved' do
- Recaptcha.configuration.skip_verify_env.delete('test')
+ context 'when the reCAPTCHA is not solved' do
+ before do
+ Recaptcha.configuration.skip_verify_env.delete('test')
+ end
- perform_request
+ it 'displays an error' do
+ perform_request
+
+ expect(response).to render_template(:new)
+ expect(flash[:alert]).to include _('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.')
+ end
- expect(response).to render_template(:new)
- expect(flash[:alert]).to include _('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.')
+ it 'sets gon variables' do
+ Gon.clear
+
+ perform_request
+
+ expect(response).to render_template(:new)
+ expect(Gon.all_variables).not_to be_empty
+ end
end
it 'successfully sends password reset when reCAPTCHA is solved' do
diff --git a/spec/controllers/profiles/personal_access_tokens_controller_spec.rb b/spec/controllers/profiles/personal_access_tokens_controller_spec.rb
index 3859af66292..48c747bf074 100644
--- a/spec/controllers/profiles/personal_access_tokens_controller_spec.rb
+++ b/spec/controllers/profiles/personal_access_tokens_controller_spec.rb
@@ -39,30 +39,19 @@ RSpec.describe Profiles::PersonalAccessTokensController do
describe '#index' do
let!(:active_personal_access_token) { create(:personal_access_token, user: user) }
- let!(:inactive_personal_access_token) { create(:personal_access_token, :revoked, user: user) }
- let!(:impersonation_personal_access_token) { create(:personal_access_token, :impersonation, user: user) }
- let(:token_value) { 's3cr3t' }
before do
- PersonalAccessToken.redis_store!(user.id, token_value)
+ # Impersonation and inactive personal tokens are ignored
+ create(:personal_access_token, :impersonation, user: user)
+ create(:personal_access_token, :revoked, user: user)
get :index
end
- it "retrieves active personal access tokens" do
- expect(assigns(:active_personal_access_tokens)).to include(active_personal_access_token)
- end
-
- it "retrieves inactive personal access tokens" do
- expect(assigns(:inactive_personal_access_tokens)).to include(inactive_personal_access_token)
- end
-
- it "does not retrieve impersonation personal access tokens" do
- expect(assigns(:active_personal_access_tokens)).not_to include(impersonation_personal_access_token)
- expect(assigns(:inactive_personal_access_tokens)).not_to include(impersonation_personal_access_token)
- end
+ it "only includes details of the active personal access token" do
+ active_personal_access_tokens_detail = ::API::Entities::PersonalAccessTokenWithDetails
+ .represent([active_personal_access_token])
- it "retrieves newly created personal access token value" do
- expect(assigns(:new_personal_access_token)).to eql(token_value)
+ expect(assigns(:active_personal_access_tokens).to_json).to eq(active_personal_access_tokens_detail.to_json)
end
it "sets PAT name and scopes" do
diff --git a/spec/controllers/projects/autocomplete_sources_controller_spec.rb b/spec/controllers/projects/autocomplete_sources_controller_spec.rb
index 79edc261809..a5274b6543e 100644
--- a/spec/controllers/projects/autocomplete_sources_controller_spec.rb
+++ b/spec/controllers/projects/autocomplete_sources_controller_spec.rb
@@ -114,17 +114,5 @@ RSpec.describe Projects::AutocompleteSourcesController do
end
end
end
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(customer_relations: false)
- end
-
- it 'renders 404' do
- get :contacts, format: :json, params: { namespace_id: group.path, project_id: project.path }
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
end
end
diff --git a/spec/controllers/projects/clusters_controller_spec.rb b/spec/controllers/projects/clusters_controller_spec.rb
index 01420e30d24..d45ea268e64 100644
--- a/spec/controllers/projects/clusters_controller_spec.rb
+++ b/spec/controllers/projects/clusters_controller_spec.rb
@@ -272,150 +272,6 @@ RSpec.describe Projects::ClustersController do
end
end
- describe 'POST #create_aws' do
- let(:params) do
- {
- cluster: {
- name: 'new-cluster',
- provider_aws_attributes: {
- key_name: 'key',
- role_arn: 'arn:role',
- region: 'region',
- vpc_id: 'vpc',
- instance_type: 'instance type',
- num_nodes: 3,
- security_group_id: 'security group',
- subnet_ids: %w(subnet1 subnet2)
- }
- }
- }
- end
-
- def post_create_aws
- post :create_aws, params: params.merge(namespace_id: project.namespace, project_id: project)
- end
-
- include_examples ':certificate_based_clusters feature flag controller responses' do
- let(:subject) { post_create_aws }
- end
-
- it 'creates a new cluster' do
- expect(ClusterProvisionWorker).to receive(:perform_async)
- expect { post_create_aws }.to change { Clusters::Cluster.count }
- .and change { Clusters::Providers::Aws.count }
-
- cluster = project.clusters.first
-
- expect(response).to have_gitlab_http_status(:created)
- expect(response.location).to eq(project_cluster_path(project, cluster))
- expect(cluster).to be_aws
- expect(cluster).to be_kubernetes
- end
-
- context 'params are invalid' do
- let(:params) do
- {
- cluster: { name: '' }
- }
- end
-
- it 'does not create a cluster' do
- expect { post_create_aws }.not_to change { Clusters::Cluster.count }
-
- expect(response).to have_gitlab_http_status(:unprocessable_entity)
- expect(response.media_type).to eq('application/json')
- expect(response.body).to include('is invalid')
- end
- end
-
- describe 'security' do
- before do
- allow(WaitForClusterCreationWorker).to receive(:perform_in)
- end
-
- it 'is allowed for admin when admin mode enabled', :enable_admin_mode do
- expect { post_create_aws }.to be_allowed_for(:admin)
- end
- it 'is disabled for admin when admin mode disabled' do
- expect { post_create_aws }.to be_denied_for(:admin)
- end
- it { expect { post_create_aws }.to be_allowed_for(:owner).of(project) }
- it { expect { post_create_aws }.to be_allowed_for(:maintainer).of(project) }
- it { expect { post_create_aws }.to be_denied_for(:developer).of(project) }
- it { expect { post_create_aws }.to be_denied_for(:reporter).of(project) }
- it { expect { post_create_aws }.to be_denied_for(:guest).of(project) }
- it { expect { post_create_aws }.to be_denied_for(:user) }
- it { expect { post_create_aws }.to be_denied_for(:external) }
- end
- end
-
- describe 'POST authorize AWS role for EKS cluster' do
- let!(:role) { create(:aws_role, user: user) }
-
- let(:role_arn) { 'arn:new-role' }
- let(:params) do
- {
- cluster: {
- role_arn: role_arn
- }
- }
- end
-
- def go
- post :authorize_aws_role, params: params.merge(namespace_id: project.namespace, project_id: project)
- end
-
- before do
- allow(Clusters::Aws::FetchCredentialsService).to receive(:new)
- .and_return(double(execute: double))
- end
-
- include_examples ':certificate_based_clusters feature flag controller responses' do
- let(:subject) { go }
- end
-
- it 'updates the associated role with the supplied ARN' do
- go
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(role.reload.role_arn).to eq(role_arn)
- end
-
- context 'supplied role is invalid' do
- let(:role_arn) { 'invalid-role' }
-
- it 'does not update the associated role' do
- expect { go }.not_to change { role.role_arn }
-
- expect(response).to have_gitlab_http_status(:unprocessable_entity)
- end
- end
-
- describe 'security' do
- before do
- allow_next_instance_of(Clusters::Aws::AuthorizeRoleService) do |service|
- response = double(status: :ok, body: double)
-
- allow(service).to receive(:execute).and_return(response)
- end
- end
-
- it 'is allowed for admin when admin mode enabled', :enable_admin_mode do
- expect { go }.to be_allowed_for(:admin)
- end
- it 'is disabled for admin when admin mode disabled' do
- expect { go }.to be_denied_for(:admin)
- end
- it { expect { go }.to be_allowed_for(:owner).of(project) }
- it { expect { go }.to be_allowed_for(:maintainer).of(project) }
- it { expect { go }.to be_denied_for(:developer).of(project) }
- it { expect { go }.to be_denied_for(:reporter).of(project) }
- it { expect { go }.to be_denied_for(:guest).of(project) }
- it { expect { go }.to be_denied_for(:user) }
- it { expect { go }.to be_denied_for(:external) }
- end
- end
-
describe 'DELETE clear cluster cache' do
let(:cluster) { create(:cluster, :project, projects: [project]) }
let!(:kubernetes_namespace) { create(:cluster_kubernetes_namespace, cluster: cluster) }
diff --git a/spec/controllers/projects/commits_controller_spec.rb b/spec/controllers/projects/commits_controller_spec.rb
index c7f98406201..26d4725656f 100644
--- a/spec/controllers/projects/commits_controller_spec.rb
+++ b/spec/controllers/projects/commits_controller_spec.rb
@@ -166,6 +166,14 @@ RSpec.describe Projects::CommitsController do
end
end
end
+
+ context 'with markdown cache' do
+ it 'preloads markdown cache for commits' do
+ expect(Commit).to receive(:preload_markdown_cache!).and_call_original
+
+ get :show, params: { namespace_id: project.namespace, project_id: project, id: 'master/README.md' }
+ end
+ end
end
describe "GET /commits/:id/signatures" do
diff --git a/spec/controllers/projects/compare_controller_spec.rb b/spec/controllers/projects/compare_controller_spec.rb
index 9821618df8d..e6e0307d0ca 100644
--- a/spec/controllers/projects/compare_controller_spec.rb
+++ b/spec/controllers/projects/compare_controller_spec.rb
@@ -44,6 +44,14 @@ RSpec.describe Projects::CompareController do
expect(response).to be_successful
end
end
+
+ context 'with missing parameters' do
+ let(:params) { super().merge(from: '', to: '') }
+
+ it 'returns successfully' do
+ expect(response).to be_successful
+ end
+ end
end
describe 'GET show' do
@@ -102,6 +110,23 @@ RSpec.describe Projects::CompareController do
end
end
+ context 'when refs have CI::Pipeline' do
+ let(:from_project_id) { nil }
+ let(:from_ref) { '08f22f25' }
+ let(:to_ref) { '59e29889' }
+
+ before do
+ create(:ci_pipeline, project: project)
+ end
+
+ it 'avoids N+1 queries' do
+ control = ActiveRecord::QueryRecorder.new { show_request }
+
+ # Only 1 query to ci/pipeline.rb is allowed
+ expect(control.find_query(/pipeline\.rb/, 1)).to be_empty
+ end
+ end
+
context 'when the refs exist in different projects that the user can see' do
let(:from_project_id) { public_fork.id }
let(:from_ref) { 'improve%2Fmore-awesome' }
@@ -434,7 +459,7 @@ RSpec.describe Projects::CompareController do
expect(CompareService).to receive(:new).with(project, escaped_to_ref).and_return(compare_service)
expect(compare_service).to receive(:execute).with(project, escaped_from_ref).and_return(compare)
- expect(compare).to receive(:commits).and_return([signature_commit, non_signature_commit])
+ expect(compare).to receive(:commits).and_return(CommitCollection.new(project, [signature_commit, non_signature_commit]))
expect(non_signature_commit).to receive(:has_signature?).and_return(false)
end
diff --git a/spec/controllers/projects/environments_controller_spec.rb b/spec/controllers/projects/environments_controller_spec.rb
index f63e0cea04c..f4cad5790a3 100644
--- a/spec/controllers/projects/environments_controller_spec.rb
+++ b/spec/controllers/projects/environments_controller_spec.rb
@@ -208,17 +208,6 @@ RSpec.describe Projects::EnvironmentsController do
expect(response).to have_gitlab_http_status(:not_found)
end
end
-
- it_behaves_like 'avoids N+1 queries on environment detail page'
-
- def create_deployment_with_associations(sequence:)
- commit = project.commit("HEAD~#{sequence}")
- create(:user, email: commit.author_email)
-
- deployer = create(:user)
- build = create(:ci_build, environment: environment.name, pipeline: create(:ci_pipeline, project: environment.project), user: deployer)
- create(:deployment, :success, environment: environment, deployable: build, user: deployer, project: project, sha: commit.sha)
- end
end
describe 'GET edit' do
diff --git a/spec/controllers/projects/import/jira_controller_spec.rb b/spec/controllers/projects/import/jira_controller_spec.rb
index 5288c0fcf21..3f149afbb02 100644
--- a/spec/controllers/projects/import/jira_controller_spec.rb
+++ b/spec/controllers/projects/import/jira_controller_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Projects::Import::JiraController do
- include JiraServiceHelper
+ include JiraIntegrationHelpers
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
diff --git a/spec/controllers/projects/incidents_controller_spec.rb b/spec/controllers/projects/incidents_controller_spec.rb
index 20cf0dcfd3a..460821634b0 100644
--- a/spec/controllers/projects/incidents_controller_spec.rb
+++ b/spec/controllers/projects/incidents_controller_spec.rb
@@ -43,7 +43,6 @@ RSpec.describe Projects::IncidentsController do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:index)
- expect(Gon.features).to include('incidentEscalations' => true)
end
context 'when user is unauthorized' do
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index 8a03c1e709b..1305693372c 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -12,10 +12,6 @@ RSpec.describe Projects::IssuesController do
let(:issue) { create(:issue, project: project) }
let(:spam_action_response_fields) { { 'stub_spam_action_response_fields' => true } }
- before do
- stub_feature_flags(vue_issues_list: true)
- end
-
describe "GET #index" do
context 'external issue tracker' do
before do
@@ -145,13 +141,104 @@ RSpec.describe Projects::IssuesController do
project.add_developer(user)
end
- it "returns issue_email_participants" do
+ it "returns issue attributes" do
participants = create_list(:issue_email_participant, 2, issue: issue)
get :show, params: { namespace_id: project.namespace, project_id: project, id: issue.iid }, format: :json
expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['issue_email_participants']).to contain_exactly({ "email" => participants[0].email }, { "email" => participants[1].email })
+ expect(json_response).to include(
+ 'issue_email_participants' => contain_exactly(
+ { "email" => participants[0].email }, { "email" => participants[1].email }
+ ),
+ 'type' => 'ISSUE'
+ )
+ end
+
+ context 'when issue is not a task and work items feature flag is enabled' do
+ it 'does not redirect to work items route' do
+ get :show, params: { namespace_id: project.namespace, project_id: project, id: issue.iid }
+
+ expect(response).to render_template(:show)
+ end
+ end
+
+ context 'when issue is of type task' do
+ let(:query) { {} }
+
+ let_it_be(:task) { create(:issue, :task, project: project) }
+
+ context 'when work_items feature flag is enabled' do
+ shared_examples 'redirects to show work item page' do
+ it 'redirects to work item page' do
+ expect(response).to redirect_to(project_work_items_path(project, task.id, query))
+ end
+ end
+
+ context 'show action' do
+ let(:query) { { query: 'any' } }
+
+ before do
+ get :show, params: { namespace_id: project.namespace, project_id: project, id: task.iid, **query }
+ end
+
+ it_behaves_like 'redirects to show work item page'
+ end
+
+ context 'edit action' do
+ let(:query) { { query: 'any' } }
+
+ before do
+ get :edit, params: { namespace_id: project.namespace, project_id: project, id: task.iid, **query }
+ end
+
+ it_behaves_like 'redirects to show work item page'
+ end
+
+ context 'update action' do
+ before do
+ put :update, params: { namespace_id: project.namespace, project_id: project, id: task.iid, issue: { title: 'New title' } }
+ end
+
+ it_behaves_like 'redirects to show work item page'
+ end
+ end
+
+ context 'when work_items feature flag is disabled' do
+ before do
+ stub_feature_flags(work_items: false)
+ end
+
+ shared_examples 'renders 404' do
+ it 'renders 404 for show action' do
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'show action' do
+ before do
+ get :show, params: { namespace_id: project.namespace, project_id: project, id: task.iid }
+ end
+
+ it_behaves_like 'renders 404'
+ end
+
+ context 'edit action' do
+ before do
+ get :edit, params: { namespace_id: project.namespace, project_id: project, id: task.iid }
+ end
+
+ it_behaves_like 'renders 404'
+ end
+
+ context 'update action' do
+ before do
+ put :update, params: { namespace_id: project.namespace, project_id: project, id: task.iid, issue: { title: 'New title' } }
+ end
+
+ it_behaves_like 'renders 404'
+ end
+ end
end
end
diff --git a/spec/controllers/projects/jobs_controller_spec.rb b/spec/controllers/projects/jobs_controller_spec.rb
index f0fbbb65fa5..107eb1ed3a3 100644
--- a/spec/controllers/projects/jobs_controller_spec.rb
+++ b/spec/controllers/projects/jobs_controller_spec.rb
@@ -1075,63 +1075,81 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state do
before do
project.add_role(user, role)
sign_in(user)
-
- post_erase
end
- shared_examples_for 'erases' do
- it 'redirects to the erased job page' do
- expect(response).to have_gitlab_http_status(:found)
- expect(response).to redirect_to(namespace_project_job_path(id: job.id))
+ context 'when project is not undergoing stats refresh' do
+ before do
+ post_erase
end
- it 'erases artifacts' do
- expect(job.artifacts_file.present?).to be_falsey
- expect(job.artifacts_metadata.present?).to be_falsey
- end
+ shared_examples_for 'erases' do
+ it 'redirects to the erased job page' do
+ expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to(namespace_project_job_path(id: job.id))
+ end
- it 'erases trace' do
- expect(job.trace.exist?).to be_falsey
+ it 'erases artifacts' do
+ expect(job.artifacts_file.present?).to be_falsey
+ expect(job.artifacts_metadata.present?).to be_falsey
+ end
+
+ it 'erases trace' do
+ expect(job.trace.exist?).to be_falsey
+ end
end
- end
- context 'when job is successful and has artifacts' do
- let(:job) { create(:ci_build, :erasable, :trace_artifact, pipeline: pipeline) }
+ context 'when job is successful and has artifacts' do
+ let(:job) { create(:ci_build, :erasable, :trace_artifact, pipeline: pipeline) }
- it_behaves_like 'erases'
- end
+ it_behaves_like 'erases'
+ end
- context 'when job has live trace and unarchived artifact' do
- let(:job) { create(:ci_build, :success, :trace_live, :unarchived_trace_artifact, pipeline: pipeline) }
+ context 'when job has live trace and unarchived artifact' do
+ let(:job) { create(:ci_build, :success, :trace_live, :unarchived_trace_artifact, pipeline: pipeline) }
- it_behaves_like 'erases'
- end
+ it_behaves_like 'erases'
+ end
- context 'when job is erased' do
- let(:job) { create(:ci_build, :erased, pipeline: pipeline) }
+ context 'when job is erased' do
+ let(:job) { create(:ci_build, :erased, pipeline: pipeline) }
- it 'returns unprocessable_entity' do
- expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ it 'returns unprocessable_entity' do
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ end
end
- end
- context 'when user is developer' do
- let(:role) { :developer }
- let(:job) { create(:ci_build, :erasable, :trace_artifact, pipeline: pipeline, user: triggered_by) }
+ context 'when user is developer' do
+ let(:role) { :developer }
+ let(:job) { create(:ci_build, :erasable, :trace_artifact, pipeline: pipeline, user: triggered_by) }
- context 'when triggered by same user' do
- let(:triggered_by) { user }
+ context 'when triggered by same user' do
+ let(:triggered_by) { user }
- it 'has successful status' do
- expect(response).to have_gitlab_http_status(:found)
+ it 'has successful status' do
+ expect(response).to have_gitlab_http_status(:found)
+ end
+ end
+
+ context 'when triggered by different user' do
+ let(:triggered_by) { create(:user) }
+
+ it 'does not have successful status' do
+ expect(response).not_to have_gitlab_http_status(:found)
+ end
end
end
+ end
+
+ context 'when project is undergoing stats refresh' do
+ it_behaves_like 'preventing request because of ongoing project stats refresh' do
+ let(:job) { create(:ci_build, :erasable, :trace_artifact, pipeline: pipeline) }
+ let(:make_request) { post_erase }
- context 'when triggered by different user' do
- let(:triggered_by) { create(:user) }
+ it 'does not erase artifacts' do
+ make_request
- it 'does not have successful status' do
- expect(response).not_to have_gitlab_http_status(:found)
+ expect(job.artifacts_file).to be_present
+ expect(job.artifacts_metadata).to be_present
end
end
end
diff --git a/spec/controllers/projects/mattermosts_controller_spec.rb b/spec/controllers/projects/mattermosts_controller_spec.rb
index 596cd5c1a20..19a04654114 100644
--- a/spec/controllers/projects/mattermosts_controller_spec.rb
+++ b/spec/controllers/projects/mattermosts_controller_spec.rb
@@ -62,7 +62,7 @@ RSpec.describe Projects::MattermostsController do
subject
integration = project.integrations.last
- expect(subject).to redirect_to(edit_project_integration_path(project, integration))
+ expect(subject).to redirect_to(edit_project_settings_integration_path(project, integration))
end
end
end
diff --git a/spec/controllers/projects/merge_requests/drafts_controller_spec.rb b/spec/controllers/projects/merge_requests/drafts_controller_spec.rb
index 222bb977beb..b9ede84157d 100644
--- a/spec/controllers/projects/merge_requests/drafts_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests/drafts_controller_spec.rb
@@ -385,6 +385,38 @@ RSpec.describe Projects::MergeRequests::DraftsController do
expect(discussion.resolved?).to eq(false)
end
end
+
+ context 'publish with note' do
+ before do
+ create(:draft_note, merge_request: merge_request, author: user)
+ end
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(mr_review_submit_comment: false)
+ end
+
+ it 'does not create note' do
+ post :publish, params: params.merge!(note: 'Hello world')
+
+ expect(merge_request.notes.reload.size).to be(1)
+ end
+ end
+
+ context 'when feature flag is enabled' do
+ it 'creates note' do
+ post :publish, params: params.merge!(note: 'Hello world')
+
+ expect(merge_request.notes.reload.size).to be(2)
+ end
+
+ it 'does not create note when note param is empty' do
+ post :publish, params: params.merge!(note: '')
+
+ expect(merge_request.notes.reload.size).to be(1)
+ end
+ end
+ end
end
describe 'DELETE #destroy' do
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index f6db809c2e3..8ccbc0d3fe2 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -1094,7 +1094,7 @@ RSpec.describe Projects::MergeRequestsController do
end
context 'when processing coverage reports is completed' do
- let(:report) { { status: :parsed, data: pipeline.coverage_reports } }
+ let(:report) { { status: :parsed, data: { 'files' => {} } } }
it 'returns coverage reports' do
subject
@@ -1730,7 +1730,7 @@ RSpec.describe Projects::MergeRequestsController do
describe 'POST remove_wip' do
before do
- merge_request.title = merge_request.wip_title
+ merge_request.title = merge_request.draft_title
merge_request.save!
post :remove_wip,
@@ -1743,8 +1743,8 @@ RSpec.describe Projects::MergeRequestsController do
xhr: true
end
- it 'removes the wip status' do
- expect(merge_request.reload.title).to eq(merge_request.wipless_title)
+ it 'removes the draft status' do
+ expect(merge_request.reload.title).to eq(merge_request.draftless_title)
end
it 'renders MergeRequest as JSON' do
diff --git a/spec/controllers/projects/notes_controller_spec.rb b/spec/controllers/projects/notes_controller_spec.rb
index 07874c8a8af..85e5de46afd 100644
--- a/spec/controllers/projects/notes_controller_spec.rb
+++ b/spec/controllers/projects/notes_controller_spec.rb
@@ -84,100 +84,6 @@ RSpec.describe Projects::NotesController do
end
end
- context 'for multiple pages of notes', :aggregate_failures do
- # 3 pages worth: 1 normal page, 1 oversized due to clashing updated_at,
- # and a final, short page
- let!(:page_1) { create_list(:note, 2, noteable: issue, project: project, updated_at: 3.days.ago) }
- let!(:page_2) { create_list(:note, 3, noteable: issue, project: project, updated_at: 2.days.ago) }
- let!(:page_3) { create_list(:note, 2, noteable: issue, project: project, updated_at: 1.day.ago) }
-
- # Include a resource event in the middle page as well
- let!(:resource_event) { create(:resource_state_event, issue: issue, user: user, created_at: 2.days.ago) }
-
- let(:page_1_boundary) { microseconds(page_1.last.updated_at + NotesFinder::FETCH_OVERLAP) }
- let(:page_2_boundary) { microseconds(page_2.last.updated_at + NotesFinder::FETCH_OVERLAP) }
-
- around do |example|
- freeze_time do
- example.run
- end
- end
-
- before do
- stub_const('Gitlab::UpdatedNotesPaginator::LIMIT', 2)
- end
-
- context 'feature flag enabled' do
- before do
- stub_feature_flags(paginated_notes: true)
- end
-
- it 'returns the first page of notes' do
- expect(Gitlab::EtagCaching::Middleware).to receive(:skip!)
-
- get :index, params: request_params
-
- expect(json_response['notes'].count).to eq(page_1.count)
- expect(json_response['more']).to be_truthy
- expect(json_response['last_fetched_at']).to eq(page_1_boundary)
- expect(response.headers['Poll-Interval'].to_i).to eq(1)
- end
-
- it 'returns the second page of notes' do
- expect(Gitlab::EtagCaching::Middleware).to receive(:skip!)
-
- request.headers['X-Last-Fetched-At'] = page_1_boundary
-
- get :index, params: request_params
-
- expect(json_response['notes'].count).to eq(page_2.count + 1) # resource event
- expect(json_response['more']).to be_truthy
- expect(json_response['last_fetched_at']).to eq(page_2_boundary)
- expect(response.headers['Poll-Interval'].to_i).to eq(1)
- end
-
- it 'returns the final page of notes' do
- expect(Gitlab::EtagCaching::Middleware).to receive(:skip!)
-
- request.headers['X-Last-Fetched-At'] = page_2_boundary
-
- get :index, params: request_params
-
- expect(json_response['notes'].count).to eq(page_3.count)
- expect(json_response['more']).to be_falsy
- expect(json_response['last_fetched_at']).to eq(microseconds(Time.zone.now))
- expect(response.headers['Poll-Interval'].to_i).to be > 1
- end
-
- it 'returns an empty page of notes' do
- expect(Gitlab::EtagCaching::Middleware).not_to receive(:skip!)
-
- request.headers['X-Last-Fetched-At'] = microseconds(Time.zone.now)
-
- get :index, params: request_params
-
- expect(json_response['notes']).to be_empty
- expect(json_response['more']).to be_falsy
- expect(json_response['last_fetched_at']).to eq(microseconds(Time.zone.now))
- expect(response.headers['Poll-Interval'].to_i).to be > 1
- end
- end
-
- context 'feature flag disabled' do
- before do
- stub_feature_flags(paginated_notes: false)
- end
-
- it 'returns all notes' do
- get :index, params: request_params
-
- expect(json_response['notes'].count).to eq((page_1 + page_2 + page_3).size + 1)
- expect(json_response['more']).to be_falsy
- expect(json_response['last_fetched_at']).to eq(microseconds(Time.zone.now))
- end
- end
- end
-
context 'for a discussion note' do
let(:project) { create(:project, :repository) }
let!(:note) { create(:discussion_note_on_merge_request, project: project) }
diff --git a/spec/controllers/projects/pipelines/tests_controller_spec.rb b/spec/controllers/projects/pipelines/tests_controller_spec.rb
index 113781bab7c..2db54dbe671 100644
--- a/spec/controllers/projects/pipelines/tests_controller_spec.rb
+++ b/spec/controllers/projects/pipelines/tests_controller_spec.rb
@@ -51,18 +51,6 @@ RSpec.describe Projects::Pipelines::TestsController do
expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['errors']).to eq('Test report artifacts have expired')
end
-
- context 'when ci_test_report_artifacts_expired is disabled' do
- before do
- stub_feature_flags(ci_test_report_artifacts_expired: false)
- end
- it 'renders test suite', :aggregate_failures do
- get_tests_show_json(build_ids)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['name']).to eq('test')
- end
- end
end
context 'when artifacts are not expired' do
diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb
index 1be4177acd1..b3b803649d1 100644
--- a/spec/controllers/projects/pipelines_controller_spec.rb
+++ b/spec/controllers/projects/pipelines_controller_spec.rb
@@ -1289,6 +1289,18 @@ RSpec.describe Projects::PipelinesController do
expect(response).to have_gitlab_http_status(:not_found)
end
end
+
+ context 'and project is undergoing stats refresh' do
+ it_behaves_like 'preventing request because of ongoing project stats refresh' do
+ let(:make_request) { delete_pipeline }
+
+ it 'does not delete the pipeline' do
+ make_request
+
+ expect(Ci::Pipeline.exists?(pipeline.id)).to be_truthy
+ end
+ end
+ end
end
context 'when user has no privileges' do
diff --git a/spec/controllers/projects/project_members_controller_spec.rb b/spec/controllers/projects/project_members_controller_spec.rb
index 20a114bbe8c..9bb34a38005 100644
--- a/spec/controllers/projects/project_members_controller_spec.rb
+++ b/spec/controllers/projects/project_members_controller_spec.rb
@@ -170,6 +170,46 @@ RSpec.describe Projects::ProjectMembersController do
expect(requester.reload.human_access).to eq(label)
end
end
+
+ describe 'managing project direct owners' do
+ context 'when a Maintainer tries to elevate another user to OWNER' do
+ it 'does not allow the operation' do
+ params = {
+ project_member: { access_level: Gitlab::Access::OWNER },
+ namespace_id: project.namespace,
+ project_id: project,
+ id: requester
+ }
+
+ put :update, params: params, xhr: true
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when a user with OWNER access tries to elevate another user to OWNER' do
+ # inherited owner role via personal project association
+ let(:user) { project.first_owner }
+
+ before do
+ sign_in(user)
+ end
+
+ it 'returns success' do
+ params = {
+ project_member: { access_level: Gitlab::Access::OWNER },
+ namespace_id: project.namespace,
+ project_id: project,
+ id: requester
+ }
+
+ put :update, params: params, xhr: true
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(requester.reload.access_level).to eq(Gitlab::Access::OWNER)
+ end
+ end
+ end
end
context 'access expiry date' do
@@ -275,19 +315,40 @@ RSpec.describe Projects::ProjectMembersController do
context 'when member is found' do
context 'when user does not have enough rights' do
- before do
- project.add_developer(user)
+ context 'when user does not have rights to manage other members' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'returns 404', :aggregate_failures do
+ delete :destroy, params: {
+ namespace_id: project.namespace,
+ project_id: project,
+ id: member
+ }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(project.members).to include member
+ end
end
- it 'returns 404', :aggregate_failures do
- delete :destroy, params: {
- namespace_id: project.namespace,
- project_id: project,
- id: member
- }
+ context 'when user does not have rights to manage Owner members' do
+ let_it_be(:member) { create(:project_member, project: project, access_level: Gitlab::Access::OWNER) }
- expect(response).to have_gitlab_http_status(:not_found)
- expect(project.members).to include member
+ before do
+ project.add_maintainer(user)
+ end
+
+ it 'returns 403', :aggregate_failures do
+ delete :destroy, params: {
+ namespace_id: project.namespace,
+ project_id: project,
+ id: member
+ }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ expect(project.members).to include member
+ end
end
end
@@ -434,7 +495,7 @@ RSpec.describe Projects::ProjectMembersController do
end
context 'when member is found' do
- context 'when user does not have enough rights' do
+ context 'when user does not have rights to manage other members' do
before do
project.add_developer(user)
end
diff --git a/spec/controllers/projects/prometheus/alerts_controller_spec.rb b/spec/controllers/projects/prometheus/alerts_controller_spec.rb
index f42119e7811..2c2c8180143 100644
--- a/spec/controllers/projects/prometheus/alerts_controller_spec.rb
+++ b/spec/controllers/projects/prometheus/alerts_controller_spec.rb
@@ -53,112 +53,6 @@ RSpec.describe Projects::Prometheus::AlertsController do
end
end
- describe 'GET #index' do
- def make_request(opts = {})
- get :index, params: request_params(opts, environment_id: environment)
- end
-
- context 'when project has no prometheus alert' do
- it 'returns an empty response' do
- make_request
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response).to be_empty
- end
- end
-
- context 'when project has prometheus alerts' do
- let(:production) { create(:environment, project: project) }
- let(:staging) { create(:environment, project: project) }
- let(:json_alert_ids) { json_response.map { |alert| alert['id'] } }
-
- let!(:production_alerts) do
- create_list(:prometheus_alert, 2, project: project, environment: production)
- end
-
- let!(:staging_alerts) do
- create_list(:prometheus_alert, 1, project: project, environment: staging)
- end
-
- it 'contains prometheus alerts only for the production environment' do
- make_request(environment_id: production)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response.count).to eq(2)
- expect(json_alert_ids).to eq(production_alerts.map(&:id))
- end
-
- it 'contains prometheus alerts only for the staging environment' do
- make_request(environment_id: staging)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response.count).to eq(1)
- expect(json_alert_ids).to eq(staging_alerts.map(&:id))
- end
-
- it 'does not return prometheus alerts without environment' do
- make_request(environment_id: nil)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response).to be_empty
- end
- end
-
- it_behaves_like 'unprivileged'
- it_behaves_like 'project non-specific environment', :ok
- end
-
- describe 'GET #show' do
- let(:alert) do
- create(:prometheus_alert,
- :with_runbook_url,
- project: project,
- environment: environment,
- prometheus_metric: metric)
- end
-
- def make_request(opts = {})
- get :show, params: request_params(
- opts,
- id: alert.prometheus_metric_id,
- environment_id: environment
- )
- end
-
- context 'when alert does not exist' do
- it 'returns not_found' do
- make_request(id: 0)
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'when alert exists' do
- let(:alert_params) do
- {
- 'id' => alert.id,
- 'title' => alert.title,
- 'query' => alert.query,
- 'operator' => alert.computed_operator,
- 'threshold' => alert.threshold,
- 'runbook_url' => alert.runbook_url,
- 'alert_path' => alert_path(alert)
- }
- end
-
- it 'renders the alert' do
- make_request
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response).to include(alert_params)
- end
-
- it_behaves_like 'unprivileged'
- it_behaves_like 'project non-specific environment', :not_found
- it_behaves_like 'project non-specific metric', :not_found
- end
- end
-
describe 'POST #notify' do
let(:alert_1) { build(:alert_management_alert, :prometheus, project: project) }
let(:alert_2) { build(:alert_management_alert, :prometheus, project: project) }
diff --git a/spec/controllers/projects/prometheus/metrics_controller_spec.rb b/spec/controllers/projects/prometheus/metrics_controller_spec.rb
index 7dfa283195e..cd195b95100 100644
--- a/spec/controllers/projects/prometheus/metrics_controller_spec.rb
+++ b/spec/controllers/projects/prometheus/metrics_controller_spec.rb
@@ -141,7 +141,7 @@ RSpec.describe Projects::Prometheus::MetricsController do
expect(flash[:notice]).to include('Metric was successfully added.')
- expect(response).to redirect_to(edit_project_integration_path(project, ::Integrations::Prometheus))
+ expect(response).to redirect_to(edit_project_settings_integration_path(project, ::Integrations::Prometheus))
end
end
@@ -168,7 +168,7 @@ RSpec.describe Projects::Prometheus::MetricsController do
expect(metric.reload.title).to eq('new_title')
expect(flash[:notice]).to include('Metric was successfully updated.')
- expect(response).to redirect_to(edit_project_integration_path(project, ::Integrations::Prometheus))
+ expect(response).to redirect_to(edit_project_settings_integration_path(project, ::Integrations::Prometheus))
end
end
end
@@ -180,7 +180,7 @@ RSpec.describe Projects::Prometheus::MetricsController do
it 'destroys the metric' do
delete :destroy, params: project_params(id: metric.id)
- expect(response).to redirect_to(edit_project_integration_path(project, ::Integrations::Prometheus))
+ expect(response).to redirect_to(edit_project_settings_integration_path(project, ::Integrations::Prometheus))
expect(PrometheusMetric.find_by(id: metric.id)).to be_nil
end
end
diff --git a/spec/controllers/projects/releases_controller_spec.rb b/spec/controllers/projects/releases_controller_spec.rb
index 0dba7dab643..ad6682601f3 100644
--- a/spec/controllers/projects/releases_controller_spec.rb
+++ b/spec/controllers/projects/releases_controller_spec.rb
@@ -115,15 +115,6 @@ RSpec.describe Projects::ReleasesController do
expect(json_response.map { |release| release["id"] } ).to eq([release_2.id, release_1.id])
end
- # TODO: remove in https://gitlab.com/gitlab-org/gitlab/-/issues/360903
- it "returns release sha when remove_sha_from_releases_json is disabled" do
- stub_feature_flags(remove_sha_from_releases_json: false)
-
- get_index
-
- expect(json_response).to eq([release_2, release_1].as_json)
- end
-
it_behaves_like 'common access controls'
context 'when the project is private and the user is not logged in' do
@@ -157,19 +148,19 @@ RSpec.describe Projects::ReleasesController do
end
let(:release) { create(:release, project: project) }
- let(:tag) { CGI.escape(release.tag) }
+ let(:tag) { release.tag }
it_behaves_like 'successful request'
context 'when tag name contains slash' do
let(:release) { create(:release, project: project, tag: 'awesome/v1.0') }
- let(:tag) { CGI.escape(release.tag) }
+ let(:tag) { release.tag }
it_behaves_like 'successful request'
it 'is accesible at a URL encoded path' do
expect(edit_project_release_path(project, release))
- .to eq("/#{project.namespace.path}/#{project.name}/-/releases/awesome%252Fv1.0/edit")
+ .to eq("/#{project.namespace.path}/#{project.name}/-/releases/awesome%2Fv1.0/edit")
end
end
@@ -196,19 +187,19 @@ RSpec.describe Projects::ReleasesController do
end
let(:release) { create(:release, project: project) }
- let(:tag) { CGI.escape(release.tag) }
+ let(:tag) { release.tag }
it_behaves_like 'successful request'
context 'when tag name contains slash' do
let(:release) { create(:release, project: project, tag: 'awesome/v1.0') }
- let(:tag) { CGI.escape(release.tag) }
+ let(:tag) { release.tag }
it_behaves_like 'successful request'
it 'is accesible at a URL encoded path' do
expect(project_release_path(project, release))
- .to eq("/#{project.namespace.path}/#{project.name}/-/releases/awesome%252Fv1.0")
+ .to eq("/#{project.namespace.path}/#{project.name}/-/releases/awesome%2Fv1.0")
end
end
@@ -248,7 +239,7 @@ RSpec.describe Projects::ReleasesController do
end
let(:release) { create(:release, project: project) }
- let(:tag) { CGI.escape(release.tag) }
+ let(:tag) { release.tag }
context 'when user is a guest' do
let(:project) { private_project }
diff --git a/spec/controllers/projects/services_controller_spec.rb b/spec/controllers/projects/services_controller_spec.rb
deleted file mode 100644
index 6802ebeb63e..00000000000
--- a/spec/controllers/projects/services_controller_spec.rb
+++ /dev/null
@@ -1,356 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Projects::ServicesController do
- include JiraServiceHelper
- include AfterNextHelpers
-
- let_it_be(:project) { create(:project, :repository) }
- let_it_be(:user) { create(:user) }
- let_it_be(:jira_integration) { create(:jira_integration, project: project) }
-
- let(:integration) { jira_integration }
- let(:integration_params) { { username: 'username', password: 'password', url: 'http://example.com' } }
-
- before do
- sign_in(user)
- project.add_maintainer(user)
- end
-
- it_behaves_like Integrations::Actions do
- let(:integration_attributes) { { project: project } }
-
- let(:routing_params) do
- {
- namespace_id: project.namespace,
- project_id: project,
- id: integration.to_param
- }
- end
- end
-
- describe '#test' do
- context 'when the integration is not testable' do
- it 'renders 404' do
- allow_any_instance_of(Integration).to receive(:testable?).and_return(false)
-
- put :test, params: project_params
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'when validations fail' do
- let(:integration_params) { { active: 'true', url: '' } }
-
- it 'returns error messages in JSON response' do
- put :test, params: project_params(service: integration_params)
-
- expect(json_response['message']).to eq 'Validations failed.'
- expect(json_response['service_response']).to include "Url can't be blank"
- expect(response).to be_successful
- end
- end
-
- context 'when successful' do
- context 'with empty project' do
- let_it_be(:project) { create(:project) }
-
- context 'with chat notification integration' do
- let_it_be(:teams_integration) { project.create_microsoft_teams_integration(webhook: 'http://webhook.com') }
-
- let(:integration) { teams_integration }
-
- it 'returns success' do
- allow_next(::MicrosoftTeams::Notifier).to receive(:ping).and_return(true)
-
- put :test, params: project_params
-
- expect(response).to be_successful
- end
- end
-
- it 'returns success' do
- stub_jira_integration_test
-
- expect(Gitlab::HTTP).to receive(:get).with('/rest/api/2/serverInfo', any_args).and_call_original
-
- put :test, params: project_params(service: integration_params)
-
- expect(response).to be_successful
- end
- end
-
- it 'returns success' do
- stub_jira_integration_test
-
- expect(Gitlab::HTTP).to receive(:get).with('/rest/api/2/serverInfo', any_args).and_call_original
-
- put :test, params: project_params(service: integration_params)
-
- expect(response).to be_successful
- end
-
- context 'when service is configured for the first time' do
- let(:integration_params) do
- {
- 'active' => '1',
- 'push_events' => '1',
- 'token' => 'token',
- 'project_url' => 'https://buildkite.com/organization/pipeline'
- }
- end
-
- before do
- allow_any_instance_of(ServiceHook).to receive(:execute).and_return(true)
- end
-
- it 'persist the object' do
- do_put
-
- expect(response).to be_successful
- expect(json_response).to be_empty
- expect(Integrations::Buildkite.first).to be_present
- end
-
- it 'creates the ServiceHook object' do
- do_put
-
- expect(response).to be_successful
- expect(json_response).to be_empty
- expect(Integrations::Buildkite.first.service_hook).to be_present
- end
-
- def do_put
- put :test, params: project_params(id: 'buildkite',
- service: integration_params)
- end
- end
- end
-
- context 'when unsuccessful' do
- it 'returns an error response when the integration test fails' do
- stub_request(:get, 'http://example.com/rest/api/2/serverInfo')
- .to_return(status: 404)
-
- put :test, params: project_params(service: integration_params)
-
- expect(response).to be_successful
- expect(json_response).to eq(
- 'error' => true,
- 'message' => 'Connection failed. Please check your settings.',
- 'service_response' => '',
- 'test_failed' => true
- )
- end
-
- context 'with the Slack integration' do
- let_it_be(:integration) { build(:integrations_slack) }
-
- it 'returns an error response when the URL is blocked' do
- put :test, params: project_params(service: { webhook: 'http://127.0.0.1' })
-
- expect(response).to be_successful
- expect(json_response).to eq(
- 'error' => true,
- 'message' => 'Connection failed. Please check your settings.',
- 'service_response' => "URL 'http://127.0.0.1' is blocked: Requests to localhost are not allowed",
- 'test_failed' => true
- )
- end
-
- it 'returns an error response when a network exception is raised' do
- expect_next(Integrations::Slack).to receive(:test).and_raise(Errno::ECONNREFUSED)
-
- put :test, params: project_params
-
- expect(response).to be_successful
- expect(json_response).to eq(
- 'error' => true,
- 'message' => 'Connection failed. Please check your settings.',
- 'service_response' => 'Connection refused',
- 'test_failed' => true
- )
- end
- end
- end
- end
-
- describe 'PUT #update' do
- describe 'as HTML' do
- let(:integration_params) { { active: true } }
- let(:params) { project_params(service: integration_params) }
-
- let(:message) { 'Jira settings saved and active.' }
- let(:redirect_url) { edit_project_integration_path(project, integration) }
-
- before do
- stub_jira_integration_test
-
- put :update, params: params
- end
-
- shared_examples 'integration update' do
- it 'redirects to the correct url with a flash message' do
- expect(response).to redirect_to(redirect_url)
- expect(flash[:notice]).to eq(message)
- end
- end
-
- context 'when param `active` is set to true' do
- let(:params) { project_params(service: integration_params, redirect_to: redirect) }
-
- context 'when redirect_to param is present' do
- let(:redirect) { '/redirect_here' }
- let(:redirect_url) { redirect }
-
- it_behaves_like 'integration update'
- end
-
- context 'when redirect_to is an external domain' do
- let(:redirect) { 'http://examle.com' }
-
- it_behaves_like 'integration update'
- end
-
- context 'when redirect_to param is an empty string' do
- let(:redirect) { '' }
-
- it_behaves_like 'integration update'
- end
- end
-
- context 'when param `active` is set to false' do
- let(:integration_params) { { active: false } }
- let(:message) { 'Jira settings saved, but not active.' }
-
- it_behaves_like 'integration update'
- end
-
- context 'when param `inherit_from_id` is set to empty string' do
- let(:integration_params) { { inherit_from_id: '' } }
-
- it 'sets inherit_from_id to nil' do
- expect(integration.reload.inherit_from_id).to eq(nil)
- end
- end
-
- context 'when param `inherit_from_id` is set to an instance integration' do
- let(:instance_integration) { create(:jira_integration, :instance, url: 'http://instance.com', password: 'instance') }
- let(:integration_params) { { inherit_from_id: instance_integration.id, url: 'http://custom.com', password: 'custom' } }
-
- it 'ignores submitted params and inherits instance settings' do
- expect(integration.reload).to have_attributes(
- inherit_from_id: instance_integration.id,
- url: instance_integration.url,
- password: instance_integration.password
- )
- end
- end
-
- context 'when param `inherit_from_id` is set to a group integration' do
- let_it_be(:group) { create(:group) }
- let_it_be(:project) { create(:project, group: group) }
- let_it_be(:jira_integration) { create(:jira_integration, project: project) }
-
- let(:group_integration) { create(:jira_integration, :group, group: group, url: 'http://group.com', password: 'group') }
- let(:integration_params) { { inherit_from_id: group_integration.id, url: 'http://custom.com', password: 'custom' } }
-
- it 'ignores submitted params and inherits group settings' do
- expect(integration.reload).to have_attributes(
- inherit_from_id: group_integration.id,
- url: group_integration.url,
- password: group_integration.password
- )
- end
- end
-
- context 'when param `inherit_from_id` is set to an unrelated group' do
- let_it_be(:group) { create(:group) }
-
- let(:group_integration) { create(:jira_integration, :group, group: group, url: 'http://group.com', password: 'group') }
- let(:integration_params) { { inherit_from_id: group_integration.id, url: 'http://custom.com', password: 'custom' } }
-
- it 'ignores the param and saves the submitted settings' do
- expect(integration.reload).to have_attributes(
- inherit_from_id: nil,
- url: 'http://custom.com',
- password: 'custom'
- )
- end
- end
- end
-
- describe 'as JSON' do
- before do
- stub_jira_integration_test
- put :update, params: project_params(service: integration_params, format: :json)
- end
-
- context 'when update succeeds' do
- let(:integration_params) { { url: 'http://example.com', password: 'password' } }
-
- it 'returns success response' do
- expect(response).to be_successful
- expect(json_response).to include(
- 'active' => true,
- 'errors' => {}
- )
- end
- end
-
- context 'when update fails with missing password' do
- let(:integration_params) { { url: 'http://example.com' } }
-
- it 'returns JSON response errors' do
- expect(response).not_to be_successful
- expect(json_response).to include(
- 'active' => true,
- 'errors' => {
- 'password' => ["can't be blank"]
- }
- )
- end
- end
-
- context 'when update fails with invalid URL' do
- let(:integration_params) { { url: '', password: 'password' } }
-
- it 'returns JSON response with errors' do
- expect(response).to have_gitlab_http_status(:unprocessable_entity)
- expect(json_response).to include(
- 'active' => true,
- 'errors' => { 'url' => ['must be a valid URL', "can't be blank"] }
- )
- end
- end
- end
- end
-
- describe 'GET #edit' do
- context 'with Jira service' do
- let(:integration_param) { 'jira' }
-
- before do
- get :edit, params: project_params(id: integration_param)
- end
-
- context 'with approved services' do
- it 'renders edit page' do
- expect(response).to be_successful
- end
- end
- end
- end
-
- private
-
- def project_params(opts = {})
- opts.reverse_merge(
- namespace_id: project.namespace,
- project_id: project,
- id: integration.to_param
- )
- end
-end
diff --git a/spec/controllers/projects/settings/ci_cd_controller_spec.rb b/spec/controllers/projects/settings/ci_cd_controller_spec.rb
index 7e96e99640a..d50f1aa1dd8 100644
--- a/spec/controllers/projects/settings/ci_cd_controller_spec.rb
+++ b/spec/controllers/projects/settings/ci_cd_controller_spec.rb
@@ -25,19 +25,6 @@ RSpec.describe Projects::Settings::CiCdController do
expect(response).to render_template(:show)
end
- context 'when the FF ci_owned_runners_cross_joins_fix is disabled' do
- before do
- stub_feature_flags(ci_owned_runners_cross_joins_fix: false)
- end
-
- it 'renders show with 200 status code' do
- get :show, params: { namespace_id: project.namespace, project_id: project }
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to render_template(:show)
- end
- end
-
context 'with CI/CD disabled' do
before do
project.project_feature.update_attribute(:builds_access_level, ProjectFeature::DISABLED)
diff --git a/spec/controllers/projects/service_hook_logs_controller_spec.rb b/spec/controllers/projects/settings/integration_hook_logs_controller_spec.rb
index be78668aa88..8261461e8aa 100644
--- a/spec/controllers/projects/service_hook_logs_controller_spec.rb
+++ b/spec/controllers/projects/settings/integration_hook_logs_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::ServiceHookLogsController do
+RSpec.describe Projects::Settings::IntegrationHookLogsController do
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
let(:integration) { create(:drone_ci_integration, project: project) }
@@ -44,7 +44,8 @@ RSpec.describe Projects::ServiceHookLogsController do
it 'executes the hook and redirects to the service form' do
expect_any_instance_of(ServiceHook).to receive(:execute)
expect_any_instance_of(described_class).to receive(:set_hook_execution_notice)
- expect(subject).to redirect_to(edit_project_integration_path(project, integration))
+
+ expect(subject).to redirect_to(edit_project_settings_integration_path(project, integration))
end
it 'renders a 404 if the hook does not exist' do
diff --git a/spec/controllers/projects/settings/integrations_controller_spec.rb b/spec/controllers/projects/settings/integrations_controller_spec.rb
index 0652786c787..e6ca088a533 100644
--- a/spec/controllers/projects/settings/integrations_controller_spec.rb
+++ b/spec/controllers/projects/settings/integrations_controller_spec.rb
@@ -3,20 +3,388 @@
require 'spec_helper'
RSpec.describe Projects::Settings::IntegrationsController do
- let(:project) { create(:project, :public) }
- let(:user) { create(:user) }
+ include JiraIntegrationHelpers
+ include AfterNextHelpers
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:jira_integration) { create(:jira_integration, project: project) }
+
+ let(:integration) { jira_integration }
+ let(:integration_params) { { username: 'username', password: 'password', url: 'http://example.com' } }
before do
- project.add_maintainer(user)
sign_in(user)
+ project.add_maintainer(user)
+ end
+
+ it_behaves_like Integrations::Actions do
+ let(:integration_attributes) { { project: project } }
+
+ let(:routing_params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ id: integration.to_param
+ }
+ end
end
- describe 'GET show' do
- it 'renders show with 200 status code' do
- get :show, params: { namespace_id: project.namespace, project_id: project }
+ describe 'GET index' do
+ it 'renders index with 200 status code' do
+ get :index, params: { namespace_id: project.namespace, project_id: project }
expect(response).to have_gitlab_http_status(:ok)
- expect(response).to render_template(:show)
+ expect(response).to render_template(:index)
+ end
+ end
+
+ describe '#test' do
+ context 'when the integration is not testable' do
+ it 'renders 404' do
+ allow_any_instance_of(Integration).to receive(:testable?).and_return(false)
+
+ put :test, params: project_params
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when validations fail' do
+ let(:integration_params) { { active: 'true', url: '' } }
+
+ it 'returns error messages in JSON response' do
+ put :test, params: project_params(service: integration_params)
+
+ expect(json_response['message']).to eq 'Validations failed.'
+ expect(json_response['service_response']).to include "Url can't be blank"
+ expect(response).to be_successful
+ end
+ end
+
+ context 'when successful' do
+ context 'with empty project' do
+ let_it_be(:project) { create(:project) }
+
+ context 'with chat notification integration' do
+ let_it_be(:teams_integration) { project.create_microsoft_teams_integration(webhook: 'http://webhook.com') }
+
+ let(:integration) { teams_integration }
+
+ it 'returns success' do
+ allow_next(::MicrosoftTeams::Notifier).to receive(:ping).and_return(true)
+
+ put :test, params: project_params
+
+ expect(response).to be_successful
+ end
+ end
+
+ it 'returns success' do
+ stub_jira_integration_test
+
+ expect(Gitlab::HTTP).to receive(:get).with('/rest/api/2/serverInfo', any_args).and_call_original
+
+ put :test, params: project_params(service: integration_params)
+
+ expect(response).to be_successful
+ end
+ end
+
+ it 'returns success' do
+ stub_jira_integration_test
+
+ expect(Gitlab::HTTP).to receive(:get).with('/rest/api/2/serverInfo', any_args).and_call_original
+
+ put :test, params: project_params(service: integration_params)
+
+ expect(response).to be_successful
+ end
+
+ context 'when service is configured for the first time' do
+ let(:integration_params) do
+ {
+ 'active' => '1',
+ 'push_events' => '1',
+ 'token' => 'token',
+ 'project_url' => 'https://buildkite.com/organization/pipeline'
+ }
+ end
+
+ before do
+ allow_next(ServiceHook).to receive(:execute).and_return(true)
+ end
+
+ it 'persist the object' do
+ do_put
+
+ expect(response).to be_successful
+ expect(json_response).to be_empty
+ expect(Integrations::Buildkite.first).to be_present
+ end
+
+ it 'creates the ServiceHook object' do
+ do_put
+
+ expect(response).to be_successful
+ expect(json_response).to be_empty
+ expect(Integrations::Buildkite.first.service_hook).to be_present
+ end
+
+ def do_put
+ put :test, params: project_params(id: 'buildkite',
+ service: integration_params)
+ end
+ end
+ end
+
+ context 'when unsuccessful' do
+ it 'returns an error response when the integration test fails' do
+ stub_request(:get, 'http://example.com/rest/api/2/serverInfo')
+ .to_return(status: 404)
+
+ put :test, params: project_params(service: integration_params)
+
+ expect(response).to be_successful
+ expect(json_response).to eq(
+ 'error' => true,
+ 'message' => 'Connection failed. Please check your settings.',
+ 'service_response' => '',
+ 'test_failed' => true
+ )
+ end
+
+ context 'with the Slack integration' do
+ let_it_be(:integration) { build(:integrations_slack) }
+
+ it 'returns an error response when the URL is blocked' do
+ put :test, params: project_params(service: { webhook: 'http://127.0.0.1' })
+
+ expect(response).to be_successful
+ expect(json_response).to eq(
+ 'error' => true,
+ 'message' => 'Connection failed. Please check your settings.',
+ 'service_response' => "URL 'http://127.0.0.1' is blocked: Requests to localhost are not allowed",
+ 'test_failed' => true
+ )
+ end
+
+ it 'returns an error response when a network exception is raised' do
+ expect_next(Integrations::Slack).to receive(:test).and_raise(Errno::ECONNREFUSED)
+
+ put :test, params: project_params
+
+ expect(response).to be_successful
+ expect(json_response).to eq(
+ 'error' => true,
+ 'message' => 'Connection failed. Please check your settings.',
+ 'service_response' => 'Connection refused',
+ 'test_failed' => true
+ )
+ end
+ end
+ end
+ end
+
+ describe 'PUT #update' do
+ describe 'as HTML' do
+ let(:integration_params) { { active: true } }
+ let(:params) { project_params(service: integration_params) }
+
+ let(:message) { 'Jira settings saved and active.' }
+ let(:redirect_url) { edit_project_settings_integration_path(project, integration) }
+
+ before do
+ stub_jira_integration_test
+
+ put :update, params: params
+ end
+
+ shared_examples 'integration update' do
+ it 'redirects to the correct url with a flash message' do
+ expect(response).to redirect_to(redirect_url)
+ expect(flash[:notice]).to eq(message)
+ end
+ end
+
+ context 'when update fails' do
+ let(:integration_params) { { url: 'https://new.com', password: '' } }
+
+ it 'renders the edit form' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:edit)
+ expect(integration.reload.url).not_to eq('https://new.com')
+ end
+ end
+
+ context 'when param `active` is set to true' do
+ let(:params) { project_params(service: integration_params, redirect_to: redirect) }
+
+ context 'when redirect_to param is present' do
+ let(:redirect) { '/redirect_here' }
+ let(:redirect_url) { redirect }
+
+ it_behaves_like 'integration update'
+ end
+
+ context 'when redirect_to is an external domain' do
+ let(:redirect) { 'http://examle.com' }
+
+ it_behaves_like 'integration update'
+ end
+
+ context 'when redirect_to param is an empty string' do
+ let(:redirect) { '' }
+
+ it_behaves_like 'integration update'
+ end
+ end
+
+ context 'when param `active` is set to false' do
+ let(:integration_params) { { active: false } }
+ let(:message) { 'Jira settings saved, but not active.' }
+
+ it_behaves_like 'integration update'
+ end
+
+ context 'when param `inherit_from_id` is set to empty string' do
+ let(:integration_params) { { inherit_from_id: '' } }
+
+ it 'sets inherit_from_id to nil' do
+ expect(integration.reload.inherit_from_id).to eq(nil)
+ end
+ end
+
+ context 'when param `inherit_from_id` is set to an instance integration' do
+ let(:instance_integration) do
+ create(:jira_integration, :instance, url: 'http://instance.com', password: 'instance')
+ end
+
+ let(:integration_params) do
+ { inherit_from_id: instance_integration.id, url: 'http://custom.com', password: 'custom' }
+ end
+
+ it 'ignores submitted params and inherits instance settings' do
+ expect(integration.reload).to have_attributes(
+ inherit_from_id: instance_integration.id,
+ url: instance_integration.url,
+ password: instance_integration.password
+ )
+ end
+ end
+
+ context 'when param `inherit_from_id` is set to a group integration' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:jira_integration) { create(:jira_integration, project: project) }
+
+ let(:group_integration) do
+ create(:jira_integration, :group, group: group, url: 'http://group.com', password: 'group')
+ end
+
+ let(:integration_params) do
+ { inherit_from_id: group_integration.id, url: 'http://custom.com', password: 'custom' }
+ end
+
+ it 'ignores submitted params and inherits group settings' do
+ expect(integration.reload).to have_attributes(
+ inherit_from_id: group_integration.id,
+ url: group_integration.url,
+ password: group_integration.password
+ )
+ end
+ end
+
+ context 'when param `inherit_from_id` is set to an unrelated group' do
+ let_it_be(:group) { create(:group) }
+
+ let(:group_integration) do
+ create(:jira_integration, :group, group: group, url: 'http://group.com', password: 'group')
+ end
+
+ let(:integration_params) do
+ { inherit_from_id: group_integration.id, url: 'http://custom.com', password: 'custom' }
+ end
+
+ it 'ignores the param and saves the submitted settings' do
+ expect(integration.reload).to have_attributes(
+ inherit_from_id: nil,
+ url: 'http://custom.com',
+ password: 'custom'
+ )
+ end
+ end
+ end
+
+ describe 'as JSON' do
+ before do
+ stub_jira_integration_test
+ put :update, params: project_params(service: integration_params, format: :json)
+ end
+
+ context 'when update succeeds' do
+ let(:integration_params) { { url: 'http://example.com', password: 'password' } }
+
+ it 'returns success response' do
+ expect(response).to be_successful
+ expect(json_response).to include(
+ 'active' => true,
+ 'errors' => {}
+ )
+ end
+ end
+
+ context 'when update fails with missing password' do
+ let(:integration_params) { { url: 'http://example.com' } }
+
+ it 'returns JSON response errors' do
+ expect(response).not_to be_successful
+ expect(json_response).to include(
+ 'active' => true,
+ 'errors' => {
+ 'password' => ["can't be blank"]
+ }
+ )
+ end
+ end
+
+ context 'when update fails with invalid URL' do
+ let(:integration_params) { { url: '', password: 'password' } }
+
+ it 'returns JSON response with errors' do
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ expect(json_response).to include(
+ 'active' => true,
+ 'errors' => { 'url' => ['must be a valid URL', "can't be blank"] }
+ )
+ end
+ end
+ end
+ end
+
+ describe 'GET #edit' do
+ context 'with Jira service' do
+ let(:integration_param) { 'jira' }
+
+ before do
+ get :edit, params: project_params(id: integration_param)
+ end
+
+ context 'with approved services' do
+ it 'renders edit page' do
+ expect(response).to be_successful
+ end
+ end
end
end
+
+ private
+
+ def project_params(opts = {})
+ opts.reverse_merge(
+ namespace_id: project.namespace,
+ project_id: project,
+ id: integration.to_param
+ )
+ end
end
diff --git a/spec/controllers/projects/static_site_editor_controller_spec.rb b/spec/controllers/projects/static_site_editor_controller_spec.rb
deleted file mode 100644
index e1f25589eeb..00000000000
--- a/spec/controllers/projects/static_site_editor_controller_spec.rb
+++ /dev/null
@@ -1,101 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Projects::StaticSiteEditorController do
- let_it_be(:project) { create(:project, :public, :repository) }
- let_it_be(:user) { create(:user) }
-
- let(:data) { { key: 'value' } }
-
- describe 'GET index' do
- let(:default_params) do
- {
- namespace_id: project.namespace,
- project_id: project
- }
- end
-
- it 'responds with 404 page' do
- get :index, params: default_params
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- describe 'GET show' do
- render_views
-
- let(:default_params) do
- {
- namespace_id: project.namespace,
- project_id: project,
- id: 'master/README.md',
- return_url: 'http://example.com'
- }
- end
-
- let(:service_response) do
- ServiceResponse.success(payload: data)
- end
-
- before do
- allow_next_instance_of(::StaticSiteEditor::ConfigService) do |instance|
- allow(instance).to receive(:execute).and_return(service_response)
- end
- end
-
- context 'User roles' do
- context 'anonymous' do
- before do
- get :show, params: default_params
- end
-
- it 'redirects to sign in and returns' do
- expect(response).to redirect_to(new_user_session_path)
- end
- end
-
- context 'as guest' do
- before do
- project.add_guest(user)
- sign_in(user)
- get :show, params: default_params
- end
-
- it 'responds with 404 page' do
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context "as developer" do
- before do
- allow(Gitlab::UsageDataCounters::StaticSiteEditorCounter).to receive(:increment_views_count)
- project.add_role(user, 'developer')
- sign_in(user)
- get :show, params: default_params
- end
-
- it 'redirects to the Web IDE' do
- get :show, params: default_params
-
- expected_path_regex = %r[-/ide/project/#{project.full_path}/edit/master/-/README.md]
- expect(response).to redirect_to(expected_path_regex)
- end
-
- it 'assigns ref and path variables' do
- expect(assigns(:ref)).to eq('master')
- expect(assigns(:path)).to eq('README.md')
- end
-
- context 'when combination of ref and path is incorrect' do
- let(:default_params) { super().merge(id: 'unknown') }
-
- it 'responds with 404 page' do
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
- end
- end
-end
diff --git a/spec/controllers/projects/tags_controller_spec.rb b/spec/controllers/projects/tags_controller_spec.rb
index d0971e96910..3d1f8c12022 100644
--- a/spec/controllers/projects/tags_controller_spec.rb
+++ b/spec/controllers/projects/tags_controller_spec.rb
@@ -205,15 +205,13 @@ RSpec.describe Projects::TagsController do
before do
project.add_developer(user)
sign_in(user)
- end
-
- it 'deletes tag' do
request
+ end
- expect(response).to be_successful
- expect(response.body).to include("Tag was removed")
-
+ it 'deletes tag and redirects to tags path' do
expect(project.repository.find_tag(tag.name)).not_to be_present
+ expect(controller).to set_flash[:notice].to(/Tag was removed/)
+ expect(response).to redirect_to(project_tags_path(project))
end
end
end
diff --git a/spec/controllers/registrations/welcome_controller_spec.rb b/spec/controllers/registrations/welcome_controller_spec.rb
index c444875bf74..8a5a8490a23 100644
--- a/spec/controllers/registrations/welcome_controller_spec.rb
+++ b/spec/controllers/registrations/welcome_controller_spec.rb
@@ -31,6 +31,7 @@ RSpec.describe Registrations::WelcomeController do
context 'when role and setup_for_company is set' do
before do
+ stub_feature_flags(about_your_company_registration_flow: false)
user.update!(setup_for_company: false)
sign_in(user)
end
@@ -60,6 +61,10 @@ RSpec.describe Registrations::WelcomeController do
end
describe '#update' do
+ before do
+ stub_feature_flags(about_your_company_registration_flow: false)
+ end
+
subject(:update) do
patch :update, params: { user: { role: 'software_developer', setup_for_company: 'false' } }
end
diff --git a/spec/controllers/registrations_controller_spec.rb b/spec/controllers/registrations_controller_spec.rb
index caff7bcfc7b..36b230103db 100644
--- a/spec/controllers/registrations_controller_spec.rb
+++ b/spec/controllers/registrations_controller_spec.rb
@@ -292,13 +292,26 @@ RSpec.describe RegistrationsController do
end
end
- it 'displays an error when the reCAPTCHA is not solved' do
- allow_any_instance_of(described_class).to receive(:verify_recaptcha).and_return(false)
+ context 'when the reCAPTCHA is not solved' do
+ before do
+ allow_any_instance_of(described_class).to receive(:verify_recaptcha).and_return(false)
+ end
- subject
+ it 'displays an error' do
+ subject
+
+ expect(response).to render_template(:new)
+ expect(flash[:alert]).to eq(_('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.'))
+ end
+
+ it 'sets gon variables' do
+ Gon.clear
- expect(response).to render_template(:new)
- expect(flash[:alert]).to eq(_('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.'))
+ subject
+
+ expect(response).to render_template(:new)
+ expect(Gon.all_variables).not_to be_empty
+ end
end
it 'redirects to the welcome page when the reCAPTCHA is solved' do
diff --git a/spec/controllers/repositories/git_http_controller_spec.rb b/spec/controllers/repositories/git_http_controller_spec.rb
index fb2637238ec..448587c937a 100644
--- a/spec/controllers/repositories/git_http_controller_spec.rb
+++ b/spec/controllers/repositories/git_http_controller_spec.rb
@@ -43,18 +43,6 @@ RSpec.describe Repositories::GitHttpController do
post :git_upload_pack, params: params
end
- context 'on a read-only instance' do
- before do
- allow(Gitlab::Database).to receive(:read_only?).and_return(true)
- end
-
- it 'does not update project statistics' do
- expect(ProjectDailyStatisticsWorker).not_to receive(:perform_async)
-
- send_request
- end
- end
-
it 'updates project statistics sync for projects' do
stub_feature_flags(disable_git_http_fetch_writes: false)
@@ -83,7 +71,6 @@ RSpec.describe Repositories::GitHttpController do
it 'does not increment statistics' do
expect(Projects::FetchStatisticsIncrementService).not_to receive(:new)
- expect(ProjectDailyStatisticsWorker).not_to receive(:perform_async)
send_request
end
diff --git a/spec/controllers/sessions_controller_spec.rb b/spec/controllers/sessions_controller_spec.rb
index 877ca7cd6c6..0e0770fb94c 100644
--- a/spec/controllers/sessions_controller_spec.rb
+++ b/spec/controllers/sessions_controller_spec.rb
@@ -233,14 +233,23 @@ RSpec.describe SessionsController do
request.headers[described_class::CAPTCHA_HEADER] = '1'
end
- it 'displays an error when the reCAPTCHA is not solved' do
- # Without this, `verify_recaptcha` arbitrarily returns true in test env
+ context 'when the reCAPTCHA is not solved' do
+ it 'displays an error' do
+ unsuccesful_login(user_params)
- unsuccesful_login(user_params)
+ expect(response).to render_template(:new)
+ expect(flash[:alert]).to include _('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.')
+ expect(subject.current_user).to be_nil
+ end
- expect(response).to redirect_to new_user_session_path
- expect(flash[:alert]).to include _('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.')
- expect(subject.current_user).to be_nil
+ it 'sets gon variables' do
+ Gon.clear
+
+ unsuccesful_login(user_params)
+
+ expect(response).to render_template(:new)
+ expect(Gon.all_variables).not_to be_empty
+ end
end
it 'successfully logs in a user when reCAPTCHA is solved' do
@@ -262,7 +271,7 @@ RSpec.describe SessionsController do
it 'displays an error when the reCAPTCHA is not solved' do
unsuccesful_login(user_params, sesion_params: { failed_login_attempts: 6 })
- expect(response).to redirect_to new_user_session_path
+ expect(response).to render_template(:new)
expect(flash[:alert]).to include _('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.')
expect(subject.current_user).to be_nil
end
@@ -282,7 +291,7 @@ RSpec.describe SessionsController do
it 'displays an error when the reCAPTCHA is not solved' do
unsuccesful_login(user_params)
- expect(response).to redirect_to new_user_session_path
+ expect(response).to render_template(:new)
expect(flash[:alert]).to include _('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.')
expect(subject.current_user).to be_nil
end
diff --git a/spec/db/migration_spec.rb b/spec/db/migration_spec.rb
index ac649925751..7987c78b423 100644
--- a/spec/db/migration_spec.rb
+++ b/spec/db/migration_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe 'Migrations Validation' do
let(:all_migration_classes) do
{
2022_01_26_21_06_58.. => Gitlab::Database::Migration[2.0],
- 2021_09_01_15_33_24.. => Gitlab::Database::Migration[1.0],
+ 2021_09_01_15_33_24..2022_04_25_12_06_03 => Gitlab::Database::Migration[1.0],
2021_05_31_05_39_16..2021_09_01_15_33_24 => ActiveRecord::Migration[6.1],
..2021_05_31_05_39_16 => ActiveRecord::Migration[6.0]
}
diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb
index e21c73976a8..2d8454988d9 100644
--- a/spec/db/schema_spec.rb
+++ b/spec/db/schema_spec.rb
@@ -47,7 +47,6 @@ RSpec.describe 'Database schema' do
events: %w[target_id],
forked_project_links: %w[forked_from_project_id],
geo_event_log: %w[hashed_storage_attachments_event_id],
- geo_lfs_object_deleted_events: %w[lfs_object_id],
geo_node_statuses: %w[last_event_id cursor_last_event_id],
geo_nodes: %w[oauth_application_id],
geo_repository_deleted_events: %w[project_id],
@@ -94,7 +93,10 @@ RSpec.describe 'Database schema' do
vulnerability_identifiers: %w[external_id],
vulnerability_scanners: %w[external_id],
security_scans: %w[pipeline_id], # foreign key is not added as ci_pipeline table will be moved into different db soon
- vulnerability_reads: %w[cluster_agent_id]
+ vulnerability_reads: %w[cluster_agent_id],
+ # See: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/87584
+ # Fixes performance issues with the deletion of web-hooks with many log entries
+ web_hook_logs: %w[web_hook_id]
}.with_indifferent_access.freeze
context 'for table' do
@@ -256,14 +258,6 @@ RSpec.describe 'Database schema' do
end
context 'primary keys' do
- let(:exceptions) do
- %i(
- elasticsearch_indexed_namespaces
- elasticsearch_indexed_projects
- merge_request_context_commit_diff_files
- )
- end
-
it 'expects every table to have a primary key defined' do
connection = ActiveRecord::Base.connection
@@ -271,7 +265,7 @@ RSpec.describe 'Database schema' do
!connection.primary_key(table).present?
end.map(&:to_sym)
- expect(problematic_tables - exceptions).to be_empty
+ expect(problematic_tables).to be_empty
end
end
diff --git a/spec/events/pages/page_deleted_event_spec.rb b/spec/events/pages/page_deleted_event_spec.rb
new file mode 100644
index 00000000000..ee05b770c48
--- /dev/null
+++ b/spec/events/pages/page_deleted_event_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Pages::PageDeletedEvent do
+ where(:data, :valid) do
+ [
+ [{ project_id: 1, namespace_id: 2 }, true],
+ [{ project_id: 1 }, false],
+ [{ namespace_id: 1 }, false],
+ [{ project_id: 'foo', namespace_id: 2 }, false],
+ [{ project_id: 1, namespace_id: 'foo' }, false],
+ [{ project_id: [], namespace_id: 2 }, false],
+ [{ project_id: 1, namespace_id: [] }, false],
+ [{ project_id: {}, namespace_id: 2 }, false],
+ [{ project_id: 1, namespace_id: {} }, false],
+ ['foo', false],
+ [123, false],
+ [[], false]
+ ]
+ end
+
+ with_them do
+ it 'validates data' do
+ constructor = -> { described_class.new(data: data) }
+
+ if valid
+ expect { constructor.call }.not_to raise_error
+ else
+ expect { constructor.call }.to raise_error(Gitlab::EventStore::InvalidEvent)
+ end
+ end
+ end
+end
diff --git a/spec/experiments/application_experiment_spec.rb b/spec/experiments/application_experiment_spec.rb
index 13c12afc15d..a39890dd530 100644
--- a/spec/experiments/application_experiment_spec.rb
+++ b/spec/experiments/application_experiment_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe ApplicationExperiment, :experiment do
before do
stub_feature_flag_definition(:namespaced_stub, feature_definition)
+ allow(Gitlab::FIPS).to receive(:enabled?).and_return(true)
allow(application_experiment).to receive(:enabled?).and_return(true)
end
@@ -137,7 +138,11 @@ RSpec.describe ApplicationExperiment, :experiment do
},
{
schema: 'iglu:com.gitlab/gitlab_experiment/jsonschema/1-0-0',
- data: { experiment: 'namespaced/stub', key: '86208ac54ca798e11f127e8b23ec396a', variant: 'control' }
+ data: {
+ experiment: 'namespaced/stub',
+ key: '300b002687ba1f68591adb2f45ae67f1e56be05ad55f317cc00f1c4aa38f081a',
+ variant: 'control'
+ }
}
]
)
@@ -214,8 +219,18 @@ RSpec.describe ApplicationExperiment, :experiment do
end
describe "#key_for" do
- it "generates MD5 hashes" do
- expect(application_experiment.key_for(foo: :bar)).to eq('6f9ac12afdb9b58c2f19a136d09f9153')
+ it "generates FIPS compliant SHA2 hashes" do
+ expect(application_experiment.key_for(foo: :bar))
+ .to eq('1206febc4d022294fc639d68c2905079898ea4fee99290785b822e5010f1a9d1')
+ end
+
+ it "falls back to legacy MD5 when FIPS isn't forced" do
+ # Please see https://gitlab.com/gitlab-org/gitlab/-/issues/334590 about
+ # why this remains and why it hasn't been prioritized.
+
+ allow(Gitlab::FIPS).to receive(:enabled?).and_return(false)
+ expect(application_experiment.key_for(foo: :bar))
+ .to eq('6f9ac12afdb9b58c2f19a136d09f9153')
end
end
diff --git a/spec/factories/alert_management/alerts.rb b/spec/factories/alert_management/alerts.rb
index 7e9e58edc1e..443e43d5fd1 100644
--- a/spec/factories/alert_management/alerts.rb
+++ b/spec/factories/alert_management/alerts.rb
@@ -95,10 +95,6 @@ FactoryBot.define do
severity { 'unknown' }
end
- trait :threat_monitoring do
- domain { :threat_monitoring }
- end
-
trait :prometheus do
monitoring_tool { Gitlab::AlertManagement::Payload::MONITORING_TOOLS[:prometheus] }
payload do
diff --git a/spec/factories/application_settings.rb b/spec/factories/application_settings.rb
index c28d3c20a86..844e21df60c 100644
--- a/spec/factories/application_settings.rb
+++ b/spec/factories/application_settings.rb
@@ -5,5 +5,9 @@ FactoryBot.define do
default_projects_limit { 42 }
import_sources { [] }
restricted_visibility_levels { [] }
+
+ after(:build) do |settings|
+ settings.ensure_key_restrictions!
+ end
end
end
diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb
index 56c12d73a3b..97ddbf21b99 100644
--- a/spec/factories/ci/builds.rb
+++ b/spec/factories/ci/builds.rb
@@ -33,6 +33,18 @@ FactoryBot.define do
name { generate(:job_name) }
end
+ trait :matrix do
+ sequence(:name) { |n| "job: [#{n}]" }
+ options do
+ {
+ parallel: {
+ total: 2,
+ matrix: [{ ID: %w[1 2] }]
+ }
+ }
+ end
+ end
+
trait :dependent do
scheduling_type { 'dag' }
@@ -494,13 +506,15 @@ FactoryBot.define do
trait :with_commit do
after(:build) do |build|
- allow(build).to receive(:commit).and_return build(:commit, :without_author)
+ commit = build(:commit, :without_author)
+ stub_method(build, :commit) { commit }
end
end
trait :with_commit_and_author do
after(:build) do |build|
- allow(build).to receive(:commit).and_return build(:commit)
+ commit = build(:commit)
+ stub_method(build, :commit) { commit }
end
end
diff --git a/spec/factories/clusters/applications/helm.rb b/spec/factories/clusters/applications/helm.rb
index 10fa739acc1..919b45e57e2 100644
--- a/spec/factories/clusters/applications/helm.rb
+++ b/spec/factories/clusters/applications/helm.rb
@@ -10,19 +10,18 @@ FactoryBot.define do
before(:create) do |_record, evaluator|
if evaluator.helm_installed
- allow(Gitlab::Kubernetes::Helm::V2::Certificate).to receive(:generate_root)
- .and_return(
- double(
- key_string: File.read(Rails.root.join('spec/fixtures/clusters/sample_key.key')),
- cert_string: File.read(Rails.root.join('spec/fixtures/clusters/sample_cert.pem'))
- )
+ stub_method(Gitlab::Kubernetes::Helm::V2::Certificate, :generate_root) do
+ OpenStruct.new( # rubocop: disable Style/OpenStructUse
+ key_string: File.read(Rails.root.join('spec/fixtures/clusters/sample_key.key')),
+ cert_string: File.read(Rails.root.join('spec/fixtures/clusters/sample_cert.pem'))
)
+ end
end
end
after(:create) do |_record, evaluator|
if evaluator.helm_installed
- allow(Gitlab::Kubernetes::Helm::V2::Certificate).to receive(:generate_root).and_call_original
+ restore_original_methods(Gitlab::Kubernetes::Helm::V2::Certificate)
end
end
diff --git a/spec/factories/clusters/cluster_enabled_grant.rb b/spec/factories/clusters/cluster_enabled_grant.rb
new file mode 100644
index 00000000000..f995bc876f3
--- /dev/null
+++ b/spec/factories/clusters/cluster_enabled_grant.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :cluster_enabled_grant, class: 'Clusters::ClusterEnabledGrant' do
+ namespace
+ end
+end
diff --git a/spec/factories/commit_signature/ssh_signature.rb b/spec/factories/commit_signature/ssh_signature.rb
new file mode 100644
index 00000000000..097415ba69c
--- /dev/null
+++ b/spec/factories/commit_signature/ssh_signature.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ssh_signature, class: 'CommitSignatures::SshSignature' do
+ commit_sha { Digest::SHA1.hexdigest(SecureRandom.hex) }
+ project
+ key
+ verification_status { :verified }
+ end
+end
diff --git a/spec/factories/commits.rb b/spec/factories/commits.rb
index d006f9baf1f..4b1c74110ef 100644
--- a/spec/factories/commits.rb
+++ b/spec/factories/commits.rb
@@ -28,19 +28,20 @@ FactoryBot.define do
end
after(:build) do |commit, evaluator|
- allow(commit).to receive(:author).and_return(evaluator.author || build_stubbed(:author))
- allow(commit).to receive(:parent_ids).and_return([])
+ author = evaluator.author || build_stubbed(:author)
+ stub_method(commit, :author) { author }
+ stub_method(commit, :parent_ids) { [] }
end
trait :merge_commit do
after(:build) do |commit|
- allow(commit).to receive(:parent_ids).and_return(Array.new(2) { SecureRandom.hex(20) })
+ stub_method(commit, :parent_ids) { Array.new(2) { SecureRandom.hex(20) } }
end
end
trait :without_author do
after(:build) do |commit|
- allow(commit).to receive(:author).and_return nil
+ stub_method(commit, :author) { nil }
end
end
end
diff --git a/spec/factories/container_repositories.rb b/spec/factories/container_repositories.rb
index ce83e9e8006..210441430b0 100644
--- a/spec/factories/container_repositories.rb
+++ b/spec/factories/container_repositories.rb
@@ -85,13 +85,12 @@ FactoryBot.define do
tags = evaluator.tags
# convert Array into Hash
tags = tags.product(['sha256:4c8e63ca4cb663ce6c688cb06f1c372b088dac5b6d7ad7d49cd620d85cf72a15']).to_h unless tags.is_a?(Hash)
-
- allow(repository.client)
- .to receive(:repository_tags)
- .and_return({
+ stub_method(repository.client, :repository_tags) do |*args|
+ {
'name' => repository.path,
'tags' => tags.keys
- })
+ }
+ end
tags.each_pair do |tag, digest|
allow(repository.client)
diff --git a/spec/factories/deployments.rb b/spec/factories/deployments.rb
index ab1b794632a..204b917fa4a 100644
--- a/spec/factories/deployments.rb
+++ b/spec/factories/deployments.rb
@@ -15,7 +15,7 @@ FactoryBot.define do
deployment.user ||= deployment.project.creator
unless deployment.project.repository_exists?
- allow(deployment.project.repository).to receive(:create_ref)
+ stub_method(deployment.project.repository, :create_ref) { nil }
end
if deployment.cluster && deployment.cluster.project_type? && deployment.cluster.project.nil?
diff --git a/spec/factories/environments.rb b/spec/factories/environments.rb
index 0a9255e1abe..ccd2011eb8d 100644
--- a/spec/factories/environments.rb
+++ b/spec/factories/environments.rb
@@ -46,7 +46,7 @@ FactoryBot.define do
after(:create) do |environment, evaluator|
pipeline = create(:ci_pipeline, project: environment.project)
- deployable = create(:ci_build, name: "#{environment.name}:deploy",
+ deployable = create(:ci_build, :success, name: "#{environment.name}:deploy",
pipeline: pipeline)
deployment = create(:deployment,
diff --git a/spec/factories/gitlab/database/background_migration/batched_migrations.rb b/spec/factories/gitlab/database/background_migration/batched_migrations.rb
index 5ff90ff44b9..ea0fb571cc4 100644
--- a/spec/factories/gitlab/database/background_migration/batched_migrations.rb
+++ b/spec/factories/gitlab/database/background_migration/batched_migrations.rb
@@ -12,6 +12,7 @@ FactoryBot.define do
sequence(:job_arguments) { |n| [["column_#{n}"], ["column_#{n}_convert_to_bigint"]] }
total_tuple_count { 10_000 }
pause_ms { 100 }
+ gitlab_schema { :gitlab_main }
trait(:paused) do
status { 0 }
diff --git a/spec/factories/incident_management/timeline_events.rb b/spec/factories/incident_management/timeline_events.rb
index e2e216d24b8..831f78369b7 100644
--- a/spec/factories/incident_management/timeline_events.rb
+++ b/spec/factories/incident_management/timeline_events.rb
@@ -10,5 +10,14 @@ FactoryBot.define do
note { 'timeline created' }
note_html { '<strong>timeline created</strong>' }
action { 'comment' }
+ editable
+ end
+
+ trait :editable do
+ editable { true }
+ end
+
+ trait :non_editable do
+ editable { false }
end
end
diff --git a/spec/factories/issues.rb b/spec/factories/issues.rb
index 8c714f7736f..88522737e06 100644
--- a/spec/factories/issues.rb
+++ b/spec/factories/issues.rb
@@ -41,6 +41,13 @@ FactoryBot.define do
end
end
+ trait :closed_as_duplicate do
+ closed
+ after(:create) do |issue|
+ issue.update!(duplicated_to: create(:issue, project: issue.project))
+ end
+ end
+
after(:build) do |issue, evaluator|
issue.state_id = Issue.available_states[evaluator.state]
end
diff --git a/spec/factories/merge_requests.rb b/spec/factories/merge_requests.rb
index e897a5e022a..4941a31982f 100644
--- a/spec/factories/merge_requests.rb
+++ b/spec/factories/merge_requests.rb
@@ -25,10 +25,6 @@ FactoryBot.define do
title { generate(:draft_title) }
end
- trait :wip_merge_request do
- title { generate(:wip_title) }
- end
-
trait :jira_title do
title { generate(:jira_title) }
end
@@ -324,7 +320,7 @@ FactoryBot.define do
# Fake `fetch_ref!` if we don't have repository
# We have too many existing tests relying on this behaviour
unless [target_project, source_project].all?(&:repository_exists?)
- allow(merge_request).to receive(:fetch_ref!)
+ stub_method(merge_request, :fetch_ref!) { nil }
end
end
diff --git a/spec/factories/plan_limits.rb b/spec/factories/plan_limits.rb
index ad10629af05..1e4f70cd925 100644
--- a/spec/factories/plan_limits.rb
+++ b/spec/factories/plan_limits.rb
@@ -6,8 +6,10 @@ FactoryBot.define do
dast_profile_schedules { 50 }
- trait :default_plan do
- plan factory: :default_plan
+ Plan.all_plans.each do |plan|
+ trait :"#{plan}_plan" do
+ plan factory: :"#{plan}_plan"
+ end
end
trait :with_package_file_sizes do
diff --git a/spec/factories/project_members.rb b/spec/factories/project_members.rb
index c38257b06b6..ab1e45acc15 100644
--- a/spec/factories/project_members.rb
+++ b/spec/factories/project_members.rb
@@ -6,10 +6,11 @@ FactoryBot.define do
source { association(:project) }
maintainer
- trait(:guest) { access_level { ProjectMember::GUEST } }
- trait(:reporter) { access_level { ProjectMember::REPORTER } }
+ trait(:guest) { access_level { ProjectMember::GUEST } }
+ trait(:reporter) { access_level { ProjectMember::REPORTER } }
trait(:developer) { access_level { ProjectMember::DEVELOPER } }
trait(:maintainer) { access_level { ProjectMember::MAINTAINER } }
+ trait(:owner) { access_level { ProjectMember::OWNER } }
trait(:access_request) { requested_at { Time.now } }
trait(:invited) do
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index c3c02782578..86321350962 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -29,6 +29,7 @@ FactoryBot.define do
merge_requests_access_level { ProjectFeature::ENABLED }
repository_access_level { ProjectFeature::ENABLED }
analytics_access_level { ProjectFeature::ENABLED }
+ package_registry_access_level { ProjectFeature::ENABLED }
pages_access_level do
visibility_level == Gitlab::VisibilityLevel::PUBLIC ? ProjectFeature::ENABLED : ProjectFeature::PRIVATE
end
@@ -67,6 +68,7 @@ FactoryBot.define do
forking_access_level: evaluator.forking_access_level,
merge_requests_access_level: merge_requests_access_level,
repository_access_level: evaluator.repository_access_level,
+ package_registry_access_level: evaluator.package_registry_access_level,
pages_access_level: evaluator.pages_access_level,
metrics_dashboard_access_level: evaluator.metrics_dashboard_access_level,
operations_access_level: evaluator.operations_access_level,
@@ -301,8 +303,8 @@ FactoryBot.define do
trait :stubbed_repository do
after(:build) do |project|
- allow(project).to receive(:empty_repo?).and_return(false)
- allow(project.repository).to receive(:empty?).and_return(false)
+ stub_method(project, :empty_repo?) { false }
+ stub_method(project.repository, :empty?) { false }
end
end
diff --git a/spec/factories/releases.rb b/spec/factories/releases.rb
index 52a9341b955..a07d4ef6c2e 100644
--- a/spec/factories/releases.rb
+++ b/spec/factories/releases.rb
@@ -14,7 +14,11 @@ FactoryBot.define do
trait :legacy do
sha { nil }
- author { nil }
+
+ # Legacy releases which are created during tags creation have empty users.
+ after(:create) do |release, _|
+ release.update_column(:author_id, nil)
+ end
end
trait :with_evidence do
diff --git a/spec/factories/sequences.rb b/spec/factories/sequences.rb
index 6b86154aa91..c10fab8588d 100644
--- a/spec/factories/sequences.rb
+++ b/spec/factories/sequences.rb
@@ -16,7 +16,6 @@ FactoryBot.define do
sequence(:oid) { |n| Digest::SHA2.hexdigest("oid-like-#{n}") }
sequence(:variable) { |n| "var#{n}" }
sequence(:draft_title) { |n| "Draft: #{n}" }
- sequence(:wip_title) { |n| "WIP: #{n}" }
sequence(:jira_title) { |n| "[PROJ-#{n}]: fix bug" }
sequence(:jira_description) { |n| "This is a description\n here is the description\n Related to: PROJ-#{n}" }
sequence(:jira_branch) { |n| "feature/PROJ-#{n}" }
diff --git a/spec/factories/terraform/state.rb b/spec/factories/terraform/state.rb
index fb63c845073..c40fa14b4f8 100644
--- a/spec/factories/terraform/state.rb
+++ b/spec/factories/terraform/state.rb
@@ -12,6 +12,10 @@ FactoryBot.define do
locked_by_user { association(:user) }
end
+ trait :deletion_in_progress do
+ deleted_at { Time.current }
+ end
+
trait :with_version do
after(:create) do |state|
create(:terraform_state_version, terraform_state: state)
diff --git a/spec/factories/time_tracking/timelog_categories.rb b/spec/factories/time_tracking/timelog_categories.rb
new file mode 100644
index 00000000000..80fd9b7901a
--- /dev/null
+++ b/spec/factories/time_tracking/timelog_categories.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :timelog_category, class: 'TimeTracking::TimelogCategory' do
+ namespace
+
+ name { generate(:name) }
+ end
+end
diff --git a/spec/factories/work_items.rb b/spec/factories/work_items.rb
index 6d9dcac6165..e80aa9cc008 100644
--- a/spec/factories/work_items.rb
+++ b/spec/factories/work_items.rb
@@ -9,5 +9,10 @@ FactoryBot.define do
relative_position { RelativePositioning::START_POSITION }
issue_type { :issue }
association :work_item_type, :default
+
+ trait :task do
+ issue_type { :task }
+ association :work_item_type, :default, :task
+ end
end
end
diff --git a/spec/factories/work_items/parent_links.rb b/spec/factories/work_items/parent_links.rb
new file mode 100644
index 00000000000..886aa449a57
--- /dev/null
+++ b/spec/factories/work_items/parent_links.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :parent_link, class: 'WorkItems::ParentLink' do
+ transient do
+ work_item { nil }
+ work_item_parent { nil }
+ end
+
+ after(:build) do |link, evaluator|
+ link.work_item = evaluator.work_item
+ link.work_item_parent = evaluator.work_item_parent
+
+ unless link.work_item && link.work_item_parent
+ project = link.work_item&.project || link.work_item_parent&.project || create(:project)
+ link.work_item ||= create(:work_item, :task, project: project)
+ link.work_item_parent ||= create(:work_item, project: project)
+ end
+ end
+ end
+end
diff --git a/spec/fast_spec_helper.rb b/spec/fast_spec_helper.rb
index 6cbe97fb3f3..34ab48f67a8 100644
--- a/spec/fast_spec_helper.rb
+++ b/spec/fast_spec_helper.rb
@@ -35,4 +35,11 @@ RSpec.configure do |config|
config.filter_run focus: true
config.run_all_when_everything_filtered = true
end
+
+ # Makes diffs show entire non-truncated values.
+ config.before(:each, unlimited_max_formatted_output_length: true) do |_example|
+ config.expect_with :rspec do |c|
+ c.max_formatted_output_length = nil
+ end
+ end
end
diff --git a/spec/features/admin/admin_disables_two_factor_spec.rb b/spec/features/admin/admin_disables_two_factor_spec.rb
index f65e85b4cb6..4463dbb1eb0 100644
--- a/spec/features/admin/admin_disables_two_factor_spec.rb
+++ b/spec/features/admin/admin_disables_two_factor_spec.rb
@@ -3,8 +3,9 @@
require 'spec_helper'
RSpec.describe 'Admin disables 2FA for a user' do
+ include Spec::Support::Helpers::ModalHelpers
+
it 'successfully', :js do
- stub_feature_flags(bootstrap_confirmation_modals: false)
admin = create(:admin)
sign_in(admin)
gitlab_enable_admin_mode_sign_in(admin)
@@ -12,9 +13,11 @@ RSpec.describe 'Admin disables 2FA for a user' do
edit_user(user)
page.within('.two-factor-status') do
- accept_confirm { click_link 'Disable' }
+ click_link 'Disable'
end
+ accept_gl_confirm(button_text: 'Disable')
+
page.within('.two-factor-status') do
expect(page).to have_content 'Disabled'
expect(page).not_to have_button 'Disable'
diff --git a/spec/features/admin/admin_groups_spec.rb b/spec/features/admin/admin_groups_spec.rb
index 90dde7340d5..2d541a34f62 100644
--- a/spec/features/admin/admin_groups_spec.rb
+++ b/spec/features/admin/admin_groups_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe 'Admin Groups' do
end
end
- describe 'create a group' do
+ describe 'create a group', :js do
describe 'with expected fields' do
it 'renders from as expected', :aggregate_failures do
visit new_admin_group_path
@@ -60,8 +60,7 @@ RSpec.describe 'Admin Groups' do
expect(page).to have_current_path admin_group_path(Group.find_by(path: path_component)), ignore_query: true
content = page.find('#content-body')
- h3_texts = content.all('h3').collect(&:text).join("\n")
- expect(h3_texts).to match group_name
+ expect(page).to have_content group_name
li_texts = content.all('li').collect(&:text).join("\n")
expect(li_texts).to match group_name
expect(li_texts).to match path_component
@@ -76,7 +75,7 @@ RSpec.describe 'Admin Groups' do
expect_selected_visibility(internal)
end
- it 'when entered in group name, it auto filled the group path', :js do
+ it 'when entered in group name, it auto filled the group path' do
visit admin_groups_path
click_link "New group"
group_name = 'gitlab'
@@ -85,7 +84,7 @@ RSpec.describe 'Admin Groups' do
expect(path_field.value).to eq group_name
end
- it 'auto populates the group path with the group name', :js do
+ it 'auto populates the group path with the group name' do
visit admin_groups_path
click_link "New group"
group_name = 'my gitlab project'
@@ -94,7 +93,7 @@ RSpec.describe 'Admin Groups' do
expect(path_field.value).to eq 'my-gitlab-project'
end
- it 'when entering in group path, group name does not change anymore', :js do
+ it 'when entering in group path, group name does not change anymore' do
visit admin_groups_path
click_link "New group"
group_path = 'my-gitlab-project'
diff --git a/spec/features/admin/admin_hook_logs_spec.rb b/spec/features/admin/admin_hook_logs_spec.rb
index fd51fd71fea..6caf2b24555 100644
--- a/spec/features/admin/admin_hook_logs_spec.rb
+++ b/spec/features/admin/admin_hook_logs_spec.rb
@@ -41,4 +41,18 @@ RSpec.describe 'Admin::HookLogs' do
expect(page).to have_current_path(edit_admin_hook_path(system_hook), ignore_query: true)
end
+
+ context 'response data is too large' do
+ let(:hook_log) { create(:web_hook_log, web_hook: system_hook, request_data: WebHookLog::OVERSIZE_REQUEST_DATA) }
+
+ it 'shows request data as too large and disables retry function' do
+ visit(admin_hook_hook_log_path(system_hook, hook_log))
+
+ expect(page).to have_content('Request data is too large')
+ expect(page).not_to have_button(
+ _('Resent request'),
+ disabled: true, class: 'has-tooltip', title: _("Request data is too large")
+ )
+ end
+ end
end
diff --git a/spec/features/admin/admin_hooks_spec.rb b/spec/features/admin/admin_hooks_spec.rb
index 388ab02d8e8..901315752d6 100644
--- a/spec/features/admin/admin_hooks_spec.rb
+++ b/spec/features/admin/admin_hooks_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'Admin::Hooks' do
+ include Spec::Support::Helpers::ModalHelpers
+
let(:user) { create(:admin) }
before do
@@ -79,7 +81,6 @@ RSpec.describe 'Admin::Hooks' do
let(:hook_url) { generate(:url) }
before do
- stub_feature_flags(bootstrap_confirmation_modals: false)
create(:system_hook, url: hook_url)
end
@@ -87,7 +88,7 @@ RSpec.describe 'Admin::Hooks' do
it 'from hooks list page' do
visit admin_hooks_path
- accept_confirm { click_link 'Delete' }
+ accept_gl_confirm(button_text: 'Delete webhook') { click_link 'Delete' }
expect(page).not_to have_content(hook_url)
end
@@ -95,7 +96,7 @@ RSpec.describe 'Admin::Hooks' do
visit admin_hooks_path
click_link 'Edit'
- accept_confirm { click_link 'Delete' }
+ accept_gl_confirm(button_text: 'Delete webhook') { click_link 'Delete' }
expect(page).not_to have_content(hook_url)
end
end
diff --git a/spec/features/admin/admin_labels_spec.rb b/spec/features/admin/admin_labels_spec.rb
index ba0870a53ae..fa5c94aa66e 100644
--- a/spec/features/admin/admin_labels_spec.rb
+++ b/spec/features/admin/admin_labels_spec.rb
@@ -16,7 +16,6 @@ RSpec.describe 'admin issues labels' do
describe 'list' do
before do
- stub_feature_flags(bootstrap_confirmation_modals: false)
visit admin_labels_path
end
@@ -38,11 +37,9 @@ RSpec.describe 'admin issues labels' do
end
it 'deletes all labels', :js do
- page.within '.labels' do
- page.all('.js-remove-label').each do |remove|
- accept_confirm { remove.click }
- wait_for_requests
- end
+ page.all('.labels .js-remove-label').each do |remove|
+ accept_gl_confirm(button_text: 'Delete label') { remove.click }
+ wait_for_requests
end
wait_for_requests
diff --git a/spec/features/admin/admin_runners_spec.rb b/spec/features/admin/admin_runners_spec.rb
index e1a1e2bbb2d..d312965f6cf 100644
--- a/spec/features/admin/admin_runners_spec.rb
+++ b/spec/features/admin/admin_runners_spec.rb
@@ -115,13 +115,17 @@ RSpec.describe "Admin Runners" do
expect(page).not_to have_content("runner-bar")
end
- it 'shows no runner when description does not match' do
- input_filtered_search_keys('runner-baz')
+ context 'when description does not match' do
+ before do
+ input_filtered_search_keys('runner-baz')
+ end
- expect(page).to have_link('All 0')
- expect(page).to have_link('Instance 0')
+ it_behaves_like 'shows no runners found'
- expect(page).to have_text 'No runners found'
+ it 'shows no runner' do
+ expect(page).to have_link('All 0')
+ expect(page).to have_link('Instance 0')
+ end
end
end
@@ -190,14 +194,6 @@ RSpec.describe "Admin Runners" do
expect(page).not_to have_content 'runner-never-contacted'
end
- it 'shows no runner when status does not match' do
- input_filtered_search_filter_is_only('Status', 'Stale')
-
- expect(page).to have_link('All 0')
-
- expect(page).to have_text 'No runners found'
- end
-
it 'shows correct runner when status is selected and search term is entered' do
input_filtered_search_filter_is_only('Status', 'Online')
input_filtered_search_keys('runner-1')
@@ -225,6 +221,18 @@ RSpec.describe "Admin Runners" do
expect(page).to have_selector '.badge', text: 'never contacted'
end
end
+
+ context 'when status does not match' do
+ before do
+ input_filtered_search_filter_is_only('Status', 'Stale')
+ end
+
+ it_behaves_like 'shows no runners found'
+
+ it 'shows no runner' do
+ expect(page).to have_link('All 0')
+ end
+ end
end
describe 'filter by type' do
@@ -273,21 +281,6 @@ RSpec.describe "Admin Runners" do
end
end
- it 'shows no runner when type does not match' do
- visit admin_runners_path
-
- page.within('[data-testid="runner-type-tabs"]') do
- click_on 'Instance'
-
- expect(page).to have_link('Instance', class: 'active')
- end
-
- expect(page).not_to have_content 'runner-project'
- expect(page).not_to have_content 'runner-group'
-
- expect(page).to have_text 'No runners found'
- end
-
it 'shows correct runner when type is selected and search term is entered' do
create(:ci_runner, :project, description: 'runner-2-project', projects: [project])
@@ -327,6 +320,24 @@ RSpec.describe "Admin Runners" do
expect(page).not_to have_content 'runner-group'
expect(page).not_to have_content 'runner-paused-project'
end
+
+ context 'when type does not match' do
+ before do
+ visit admin_runners_path
+ page.within('[data-testid="runner-type-tabs"]') do
+ click_on 'Instance'
+
+ expect(page).to have_link('Instance', class: 'active')
+ end
+ end
+
+ it_behaves_like 'shows no runners found'
+
+ it 'shows no runner' do
+ expect(page).not_to have_content 'runner-project'
+ expect(page).not_to have_content 'runner-group'
+ end
+ end
end
describe 'filter by tag' do
@@ -358,15 +369,6 @@ RSpec.describe "Admin Runners" do
expect(page).not_to have_content 'runner-red'
end
- it 'shows no runner when tag does not match' do
- visit admin_runners_path
-
- input_filtered_search_filter_is_only('Tags', 'green')
-
- expect(page).not_to have_content 'runner-blue'
- expect(page).to have_text 'No runners found'
- end
-
it 'shows correct runner when tag is selected and search term is entered' do
create(:ci_runner, :instance, description: 'runner-2-blue', tag_list: ['blue'])
@@ -384,6 +386,19 @@ RSpec.describe "Admin Runners" do
expect(page).not_to have_content 'runner-blue'
expect(page).not_to have_content 'runner-red'
end
+
+ context 'when tag does not match' do
+ before do
+ visit admin_runners_path
+ input_filtered_search_filter_is_only('Tags', 'green')
+ end
+
+ it_behaves_like 'shows no runners found'
+
+ it 'shows no runner' do
+ expect(page).not_to have_content 'runner-blue'
+ end
+ end
end
it 'sorts by last contact date' do
@@ -419,7 +434,7 @@ RSpec.describe "Admin Runners" do
visit admin_runners_path
end
- it_behaves_like "shows no runners"
+ it_behaves_like 'shows no runners registered'
it 'shows tabs with total counts equal to 0' do
expect(page).to have_link('All 0')
diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb
index 79b3f049047..8843e13026b 100644
--- a/spec/features/admin/admin_settings_spec.rb
+++ b/spec/features/admin/admin_settings_spec.rb
@@ -61,7 +61,7 @@ RSpec.describe 'Admin updates settings' do
expect(current_settings.import_sources).to be_empty
page.within('.as-visibility-access') do
- check "Repo by URL"
+ check "Repository by URL"
click_button 'Save changes'
end
@@ -280,6 +280,18 @@ RSpec.describe 'Admin updates settings' do
expect(current_settings.gitpod_enabled).to be(true)
end
end
+
+ context 'GitLab for Jira App settings' do
+ it 'changes the setting' do
+ page.within('#js-jira_connect-settings') do
+ fill_in 'Jira Connect Application ID', with: '1234'
+ click_button 'Save changes'
+ end
+
+ expect(current_settings.jira_connect_application_key).to eq('1234')
+ expect(page).to have_content "Application settings saved successfully"
+ end
+ end
end
context 'Integrations page' do
diff --git a/spec/features/admin/admin_users_impersonation_tokens_spec.rb b/spec/features/admin/admin_users_impersonation_tokens_spec.rb
index 15bc2318022..7e57cffc791 100644
--- a/spec/features/admin/admin_users_impersonation_tokens_spec.rb
+++ b/spec/features/admin/admin_users_impersonation_tokens_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'Admin > Users > Impersonation Tokens', :js do
+ include Spec::Support::Helpers::ModalHelpers
+
let(:admin) { create(:admin) }
let!(:user) { create(:user) }
@@ -74,10 +76,9 @@ RSpec.describe 'Admin > Users > Impersonation Tokens', :js do
let!(:impersonation_token) { create(:personal_access_token, :impersonation, user: user) }
it "allows revocation of an active impersonation token" do
- stub_feature_flags(bootstrap_confirmation_modals: false)
visit admin_user_impersonation_tokens_path(user_id: user.username)
- accept_confirm { click_on "Revoke" }
+ accept_gl_confirm(button_text: 'Revoke') { click_on "Revoke" }
expect(page).to have_selector(".settings-message")
expect(no_personal_access_tokens_message).to have_text("This user has no active impersonation tokens.")
diff --git a/spec/features/admin/admin_uses_repository_checks_spec.rb b/spec/features/admin/admin_uses_repository_checks_spec.rb
index 4e6aae7c46f..2dffef93600 100644
--- a/spec/features/admin/admin_uses_repository_checks_spec.rb
+++ b/spec/features/admin/admin_uses_repository_checks_spec.rb
@@ -4,11 +4,11 @@ require 'spec_helper'
RSpec.describe 'Admin uses repository checks', :request_store do
include StubENV
+ include Spec::Support::Helpers::ModalHelpers
let(:admin) { create(:admin) }
before do
- stub_feature_flags(bootstrap_confirmation_modals: false)
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
sign_in(admin)
end
@@ -57,7 +57,9 @@ RSpec.describe 'Admin uses repository checks', :request_store do
expect(RepositoryCheck::ClearWorker).to receive(:perform_async)
- accept_confirm { find(:link, 'Clear all repository checks').send_keys(:return) }
+ accept_gl_confirm(button_text: 'Clear repository checks') do
+ find(:link, 'Clear all repository checks').send_keys(:return)
+ end
expect(page).to have_content('Started asynchronous removal of all repository check states.')
end
diff --git a/spec/features/admin/users/user_spec.rb b/spec/features/admin/users/user_spec.rb
index 7e8dee9cc0b..18bb03f4617 100644
--- a/spec/features/admin/users/user_spec.rb
+++ b/spec/features/admin/users/user_spec.rb
@@ -10,7 +10,6 @@ RSpec.describe 'Admin::Users::User' do
let_it_be(:current_user) { create(:admin) }
before do
- stub_feature_flags(bootstrap_confirmation_modals: false)
sign_in(current_user)
gitlab_enable_admin_mode_sign_in(current_user)
end
@@ -354,7 +353,7 @@ RSpec.describe 'Admin::Users::User' do
expect(page).to have_content("Secondary email: #{secondary_email.email}")
- accept_confirm { find("#remove_email_#{secondary_email.id}").click }
+ accept_gl_confirm { find("#remove_email_#{secondary_email.id}").click }
expect(page).not_to have_content(secondary_email.email)
end
diff --git a/spec/features/admin/users/users_spec.rb b/spec/features/admin/users/users_spec.rb
index a05e1531949..e5df6cc0fd3 100644
--- a/spec/features/admin/users/users_spec.rb
+++ b/spec/features/admin/users/users_spec.rb
@@ -10,7 +10,6 @@ RSpec.describe 'Admin::Users' do
let_it_be(:current_user) { create(:admin) }
before do
- stub_feature_flags(bootstrap_confirmation_modals: false)
sign_in(current_user)
gitlab_enable_admin_mode_sign_in(current_user)
end
@@ -492,9 +491,7 @@ RSpec.describe 'Admin::Users' do
within(:css, '.gl-mb-3 + .card') do
click_link group.name
end
- within(:css, 'h3.page-title') do
- expect(page).to have_content "Group: #{group.name}"
- end
+ expect(page).to have_content "Group: #{group.name}"
expect(page).to have_content project.name
end
@@ -506,8 +503,11 @@ RSpec.describe 'Admin::Users' do
it 'allows group membership to be revoked', :js do
page.within(first('.group_member')) do
- accept_confirm { find('.btn[data-testid="remove-user"]').click }
+ find('.btn[data-testid="remove-user"]').click
end
+
+ accept_gl_confirm(button_text: 'Remove')
+
wait_for_requests
expect(page).not_to have_selector('.group_member')
diff --git a/spec/features/boards/boards_spec.rb b/spec/features/boards/boards_spec.rb
index bf976168bbe..e8321adeb42 100644
--- a/spec/features/boards/boards_spec.rb
+++ b/spec/features/boards/boards_spec.rb
@@ -521,7 +521,6 @@ RSpec.describe 'Project issue boards', :js do
let_it_be(:user_guest) { create(:user) }
before do
- stub_feature_flags(bootstrap_confirmation_modals: false)
project.add_guest(user_guest)
sign_in(user_guest)
visit project_board_path(project, board)
diff --git a/spec/features/clusters/cluster_detail_page_spec.rb b/spec/features/clusters/cluster_detail_page_spec.rb
index 09e042b00cc..06e3e00db7d 100644
--- a/spec/features/clusters/cluster_detail_page_spec.rb
+++ b/spec/features/clusters/cluster_detail_page_spec.rb
@@ -36,20 +36,6 @@ RSpec.describe 'Clusterable > Show page' do
expect(page).not_to have_selector('[data-testid="cluster-environments-tab"]')
end
-
- context 'content-security policy' do
- it 'has AWS domains in the CSP' do
- visit cluster_path
-
- expect(response_headers['Content-Security-Policy']).to include(::Clusters::ClustersController::AWS_CSP_DOMAINS.join(' '))
- end
-
- it 'keeps existing connect-src in the CSP' do
- visit cluster_path
-
- expect(response_headers['Content-Security-Policy']).to include("connect-src #{Gitlab::ContentSecurityPolicy::Directives.connect_src}")
- end
- end
end
shared_examples 'editing a GCP cluster' do
diff --git a/spec/features/clusters/create_agent_spec.rb b/spec/features/clusters/create_agent_spec.rb
index c7326204bf6..b879ae645f7 100644
--- a/spec/features/clusters/create_agent_spec.rb
+++ b/spec/features/clusters/create_agent_spec.rb
@@ -35,6 +35,7 @@ RSpec.describe 'Cluster agent registration', :js do
expect(page).to have_content('You cannot see this token again after you close this window.')
expect(page).to have_content('example-agent-token')
expect(page).to have_content('helm upgrade --install')
+ expect(page).to have_content('example-agent-2')
within find('.modal-footer') do
click_button('Close')
diff --git a/spec/features/commits_spec.rb b/spec/features/commits_spec.rb
index 4b38df175e2..db841ffc627 100644
--- a/spec/features/commits_spec.rb
+++ b/spec/features/commits_spec.rb
@@ -10,7 +10,6 @@ RSpec.describe 'Commits' do
before do
sign_in(user)
stub_ci_pipeline_to_return_yaml_file
- stub_feature_flags(pipeline_tabs_vue: false)
end
let(:creator) { create(:user, developer_projects: [project]) }
@@ -94,7 +93,6 @@ RSpec.describe 'Commits' do
context 'Download artifacts', :js do
before do
- stub_feature_flags(pipeline_tabs_vue: false)
create(:ci_job_artifact, :archive, file: artifacts_file, job: build)
end
@@ -124,7 +122,6 @@ RSpec.describe 'Commits' do
context "when logged as reporter", :js do
before do
- stub_feature_flags(pipeline_tabs_vue: false)
project.add_reporter(user)
create(:ci_job_artifact, :archive, file: artifacts_file, job: build)
visit builds_project_pipeline_path(project, pipeline)
diff --git a/spec/features/dashboard/todos/todos_spec.rb b/spec/features/dashboard/todos/todos_spec.rb
index 04e78b59ab4..adb43d60306 100644
--- a/spec/features/dashboard/todos/todos_spec.rb
+++ b/spec/features/dashboard/todos/todos_spec.rb
@@ -55,7 +55,8 @@ RSpec.describe 'Dashboard Todos' do
expect(link).not_to be_nil
expect(link['data-iid']).to eq(referenced_mr.iid.to_s)
expect(link['data-project-path']).to eq(referenced_mr.project.full_path)
- expect(link['data-mr-title']).to eq(referenced_mr.title)
+ expect(link['title']).to eq(referenced_mr.title)
+ expect(link['data-reference-type']).to eq('merge_request')
end
end
diff --git a/spec/features/group_variables_spec.rb b/spec/features/group_variables_spec.rb
index c7d37205b71..9af9baeb5bb 100644
--- a/spec/features/group_variables_spec.rb
+++ b/spec/features/group_variables_spec.rb
@@ -12,8 +12,18 @@ RSpec.describe 'Group variables', :js do
group.add_owner(user)
gitlab_sign_in(user)
wait_for_requests
- visit page_path
end
- it_behaves_like 'variable list'
+ context 'with disabled ff `ci_variable_settings_graphql' do
+ before do
+ stub_feature_flags(ci_variable_settings_graphql: false)
+ visit page_path
+ end
+
+ it_behaves_like 'variable list'
+ end
+
+ # TODO: Uncomment when the new graphQL app for variable settings
+ # is enabled.
+ # it_behaves_like 'variable list'
end
diff --git a/spec/features/groups/clusters/user_spec.rb b/spec/features/groups/clusters/user_spec.rb
index 74ea72b238f..6b512323d4d 100644
--- a/spec/features/groups/clusters/user_spec.rb
+++ b/spec/features/groups/clusters/user_spec.rb
@@ -112,9 +112,9 @@ RSpec.describe 'User Cluster', :js do
context 'when user destroys the cluster' do
before do
click_link 'Advanced Settings'
- click_button 'Remove integration and resources'
+ find('[data-testid="remove-integration-button"]').click
fill_in 'confirm_cluster_name_input', with: cluster.name
- click_button 'Remove integration'
+ find('[data-testid="remove-integration-modal-button"]').click
end
it 'user sees creation form with the successful message' do
diff --git a/spec/features/groups/empty_states_spec.rb b/spec/features/groups/empty_states_spec.rb
index 71f38401fa1..f11e5c56545 100644
--- a/spec/features/groups/empty_states_spec.rb
+++ b/spec/features/groups/empty_states_spec.rb
@@ -7,8 +7,6 @@ RSpec.describe 'Group empty states' do
let(:user) { create(:group_member, :developer, user: create(:user), group: group ).user }
before do
- stub_feature_flags(vue_issues_list: true)
-
sign_in(user)
end
diff --git a/spec/features/groups/group_runners_spec.rb b/spec/features/groups/group_runners_spec.rb
index 1d821edefa3..a60b8a60da0 100644
--- a/spec/features/groups/group_runners_spec.rb
+++ b/spec/features/groups/group_runners_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe "Group Runners" do
visit group_runners_path(group)
end
- it_behaves_like "shows no runners"
+ it_behaves_like 'shows no runners registered'
it 'shows tabs with total counts equal to 0' do
expect(page).to have_link('All 0')
@@ -70,6 +70,18 @@ RSpec.describe "Group Runners" do
expect(find_link('Edit')[:href]).to end_with(edit_group_runner_path(group, group_runner))
end
end
+
+ context 'when description does not match' do
+ before do
+ input_filtered_search_keys('runner-baz')
+ end
+
+ it_behaves_like 'shows no runners found'
+
+ it 'shows no runner' do
+ expect(page).not_to have_content 'runner-foo'
+ end
+ end
end
context "with an online project runner" do
@@ -137,31 +149,37 @@ RSpec.describe "Group Runners" do
create(:ci_runner, :group, groups: [group], description: 'runner-foo', contacted_at: Time.zone.now)
end
- it 'user edits the runner to be protected' do
- visit edit_group_runner_path(group, runner)
+ context 'when group_runner_view_ui is disabled' do
+ before do
+ stub_feature_flags(group_runner_view_ui: false)
+ end
- expect(page.find_field('runner[access_level]')).not_to be_checked
+ it 'user edits the runner to be protected' do
+ visit edit_group_runner_path(group, runner)
- check 'runner_access_level'
- click_button 'Save changes'
+ expect(page.find_field('runner[access_level]')).not_to be_checked
- expect(page).to have_content 'Protected Yes'
- end
+ check 'runner_access_level'
+ click_button 'Save changes'
- context 'when a runner has a tag' do
- before do
- runner.update!(tag_list: ['tag'])
+ expect(page).to have_content 'Protected Yes'
end
- it 'user edits runner not to run untagged jobs' do
- visit edit_group_runner_path(group, runner)
+ context 'when a runner has a tag' do
+ before do
+ runner.update!(tag_list: ['tag'])
+ end
- expect(page.find_field('runner[run_untagged]')).to be_checked
+ it 'user edits runner not to run untagged jobs' do
+ visit edit_group_runner_path(group, runner)
- uncheck 'runner_run_untagged'
- click_button 'Save changes'
+ expect(page.find_field('runner[run_untagged]')).to be_checked
- expect(page).to have_content 'Can run untagged jobs No'
+ uncheck 'runner_run_untagged'
+ click_button 'Save changes'
+
+ expect(page).to have_content 'Can run untagged jobs No'
+ end
end
end
end
diff --git a/spec/features/groups/import_export/connect_instance_spec.rb b/spec/features/groups/import_export/connect_instance_spec.rb
index 552b599a3f3..ae03e64cf59 100644
--- a/spec/features/groups/import_export/connect_instance_spec.rb
+++ b/spec/features/groups/import_export/connect_instance_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe 'Import/Export - Connect to another instance', :js do
click_on 'Connect instance'
- expect(page).to have_content 'Showing 1-1 of 42 groups from %{url}' % { url: source_url }
+ expect(page).to have_content 'Showing 1-1 of 42 groups that you own from %{url}' % { url: source_url }
expect(page).to have_content 'stub-group'
visit '/'
diff --git a/spec/features/groups/issues_spec.rb b/spec/features/groups/issues_spec.rb
index ef3346b9763..c86705832b1 100644
--- a/spec/features/groups/issues_spec.rb
+++ b/spec/features/groups/issues_spec.rb
@@ -11,10 +11,6 @@ RSpec.describe 'Group issues page' do
let(:project_with_issues_disabled) { create(:project, :issues_disabled, group: group) }
let(:path) { issues_group_path(group) }
- before do
- stub_feature_flags(vue_issues_list: true)
- end
-
context 'with shared examples', :js do
let(:issuable) { create(:issue, project: project, title: "this is my created issuable")}
@@ -140,8 +136,6 @@ RSpec.describe 'Group issues page' do
let!(:issue3) { create(:issue, project: project, title: 'Issue #3', relative_position: 3) }
before do
- stub_feature_flags(vue_issues_list: false)
-
sign_in(user_in_group)
end
@@ -164,45 +158,36 @@ RSpec.describe 'Group issues page' do
end
it 'issues should be draggable and persist order' do
- visit issues_group_path(group, sort: 'relative_position')
-
- wait_for_requests
+ visit issues_group_path(group)
+ select_manual_sort
- drag_to(selector: '.manual-ordering',
- from_index: 0,
- to_index: 2)
+ drag_to(selector: '.manual-ordering', from_index: 0, to_index: 2)
- wait_for_requests
+ expect_issue_order
- check_issue_order
-
- visit issues_group_path(group, sort: 'relative_position')
+ visit issues_group_path(group)
- check_issue_order
+ expect_issue_order
end
it 'issues should not be draggable when user is not logged in' do
sign_out(user_in_group)
-
- visit issues_group_path(group, sort: 'relative_position')
-
wait_for_requests
+ visit issues_group_path(group)
+ select_manual_sort
- drag_to(selector: '.manual-ordering',
- from_index: 0,
- to_index: 2)
+ drag_to(selector: '.manual-ordering', from_index: 0, to_index: 2)
- wait_for_requests
+ expect(page).to have_text 'An error occurred while reordering issues.'
+ end
- # Issue order should remain the same
- page.within('.manual-ordering') do
- expect(find('.issue:nth-child(1) .title')).to have_content('Issue #1')
- expect(find('.issue:nth-child(2) .title')).to have_content('Issue #2')
- expect(find('.issue:nth-child(3) .title')).to have_content('Issue #3')
- end
+ def select_manual_sort
+ click_button 'Created date'
+ click_button 'Manual'
+ wait_for_requests
end
- def check_issue_order
+ def expect_issue_order
expect(page).to have_css('.issue:nth-child(1) .title', text: 'Issue #2')
expect(page).to have_css('.issue:nth-child(2) .title', text: 'Issue #3')
expect(page).to have_css('.issue:nth-child(3) .title', text: 'Issue #1')
diff --git a/spec/features/groups/members/leave_group_spec.rb b/spec/features/groups/members/leave_group_spec.rb
index 50d5db46cee..66f251c859a 100644
--- a/spec/features/groups/members/leave_group_spec.rb
+++ b/spec/features/groups/members/leave_group_spec.rb
@@ -4,13 +4,13 @@ require 'spec_helper'
RSpec.describe 'Groups > Members > Leave group' do
include Spec::Support::Helpers::Features::MembersHelpers
+ include Spec::Support::Helpers::ModalHelpers
let(:user) { create(:user) }
let(:other_user) { create(:user) }
let(:group) { create(:group) }
before do
- stub_feature_flags(bootstrap_confirmation_modals: false)
sign_in(user)
end
@@ -32,7 +32,7 @@ RSpec.describe 'Groups > Members > Leave group' do
visit group_path(group, leave: 1)
- page.accept_confirm
+ accept_gl_confirm(button_text: 'Leave group')
wait_for_all_requests
expect(page).to have_current_path(dashboard_groups_path, ignore_query: true)
diff --git a/spec/features/groups/navbar_spec.rb b/spec/features/groups/navbar_spec.rb
index e4b44d65438..b4faa3ce0dd 100644
--- a/spec/features/groups/navbar_spec.rb
+++ b/spec/features/groups/navbar_spec.rb
@@ -15,7 +15,6 @@ RSpec.describe 'Group navbar' do
before do
insert_package_nav(_('Kubernetes'))
- stub_feature_flags(customer_relations: false)
stub_config(dependency_proxy: { enabled: false })
stub_config(registry: { enabled: false })
stub_feature_flags(harbor_registry_integration: false)
@@ -42,12 +41,10 @@ RSpec.describe 'Group navbar' do
it_behaves_like 'verified navigation bar'
end
- context 'when customer_relations feature and flag is enabled' do
+ context 'when customer_relations feature is enabled' do
let(:group) { create(:group, :crm_enabled) }
before do
- stub_feature_flags(customer_relations: true)
-
if Gitlab.ee?
insert_customer_relations_nav(_('Analytics'))
else
@@ -60,12 +57,10 @@ RSpec.describe 'Group navbar' do
it_behaves_like 'verified navigation bar'
end
- context 'when customer_relations feature and flag is enabled but subgroup' do
+ context 'when customer_relations feature is enabled but subgroup' do
let(:group) { create(:group, :crm_enabled, parent: create(:group)) }
before do
- stub_feature_flags(customer_relations: true)
-
visit group_path(group)
end
diff --git a/spec/features/groups/settings/access_tokens_spec.rb b/spec/features/groups/settings/access_tokens_spec.rb
index 20787c4c2f5..198d3a40df2 100644
--- a/spec/features/groups/settings/access_tokens_spec.rb
+++ b/spec/features/groups/settings/access_tokens_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'Group > Settings > Access Tokens', :js do
+ include Spec::Support::Helpers::ModalHelpers
+
let_it_be(:user) { create(:user) }
let_it_be(:bot_user) { create(:user, :project_bot) }
let_it_be(:group) { create(:group) }
@@ -13,7 +15,6 @@ RSpec.describe 'Group > Settings > Access Tokens', :js do
end
before do
- stub_feature_flags(bootstrap_confirmation_modals: false)
sign_in(user)
end
diff --git a/spec/features/groups/show_spec.rb b/spec/features/groups/show_spec.rb
index eb62b6fa8ee..fa8db1befb5 100644
--- a/spec/features/groups/show_spec.rb
+++ b/spec/features/groups/show_spec.rb
@@ -72,6 +72,58 @@ RSpec.describe 'Group show page' do
end
end
end
+
+ context 'subgroups and projects empty state', :js do
+ context 'when user has permissions to create new subgroups or projects' do
+ before do
+ group.add_owner(user)
+ sign_in(user)
+ visit path
+ end
+
+ it 'shows `Create new subgroup` link' do
+ expect(page).to have_link(
+ s_('GroupsEmptyState|Create new subgroup'),
+ href: new_group_path(parent_id: group.id)
+ )
+ end
+
+ it 'shows `Create new project` link' do
+ expect(page).to have_link(
+ s_('GroupsEmptyState|Create new project'),
+ href: new_project_path(namespace_id: group.id)
+ )
+ end
+ end
+ end
+
+ context 'when user does not have permissions to create new subgroups or projects', :js do
+ before do
+ group.add_reporter(user)
+ sign_in(user)
+ visit path
+ end
+
+ it 'does not show `Create new subgroup` link' do
+ expect(page).not_to have_link(
+ s_('GroupsEmptyState|Create new subgroup'),
+ href: new_group_path(parent_id: group.id)
+ )
+ end
+
+ it 'does not show `Create new project` link' do
+ expect(page).not_to have_link(
+ s_('GroupsEmptyState|Create new project'),
+ href: new_project_path(namespace_id: group.id)
+ )
+ end
+
+ it 'shows empty state' do
+ expect(page).to have_content(s_('GroupsEmptyState|No subgroups or projects.'))
+ expect(page).to have_content(s_('GroupsEmptyState|You do not have necessary permissions to create a subgroup' \
+ ' or project in this group. Please contact an owner of this group to create a new subgroup or project.'))
+ end
+ end
end
context 'when signed out' do
diff --git a/spec/features/groups/user_sees_users_dropdowns_in_issuables_list_spec.rb b/spec/features/groups/user_sees_users_dropdowns_in_issuables_list_spec.rb
index 9fe11070187..4e4c0e509b0 100644
--- a/spec/features/groups/user_sees_users_dropdowns_in_issuables_list_spec.rb
+++ b/spec/features/groups/user_sees_users_dropdowns_in_issuables_list_spec.rb
@@ -2,23 +2,50 @@
require 'spec_helper'
-RSpec.describe 'Groups > User sees users dropdowns in issuables list' do
- let(:entity) { create(:group) }
+RSpec.describe 'Groups > User sees users dropdowns in issuables list', :js do
+ include FilteredSearchHelpers
+
+ let(:group) { create(:group) }
let(:user_in_dropdown) { create(:user) }
let!(:user_not_in_dropdown) { create(:user) }
- let!(:project) { create(:project, group: entity) }
+ let!(:project) { create(:project, group: group) }
before do
- entity.add_developer(user_in_dropdown)
+ group.add_developer(user_in_dropdown)
+ sign_in(user_in_dropdown)
end
- it_behaves_like 'issuable user dropdown behaviors' do
- let(:issuable) { create(:issue, project: project) }
- let(:issuables_path) { issues_group_path(entity) }
+ describe 'issues' do
+ let!(:issuable) { create(:issue, project: project) }
+
+ %w[Author Assignee].each do |dropdown|
+ describe "#{dropdown} dropdown" do
+ it 'only includes members of the project/group' do
+ visit issues_group_path(group)
+
+ select_tokens dropdown, '=', submit: false
+
+ expect_suggestion(user_in_dropdown.name)
+ expect_no_suggestion(user_not_in_dropdown.name)
+ end
+ end
+ end
end
- it_behaves_like 'issuable user dropdown behaviors' do
- let(:issuable) { create(:merge_request, source_project: project) }
- let(:issuables_path) { merge_requests_group_path(entity) }
+ describe 'merge requests' do
+ let!(:issuable) { create(:merge_request, source_project: project) }
+
+ %w[author assignee].each do |dropdown|
+ describe "#{dropdown} dropdown" do
+ it 'only includes members of the project/group' do
+ visit merge_requests_group_path(group)
+
+ filtered_search.set("#{dropdown}:=")
+
+ expect(find("#js-dropdown-#{dropdown} .filter-dropdown")).to have_content(user_in_dropdown.name)
+ expect(find("#js-dropdown-#{dropdown} .filter-dropdown")).not_to have_content(user_not_in_dropdown.name)
+ end
+ end
+ end
end
end
diff --git a/spec/features/groups_spec.rb b/spec/features/groups_spec.rb
index ceb4af03f89..31390b110e7 100644
--- a/spec/features/groups_spec.rb
+++ b/spec/features/groups_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe 'Group' do
end
describe 'as a non-admin' do
- it 'creates a group and persists visibility radio selection', :js do
+ it 'creates a group and persists visibility radio selection', :js, :saas do
stub_application_setting(default_group_visibility: :private)
fill_in 'Group name', with: 'test-group'
@@ -127,7 +127,7 @@ RSpec.describe 'Group' do
describe 'Mattermost team creation' do
before do
- stub_mattermost_setting(enabled: mattermost_enabled)
+ stub_mattermost_setting(enabled: mattermost_enabled, host: 'https://mattermost.test')
visit new_group_path
click_link 'Create group'
@@ -145,13 +145,14 @@ RSpec.describe 'Group' do
end
it 'updates the team URL on graph path update', :js do
- out_span = find('span[data-bind-out="create_chat_team"]', visible: false)
+ label = find('#group_create_chat_team ~ label[for=group_create_chat_team]')
+ url = 'https://mattermost.test/test-group'
- expect(out_span.text).to be_empty
+ expect(label.text).not_to match(url)
fill_in('group_path', with: 'test-group')
- expect(out_span.text).to eq('test-group')
+ expect(label.text).to match(url)
end
end
@@ -497,7 +498,9 @@ RSpec.describe 'Group' do
let_it_be(:group) { create(:group) }
let_it_be_with_refind(:user) { create(:user) }
- before_all do
+ before do
+ stub_feature_flags(namespace_storage_limit_bypass_date_check: false)
+
group.add_owner(user)
sign_in(user)
end
diff --git a/spec/features/incidents/incidents_list_spec.rb b/spec/features/incidents/incidents_list_spec.rb
index 789cc89e083..3241e71f537 100644
--- a/spec/features/incidents/incidents_list_spec.rb
+++ b/spec/features/incidents/incidents_list_spec.rb
@@ -44,18 +44,5 @@ RSpec.describe 'Incident Management index', :js do
expect(table).to have_content('Date created')
expect(table).to have_content('Assignees')
end
-
- context 'when :incident_escalations feature is disabled' do
- before do
- stub_feature_flags(incident_escalations: false)
- end
-
- it 'does not include the Status columns' do
- visit project_incidents_path(project)
- wait_for_requests
-
- expect(page.find('.gl-table')).not_to have_content('Status')
- end
- end
end
end
diff --git a/spec/features/issuables/issuable_list_spec.rb b/spec/features/issuables/issuable_list_spec.rb
index 7edf5fdc5ff..0fa2d238b0a 100644
--- a/spec/features/issuables/issuable_list_spec.rb
+++ b/spec/features/issuables/issuable_list_spec.rb
@@ -9,8 +9,6 @@ RSpec.describe 'issuable list', :js do
issuable_types = [:issue, :merge_request]
before do
- stub_feature_flags(vue_issues_list: true)
-
project.add_user(user, :developer)
sign_in(user)
issuable_types.each { |type| create_issuables(type) }
diff --git a/spec/features/issue_rebalancing_spec.rb b/spec/features/issue_rebalancing_spec.rb
index 8a05aeec7ec..686aa5eb1b6 100644
--- a/spec/features/issue_rebalancing_spec.rb
+++ b/spec/features/issue_rebalancing_spec.rb
@@ -15,10 +15,6 @@ RSpec.describe 'Issue rebalancing' do
group.add_developer(user)
end
- before do
- stub_feature_flags(vue_issues_list: true)
- end
-
context 'when issue rebalancing is in progress' do
before do
sign_in(user)
diff --git a/spec/features/issues/filtered_search/dropdown_assignee_spec.rb b/spec/features/issues/filtered_search/dropdown_assignee_spec.rb
index 18b70c9622a..05eb656461e 100644
--- a/spec/features/issues/filtered_search/dropdown_assignee_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_assignee_spec.rb
@@ -9,10 +9,6 @@ RSpec.describe 'Dropdown assignee', :js do
let_it_be(:user) { create(:user) }
let_it_be(:issue) { create(:issue, project: project) }
- before do
- stub_feature_flags(vue_issues_list: true)
- end
-
describe 'behavior' do
before do
project.add_maintainer(user)
diff --git a/spec/features/issues/filtered_search/dropdown_author_spec.rb b/spec/features/issues/filtered_search/dropdown_author_spec.rb
index 07e2bd3b7e4..36a8f1f3902 100644
--- a/spec/features/issues/filtered_search/dropdown_author_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_author_spec.rb
@@ -10,8 +10,6 @@ RSpec.describe 'Dropdown author', :js do
let_it_be(:issue) { create(:issue, project: project) }
before do
- stub_feature_flags(vue_issues_list: true)
-
project.add_maintainer(user)
sign_in(user)
diff --git a/spec/features/issues/filtered_search/dropdown_base_spec.rb b/spec/features/issues/filtered_search/dropdown_base_spec.rb
index 5fdab288b2d..9e3e3d394cd 100644
--- a/spec/features/issues/filtered_search/dropdown_base_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_base_spec.rb
@@ -10,8 +10,6 @@ RSpec.describe 'Dropdown base', :js do
let_it_be(:issue) { create(:issue, project: project) }
before do
- stub_feature_flags(vue_issues_list: true)
-
project.add_maintainer(user)
sign_in(user)
diff --git a/spec/features/issues/filtered_search/dropdown_emoji_spec.rb b/spec/features/issues/filtered_search/dropdown_emoji_spec.rb
index d6d59b89a8c..78450a9c3f7 100644
--- a/spec/features/issues/filtered_search/dropdown_emoji_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_emoji_spec.rb
@@ -11,8 +11,6 @@ RSpec.describe 'Dropdown emoji', :js do
let_it_be(:award_emoji_star) { create(:award_emoji, name: 'star', user: user, awardable: issue) }
before do
- stub_feature_flags(vue_issues_list: true)
-
project.add_maintainer(user)
create_list(:award_emoji, 2, user: user, name: 'thumbsup')
create_list(:award_emoji, 1, user: user, name: 'thumbsdown')
diff --git a/spec/features/issues/filtered_search/dropdown_hint_spec.rb b/spec/features/issues/filtered_search/dropdown_hint_spec.rb
index c64247b2b15..dcbab308efa 100644
--- a/spec/features/issues/filtered_search/dropdown_hint_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_hint_spec.rb
@@ -10,8 +10,6 @@ RSpec.describe 'Dropdown hint', :js do
let_it_be(:issue) { create(:issue, project: project) }
before do
- stub_feature_flags(vue_issues_list: true)
-
project.add_maintainer(user)
end
diff --git a/spec/features/issues/filtered_search/dropdown_label_spec.rb b/spec/features/issues/filtered_search/dropdown_label_spec.rb
index 67e3792a04c..0ff56909ad1 100644
--- a/spec/features/issues/filtered_search/dropdown_label_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_label_spec.rb
@@ -11,8 +11,6 @@ RSpec.describe 'Dropdown label', :js do
let_it_be(:label) { create(:label, project: project, title: 'bug-label') }
before do
- stub_feature_flags(vue_issues_list: true)
-
project.add_maintainer(user)
sign_in(user)
diff --git a/spec/features/issues/filtered_search/dropdown_milestone_spec.rb b/spec/features/issues/filtered_search/dropdown_milestone_spec.rb
index 19a4c8853f1..37d604106f1 100644
--- a/spec/features/issues/filtered_search/dropdown_milestone_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_milestone_spec.rb
@@ -12,8 +12,6 @@ RSpec.describe 'Dropdown milestone', :js do
let_it_be(:issue) { create(:issue, project: project) }
before do
- stub_feature_flags(vue_issues_list: true)
-
project.add_maintainer(user)
sign_in(user)
diff --git a/spec/features/issues/filtered_search/dropdown_release_spec.rb b/spec/features/issues/filtered_search/dropdown_release_spec.rb
index 50ac9068b26..08e20563c8e 100644
--- a/spec/features/issues/filtered_search/dropdown_release_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_release_spec.rb
@@ -12,8 +12,6 @@ RSpec.describe 'Dropdown release', :js do
let_it_be(:issue) { create(:issue, project: project) }
before do
- stub_feature_flags(vue_issues_list: true)
-
project.add_maintainer(user)
sign_in(user)
diff --git a/spec/features/issues/filtered_search/filter_issues_spec.rb b/spec/features/issues/filtered_search/filter_issues_spec.rb
index 13bce49e6d1..8d96bbc38cb 100644
--- a/spec/features/issues/filtered_search/filter_issues_spec.rb
+++ b/spec/features/issues/filtered_search/filter_issues_spec.rb
@@ -19,7 +19,6 @@ RSpec.describe 'Filter issues', :js do
end
before do
- stub_feature_flags(vue_issues_list: true)
project.add_maintainer(user)
create(:issue, project: project, author: user2, title: "Bug report 1")
diff --git a/spec/features/issues/filtered_search/recent_searches_spec.rb b/spec/features/issues/filtered_search/recent_searches_spec.rb
index bb5964258be..cb17349dd43 100644
--- a/spec/features/issues/filtered_search/recent_searches_spec.rb
+++ b/spec/features/issues/filtered_search/recent_searches_spec.rb
@@ -13,8 +13,6 @@ RSpec.describe 'Recent searches', :js do
let(:project_1_local_storage_key) { "#{project_1.full_path}-issue-recent-searches" }
before do
- stub_feature_flags(vue_issues_list: true)
-
# Visit any fast-loading page so we can clear local storage without a DOM exception
visit '/404'
remove_recent_searches
diff --git a/spec/features/issues/filtered_search/search_bar_spec.rb b/spec/features/issues/filtered_search/search_bar_spec.rb
index 8639ec2a227..e075547e326 100644
--- a/spec/features/issues/filtered_search/search_bar_spec.rb
+++ b/spec/features/issues/filtered_search/search_bar_spec.rb
@@ -10,7 +10,6 @@ RSpec.describe 'Search bar', :js do
let_it_be(:issue) { create(:issue, project: project) }
before do
- stub_feature_flags(vue_issues_list: true)
project.add_maintainer(user)
sign_in(user)
diff --git a/spec/features/issues/filtered_search/visual_tokens_spec.rb b/spec/features/issues/filtered_search/visual_tokens_spec.rb
index 9fb6a4cc2af..7a367723609 100644
--- a/spec/features/issues/filtered_search/visual_tokens_spec.rb
+++ b/spec/features/issues/filtered_search/visual_tokens_spec.rb
@@ -15,7 +15,6 @@ RSpec.describe 'Visual tokens', :js do
let_it_be(:issue) { create(:issue, project: project) }
before do
- stub_feature_flags(vue_issues_list: true)
project.add_user(user, :maintainer)
project.add_user(user_rock, :maintainer)
sign_in(user)
diff --git a/spec/features/issues/incident_issue_spec.rb b/spec/features/issues/incident_issue_spec.rb
index a2519a44604..d6ec7f1c539 100644
--- a/spec/features/issues/incident_issue_spec.rb
+++ b/spec/features/issues/incident_issue_spec.rb
@@ -26,6 +26,7 @@ RSpec.describe 'Incident Detail', :js do
context 'when user displays the incident' do
before do
+ stub_feature_flags(incident_timeline: project)
project.add_developer(user)
sign_in(user)
@@ -72,6 +73,12 @@ RSpec.describe 'Incident Detail', :js do
expect(hidden_items).to all(be_visible)
end
end
+
+ it 'shows the edit title and description button' do
+ edit_button = find_all('[aria-label="Edit title and description"]')
+
+ expect(edit_button).to all(be_visible)
+ end
end
context 'when on alert details tab' do
@@ -87,6 +94,61 @@ RSpec.describe 'Incident Detail', :js do
expect(hidden_items.count).to eq(0)
end
end
+
+ it 'does not show the edit title and description button' do
+ edit_button = find_all('[aria-label="Edit title and description"]')
+
+ expect(edit_button.count).to eq(0)
+ end
+ end
+
+ context 'when on timeline events tab from incident route' do
+ before do
+ visit project_issues_incident_path(project, incident)
+ wait_for_requests
+ click_link 'Timeline'
+ end
+
+ it 'does not show the linked issues and notes/comment components' do
+ page.within('.issuable-details') do
+ hidden_items = find_all('.js-issue-widgets')
+
+ # Linked Issues/MRs and comment box are hidden on page
+ expect(hidden_items.count).to eq(0)
+ end
+ end
+ end
+
+ context 'when on timeline events tab from issue route' do
+ before do
+ visit project_issue_path(project, incident)
+ wait_for_requests
+ click_link 'Timeline'
+ end
+
+ it 'does not show the linked issues and notes/comment commponents' do
+ page.within('.issuable-details') do
+ hidden_items = find_all('.js-issue-widgets')
+
+ # Linked Issues/MRs and comment box are hidden on page
+ expect(hidden_items.count).to eq(0)
+ end
+ end
+ end
+
+ context 'when incident_timeline feature flag is disabled' do
+ before do
+ stub_feature_flags(incident_timeline: false)
+
+ visit project_issue_path(project, incident)
+ wait_for_requests
+ end
+
+ it 'does not show Timeline tab' do
+ tabs = find('[data-testid="incident-tabs"]')
+
+ expect(tabs).not_to have_content('Timeline')
+ end
end
end
end
diff --git a/spec/features/issues/issue_detail_spec.rb b/spec/features/issues/issue_detail_spec.rb
index 88709d66887..2af54e51bb7 100644
--- a/spec/features/issues/issue_detail_spec.rb
+++ b/spec/features/issues/issue_detail_spec.rb
@@ -140,13 +140,9 @@ RSpec.describe 'Issue Detail', :js do
end
context 'by non-member author' do
- it 'routes the user to the issue details page when the `issue_type` is set to issue' do
- open_issue_edit_form
-
- page.within('[data-testid="issuable-form"]') do
- update_type_select('Incident', 'Issue')
-
- expect(page).to have_current_path(project_issue_path(project, incident))
+ it 'cannot edit issuable' do
+ page.within('.content') do
+ expect(page).to have_no_button('Edit title and description')
end
end
end
diff --git a/spec/features/issues/rss_spec.rb b/spec/features/issues/rss_spec.rb
index bdc5f282875..e3faed81c73 100644
--- a/spec/features/issues/rss_spec.rb
+++ b/spec/features/issues/rss_spec.rb
@@ -13,10 +13,6 @@ RSpec.describe 'Project Issues RSS', :js do
group.add_developer(user)
end
- before do
- stub_feature_flags(vue_issues_list: true)
- end
-
context 'when signed in' do
let_it_be(:user) { create(:user) }
diff --git a/spec/features/issues/user_bulk_edits_issues_labels_spec.rb b/spec/features/issues/user_bulk_edits_issues_labels_spec.rb
index 27377f6e1fd..4837d13574c 100644
--- a/spec/features/issues/user_bulk_edits_issues_labels_spec.rb
+++ b/spec/features/issues/user_bulk_edits_issues_labels_spec.rb
@@ -15,10 +15,6 @@ RSpec.describe 'Issues > Labels bulk assignment' do
let(:issue_1_selector) { "#issuable_#{issue1.id}" }
let(:issue_2_selector) { "#issuable_#{issue2.id}" }
- before do
- stub_feature_flags(vue_issues_list: true)
- end
-
context 'as an allowed user', :js do
before do
project.add_maintainer(user)
diff --git a/spec/features/issues/user_bulk_edits_issues_spec.rb b/spec/features/issues/user_bulk_edits_issues_spec.rb
index 625303f89e4..0533f1688e2 100644
--- a/spec/features/issues/user_bulk_edits_issues_spec.rb
+++ b/spec/features/issues/user_bulk_edits_issues_spec.rb
@@ -107,10 +107,6 @@ RSpec.describe 'Multiple issue updating from issues#index', :js do
describe 'select all issues' do
let!(:issue_2) { create(:issue, project: project) }
- before do
- stub_feature_flags(vue_issues_list: true)
- end
-
it 'after selecting all issues, unchecking one issue only unselects that one issue' do
visit project_issues_path(project)
diff --git a/spec/features/issues/user_comments_on_issue_spec.rb b/spec/features/issues/user_comments_on_issue_spec.rb
index a719263f092..a1e7c007b90 100644
--- a/spec/features/issues/user_comments_on_issue_spec.rb
+++ b/spec/features/issues/user_comments_on_issue_spec.rb
@@ -10,7 +10,6 @@ RSpec.describe "User comments on issue", :js do
let(:user) { create(:user) }
before do
- stub_feature_flags(sandboxed_mermaid: false)
project.add_guest(user)
sign_in(user)
@@ -42,17 +41,6 @@ RSpec.describe "User comments on issue", :js do
expect(page.find('pre code').text).to eq code_block_content
end
- it "renders HTML content as text in Mermaid" do
- html_content = "<img onerror=location=`javascript\\u003aalert\\u0028document.domain\\u0029` src=x>"
- mermaid_content = "graph LR\n B-->D(#{html_content});"
- comment = "```mermaid\n#{mermaid_content}\n```"
-
- add_note(comment)
-
- expect(page.find('svg.mermaid')).not_to have_content 'javascript'
- within('svg.mermaid') { expect(page).not_to have_selector('img') }
- end
-
it 'opens autocomplete menu for quick actions and have `/label` first choice' do
project.add_maintainer(user)
create(:label, project: project, title: 'label')
@@ -67,7 +55,7 @@ RSpec.describe "User comments on issue", :js do
it "edits comment" do
add_note("# Comment with a header")
- page.within(".note-body > .note-text") do
+ page.within(".note-body .note-text") do
expect(page).to have_content("Comment with a header").and have_no_css("#comment-with-a-header")
end
diff --git a/spec/features/issues/user_creates_issue_spec.rb b/spec/features/issues/user_creates_issue_spec.rb
index 3bba041dab7..151d3c60fa2 100644
--- a/spec/features/issues/user_creates_issue_spec.rb
+++ b/spec/features/issues/user_creates_issue_spec.rb
@@ -8,10 +8,6 @@ RSpec.describe "User creates issue" do
let_it_be(:project) { create(:project_empty_repo, :public) }
let_it_be(:user) { create(:user) }
- before do
- stub_feature_flags(vue_issues_list: true)
- end
-
context "when unauthenticated" do
before do
sign_out(:user)
diff --git a/spec/features/issues/user_filters_issues_spec.rb b/spec/features/issues/user_filters_issues_spec.rb
index 42c2b5d32c1..2941ea6ec36 100644
--- a/spec/features/issues/user_filters_issues_spec.rb
+++ b/spec/features/issues/user_filters_issues_spec.rb
@@ -7,8 +7,6 @@ RSpec.describe 'User filters issues', :js do
let_it_be(:project) { create(:project_empty_repo, :public) }
before do
- stub_feature_flags(vue_issues_list: true)
-
%w[foobar barbaz].each do |title|
create(:issue,
author: user,
diff --git a/spec/features/issues/user_sees_breadcrumb_links_spec.rb b/spec/features/issues/user_sees_breadcrumb_links_spec.rb
index 1577d7d5ce8..4ec13533a8d 100644
--- a/spec/features/issues/user_sees_breadcrumb_links_spec.rb
+++ b/spec/features/issues/user_sees_breadcrumb_links_spec.rb
@@ -8,8 +8,6 @@ RSpec.describe 'New issue breadcrumb' do
let(:user) { project.creator }
before do
- stub_feature_flags(vue_issues_list: true)
-
sign_in(user)
visit(new_project_issue_path(project))
end
diff --git a/spec/features/issues/user_sorts_issues_spec.rb b/spec/features/issues/user_sorts_issues_spec.rb
index 4af313576ed..7add6c782f7 100644
--- a/spec/features/issues/user_sorts_issues_spec.rb
+++ b/spec/features/issues/user_sorts_issues_spec.rb
@@ -16,8 +16,6 @@ RSpec.describe "User sorts issues" do
let_it_be(:later_due_milestone) { create(:milestone, project: project, due_date: '2013-12-12') }
before do
- stub_feature_flags(vue_issues_list: true)
-
create_list(:award_emoji, 2, :upvote, awardable: issue1)
create_list(:award_emoji, 2, :downvote, awardable: issue2)
create(:award_emoji, :downvote, awardable: issue1)
diff --git a/spec/features/labels_hierarchy_spec.rb b/spec/features/labels_hierarchy_spec.rb
index ea888d4b254..2f22ac8b395 100644
--- a/spec/features/labels_hierarchy_spec.rb
+++ b/spec/features/labels_hierarchy_spec.rb
@@ -17,8 +17,6 @@ RSpec.describe 'Labels Hierarchy', :js do
let!(:project_label_1) { create(:label, project: project_1, title: 'Label_4') }
before do
- stub_feature_flags(vue_issues_list: true)
-
grandparent.add_owner(user)
sign_in(user)
diff --git a/spec/features/markdown/math_spec.rb b/spec/features/markdown/math_spec.rb
index fa23fac2f96..1f219886818 100644
--- a/spec/features/markdown/math_spec.rb
+++ b/spec/features/markdown/math_spec.rb
@@ -67,4 +67,24 @@ RSpec.describe 'Math rendering', :js do
expect(page).to have_selector('.js-lazy-render-math')
end
end
+
+ it 'renders without any limits on wiki page', :js do
+ description = <<~MATH
+ ```math
+ \Huge \sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{\sqrt{}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}
+ ```
+ MATH
+
+ wiki_page = build(:wiki_page, { container: project, content: description })
+ wiki_page.create message: 'math test commit' # rubocop:disable Rails/SaveBang
+ wiki_page = project.wiki.find_page(wiki_page.slug)
+
+ visit project_wiki_path(project, wiki_page)
+
+ wait_for_requests
+
+ page.within '.js-wiki-page-content' do
+ expect(page).not_to have_selector('.js-lazy-render-math')
+ end
+ end
end
diff --git a/spec/features/markdown/mermaid_spec.rb b/spec/features/markdown/mermaid_spec.rb
deleted file mode 100644
index 322b5306a00..00000000000
--- a/spec/features/markdown/mermaid_spec.rb
+++ /dev/null
@@ -1,361 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Mermaid rendering', :js do
- let_it_be(:project) { create(:project, :public) }
-
- let(:is_mac) { page.evaluate_script('navigator.platform').include?('Mac') }
- let(:modifier_key) { is_mac ? :command : :control }
-
- before do
- stub_feature_flags(sandboxed_mermaid: false)
- end
-
- it 'renders Mermaid diagrams correctly' do
- description = <<~MERMAID
- ```mermaid
- graph TD;
- A-->B;
- A-->C;
- B-->D;
- C-->D;
- ```
- MERMAID
-
- issue = create(:issue, project: project, description: description)
-
- visit project_issue_path(project, issue)
-
- wait_for_requests
- wait_for_mermaid
-
- %w[A B C D].each do |label|
- expect(page).to have_selector('svg text', text: label)
- end
- end
-
- it 'renders linebreaks in Mermaid diagrams' do
- description = <<~MERMAID
- ```mermaid
- graph TD;
- A(Line 1<br>Line 2)-->B(Line 1<br/>Line 2);
- C(Line 1<br />Line 2)-->D(Line 1<br />Line 2);
- ```
- MERMAID
-
- issue = create(:issue, project: project, description: description)
-
- visit project_issue_path(project, issue)
-
- wait_for_requests
- wait_for_mermaid
-
- # From # From https://github.com/mermaid-js/mermaid/blob/170ed89e9ef3e33dc84f8656eed1725379d505df/src/dagre-wrapper/createLabel.js#L39-L42
- expected = %(<div style="display: inline-block; white-space: nowrap;" xmlns="http://www.w3.org/1999/xhtml">Line 1<br>Line 2</div>)
- expect(page.html.scan(expected).count).to be(4)
- end
-
- it 'does not allow XSS in HTML labels' do
- description = <<~MERMAID
- ```mermaid
- graph LR;
- A-->CLICK_HERE_AND_GET_BONUS;
- click A alert "aaa"
- click CLICK_HERE_AND_GET_BONUS "javascript:alert%28%64%6f%63%75%6d%65%6e%74%2e%64%6f%6d%61%69%6e%29" "Here is the XSS"
- ```
- MERMAID
-
- issue = create(:issue, project: project, description: description)
-
- visit project_issue_path(project, issue)
-
- wait_for_requests
- wait_for_mermaid
-
- # From https://github.com/mermaid-js/mermaid/blob/170ed89e9ef3e33dc84f8656eed1725379d505df/src/dagre-wrapper/createLabel.js#L39-L42
- expected = %(<div style="display: inline-block; white-space: nowrap;" xmlns="http://www.w3.org/1999/xhtml">CLICK_HERE_AND_GET_BONUS</div>)
- expect(page.html).to include(expected)
- end
-
- it 'renders only 2 Mermaid blocks and', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/234081' do
- description = <<~MERMAID
- ```mermaid
- graph LR
- A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;
- ```
- ```mermaid
- graph LR
- A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;
- ```
- ```mermaid
- graph LR
- A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;B-->A;A-->B;
- ```
- MERMAID
-
- issue = create(:issue, project: project, description: description)
-
- visit project_issue_path(project, issue)
-
- wait_for_requests
- wait_for_mermaid
-
- page.within('.description') do
- expect(page).to have_selector('svg')
- expect(page).to have_selector('pre.mermaid')
- end
- end
-
- it 'correctly sizes mermaid diagram inside <details> block' do
- description = <<~MERMAID
- <details>
- <summary>Click to show diagram</summary>
-
- ```mermaid
- graph TD;
- A-->B;
- A-->C;
- B-->D;
- C-->D;
- ```
-
- </details>
- MERMAID
-
- issue = create(:issue, project: project, description: description)
-
- visit project_issue_path(project, issue)
-
- wait_for_requests
- wait_for_mermaid
-
- page.within('.description') do
- page.find('summary').click
- svg = page.find('svg.mermaid')
-
- expect(svg[:style]).to match(/max-width/)
- expect(svg[:width].to_i).to eq(100)
- expect(svg[:height].to_i).to be_within(5).of(236)
- end
- end
-
- it 'renders V2 state diagrams' do
- description = <<~MERMAID
- ```mermaid
- stateDiagram-v2
- [*] --> Idle
- Idle --> Active : CONTINUE
- state Active {
- [*] --> Run
- Run--> Stop: CONTINUE
- Stop--> Run: CONTINUE
-
- Run: Run
- Run: entry/start
- Run: check
- }
- ```
- MERMAID
-
- issue = create(:issue, project: project, description: description)
-
- visit project_issue_path(project, issue)
-
- wait_for_requests
- wait_for_mermaid
-
- page.within('.description') do
- expect(page).to have_selector('svg')
- end
- end
-
- it 'correctly sizes mermaid diagram block' do
- description = <<~MERMAID
- ```mermaid
- graph TD;
- A-->B;
- A-->C;
- B-->D;
- C-->D;
- ```
- MERMAID
-
- issue = create(:issue, project: project, description: description)
-
- visit project_issue_path(project, issue)
-
- wait_for_requests
- wait_for_mermaid
-
- expect(page).to have_css('svg.mermaid[style*="max-width"][width="100%"]')
- end
-
- it 'display button when diagram exceeds length', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/287806' do
- graph_edges = "A-->B;B-->A;" * 420
-
- description = <<~MERMAID
- ```mermaid
- graph LR
- #{graph_edges}
- ```
- MERMAID
-
- issue = create(:issue, project: project, description: description)
-
- visit project_issue_path(project, issue)
-
- page.within('.description') do
- expect(page).not_to have_selector('svg')
-
- expect(page).to have_selector('pre.mermaid')
-
- expect(page).to have_selector('.lazy-alert-shown')
-
- expect(page).to have_selector('.js-lazy-render-mermaid-container')
- end
-
- wait_for_requests
- wait_for_mermaid
-
- find('.js-lazy-render-mermaid').click
-
- page.within('.description') do
- expect(page).to have_selector('svg')
-
- expect(page).not_to have_selector('.js-lazy-render-mermaid-container')
- end
- end
-
- it 'does not render more than 50 mermaid blocks', :js, quarantine: { issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/234081' } do
- graph_edges = "A-->B;B-->A;"
-
- description = <<~MERMAID
- ```mermaid
- graph LR
- #{graph_edges}
- ```
- MERMAID
-
- description *= 51
-
- issue = create(:issue, project: project, description: description)
-
- visit project_issue_path(project, issue)
-
- wait_for_requests
- wait_for_mermaid
-
- page.within('.description') do
- expect(page).to have_selector('svg')
-
- expect(page).to have_selector('.lazy-alert-shown')
-
- expect(page).to have_selector('.js-lazy-render-mermaid-container')
- end
- end
-
- it 'renders without any limits on wiki page', :js do
- graph_edges = "A-->B;B-->A;"
-
- description = <<~MERMAID
- ```mermaid
- graph LR
- #{graph_edges}
- ```
- MERMAID
-
- description *= 51
-
- wiki_page = build(:wiki_page, { container: project, content: description })
- wiki_page.create message: 'mermaid test commit' # rubocop:disable Rails/SaveBang
- wiki_page = project.wiki.find_page(wiki_page.slug)
-
- visit project_wiki_path(project, wiki_page)
-
- wait_for_requests
- wait_for_mermaid
-
- page.within('.js-wiki-page-content') do
- expect(page).not_to have_selector('.lazy-alert-shown')
-
- expect(page).not_to have_selector('.js-lazy-render-mermaid-container')
- end
- end
-
- it 'does not allow HTML injection' do
- description = <<~MERMAID
- ```mermaid
- %%{init: {"flowchart": {"htmlLabels": "false"}} }%%
- flowchart
- A["<iframe></iframe>"]
- ```
- MERMAID
-
- issue = create(:issue, project: project, description: description)
-
- visit project_issue_path(project, issue)
-
- wait_for_requests
- wait_for_mermaid
-
- page.within('.description') do
- expect(page).not_to have_xpath("//iframe")
- end
- end
-
- it 'correctly copies and pastes to/from the clipboard' do
- stub_feature_flags(sandboxed_mermaid: true)
-
- description = <<~MERMAID
- ```mermaid
- graph TD;
- A-->B;
- A-->C;
- ```
- MERMAID
-
- issue = create(:issue, project: project, description: description)
-
- user = create(:user)
- sign_in(user)
- visit project_issue_path(project, issue)
-
- wait_for_requests
- wait_for_mermaid
-
- find('pre.language-mermaid').hover
- find('copy-code button').click
-
- sleep 2
-
- find('#note-body').send_keys [modifier_key, 'v']
-
- wait_for_requests
-
- # The codefences do actually get included, but we can't get spec to pass
- # https://gitlab.com/gitlab-org/gitlab/-/merge_requests/83202#note_880621264
- expect(find('#note-body').value.strip).to eq("graph TD;\n A-->B;\n A-->C;")
- end
-end
-
-def wait_for_mermaid
- run_idle_callback = <<~RUN_IDLE_CALLBACK
- window.requestIdleCallback(() => {
- window.__CAPYBARA_IDLE_CALLBACK_EXEC__ = 1;
- })
- RUN_IDLE_CALLBACK
-
- page.evaluate_script(run_idle_callback)
-
- Timeout.timeout(Capybara.default_max_wait_time) do
- loop until finished_rendering?
- end
-end
-
-def finished_rendering?
- check_idle_callback = <<~CHECK_IDLE_CALLBACK
- window.__CAPYBARA_IDLE_CALLBACK_EXEC__
- CHECK_IDLE_CALLBACK
- page.evaluate_script(check_idle_callback) == 1
-end
diff --git a/spec/features/merge_request/batch_comments_spec.rb b/spec/features/merge_request/batch_comments_spec.rb
index 9b54d95be6b..f03c812ebb5 100644
--- a/spec/features/merge_request/batch_comments_spec.rb
+++ b/spec/features/merge_request/batch_comments_spec.rb
@@ -13,224 +13,221 @@ RSpec.describe 'Merge request > Batch comments', :js do
end
before do
- stub_feature_flags(paginated_notes: false)
-
project.add_maintainer(user)
sign_in(user)
+
+ visit_diffs
end
- context 'Feature is enabled' do
- before do
- visit_diffs
- end
+ it 'adds draft note' do
+ write_diff_comment
- it 'adds draft note' do
- write_diff_comment
+ expect(find('.draft-note-component')).to have_content('Line is wrong')
- expect(find('.draft-note-component')).to have_content('Line is wrong')
+ expect(page).to have_selector('[data-testid="review_bar_component"]')
- expect(page).to have_selector('[data-testid="review_bar_component"]')
+ expect(find('[data-testid="review_bar_component"] .gl-badge')).to have_content('1')
+ end
+
+ it 'publishes review' do
+ write_diff_comment
- expect(find('[data-testid="review_bar_component"] .btn-confirm')).to have_content('1')
+ page.within('.review-bar-content') do
+ click_button 'Finish review'
+ click_button 'Submit review'
end
- it 'publishes review' do
- write_diff_comment
+ wait_for_requests
- page.within('.review-bar-content') do
- click_button 'Submit review'
- end
+ expect(page).not_to have_selector('.draft-note-component', text: 'Line is wrong')
- wait_for_requests
+ expect(page).to have_selector('.note:not(.draft-note)', text: 'Line is wrong')
+ end
- expect(page).not_to have_selector('.draft-note-component', text: 'Line is wrong')
+ it 'publishes single comment' do
+ write_diff_comment
- expect(page).to have_selector('.note:not(.draft-note)', text: 'Line is wrong')
- end
+ click_button 'Add comment now'
- it 'publishes single comment' do
- write_diff_comment
+ wait_for_requests
- click_button 'Add comment now'
+ expect(page).not_to have_selector('.draft-note-component', text: 'Line is wrong')
- wait_for_requests
+ expect(page).to have_selector('.note:not(.draft-note)', text: 'Line is wrong')
+ end
+
+ it 'deletes draft note' do
+ write_diff_comment
- expect(page).not_to have_selector('.draft-note-component', text: 'Line is wrong')
+ find('.js-note-delete').click
- expect(page).to have_selector('.note:not(.draft-note)', text: 'Line is wrong')
+ page.within('.modal') do
+ click_button('Delete Comment', match: :first)
end
- it 'deletes draft note' do
- write_diff_comment
+ wait_for_requests
- find('.js-note-delete').click
+ expect(page).not_to have_selector('.draft-note-component', text: 'Line is wrong')
+ end
- page.within('.modal') do
- click_button('Delete Comment', match: :first)
- end
+ it 'edits draft note' do
+ write_diff_comment
- wait_for_requests
+ find('.js-note-edit').click
- expect(page).not_to have_selector('.draft-note-component', text: 'Line is wrong')
- end
+ # make sure comment form is in view
+ execute_script("window.scrollBy(0, 200)")
- it 'edits draft note' do
- write_diff_comment
+ write_comment(text: 'Testing update', button_text: 'Save comment')
- find('.js-note-edit').click
+ expect(page).to have_selector('.draft-note-component', text: 'Testing update')
+ end
- # make sure comment form is in view
- execute_script("window.scrollBy(0, 200)")
+ context 'with image and file draft note' do
+ let(:merge_request) { create(:merge_request_with_diffs, :with_image_diffs, source_project: project) }
+ let!(:draft_on_text) { create(:draft_note_on_text_diff, merge_request: merge_request, author: user, path: 'README.md', note: 'Lorem ipsum on text...') }
+ let!(:draft_on_image) { create(:draft_note_on_image_diff, merge_request: merge_request, author: user, path: 'files/images/ee_repo_logo.png', note: 'Lorem ipsum on an image...') }
- write_comment(text: 'Testing update', button_text: 'Save comment')
+ it 'does not show in overview' do
+ visit_overview
- expect(page).to have_selector('.draft-note-component', text: 'Testing update')
+ expect(page).to have_no_text(draft_on_text.note)
+ expect(page).to have_no_text(draft_on_image.note)
end
+ end
- context 'with image and file draft note' do
- let(:merge_request) { create(:merge_request_with_diffs, :with_image_diffs, source_project: project) }
- let!(:draft_on_text) { create(:draft_note_on_text_diff, merge_request: merge_request, author: user, path: 'README.md', note: 'Lorem ipsum on text...') }
- let!(:draft_on_image) { create(:draft_note_on_image_diff, merge_request: merge_request, author: user, path: 'files/images/ee_repo_logo.png', note: 'Lorem ipsum on an image...') }
-
- it 'does not show in overview' do
- visit_overview
+ context 'adding single comment to review' do
+ before do
+ visit_overview
+ end
- expect(page).to have_no_text(draft_on_text.note)
- expect(page).to have_no_text(draft_on_image.note)
- end
+ it 'at first does not show `Add to review` and `Add comment now` buttons' do
+ expect(page).to have_no_button('Add to review')
+ expect(page).to have_no_button('Add comment now')
end
- context 'adding single comment to review' do
+ context 'when review has started' do
before do
- visit_overview
- end
-
- it 'at first does not show `Add to review` and `Add comment now` buttons' do
- expect(page).to have_no_button('Add to review')
- expect(page).to have_no_button('Add comment now')
- end
-
- context 'when review has started' do
- before do
- visit_diffs
+ visit_diffs
- write_diff_comment
+ write_diff_comment
- visit_overview
- end
+ visit_overview
+ end
- it 'can add comment to review' do
- write_comment(selector: '.js-main-target-form', field: 'note-body', text: 'Its a draft comment', button_text: 'Add to review')
+ it 'can add comment to review' do
+ write_comment(selector: '.js-main-target-form', field: 'note-body', text: 'Its a draft comment', button_text: 'Add to review')
- expect(page).to have_selector('.draft-note-component', text: 'Its a draft comment')
+ expect(page).to have_selector('.draft-note-component', text: 'Its a draft comment')
- click_button('Pending comments')
+ click_button('Pending comments')
- expect(page).to have_text('2 pending comments')
- end
+ expect(page).to have_text('2 pending comments')
+ end
- it 'can add comment right away' do
- write_comment(selector: '.js-main-target-form', field: 'note-body', text: 'Its a regular comment', button_text: 'Add comment now')
+ it 'can add comment right away' do
+ write_comment(selector: '.js-main-target-form', field: 'note-body', text: 'Its a regular comment', button_text: 'Add comment now')
- expect(page).to have_selector('.note:not(.draft-note)', text: 'Its a regular comment')
+ expect(page).to have_selector('.note:not(.draft-note)', text: 'Its a regular comment')
- click_button('Pending comments')
+ click_button('Pending comments')
- expect(page).to have_text('1 pending comment')
- end
+ expect(page).to have_text('1 pending comment')
end
end
+ end
- context 'in parallel diff' do
- before do
- find('.js-show-diff-settings').click
- click_button 'Side-by-side'
- find('.js-show-diff-settings').click
- end
+ context 'in parallel diff' do
+ before do
+ find('.js-show-diff-settings').click
+ click_button 'Side-by-side'
+ find('.js-show-diff-settings').click
+ end
- it 'adds draft comments to both sides' do
- write_parallel_comment('2f6fcd96b88b36ce98c38da085c795a27d92a3dd_10_9')
- write_parallel_comment('2f6fcd96b88b36ce98c38da085c795a27d92a3dd_9_9', button_text: 'Add to review', text: 'Another wrong line')
+ it 'adds draft comments to both sides' do
+ write_parallel_comment('2f6fcd96b88b36ce98c38da085c795a27d92a3dd_10_9')
+ write_parallel_comment('2f6fcd96b88b36ce98c38da085c795a27d92a3dd_9_9', button_text: 'Add to review', text: 'Another wrong line')
- expect(find('.new .draft-note-component')).to have_content('Line is wrong')
- expect(find('.old .draft-note-component')).to have_content('Another wrong line')
+ expect(find('.new .draft-note-component')).to have_content('Line is wrong')
+ expect(find('.old .draft-note-component')).to have_content('Another wrong line')
- expect(find('.review-bar-content .btn-confirm')).to have_content('2')
- end
+ expect(find('.review-bar-content .gl-badge')).to have_content('2')
end
+ end
- context 'thread is unresolved' do
- let!(:active_discussion) { create(:diff_note_on_merge_request, noteable: merge_request, project: project).to_discussion }
+ context 'thread is unresolved' do
+ let!(:active_discussion) { create(:diff_note_on_merge_request, noteable: merge_request, project: project).to_discussion }
- before do
- visit_diffs
- end
+ before do
+ visit_diffs
+ end
- it 'publishes comment right away and resolves the thread' do
- expect(active_discussion.resolved?).to eq(false)
+ it 'publishes comment right away and resolves the thread' do
+ expect(active_discussion.resolved?).to eq(false)
- write_reply_to_discussion(button_text: 'Add comment now', resolve: true)
+ write_reply_to_discussion(button_text: 'Add comment now', resolve: true)
- page.within '.discussions-counter' do
- expect(page).to have_content('All threads resolved')
- end
+ page.within '.discussions-counter' do
+ expect(page).to have_content('All threads resolved')
end
+ end
- it 'publishes review and resolves the thread' do
- expect(active_discussion.resolved?).to eq(false)
+ it 'publishes review and resolves the thread' do
+ expect(active_discussion.resolved?).to eq(false)
- write_reply_to_discussion(resolve: true)
+ write_reply_to_discussion(resolve: true)
- page.within('.review-bar-content') do
- click_button 'Submit review'
- end
+ page.within('.review-bar-content') do
+ click_button 'Finish review'
+ click_button 'Submit review'
+ end
- wait_for_requests
+ wait_for_requests
- page.within '.discussions-counter' do
- expect(page).to have_content('All threads resolved')
- end
+ page.within '.discussions-counter' do
+ expect(page).to have_content('All threads resolved')
end
end
+ end
- context 'thread is resolved' do
- let!(:active_discussion) { create(:diff_note_on_merge_request, :resolved, noteable: merge_request, project: project).to_discussion }
+ context 'thread is resolved' do
+ let!(:active_discussion) { create(:diff_note_on_merge_request, :resolved, noteable: merge_request, project: project).to_discussion }
- before do
- active_discussion.resolve!(@current_user)
+ before do
+ active_discussion.resolve!(@current_user)
- visit_diffs
+ visit_diffs
- page.find('.js-diff-comment-avatar').click
- end
+ page.find('.js-diff-comment-avatar').click
+ end
- it 'publishes comment right away and unresolves the thread' do
- expect(active_discussion.resolved?).to eq(true)
+ it 'publishes comment right away and unresolves the thread' do
+ expect(active_discussion.resolved?).to eq(true)
- write_reply_to_discussion(button_text: 'Add comment now', unresolve: true)
+ write_reply_to_discussion(button_text: 'Add comment now', unresolve: true)
- page.within '.discussions-counter' do
- expect(page).to have_content('1 unresolved thread')
- end
+ page.within '.discussions-counter' do
+ expect(page).to have_content('1 unresolved thread')
end
+ end
- it 'publishes review and unresolves the thread' do
- expect(active_discussion.resolved?).to eq(true)
+ it 'publishes review and unresolves the thread' do
+ expect(active_discussion.resolved?).to eq(true)
- wait_for_requests
+ wait_for_requests
- write_reply_to_discussion(button_text: 'Start a review', unresolve: true)
+ write_reply_to_discussion(button_text: 'Start a review', unresolve: true)
- page.within('.review-bar-content') do
- click_button 'Submit review'
- end
+ page.within('.review-bar-content') do
+ click_button 'Finish review'
+ click_button 'Submit review'
+ end
- wait_for_requests
+ wait_for_requests
- page.within '.discussions-counter' do
- expect(page).to have_content('1 unresolved thread')
- end
+ page.within '.discussions-counter' do
+ expect(page).to have_content('1 unresolved thread')
end
end
end
diff --git a/spec/features/merge_request/user_comments_on_diff_spec.rb b/spec/features/merge_request/user_comments_on_diff_spec.rb
index 99756da51e4..06b29969775 100644
--- a/spec/features/merge_request/user_comments_on_diff_spec.rb
+++ b/spec/features/merge_request/user_comments_on_diff_spec.rb
@@ -14,7 +14,6 @@ RSpec.describe 'User comments on a diff', :js do
let(:user) { create(:user) }
before do
- stub_feature_flags(bootstrap_confirmation_modals: false)
project.add_maintainer(user)
sign_in(user)
diff --git a/spec/features/merge_request/user_creates_merge_request_spec.rb b/spec/features/merge_request/user_creates_merge_request_spec.rb
index a3dc3079374..2bf8e9ba6a4 100644
--- a/spec/features/merge_request/user_creates_merge_request_spec.rb
+++ b/spec/features/merge_request/user_creates_merge_request_spec.rb
@@ -30,7 +30,6 @@ RSpec.describe "User creates a merge request", :js do
it "shows merge request form" do
page.within('.merge-request-form') do
- expect(page.find('#merge_request_title')['placeholder']).to eq 'Title'
expect(page.find('#merge_request_description')['placeholder']).to eq 'Describe the goal of the changes and what reviewers should be aware of.'
end
end
@@ -100,7 +99,7 @@ RSpec.describe "User creates a merge request", :js do
click_button("Compare branches and continue")
- expect(page).to have_css("h3.page-title", text: "New merge request")
+ expect(page).to have_text _('New merge request')
page.within("form#new_merge_request") do
fill_in("Title", with: title)
diff --git a/spec/features/merge_request/user_posts_diff_notes_spec.rb b/spec/features/merge_request/user_posts_diff_notes_spec.rb
index 64715f9234a..8a310aba77b 100644
--- a/spec/features/merge_request/user_posts_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_posts_diff_notes_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe 'Merge request > User posts diff notes', :js do
project.add_developer(user)
sign_in(user)
- stub_feature_flags(bootstrap_confirmation_modals: false)
+ stub_const('Gitlab::QueryLimiting::Transaction::THRESHOLD', 104)
end
context 'when hovering over a parallel view diff file' do
diff --git a/spec/features/merge_request/user_posts_notes_spec.rb b/spec/features/merge_request/user_posts_notes_spec.rb
index ad602afe68a..844ef6133c8 100644
--- a/spec/features/merge_request/user_posts_notes_spec.rb
+++ b/spec/features/merge_request/user_posts_notes_spec.rb
@@ -18,7 +18,6 @@ RSpec.describe 'Merge request > User posts notes', :js do
end
before do
- stub_feature_flags(bootstrap_confirmation_modals: false)
project.add_maintainer(user)
sign_in(user)
@@ -158,7 +157,7 @@ RSpec.describe 'Merge request > User posts notes', :js do
page.within("#note_#{note.id}") do
expect(find('.current-note-edit-form', visible: true)).to be_visible
expect(find('.note-edit-form', visible: true)).to be_visible
- expect(find(:css, '.note-body > .note-text', visible: false)).not_to be_visible
+ expect(find(:css, '.note-body .note-text', visible: false)).not_to be_visible
end
end
diff --git a/spec/features/merge_request/user_resolves_conflicts_spec.rb b/spec/features/merge_request/user_resolves_conflicts_spec.rb
index 982e75760d7..a04ca4e789c 100644
--- a/spec/features/merge_request/user_resolves_conflicts_spec.rb
+++ b/spec/features/merge_request/user_resolves_conflicts_spec.rb
@@ -178,23 +178,6 @@ RSpec.describe 'Merge request > User resolves conflicts', :js do
end
end
- context 'sidebar' do
- let(:merge_request) { create_merge_request('conflict-resolvable') }
-
- before do
- project.add_developer(user)
- sign_in(user)
-
- visit conflicts_project_merge_request_path(project, merge_request)
- end
-
- it 'displays reviewers' do
- page.within '.issuable-sidebar' do
- expect(page).to have_selector('[data-testid="reviewer"]', count: 1)
- end
- end
- end
-
unresolvable_conflicts = {
'conflict-too-large' => 'when the conflicts contain a large file',
'conflict-binary-file' => 'when the conflicts contain a binary file',
diff --git a/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb b/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
index fca40dc7edc..0e9ff98c3e1 100644
--- a/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
@@ -83,7 +83,6 @@ RSpec.describe 'Merge request > User sees avatars on diff notes', :js do
%w(parallel).each do |view|
context "#{view} view" do
before do
- stub_feature_flags(bootstrap_confirmation_modals: false)
visit diffs_project_merge_request_path(project, merge_request, view: view)
wait_for_requests
diff --git a/spec/features/merge_request/user_sees_deployment_widget_spec.rb b/spec/features/merge_request/user_sees_deployment_widget_spec.rb
index 01cc58777ba..81034caaee2 100644
--- a/spec/features/merge_request/user_sees_deployment_widget_spec.rb
+++ b/spec/features/merge_request/user_sees_deployment_widget_spec.rb
@@ -109,10 +109,12 @@ RSpec.describe 'Merge request > User sees deployment widget', :js do
end
context 'with stop action' do
- let(:manual) { create(:ci_build, :manual, pipeline: pipeline, name: 'close_app') }
+ let(:manual) do
+ create(:ci_build, :manual, pipeline: pipeline,
+ name: 'close_app', environment: environment.name)
+ end
before do
- stub_feature_flags(bootstrap_confirmation_modals: false)
build.success!
deployment.update!(on_stop: manual.name)
visit project_merge_request_path(project, merge_request)
diff --git a/spec/features/merge_request/user_sees_discussions_spec.rb b/spec/features/merge_request/user_sees_discussions_spec.rb
index d79763ba5e0..cc477e363a4 100644
--- a/spec/features/merge_request/user_sees_discussions_spec.rb
+++ b/spec/features/merge_request/user_sees_discussions_spec.rb
@@ -62,7 +62,7 @@ RSpec.describe 'Merge request > User sees threads', :js do
within(".discussion[data-discussion-id='#{discussion_id}']") do
find_field('Reply…').click
fill_in 'note[note]', with: 'Test!'
- click_button 'Comment'
+ click_button 'Reply'
expect(page).to have_css('.note', count: 2)
end
diff --git a/spec/features/merge_request/user_sees_versions_spec.rb b/spec/features/merge_request/user_sees_versions_spec.rb
index 2b856811e02..4465d7e29be 100644
--- a/spec/features/merge_request/user_sees_versions_spec.rb
+++ b/spec/features/merge_request/user_sees_versions_spec.rb
@@ -101,6 +101,7 @@ RSpec.describe 'Merge request > User sees versions', :js do
outdated_diff_note.save!
refresh
+ wait_for_requests
expect(page).to have_css(".diffs .notes[data-discussion-id='#{outdated_diff_note.discussion_id}']")
end
diff --git a/spec/features/merge_request/user_views_user_status_on_merge_request_spec.rb b/spec/features/merge_request/user_views_user_status_on_merge_request_spec.rb
index f637186ec67..b214486b3c1 100644
--- a/spec/features/merge_request/user_views_user_status_on_merge_request_spec.rb
+++ b/spec/features/merge_request/user_views_user_status_on_merge_request_spec.rb
@@ -10,16 +10,6 @@ RSpec.describe 'Project > Merge request > View user status' do
subject { visit merge_request_path(merge_request) }
- describe 'the status of the merge request author' do
- before do
- stub_feature_flags(updated_mr_header: false)
- end
-
- it_behaves_like 'showing user status' do
- let(:user_with_status) { merge_request.author }
- end
- end
-
context 'for notes', :js do
describe 'the status of the author of a note on a merge request' do
let(:note) { create(:note, noteable: merge_request, project: project, author: create(:user)) }
diff --git a/spec/features/merge_requests/user_sorts_merge_requests_spec.rb b/spec/features/merge_requests/user_sorts_merge_requests_spec.rb
index 459145d3ef0..4a124299c61 100644
--- a/spec/features/merge_requests/user_sorts_merge_requests_spec.rb
+++ b/spec/features/merge_requests/user_sorts_merge_requests_spec.rb
@@ -17,8 +17,6 @@ RSpec.describe 'User sorts merge requests', :js do
let_it_be(:project) { create(:project, :public, group: group) }
before do
- stub_feature_flags(vue_issues_list: true)
-
sign_in(user)
visit(project_merge_requests_path(project))
diff --git a/spec/features/milestone_spec.rb b/spec/features/milestone_spec.rb
index b9594293996..5bbd89f1b88 100644
--- a/spec/features/milestone_spec.rb
+++ b/spec/features/milestone_spec.rb
@@ -64,7 +64,7 @@ RSpec.describe 'Milestone' do
end
find('input[name="commit"]').click
- expect(find('.alert-danger')).to have_content('already being used for another group or project milestone.')
+ expect(find('.gl-alert-danger')).to have_content('already being used for another group or project milestone.')
end
end
@@ -122,8 +122,8 @@ RSpec.describe 'Milestone' do
click_link 'Reopen Milestone'
- expect(page).not_to have_selector('.status-box-closed')
- expect(page).to have_selector('.status-box-open')
+ expect(page).not_to have_selector('.gl-bg-red-500')
+ expect(page).to have_selector('.gl-bg-green-500')
end
end
@@ -133,8 +133,8 @@ RSpec.describe 'Milestone' do
click_link 'Reopen Milestone'
- expect(page).not_to have_selector('.status-box-closed')
- expect(page).to have_selector('.status-box-open')
+ expect(page).not_to have_selector('.gl-bg-red-500')
+ expect(page).to have_selector('.gl-bg-green-500')
end
end
end
diff --git a/spec/features/nav/top_nav_responsive_spec.rb b/spec/features/nav/top_nav_responsive_spec.rb
index 5c6a12a37a3..d571327e4b5 100644
--- a/spec/features/nav/top_nav_responsive_spec.rb
+++ b/spec/features/nav/top_nav_responsive_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe 'top nav responsive', :js do
end
it 'has new dropdown', :aggregate_failures do
- click_button('New...')
+ click_button('Create new')
expect(page).to have_link('New project', href: new_project_path)
expect(page).to have_link('New group', href: new_group_path)
diff --git a/spec/features/nav/top_nav_tooltip_spec.rb b/spec/features/nav/top_nav_tooltip_spec.rb
new file mode 100644
index 00000000000..58bfe1caf65
--- /dev/null
+++ b/spec/features/nav/top_nav_tooltip_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'top nav tooltips', :js do
+ let_it_be(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ visit explore_projects_path
+ end
+
+ it 'clicking new dropdown hides tooltip', :aggregate_failures do
+ btn = '#js-onboarding-new-project-link'
+
+ page.find(btn).hover
+
+ expect(page).to have_content('Create new')
+
+ page.find(btn).click
+
+ expect(page).not_to have_content('Create new')
+ end
+end
diff --git a/spec/features/oauth_registration_spec.rb b/spec/features/oauth_registration_spec.rb
new file mode 100644
index 00000000000..18dd10755b1
--- /dev/null
+++ b/spec/features/oauth_registration_spec.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'OAuth Registration', :js, :allow_forgery_protection do
+ include DeviseHelpers
+ include LoginHelpers
+ include TermsHelper
+ using RSpec::Parameterized::TableSyntax
+
+ around do |example|
+ with_omniauth_full_host { example.run }
+ end
+
+ context 'when the user registers using single-sign on provider' do
+ let(:uid) { 'my-uid' }
+ let(:email) { 'user@example.com' }
+
+ where(:provider, :additional_info) do
+ :github | {}
+ :twitter | {}
+ :bitbucket | {}
+ :gitlab | {}
+ :google_oauth2 | {}
+ :facebook | {}
+ :cas3 | {}
+ :auth0 | {}
+ :authentiq | {}
+ :salesforce | { extra: { email_verified: true } }
+ :dingtalk | {}
+ :alicloud | {}
+ end
+
+ with_them do
+ before do
+ stub_omniauth_provider(provider)
+ stub_feature_flags(update_oauth_registration_flow: true)
+ end
+
+ context 'when block_auto_created_users is true' do
+ before do
+ stub_omniauth_setting(block_auto_created_users: true)
+ end
+
+ it 'redirects back to the sign-in page' do
+ register_via(provider, uid, email, additional_info: additional_info)
+
+ expect(page).to have_current_path new_user_session_path
+ expect(page).to have_content('Your account is pending approval')
+ end
+ end
+
+ context 'when block_auto_created_users is false' do
+ before do
+ stub_omniauth_setting(block_auto_created_users: false)
+ end
+
+ it 'redirects to the initial welcome path' do
+ register_via(provider, uid, email, additional_info: additional_info)
+
+ expect(page).to have_current_path users_sign_up_welcome_path
+ expect(page).to have_content('Welcome to GitLab, mockuser!')
+ end
+
+ context 'when terms are enforced' do
+ before do
+ enforce_terms
+ end
+
+ it 'auto accepts terms and redirects to the initial welcome path' do
+ register_via(provider, uid, email, additional_info: additional_info)
+
+ expect(page).to have_current_path users_sign_up_welcome_path
+ expect(page).to have_content('Welcome to GitLab, mockuser!')
+ end
+ end
+
+ context 'when provider does not send a verified email address' do
+ let(:email) { 'temp-email-for-oauth@email.com' }
+
+ it 'redirects to the profile path' do
+ register_via(provider, uid, email, additional_info: additional_info)
+
+ expect(page).to have_current_path profile_path
+ expect(page).to have_content('Please complete your profile with email address')
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/features/profile_spec.rb b/spec/features/profile_spec.rb
index 36657406303..1013937ebb9 100644
--- a/spec/features/profile_spec.rb
+++ b/spec/features/profile_spec.rb
@@ -3,10 +3,11 @@
require 'spec_helper'
RSpec.describe 'Profile account page', :js do
+ include Spec::Support::Helpers::ModalHelpers
+
let(:user) { create(:user) }
before do
- stub_feature_flags(bootstrap_confirmation_modals: false)
sign_in(user)
end
@@ -65,11 +66,17 @@ RSpec.describe 'Profile account page', :js do
it 'allows resetting of feed token' do
visit profile_personal_access_tokens_path
+ previous_token = ''
+
within('[data-testid="feed-token-container"]') do
previous_token = find_field('Feed token').value
- accept_confirm { click_link('reset this token') }
+ click_link('reset this token')
+ end
+ accept_gl_confirm
+
+ within('[data-testid="feed-token-container"]') do
click_button('Click to reveal')
expect(find_field('Feed token').value).not_to eq(previous_token)
@@ -81,11 +88,17 @@ RSpec.describe 'Profile account page', :js do
visit profile_personal_access_tokens_path
+ previous_token = ''
+
within('[data-testid="incoming-email-token-container"]') do
previous_token = find_field('Incoming email token').value
- accept_confirm { click_link('reset this token') }
+ click_link('reset this token')
+ end
+
+ accept_gl_confirm
+ within('[data-testid="incoming-email-token-container"]') do
click_button('Click to reveal')
expect(find_field('Incoming email token').value).not_to eq(previous_token)
diff --git a/spec/features/profiles/active_sessions_spec.rb b/spec/features/profiles/active_sessions_spec.rb
index a515c7b1c1f..24c9225532b 100644
--- a/spec/features/profiles/active_sessions_spec.rb
+++ b/spec/features/profiles/active_sessions_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'Profile > Active Sessions', :clean_gitlab_redis_shared_state do
+ include Spec::Support::Helpers::ModalHelpers
+
let(:user) do
create(:user).tap do |user|
user.current_sign_in_at = Time.current
@@ -11,10 +13,6 @@ RSpec.describe 'Profile > Active Sessions', :clean_gitlab_redis_shared_state do
let(:admin) { create(:admin) }
- before do
- stub_feature_flags(bootstrap_confirmation_modals: false)
- end
-
it 'user sees their active sessions' do
travel_to(Time.zone.parse('2018-03-12 09:06')) do
Capybara::Session.new(:session1)
@@ -101,7 +99,9 @@ RSpec.describe 'Profile > Active Sessions', :clean_gitlab_redis_shared_state do
expect(page).to have_link('Revoke', count: 1)
- accept_confirm { click_on 'Revoke' }
+ accept_gl_confirm(button_text: 'Revoke') do
+ click_on 'Revoke'
+ end
expect(page).not_to have_link('Revoke')
end
diff --git a/spec/features/profiles/oauth_applications_spec.rb b/spec/features/profiles/oauth_applications_spec.rb
index 9d79041dc9d..ee1daf69f62 100644
--- a/spec/features/profiles/oauth_applications_spec.rb
+++ b/spec/features/profiles/oauth_applications_spec.rb
@@ -3,11 +3,12 @@
require 'spec_helper'
RSpec.describe 'Profile > Applications' do
+ include Spec::Support::Helpers::ModalHelpers
+
let(:user) { create(:user) }
let(:application) { create(:oauth_application, owner: user) }
before do
- stub_feature_flags(bootstrap_confirmation_modals: false)
sign_in(user)
end
@@ -25,9 +26,11 @@ RSpec.describe 'Profile > Applications' do
page.within('.oauth-applications') do
expect(page).to have_content('Your applications (1)')
- accept_confirm { click_button 'Destroy' }
+ click_button 'Destroy'
end
+ accept_gl_confirm(button_text: 'Destroy')
+
expect(page).to have_content('The application was deleted successfully')
expect(page).to have_content('Your applications (0)')
expect(page).to have_content('Authorized applications (0)')
@@ -39,9 +42,11 @@ RSpec.describe 'Profile > Applications' do
page.within('.oauth-authorized-applications') do
expect(page).to have_content('Authorized applications (1)')
- accept_confirm { click_button 'Revoke' }
+ click_button 'Revoke'
end
+ accept_gl_confirm(button_text: 'Revoke application')
+
expect(page).to have_content('The application was revoked access.')
expect(page).to have_content('Your applications (0)')
expect(page).to have_content('Authorized applications (0)')
diff --git a/spec/features/profiles/personal_access_tokens_spec.rb b/spec/features/profiles/personal_access_tokens_spec.rb
index 8cbc0491441..bca1bc4df4d 100644
--- a/spec/features/profiles/personal_access_tokens_spec.rb
+++ b/spec/features/profiles/personal_access_tokens_spec.rb
@@ -3,34 +3,24 @@
require 'spec_helper'
RSpec.describe 'Profile > Personal Access Tokens', :js do
+ include Spec::Support::Helpers::ModalHelpers
+
let(:user) { create(:user) }
let(:pat_create_service) { double('PersonalAccessTokens::CreateService', execute: ServiceResponse.error(message: 'error', payload: { personal_access_token: PersonalAccessToken.new })) }
def active_personal_access_tokens
- find(".table.active-tokens")
- end
-
- def no_personal_access_tokens_message
- find(".settings-message")
+ find("[data-testid='active-tokens']")
end
def created_personal_access_token
- find("#created-personal-access-token").value
+ find_field('new-access-token').value
end
def feed_token_description
"Your feed token authenticates you when your RSS reader loads a personalized RSS feed or when your calendar application loads a personalized calendar. It is visible in those feed URLs."
end
- def disallow_personal_access_token_saves!
- allow(PersonalAccessTokens::CreateService).to receive(:new).and_return(pat_create_service)
-
- errors = ActiveModel::Errors.new(PersonalAccessToken.new).tap { |e| e.add(:name, "cannot be nil") }
- allow_any_instance_of(PersonalAccessToken).to receive(:errors).and_return(errors)
- end
-
before do
- stub_feature_flags(bootstrap_confirmation_modals: false)
sign_in(user)
end
@@ -51,6 +41,7 @@ RSpec.describe 'Profile > Personal Access Tokens', :js do
check "read_user"
click_on "Create personal access token"
+ wait_for_all_requests
expect(active_personal_access_tokens).to have_text(name)
expect(active_personal_access_tokens).to have_text('in')
@@ -61,13 +52,16 @@ RSpec.describe 'Profile > Personal Access Tokens', :js do
context "when creation fails" do
it "displays an error message" do
- disallow_personal_access_token_saves!
+ number_tokens_before = PersonalAccessToken.count
visit profile_personal_access_tokens_path
fill_in "Token name", with: 'My PAT'
- expect { click_on "Create personal access token" }.not_to change { PersonalAccessToken.count }
- expect(page).to have_content("Name cannot be nil")
- expect(page).not_to have_selector("#created-personal-access-token")
+ click_on "Create personal access token"
+ wait_for_all_requests
+
+ expect(number_tokens_before).to equal(PersonalAccessToken.count)
+ expect(page).to have_content(_("Scopes can't be blank"))
+ expect(page).not_to have_selector("[data-testid='new-access-tokens']")
end
end
end
@@ -101,31 +95,27 @@ RSpec.describe 'Profile > Personal Access Tokens', :js do
it "allows revocation of an active token" do
visit profile_personal_access_tokens_path
- accept_confirm { click_on "Revoke" }
+ accept_gl_confirm(button_text: 'Revoke') { click_on "Revoke" }
- expect(page).to have_selector(".settings-message")
- expect(no_personal_access_tokens_message).to have_text("This user has no active personal access tokens.")
+ expect(active_personal_access_tokens).to have_text("This user has no active personal access tokens.")
end
it "removes expired tokens from 'active' section" do
personal_access_token.update!(expires_at: 5.days.ago)
visit profile_personal_access_tokens_path
- expect(page).to have_selector(".settings-message")
- expect(no_personal_access_tokens_message).to have_text("This user has no active personal access tokens.")
+ expect(active_personal_access_tokens).to have_text("This user has no active personal access tokens.")
end
context "when revocation fails" do
it "displays an error message" do
- visit profile_personal_access_tokens_path
-
allow_next_instance_of(PersonalAccessTokens::RevokeService) do |instance|
allow(instance).to receive(:revocation_permitted?).and_return(false)
end
+ visit profile_personal_access_tokens_path
- accept_confirm { click_on "Revoke" }
+ accept_gl_confirm(button_text: "Revoke") { click_on "Revoke" }
expect(active_personal_access_tokens).to have_text(personal_access_token.name)
- expect(page).to have_content("Not permitted to revoke")
end
end
end
diff --git a/spec/features/profiles/user_visits_profile_spec.rb b/spec/features/profiles/user_visits_profile_spec.rb
index 7d545711997..8b1af283765 100644
--- a/spec/features/profiles/user_visits_profile_spec.rb
+++ b/spec/features/profiles/user_visits_profile_spec.rb
@@ -89,6 +89,10 @@ RSpec.describe 'User visits their profile' do
end
describe 'storage_enforcement_banner', :js do
+ before do
+ stub_feature_flags(namespace_storage_limit_bypass_date_check: false)
+ end
+
context 'with storage_enforcement_date set' do
let_it_be(:storage_enforcement_date) { Date.today + 30 }
diff --git a/spec/features/project_variables_spec.rb b/spec/features/project_variables_spec.rb
index cc59fea173b..89dbd1afc6b 100644
--- a/spec/features/project_variables_spec.rb
+++ b/spec/features/project_variables_spec.rb
@@ -12,28 +12,36 @@ RSpec.describe 'Project variables', :js do
sign_in(user)
project.add_maintainer(user)
project.variables << variable
- visit page_path
end
- it_behaves_like 'variable list'
-
- it 'adds a new variable with an environment scope' do
- click_button('Add variable')
+ # TODO: Add same tests but with FF enabled context when
+ # the new graphQL app for variable settings is enabled.
+ context 'with disabled ff `ci_variable_settings_graphql' do
+ before do
+ stub_feature_flags(ci_variable_settings_graphql: false)
+ visit page_path
+ end
- page.within('#add-ci-variable') do
- fill_in 'Key', with: 'akey'
- find('#ci-variable-value').set('akey_value')
- find('[data-testid="environment-scope"]').click
- find('[data-testid="ci-environment-search"]').set('review/*')
- find('[data-testid="create-wildcard-button"]').click
+ it_behaves_like 'variable list'
+ it 'adds a new variable with an environment scope' do
click_button('Add variable')
- end
- wait_for_requests
+ page.within('#add-ci-variable') do
+ fill_in 'Key', with: 'akey'
+ find('#ci-variable-value').set('akey_value')
+ find('[data-testid="environment-scope"]').click
+ find('[data-testid="ci-environment-search"]').set('review/*')
+ find('[data-testid="create-wildcard-button"]').click
+
+ click_button('Add variable')
+ end
+
+ wait_for_requests
- page.within('[data-testid="ci-variable-table"]') do
- expect(find('.js-ci-variable-row:first-child [data-label="Environments"]').text).to eq('review/*')
+ page.within('[data-testid="ci-variable-table"]') do
+ expect(find('.js-ci-variable-row:first-child [data-label="Environments"]').text).to eq('review/*')
+ end
end
end
end
diff --git a/spec/features/projects/branches/user_deletes_branch_spec.rb b/spec/features/projects/branches/user_deletes_branch_spec.rb
index 0d08e7ea10d..a89fed3a78a 100644
--- a/spec/features/projects/branches/user_deletes_branch_spec.rb
+++ b/spec/features/projects/branches/user_deletes_branch_spec.rb
@@ -3,6 +3,8 @@
require "spec_helper"
RSpec.describe "User deletes branch", :js do
+ include Spec::Support::Helpers::ModalHelpers
+
let_it_be(:user) { create(:user) }
let(:project) { create(:project, :repository) }
@@ -24,9 +26,7 @@ RSpec.describe "User deletes branch", :js do
find('.js-delete-branch-button').click
end
- page.within '.modal-footer' do
- click_button 'Yes, delete branch'
- end
+ accept_gl_confirm(button_text: 'Yes, delete branch')
wait_for_requests
diff --git a/spec/features/projects/ci/editor_spec.rb b/spec/features/projects/ci/editor_spec.rb
index 2f960c09936..8197fe46c7b 100644
--- a/spec/features/projects/ci/editor_spec.rb
+++ b/spec/features/projects/ci/editor_spec.rb
@@ -12,8 +12,6 @@ RSpec.describe 'Pipeline Editor', :js do
let(:other_branch) { 'test' }
before do
- stub_feature_flags(pipeline_editor_file_tree: false)
-
sign_in(user)
project.add_developer(user)
@@ -70,14 +68,8 @@ RSpec.describe 'Pipeline Editor', :js do
expect(page).to have_content('Pipeline Editor')
end
- describe 'Branch Switcher (pipeline_editor_file_tree disabled)' do
- it_behaves_like 'default branch switcher behavior'
- end
-
- describe 'Branch Switcher (pipeline_editor_file_tree enabled)' do
+ describe 'Branch Switcher' do
before do
- stub_feature_flags(pipeline_editor_file_tree: true)
-
visit project_ci_pipeline_editor_path(project)
wait_for_requests
diff --git a/spec/features/projects/clusters/gcp_spec.rb b/spec/features/projects/clusters/gcp_spec.rb
index a8a23ba1c85..5c54b7fda7c 100644
--- a/spec/features/projects/clusters/gcp_spec.rb
+++ b/spec/features/projects/clusters/gcp_spec.rb
@@ -66,9 +66,9 @@ RSpec.describe 'Gcp Cluster', :js do
context 'when user destroys the cluster' do
before do
click_link 'Advanced Settings'
- click_button 'Remove integration and resources'
+ find('[data-testid="remove-integration-button"]').click
fill_in 'confirm_cluster_name_input', with: cluster.name
- click_button 'Remove integration'
+ find('[data-testid="remove-integration-modal-button"]').click
click_link 'Certificate'
end
diff --git a/spec/features/projects/clusters/user_spec.rb b/spec/features/projects/clusters/user_spec.rb
index b6bfaa3a9b9..527d038f975 100644
--- a/spec/features/projects/clusters/user_spec.rb
+++ b/spec/features/projects/clusters/user_spec.rb
@@ -100,9 +100,9 @@ RSpec.describe 'User Cluster', :js do
context 'when user destroys the cluster' do
before do
click_link 'Advanced Settings'
- click_button 'Remove integration and resources'
+ find('[data-testid="remove-integration-button"]').click
fill_in 'confirm_cluster_name_input', with: cluster.name
- click_button 'Remove integration'
+ find('[data-testid="remove-integration-modal-button"]').click
click_link 'Certificate'
end
diff --git a/spec/features/projects/commit/comments/user_deletes_comments_spec.rb b/spec/features/projects/commit/comments/user_deletes_comments_spec.rb
index 67d3276fc14..9059f9e4857 100644
--- a/spec/features/projects/commit/comments/user_deletes_comments_spec.rb
+++ b/spec/features/projects/commit/comments/user_deletes_comments_spec.rb
@@ -4,6 +4,7 @@ require "spec_helper"
RSpec.describe "User deletes comments on a commit", :js do
include Spec::Support::Helpers::Features::NotesHelpers
+ include Spec::Support::Helpers::ModalHelpers
include RepoHelpers
let(:comment_text) { "XML attached" }
@@ -11,7 +12,6 @@ RSpec.describe "User deletes comments on a commit", :js do
let(:user) { create(:user) }
before do
- stub_feature_flags(bootstrap_confirmation_modals: false)
sign_in(user)
project.add_developer(user)
@@ -32,9 +32,11 @@ RSpec.describe "User deletes comments on a commit", :js do
find(".more-actions").click
find(".more-actions .dropdown-menu li", match: :first)
- accept_confirm { find(".js-note-delete").click }
+ find(".js-note-delete").click
end
+ accept_gl_confirm(button_text: 'Delete comment')
+
expect(page).not_to have_css(".note")
end
end
diff --git a/spec/features/projects/commit/user_comments_on_commit_spec.rb b/spec/features/projects/commit/user_comments_on_commit_spec.rb
index b0be6edb245..a7f23f093a3 100644
--- a/spec/features/projects/commit/user_comments_on_commit_spec.rb
+++ b/spec/features/projects/commit/user_comments_on_commit_spec.rb
@@ -4,6 +4,7 @@ require "spec_helper"
RSpec.describe "User comments on commit", :js do
include Spec::Support::Helpers::Features::NotesHelpers
+ include Spec::Support::Helpers::ModalHelpers
include RepoHelpers
let_it_be(:project) { create(:project, :repository) }
@@ -93,8 +94,6 @@ RSpec.describe "User comments on commit", :js do
context "when deleting comment" do
before do
- stub_feature_flags(bootstrap_confirmation_modals: false)
-
visit(project_commit_path(project, sample_commit.id))
add_note(comment_text)
@@ -112,9 +111,11 @@ RSpec.describe "User comments on commit", :js do
find(".more-actions").click
find(".more-actions .dropdown-menu li", match: :first)
- accept_confirm { find(".js-note-delete").click }
+ find(".js-note-delete").click
end
+ accept_gl_confirm(button_text: 'Delete comment')
+
expect(page).not_to have_css(".note")
end
end
diff --git a/spec/features/projects/commits/multi_view_diff_spec.rb b/spec/features/projects/commits/multi_view_diff_spec.rb
index 009dd05c6d1..282112a3767 100644
--- a/spec/features/projects/commits/multi_view_diff_spec.rb
+++ b/spec/features/projects/commits/multi_view_diff_spec.rb
@@ -18,71 +18,77 @@ RSpec.describe 'Multiple view Diffs', :js do
let(:feature_flag_on) { false }
before do
- stub_feature_flags(rendered_diffs_viewer: feature_flag_on ? project : false)
-
visit path
wait_for_all_requests
end
- context 'when :rendered_diffs_viewer is off' do
- context 'and diff does not have ipynb' do
- it_behaves_like "no multiple viewers", 'ddd0f15ae83993f5cb66a927a28673882e99100b'
+ context 'diff does not include ipynb' do
+ it_behaves_like "no multiple viewers", 'ddd0f15ae83993f5cb66a927a28673882e99100b'
+
+ context 'and in inline diff' do
+ let(:ref) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' }
+
+ it 'does not change display for non-ipynb' do
+ expect(page).to have_selector line_with_content('new', 1)
+ end
end
- context 'and diff has ipynb' do
- it_behaves_like "no multiple viewers", '5d6ed1503801ca9dc28e95eeb85a7cf863527aee'
+ context 'and in parallel diff' do
+ let(:ref) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' }
+
+ it 'does not change display for non-ipynb' do
+ page.find('#parallel-diff-btn').click
+
+ expect(page).to have_selector line_with_content('new', 1)
+ end
end
end
- context 'when :rendered_diffs_viewer is on' do
- let(:feature_flag_on) { true }
+ context 'opening a diff with ipynb' do
+ it 'loads the rendered diff as hidden' do
+ diff = page.find('.diff-file, .file-holder', match: :first)
- context 'and diff does not include ipynb' do
- it_behaves_like "no multiple viewers", 'ddd0f15ae83993f5cb66a927a28673882e99100b'
+ expect(diff).not_to have_selector '[data-diff-toggle-entity="toHide"]'
+ expect(diff).to have_selector '[data-diff-toggle-entity="toShow"]'
- context 'and in inline diff' do
- let(:ref) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' }
+ expect(classes_for_element(diff, 'toHide', visible: false)).to include('hidden')
+ expect(classes_for_element(diff, 'toShow')).not_to include('hidden')
- it 'does not change display for non-ipynb' do
- expect(page).to have_selector line_with_content('new', 1)
- end
- end
+ expect(classes_for_element(diff, 'toShowBtn')).to include('selected')
+ expect(classes_for_element(diff, 'toHideBtn')).not_to include('selected')
+ end
- context 'and in parallel diff' do
- let(:ref) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' }
+ it 'displays the rendered diff and hides after selection changes' do
+ diff = page.find('.diff-file, .file-holder', match: :first)
+ diff.find('[data-diff-toggle-entity="toShowBtn"]').click
- it 'does not change display for non-ipynb' do
- page.find('#parallel-diff-btn').click
+ expect(diff).to have_selector '[data-diff-toggle-entity="toShow"]'
+ expect(diff).not_to have_selector '[data-diff-toggle-entity="toHide"]'
- expect(page).to have_selector line_with_content('new', 1)
- end
- end
+ expect(classes_for_element(diff, 'toHideBtn')).not_to include('selected')
+ expect(classes_for_element(diff, 'toShowBtn')).to include('selected')
end
- context 'and opening a diff with ipynb' do
- it 'loads the rendered diff as hidden' do
- diff = page.find('.diff-file, .file-holder', match: :first)
-
- expect(diff).not_to have_selector '[data-diff-toggle-entity="toHide"]'
- expect(diff).to have_selector '[data-diff-toggle-entity="toShow"]'
+ it 'transforms the diff' do
+ diff = page.find('.diff-file, .file-holder', match: :first)
- expect(classes_for_element(diff, 'toHide', visible: false)).to include('hidden')
- expect(classes_for_element(diff, 'toShow')).not_to include('hidden')
+ expect(diff['innerHTML']).to include('%% Cell type:markdown id:0aac5da7-745c-4eda-847a-3d0d07a1bb9b tags:')
+ end
- expect(classes_for_element(diff, 'toShowBtn')).to include('selected')
- expect(classes_for_element(diff, 'toHideBtn')).not_to include('selected')
+ context 'on parallel view' do
+ before do
+ page.find('#parallel-diff-btn').click
end
- it 'displays the rendered diff and hides after selection changes' do
- diff = page.find('.diff-file, .file-holder', match: :first)
- diff.find('[data-diff-toggle-entity="toShowBtn"]').click
-
- expect(diff).to have_selector '[data-diff-toggle-entity="toShow"]'
- expect(diff).not_to have_selector '[data-diff-toggle-entity="toHide"]'
+ it 'lines without mapping cannot receive comments' do
+ expect(page).not_to have_selector('td.line_content.nomappinginraw ~ td.diff-line-num > .add-diff-note')
+ expect(page).to have_selector('td.line_content:not(.nomappinginraw) ~ td.diff-line-num > .add-diff-note')
+ end
- expect(classes_for_element(diff, 'toHideBtn')).not_to include('selected')
- expect(classes_for_element(diff, 'toShowBtn')).to include('selected')
+ it 'lines numbers without mapping are empty' do
+ expect(page).not_to have_selector('td.nomappinginraw + td.diff-line-num')
+ expect(page).to have_selector('td.nomappinginraw + td.diff-line-num', visible: false)
end
it 'transforms the diff' do
@@ -90,40 +96,18 @@ RSpec.describe 'Multiple view Diffs', :js do
expect(diff['innerHTML']).to include('%% Cell type:markdown id:0aac5da7-745c-4eda-847a-3d0d07a1bb9b tags:')
end
+ end
- context 'on parallel view' do
- before do
- page.find('#parallel-diff-btn').click
- end
-
- it 'lines without mapping cannot receive comments' do
- expect(page).not_to have_selector('td.line_content.nomappinginraw ~ td.diff-line-num > .add-diff-note')
- expect(page).to have_selector('td.line_content:not(.nomappinginraw) ~ td.diff-line-num > .add-diff-note')
- end
-
- it 'lines numbers without mapping are empty' do
- expect(page).not_to have_selector('td.nomappinginraw + td.diff-line-num')
- expect(page).to have_selector('td.nomappinginraw + td.diff-line-num', visible: false)
- end
-
- it 'transforms the diff' do
- diff = page.find('.diff-file, .file-holder', match: :first)
-
- expect(diff['innerHTML']).to include('%% Cell type:markdown id:0aac5da7-745c-4eda-847a-3d0d07a1bb9b tags:')
- end
+ context 'on inline view' do
+ it 'lines without mapping cannot receive comments' do
+ expect(page).not_to have_selector('tr.line_holder[class$="nomappinginraw"] > td.diff-line-num > .add-diff-note')
+ expect(page).to have_selector('tr.line_holder:not([class$="nomappinginraw"]) > td.diff-line-num > .add-diff-note')
end
- context 'on inline view' do
- it 'lines without mapping cannot receive comments' do
- expect(page).not_to have_selector('tr.line_holder[class$="nomappinginraw"] > td.diff-line-num > .add-diff-note')
- expect(page).to have_selector('tr.line_holder:not([class$="nomappinginraw"]) > td.diff-line-num > .add-diff-note')
- end
-
- it 'lines numbers without mapping are empty' do
- elements = page.all('tr.line_holder[class$="nomappinginraw"] > td.diff-line-num').map { |e| e.text(:all) }
+ it 'lines numbers without mapping are empty' do
+ elements = page.all('tr.line_holder[class$="nomappinginraw"] > td.diff-line-num').map { |e| e.text(:all) }
- expect(elements).to all(be == "")
- end
+ expect(elements).to all(be == "")
end
end
end
diff --git a/spec/features/projects/container_registry_spec.rb b/spec/features/projects/container_registry_spec.rb
index 17eb421191f..54685441300 100644
--- a/spec/features/projects/container_registry_spec.rb
+++ b/spec/features/projects/container_registry_spec.rb
@@ -122,7 +122,7 @@ RSpec.describe 'Container Registry', :js do
it 'renders the tags list correctly' do
expect(page).to have_content('latest')
expect(page).to have_content('stable')
- expect(page).to have_content('Digest: N/A')
+ expect(page).to have_content('Digest: Not applicable.')
end
end
diff --git a/spec/features/projects/environments/environment_spec.rb b/spec/features/projects/environments/environment_spec.rb
index bfd54b9c6da..951b24eafac 100644
--- a/spec/features/projects/environments/environment_spec.rb
+++ b/spec/features/projects/environments/environment_spec.rb
@@ -157,7 +157,7 @@ RSpec.describe 'Environment' do
context 'with related deployable present' do
let(:pipeline) { create(:ci_pipeline, project: project) }
- let(:build) { create(:ci_build, pipeline: pipeline) }
+ let(:build) { create(:ci_build, pipeline: pipeline, environment: environment.name) }
let(:deployment) do
create(:deployment, :success, environment: environment, deployable: build)
@@ -261,9 +261,12 @@ RSpec.describe 'Environment' do
context 'when environment is available' do
context 'with stop action' do
+ let(:build) { create(:ci_build, :success, pipeline: pipeline, environment: environment.name) }
+
let(:action) do
create(:ci_build, :manual, pipeline: pipeline,
- name: 'close_app')
+ name: 'close_app',
+ environment: environment.name)
end
let(:deployment) do
@@ -283,7 +286,6 @@ RSpec.describe 'Environment' do
click_button('Stop')
click_button('Stop environment') # confirm modal
wait_for_all_requests
- expect(page).to have_button('Delete')
end
end
@@ -361,8 +363,6 @@ RSpec.describe 'Environment' do
end
visit_environment(environment)
-
- expect(page).not_to have_button('Stop')
end
##
diff --git a/spec/features/projects/environments/environments_spec.rb b/spec/features/projects/environments/environments_spec.rb
index 6cf59394af7..9ec41cd8f8d 100644
--- a/spec/features/projects/environments/environments_spec.rb
+++ b/spec/features/projects/environments/environments_spec.rb
@@ -132,8 +132,6 @@ RSpec.describe 'Environments page', :js do
create(:environment, project: project, state: :available)
end
- stub_feature_flags(bootstrap_confirmation_modals: false)
-
context 'when there are no deployments' do
before do
visit_environments(project)
diff --git a/spec/features/projects/features_visibility_spec.rb b/spec/features/projects/features_visibility_spec.rb
index 23fcc1fe444..649c21d4459 100644
--- a/spec/features/projects/features_visibility_spec.rb
+++ b/spec/features/projects/features_visibility_spec.rb
@@ -189,7 +189,7 @@ RSpec.describe 'Edit Project Settings' do
click_button "Save changes"
end
- expect(find(".sharing-permissions")).to have_selector(".gl-toggle.is-disabled", minimum: 4)
+ expect(find(".sharing-permissions")).to have_selector(".gl-toggle.is-disabled", minimum: 3)
end
it "shows empty features project homepage" do
diff --git a/spec/features/projects/hook_logs/user_reads_log_spec.rb b/spec/features/projects/hook_logs/user_reads_log_spec.rb
index 8513a9374d1..9b7ec14c36f 100644
--- a/spec/features/projects/hook_logs/user_reads_log_spec.rb
+++ b/spec/features/projects/hook_logs/user_reads_log_spec.rb
@@ -3,21 +3,80 @@
require 'spec_helper'
RSpec.describe 'Hook logs' do
- let(:web_hook_log) { create(:web_hook_log, response_body: '<script>') }
- let(:project) { web_hook_log.web_hook.project }
+ let(:project) { create(:project) }
+ let(:project_hook) { create(:project_hook, project: project) }
+ let(:web_hook_log) { create(:web_hook_log, web_hook: project_hook, response_body: 'Hello World') }
let(:user) { create(:user) }
before do
+ web_hook_log
project.add_maintainer(user)
sign_in(user)
end
- it 'user reads log without getting XSS' do
- visit(
- project_hook_hook_log_path(
- project, web_hook_log.web_hook, web_hook_log))
+ it 'shows list of hook logs' do
+ visit edit_project_hook_path(project, project_hook)
- expect(page).to have_content('<script>')
+ expect(page).to have_content('Recent events')
+ expect(page).to have_link('View details', href: project_hook_hook_log_path(project, project_hook, web_hook_log))
+ end
+
+ it 'shows hook log details' do
+ visit edit_project_hook_path(project, project_hook)
+ click_link 'View details'
+
+ expect(page).to have_content("POST #{web_hook_log.url}")
+ expect(page).to have_content(web_hook_log.response_body)
+ expect(page).to have_content('Resend Request')
+ end
+
+ it 'retries hook log' do
+ WebMock.stub_request(:post, project_hook.url)
+
+ visit edit_project_hook_path(project, project_hook)
+ click_link 'View details'
+ click_link 'Resend Request'
+
+ expect(page).to have_current_path(edit_project_hook_path(project, project_hook), ignore_query: true)
+ end
+
+ context 'request gets internal error' do
+ let(:web_hook_log) { create(:web_hook_log, web_hook: project_hook, internal_error_message: 'Some error') }
+
+ it 'shows hook log details with internal error message' do
+ visit edit_project_hook_path(project, project_hook)
+ click_link 'View details'
+
+ expect(page).to have_content("POST #{web_hook_log.url}")
+ expect(page).to have_content(web_hook_log.internal_error_message)
+ expect(page).to have_content('Resend Request')
+ end
+ end
+
+ context 'response body contains XSS string' do
+ let(:web_hook_log) { create(:web_hook_log, web_hook: project_hook, response_body: '<script>') }
+
+ it 'displays log without getting XSS' do
+ visit(project_hook_hook_log_path(project, project_hook, web_hook_log))
+
+ expect(page).to have_content('<script>')
+ end
+ end
+
+ context 'response data is too large' do
+ let(:web_hook_log) do
+ create(:web_hook_log, web_hook: project_hook, request_data: WebHookLog::OVERSIZE_REQUEST_DATA)
+ end
+
+ it 'shows request data as too large and disables retry function' do
+ visit(project_hook_hook_log_path(project, project_hook, web_hook_log))
+
+ expect(page).to have_content('Request data is too large')
+ expect(page).not_to have_button(
+ _('Resent request'),
+ disabled: true, class: 'has-tooltip', title: _("Request data is too large")
+ )
+ end
end
end
diff --git a/spec/features/projects/integrations/user_activates_issue_tracker_spec.rb b/spec/features/projects/integrations/user_activates_issue_tracker_spec.rb
index 2821f35f6a6..e7d4ed58549 100644
--- a/spec/features/projects/integrations/user_activates_issue_tracker_spec.rb
+++ b/spec/features/projects/integrations/user_activates_issue_tracker_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe 'User activates issue tracker', :js do
it 'activates the integration' do
expect(page).to have_content("#{tracker} settings saved and active.")
- expect(page).to have_current_path(edit_project_integration_path(project, tracker.parameterize(separator: '_')), ignore_query: true)
+ expect(page).to have_current_path(edit_project_settings_integration_path(project, tracker.parameterize(separator: '_')), ignore_query: true)
end
it 'shows the link in the menu' do
@@ -58,7 +58,7 @@ RSpec.describe 'User activates issue tracker', :js do
end
expect(page).to have_content("#{tracker} settings saved and active.")
- expect(page).to have_current_path(edit_project_integration_path(project, tracker.parameterize(separator: '_')), ignore_query: true)
+ expect(page).to have_current_path(edit_project_settings_integration_path(project, tracker.parameterize(separator: '_')), ignore_query: true)
end
end
end
@@ -73,7 +73,7 @@ RSpec.describe 'User activates issue tracker', :js do
it 'saves but does not activate the integration' do
expect(page).to have_content("#{tracker} settings saved, but not active.")
- expect(page).to have_current_path(edit_project_integration_path(project, tracker.parameterize(separator: '_')), ignore_query: true)
+ expect(page).to have_current_path(edit_project_settings_integration_path(project, tracker.parameterize(separator: '_')), ignore_query: true)
end
it 'does not show the external tracker link in the menu' do
diff --git a/spec/features/projects/integrations/user_activates_jira_spec.rb b/spec/features/projects/integrations/user_activates_jira_spec.rb
index f855d6befe7..dad201ffbb6 100644
--- a/spec/features/projects/integrations/user_activates_jira_spec.rb
+++ b/spec/features/projects/integrations/user_activates_jira_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe 'User activates Jira', :js do
it 'activates the Jira integration' do
expect(page).to have_content('Jira settings saved and active.')
- expect(page).to have_current_path(edit_project_integration_path(project, :jira), ignore_query: true)
+ expect(page).to have_current_path(edit_project_settings_integration_path(project, :jira), ignore_query: true)
end
unless Gitlab.ee?
@@ -55,13 +55,13 @@ RSpec.describe 'User activates Jira', :js do
click_test_then_save_integration
expect(page).to have_content('Jira settings saved and active.')
- expect(page).to have_current_path(edit_project_integration_path(project, :jira), ignore_query: true)
+ expect(page).to have_current_path(edit_project_settings_integration_path(project, :jira), ignore_query: true)
end
end
end
describe 'user disables the Jira integration' do
- include JiraServiceHelper
+ include JiraIntegrationHelpers
before do
stub_jira_integration_test
@@ -72,7 +72,7 @@ RSpec.describe 'User activates Jira', :js do
it 'saves but does not activate the Jira integration' do
expect(page).to have_content('Jira settings saved, but not active.')
- expect(page).to have_current_path(edit_project_integration_path(project, :jira), ignore_query: true)
+ expect(page).to have_current_path(edit_project_settings_integration_path(project, :jira), ignore_query: true)
end
it 'does not show the Jira link in the menu' do
diff --git a/spec/features/projects/integrations/user_activates_mattermost_slash_command_spec.rb b/spec/features/projects/integrations/user_activates_mattermost_slash_command_spec.rb
index ed0877ab0e9..54c9ec0f62e 100644
--- a/spec/features/projects/integrations/user_activates_mattermost_slash_command_spec.rb
+++ b/spec/features/projects/integrations/user_activates_mattermost_slash_command_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe 'Set up Mattermost slash commands', :js do
let(:mattermost_enabled) { true }
describe 'activation' do
- let(:edit_path) { edit_project_integration_path(project, :mattermost_slash_commands) }
+ let(:edit_path) { edit_project_settings_integration_path(project, :mattermost_slash_commands) }
include_examples 'user activates the Mattermost Slash Command integration'
end
diff --git a/spec/features/projects/integrations/user_activates_slack_notifications_spec.rb b/spec/features/projects/integrations/user_activates_slack_notifications_spec.rb
index 616469c5df8..e89f6e309ea 100644
--- a/spec/features/projects/integrations/user_activates_slack_notifications_spec.rb
+++ b/spec/features/projects/integrations/user_activates_slack_notifications_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe 'User activates Slack notifications', :js do
pipeline_channel: 6,
wiki_page_channel: 7)
- visit(edit_project_integration_path(project, integration))
+ visit(edit_project_settings_integration_path(project, integration))
end
it 'filters events by channel' do
diff --git a/spec/features/projects/integrations/user_activates_slack_slash_command_spec.rb b/spec/features/projects/integrations/user_activates_slack_slash_command_spec.rb
index 0b4c9620bdf..df8cd84ffdb 100644
--- a/spec/features/projects/integrations/user_activates_slack_slash_command_spec.rb
+++ b/spec/features/projects/integrations/user_activates_slack_slash_command_spec.rb
@@ -24,7 +24,11 @@ RSpec.describe 'Slack slash commands', :js do
click_active_checkbox
click_on 'Save'
- expect(page).to have_current_path(edit_project_integration_path(project, :slack_slash_commands), ignore_query: true)
+ expect(page).to have_current_path(
+ edit_project_settings_integration_path(project, :slack_slash_commands),
+ ignore_query: true
+ )
+
expect(page).to have_content('Slack slash commands settings saved, but not active.')
end
@@ -32,7 +36,11 @@ RSpec.describe 'Slack slash commands', :js do
fill_in 'Token', with: 'token'
click_on 'Save'
- expect(page).to have_current_path(edit_project_integration_path(project, :slack_slash_commands), ignore_query: true)
+ expect(page).to have_current_path(
+ edit_project_settings_integration_path(project, :slack_slash_commands),
+ ignore_query: true
+ )
+
expect(page).to have_content('Slack slash commands settings saved and active.')
end
diff --git a/spec/features/projects/integrations/user_uses_inherited_settings_spec.rb b/spec/features/projects/integrations/user_uses_inherited_settings_spec.rb
index fcb04c338a9..8a2881c95dc 100644
--- a/spec/features/projects/integrations/user_uses_inherited_settings_spec.rb
+++ b/spec/features/projects/integrations/user_uses_inherited_settings_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'User uses inherited settings', :js do
- include JiraServiceHelper
+ include JiraIntegrationHelpers
include_context 'project integration activation'
diff --git a/spec/features/projects/jobs/user_browses_job_spec.rb b/spec/features/projects/jobs/user_browses_job_spec.rb
index e2dc760beda..6a2d2c36521 100644
--- a/spec/features/projects/jobs/user_browses_job_spec.rb
+++ b/spec/features/projects/jobs/user_browses_job_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'User browses a job', :js do
+ include Spec::Support::Helpers::ModalHelpers
+
let(:user) { create(:user) }
let(:user_access_level) { :developer }
let(:project) { create(:project, :repository, namespace: user.namespace) }
@@ -12,7 +14,6 @@ RSpec.describe 'User browses a job', :js do
before do
project.add_maintainer(user)
project.enable_ci
- stub_feature_flags(bootstrap_confirmation_modals: false)
sign_in(user)
end
@@ -26,7 +27,11 @@ RSpec.describe 'User browses a job', :js do
# scroll to the top of the page first
execute_script "window.scrollTo(0,0)"
- accept_confirm { find('[data-testid="job-log-erase-link"]').click }
+ accept_gl_confirm(button_text: 'Erase job log') do
+ find('[data-testid="job-log-erase-link"]').click
+ end
+
+ wait_for_requests
expect(page).to have_no_css('.artifacts')
expect(build).not_to have_trace
diff --git a/spec/features/projects/jobs_spec.rb b/spec/features/projects/jobs_spec.rb
index befaf85fc1e..f0d41c1dd11 100644
--- a/spec/features/projects/jobs_spec.rb
+++ b/spec/features/projects/jobs_spec.rb
@@ -987,7 +987,9 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
it 'renders message about job being stuck because of no runners with the specified tags' do
expect(page).to have_selector('[data-testid="job-stuck-with-tags"')
- expect(page).to have_content("This job is stuck because you don't have any active runners online or available with any of these tags assigned to them:")
+ expect(page).to have_content("This job is stuck because of one of the following problems. There are no active runners online, no runners for the ")
+ expect(page).to have_content("protected branch")
+ expect(page).to have_content(", or no runners that match all of the job's tags:")
end
end
@@ -997,7 +999,9 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
it 'renders message about job being stuck because of no runners with the specified tags' do
expect(page).to have_selector('[data-testid="job-stuck-with-tags"')
- expect(page).to have_content("This job is stuck because you don't have any active runners online or available with any of these tags assigned to them:")
+ expect(page).to have_content("This job is stuck because of one of the following problems. There are no active runners online, no runners for the ")
+ expect(page).to have_content("protected branch")
+ expect(page).to have_content(", or no runners that match all of the job's tags:")
end
end
diff --git a/spec/features/projects/members/manage_members_spec.rb b/spec/features/projects/members/manage_members_spec.rb
index 0f4120e88e0..8d229530ef5 100644
--- a/spec/features/projects/members/manage_members_spec.rb
+++ b/spec/features/projects/members/manage_members_spec.rb
@@ -48,20 +48,48 @@ RSpec.describe 'Projects > Members > Manage members', :js do
end
end
- it 'uses ProjectMember access_level_roles for the invite members modal access option', :aggregate_failures do
- visit_members_page
+ context 'when owner' do
+ it 'uses ProjectMember access_level_roles for the invite members modal access option', :aggregate_failures do
+ visit_members_page
- click_on 'Invite members'
+ click_on 'Invite members'
- click_on 'Guest'
- wait_for_requests
+ click_on 'Guest'
+ wait_for_requests
- page.within '.dropdown-menu' do
- expect(page).to have_button('Guest')
- expect(page).to have_button('Reporter')
- expect(page).to have_button('Developer')
- expect(page).to have_button('Maintainer')
- expect(page).not_to have_button('Owner')
+ page.within '.dropdown-menu' do
+ expect(page).to have_button('Guest')
+ expect(page).to have_button('Reporter')
+ expect(page).to have_button('Developer')
+ expect(page).to have_button('Maintainer')
+ expect(page).to have_button('Owner')
+ end
+ end
+ end
+
+ context 'when maintainer' do
+ let(:maintainer) { create(:user) }
+
+ before do
+ project.add_maintainer(maintainer)
+ sign_in(maintainer)
+ end
+
+ it 'uses ProjectMember access_level_roles for the invite members modal access option', :aggregate_failures do
+ visit_members_page
+
+ click_on 'Invite members'
+
+ click_on 'Guest'
+ wait_for_requests
+
+ page.within '.dropdown-menu' do
+ expect(page).to have_button('Guest')
+ expect(page).to have_button('Reporter')
+ expect(page).to have_button('Developer')
+ expect(page).to have_button('Maintainer')
+ expect(page).not_to have_button('Owner')
+ end
end
end
diff --git a/spec/features/projects/members/member_leaves_project_spec.rb b/spec/features/projects/members/member_leaves_project_spec.rb
index 67c40c1dbee..db227f3701d 100644
--- a/spec/features/projects/members/member_leaves_project_spec.rb
+++ b/spec/features/projects/members/member_leaves_project_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe 'Projects > Members > Member leaves project' do
include Spec::Support::Helpers::Features::MembersHelpers
+ include Spec::Support::Helpers::ModalHelpers
let(:user) { create(:user) }
let(:project) { create(:project, :repository, :with_namespace_settings) }
@@ -11,7 +12,6 @@ RSpec.describe 'Projects > Members > Member leaves project' do
before do
project.add_developer(user)
sign_in(user)
- stub_feature_flags(bootstrap_confirmation_modals: false)
end
it 'user leaves project' do
@@ -26,7 +26,7 @@ RSpec.describe 'Projects > Members > Member leaves project' do
it 'user leaves project by url param', :js do
visit project_path(project, leave: 1)
- page.accept_confirm
+ accept_gl_confirm(button_text: 'Leave project')
wait_for_all_requests
expect(page).to have_current_path(dashboard_projects_path, ignore_query: true)
diff --git a/spec/features/projects/members/user_requests_access_spec.rb b/spec/features/projects/members/user_requests_access_spec.rb
index 370d7b49832..be124502c32 100644
--- a/spec/features/projects/members/user_requests_access_spec.rb
+++ b/spec/features/projects/members/user_requests_access_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'Projects > Members > User requests access', :js do
+ include Spec::Support::Helpers::ModalHelpers
+
let_it_be(:user) { create(:user) }
let_it_be(:maintainer) { create(:user) }
let_it_be(:project) { create(:project, :public, :repository) }
@@ -13,7 +15,6 @@ RSpec.describe 'Projects > Members > User requests access', :js do
sign_in(user)
project.add_maintainer(maintainer)
visit project_path(project)
- stub_feature_flags(bootstrap_confirmation_modals: false)
end
it 'request access feature is disabled' do
@@ -67,7 +68,7 @@ RSpec.describe 'Projects > Members > User requests access', :js do
expect(project.requesters.exists?(user_id: user)).to be_truthy
- accept_confirm { click_link 'Withdraw Access Request' }
+ accept_gl_confirm { click_link 'Withdraw Access Request' }
expect(page).not_to have_content 'Withdraw Access Request'
expect(page).to have_content 'Request Access'
diff --git a/spec/features/projects/new_project_spec.rb b/spec/features/projects/new_project_spec.rb
index 0046dfe436f..c323e60bb71 100644
--- a/spec/features/projects/new_project_spec.rb
+++ b/spec/features/projects/new_project_spec.rb
@@ -57,10 +57,37 @@ RSpec.describe 'New project', :js do
expect(page).to have_link('GitHub')
expect(page).to have_link('Bitbucket')
expect(page).to have_link('GitLab.com')
- expect(page).to have_button('Repo by URL')
+ expect(page).to have_button('Repository by URL')
expect(page).to have_link('GitLab export')
end
+ describe 'github import option' do
+ context 'with user namespace' do
+ before do
+ visit new_project_path
+ click_link 'Import project'
+ end
+
+ it 'renders link to github importer' do
+ expect(page).to have_link(href: new_import_github_path)
+ end
+ end
+
+ context 'with group namespace' do
+ let(:group) { create(:group, :private) }
+
+ before do
+ group.add_owner(user)
+ visit new_project_path(namespace_id: group.id)
+ click_link 'Import project'
+ end
+
+ it 'renders link to github importer including namespace id' do
+ expect(page).to have_link(href: new_import_github_path(namespace_id: group.id))
+ end
+ end
+ end
+
describe 'manifest import option' do
before do
visit new_project_path
@@ -175,7 +202,7 @@ RSpec.describe 'New project', :js do
it 'does not show the initialize with Readme checkbox on "Import project" tab' do
visit new_project_path
click_link 'Import project'
- click_button 'Repo by URL'
+ click_button 'Repository by URL'
page.within '#import-project-pane' do
expect(page).not_to have_css('input#project_initialize_with_readme')
@@ -277,7 +304,7 @@ RSpec.describe 'New project', :js do
click_link 'Import project'
end
- context 'from git repository url, "Repo by URL"' do
+ context 'from git repository url, "Repository by URL"' do
before do
first('.js-import-git-toggle-button').click
end
diff --git a/spec/features/projects/pages/user_adds_domain_spec.rb b/spec/features/projects/pages/user_adds_domain_spec.rb
index 71bf1c24655..afa3f29ce0d 100644
--- a/spec/features/projects/pages/user_adds_domain_spec.rb
+++ b/spec/features/projects/pages/user_adds_domain_spec.rb
@@ -3,6 +3,7 @@ require 'spec_helper'
RSpec.describe 'User adds pages domain', :js do
include LetsEncryptHelpers
+ include Spec::Support::Helpers::ModalHelpers
let_it_be(:project) { create(:project, pages_https_only: false) }
@@ -14,8 +15,6 @@ RSpec.describe 'User adds pages domain', :js do
project.add_maintainer(user)
sign_in(user)
-
- stub_feature_flags(bootstrap_confirmation_modals: false)
end
context 'when pages are exposed on external HTTP address', :http_pages_enabled do
@@ -95,7 +94,7 @@ RSpec.describe 'User adds pages domain', :js do
fill_in 'Domain', with: 'my.test.domain.com'
- find('.js-auto-ssl-toggle-container .js-project-feature-toggle').click
+ find('.js-auto-ssl-toggle-container .js-project-feature-toggle button').click
fill_in 'Certificate (PEM)', with: certificate_pem
fill_in 'Key (PEM)', with: certificate_key
@@ -168,7 +167,7 @@ RSpec.describe 'User adds pages domain', :js do
within('#content-body') { click_link 'Edit' }
- accept_confirm { click_link 'Remove' }
+ accept_gl_confirm(button_text: 'Remove certificate') { click_link 'Remove' }
expect(page).to have_field('Certificate (PEM)', with: '')
expect(page).to have_field('Key (PEM)', with: '')
diff --git a/spec/features/projects/pages/user_edits_lets_encrypt_settings_spec.rb b/spec/features/projects/pages/user_edits_lets_encrypt_settings_spec.rb
index bdf280f4fe4..4c633bea64e 100644
--- a/spec/features/projects/pages/user_edits_lets_encrypt_settings_spec.rb
+++ b/spec/features/projects/pages/user_edits_lets_encrypt_settings_spec.rb
@@ -3,6 +3,7 @@ require 'spec_helper'
RSpec.describe "Pages with Let's Encrypt", :https_pages_enabled do
include LetsEncryptHelpers
+ include Spec::Support::Helpers::ModalHelpers
let(:project) { create(:project, pages_https_only: false) }
let(:user) { create(:user) }
@@ -14,7 +15,6 @@ RSpec.describe "Pages with Let's Encrypt", :https_pages_enabled do
before do
allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
stub_lets_encrypt_settings
- stub_feature_flags(bootstrap_confirmation_modals: false)
project.add_role(user, role)
sign_in(user)
@@ -50,7 +50,7 @@ RSpec.describe "Pages with Let's Encrypt", :https_pages_enabled do
expect(page).to have_selector '.card-header', text: 'Certificate'
expect(page).to have_text domain.subject
- find('.js-auto-ssl-toggle-container .js-project-feature-toggle').click
+ find('.js-auto-ssl-toggle-container .js-project-feature-toggle button').click
expect(find("#pages_domain_auto_ssl_enabled", visible: false).value).to eq 'true'
expect(page).not_to have_selector '.card-header', text: 'Certificate'
@@ -74,7 +74,7 @@ RSpec.describe "Pages with Let's Encrypt", :https_pages_enabled do
expect(page).not_to have_field 'Certificate (PEM)', type: 'textarea'
expect(page).not_to have_field 'Key (PEM)', type: 'textarea'
- find('.js-auto-ssl-toggle-container .js-project-feature-toggle').click
+ find('.js-auto-ssl-toggle-container .js-project-feature-toggle button').click
expect(find("#pages_domain_auto_ssl_enabled", visible: false).value).to eq 'false'
expect(page).to have_field 'Certificate (PEM)', type: 'textarea'
@@ -139,7 +139,8 @@ RSpec.describe "Pages with Let's Encrypt", :https_pages_enabled do
expect(page).to have_selector '.card-header', text: 'Certificate'
expect(page).to have_text domain.subject
- within('.card') { accept_confirm { click_on 'Remove' } }
+ within('.card') { click_on 'Remove' }
+ accept_gl_confirm(button_text: 'Remove certificate')
expect(page).to have_field 'Certificate (PEM)', with: ''
expect(page).to have_field 'Key (PEM)', with: ''
end
diff --git a/spec/features/projects/pages/user_edits_settings_spec.rb b/spec/features/projects/pages/user_edits_settings_spec.rb
index 1226e1dc2ed..bd163f4a109 100644
--- a/spec/features/projects/pages/user_edits_settings_spec.rb
+++ b/spec/features/projects/pages/user_edits_settings_spec.rb
@@ -2,6 +2,8 @@
require 'spec_helper'
RSpec.describe 'Pages edits pages settings', :js do
+ include Spec::Support::Helpers::ModalHelpers
+
let(:project) { create(:project, pages_https_only: false) }
let(:user) { create(:user) }
@@ -176,7 +178,6 @@ RSpec.describe 'Pages edits pages settings', :js do
describe 'Remove page' do
context 'when pages are deployed' do
before do
- stub_feature_flags(bootstrap_confirmation_modals: false)
project.mark_pages_as_deployed
end
@@ -185,7 +186,7 @@ RSpec.describe 'Pages edits pages settings', :js do
expect(page).to have_link('Remove pages')
- accept_confirm { click_link 'Remove pages' }
+ accept_gl_confirm(button_text: 'Remove pages') { click_link 'Remove pages' }
expect(page).to have_content('Pages were scheduled for removal')
expect(project.reload.pages_deployed?).to be_falsey
diff --git a/spec/features/projects/pipeline_schedules_spec.rb b/spec/features/projects/pipeline_schedules_spec.rb
index 7cb14feabd2..8cf6d5bd29b 100644
--- a/spec/features/projects/pipeline_schedules_spec.rb
+++ b/spec/features/projects/pipeline_schedules_spec.rb
@@ -3,15 +3,16 @@
require 'spec_helper'
RSpec.describe 'Pipeline Schedules', :js do
+ include Spec::Support::Helpers::ModalHelpers
+
let!(:project) { create(:project, :repository) }
let!(:pipeline_schedule) { create(:ci_pipeline_schedule, :nightly, project: project ) }
let!(:pipeline) { create(:ci_pipeline, pipeline_schedule: pipeline_schedule) }
let(:scope) { nil }
let!(:user) { create(:user) }
- context 'logged in as the pipeline scheduler owner' do
+ context 'logged in as the pipeline schedule owner' do
before do
- stub_feature_flags(bootstrap_confirmation_modals: false)
project.add_developer(user)
pipeline_schedule.update!(owner: user)
gitlab_sign_in(user)
@@ -81,7 +82,6 @@ RSpec.describe 'Pipeline Schedules', :js do
context 'logged in as a project maintainer' do
before do
- stub_feature_flags(bootstrap_confirmation_modals: false)
project.add_maintainer(user)
gitlab_sign_in(user)
end
@@ -117,7 +117,9 @@ RSpec.describe 'Pipeline Schedules', :js do
end
it 'deletes the pipeline' do
- accept_confirm { click_link 'Delete' }
+ click_link 'Delete'
+
+ accept_gl_confirm(button_text: 'Delete pipeline schedule')
expect(page).not_to have_css(".pipeline-schedule-table-row")
end
diff --git a/spec/features/projects/pipelines/legacy_pipeline_spec.rb b/spec/features/projects/pipelines/legacy_pipeline_spec.rb
index a29cef393a8..db6feecba03 100644
--- a/spec/features/projects/pipelines/legacy_pipeline_spec.rb
+++ b/spec/features/projects/pipelines/legacy_pipeline_spec.rb
@@ -1070,4 +1070,202 @@ RSpec.describe 'Pipeline', :js do
end
end
end
+
+ describe 'GET /:project/-/pipelines/:id/builds' do
+ include_context 'pipeline builds'
+
+ let_it_be(:project) { create(:project, :repository) }
+
+ let(:pipeline) { create(:ci_pipeline, project: project, ref: 'master', sha: project.commit.id) }
+
+ before do
+ visit builds_project_pipeline_path(project, pipeline)
+ end
+
+ it 'shows a list of jobs' do
+ expect(page).to have_content('Test')
+ expect(page).to have_content(build_passed.id)
+ expect(page).to have_content('Deploy')
+ expect(page).to have_content(build_failed.id)
+ expect(page).to have_content(build_running.id)
+ expect(page).to have_content(build_external.id)
+ expect(page).to have_content('Retry')
+ expect(page).to have_content('Cancel running')
+ expect(page).to have_button('Play')
+ end
+
+ context 'page tabs' do
+ it 'shows Pipeline, Jobs and DAG tabs with link' do
+ expect(page).to have_link('Pipeline')
+ expect(page).to have_link('Jobs')
+ expect(page).to have_link('Needs')
+ end
+
+ it 'shows counter in Jobs tab' do
+ expect(page.find('.js-builds-counter').text).to eq(pipeline.total_size.to_s)
+ end
+ end
+
+ context 'retrying jobs' do
+ it { expect(page).not_to have_content('retried') }
+
+ context 'when retrying' do
+ before do
+ find('[data-testid="retry"]', match: :first).click
+ end
+
+ it 'does not show a "Retry" button', :sidekiq_might_not_need_inline do
+ expect(page).not_to have_content('Retry')
+ end
+ end
+ end
+
+ context 'canceling jobs' do
+ it { expect(page).not_to have_selector('.ci-canceled') }
+
+ context 'when canceling' do
+ before do
+ click_on 'Cancel running'
+ end
+
+ it 'does not show a "Cancel running" button', :sidekiq_might_not_need_inline do
+ expect(page).not_to have_content('Cancel running')
+ end
+ end
+ end
+
+ context 'playing manual job' do
+ before do
+ within '[data-testid="jobs-tab-table"]' do
+ click_button('Play')
+
+ wait_for_requests
+ end
+ end
+
+ it { expect(build_manual.reload).to be_pending }
+ end
+
+ context 'when user unschedules a delayed job' do
+ before do
+ within '[data-testid="jobs-tab-table"]' do
+ click_button('Unschedule')
+ end
+ end
+
+ it 'unschedules the delayed job and shows play button as a manual job' do
+ expect(page).to have_button('Play')
+ expect(page).not_to have_button('Unschedule')
+ end
+ end
+ end
+
+ describe 'GET /:project/-/pipelines/:id/failures' do
+ let(:pipeline) { create(:ci_pipeline, project: project, ref: 'master', sha: '1234') }
+ let(:pipeline_failures_page) { failures_project_pipeline_path(project, pipeline) }
+ let!(:failed_build) { create(:ci_build, :failed, pipeline: pipeline) }
+
+ subject { visit pipeline_failures_page }
+
+ context 'with failed build' do
+ before do
+ failed_build.trace.set('4 examples, 1 failure')
+ end
+
+ it 'lists failed builds' do
+ subject
+
+ expect(page).to have_content(failed_build.name)
+ expect(page).to have_content(failed_build.stage)
+ end
+
+ it 'shows build failure logs' do
+ subject
+
+ expect(page).to have_content('4 examples, 1 failure')
+ end
+
+ it 'shows the failure reason' do
+ subject
+
+ expect(page).to have_content('There is an unknown failure, please try again')
+ end
+
+ context 'when user does not have permission to retry build' do
+ it 'shows retry button for failed build' do
+ subject
+
+ page.within(find('#js-tab-failures', match: :first)) do
+ expect(page).not_to have_button('Retry')
+ end
+ end
+ end
+
+ context 'when user does have permission to retry build' do
+ before do
+ create(:protected_branch, :developers_can_merge,
+ name: pipeline.ref, project: project)
+ end
+
+ it 'shows retry button for failed build' do
+ subject
+
+ page.within(find('#js-tab-failures', match: :first)) do
+ expect(page).to have_button('Retry')
+ end
+ end
+ end
+ end
+
+ context 'when missing build logs' do
+ it 'lists failed builds' do
+ subject
+
+ expect(page).to have_content(failed_build.name)
+ expect(page).to have_content(failed_build.stage)
+ end
+
+ it 'does not show log' do
+ subject
+
+ expect(page).to have_content('No job log')
+ end
+ end
+
+ context 'without permission to access builds' do
+ let(:role) { :guest }
+
+ before do
+ project.update!(public_builds: false)
+ end
+
+ context 'when accessing failed jobs page' do
+ it 'renders a 404 page' do
+ requests = inspect_requests { subject }
+
+ expect(page).to have_title('Not Found')
+ expect(requests.first.status_code).to eq(404)
+ end
+ end
+ end
+
+ context 'without failures' do
+ before do
+ failed_build.update!(status: :success)
+ end
+
+ it 'does not show the failure tab' do
+ subject
+
+ expect(page).not_to have_content('Failed Jobs')
+ end
+
+ it 'displays the pipeline graph' do
+ subject
+
+ expect(page).to have_current_path(pipeline_path(pipeline), ignore_query: true)
+ expect(page).to have_selector('.js-pipeline-graph')
+ end
+ end
+ end
end
diff --git a/spec/features/projects/pipelines/pipeline_spec.rb b/spec/features/projects/pipelines/pipeline_spec.rb
index 9eda05f695d..a83d4191f38 100644
--- a/spec/features/projects/pipelines/pipeline_spec.rb
+++ b/spec/features/projects/pipelines/pipeline_spec.rb
@@ -414,16 +414,6 @@ RSpec.describe 'Pipeline', :js do
expect(page).to have_selector('button[aria-label="Retry downstream pipeline"]')
end
- context 'and the FF downstream_retry_action is disabled' do
- before do
- stub_feature_flags(downstream_retry_action: false)
- end
-
- it 'does not show the retry action' do
- expect(page).not_to have_selector('button[aria-label="Retry downstream pipeline"]')
- end
- end
-
context 'when retrying' do
before do
find('button[aria-label="Retry downstream pipeline"]').click
@@ -508,8 +498,7 @@ RSpec.describe 'Pipeline', :js do
end
it 'shows counter in Jobs tab' do
- skip('Enable in jobs `pipeline_tabs_vue` MR')
- expect(page.find('.js-builds-counter').text).to eq(pipeline.total_size.to_s)
+ expect(page.find('[data-testid="builds-counter"]').text).to eq(pipeline.total_size.to_s)
end
context 'without permission to access builds' do
@@ -889,7 +878,6 @@ RSpec.describe 'Pipeline', :js do
describe 'GET /:project/-/pipelines/:id/builds' do
before do
- stub_feature_flags(pipeline_tabs_vue: false)
visit builds_project_pipeline_path(project, pipeline)
end
@@ -1042,7 +1030,6 @@ RSpec.describe 'Pipeline', :js do
let(:pipeline) { create(:ci_pipeline, project: project, ref: 'master', sha: project.commit.id) }
before do
- stub_feature_flags(pipeline_tabs_vue: false)
visit builds_project_pipeline_path(project, pipeline)
end
@@ -1066,8 +1053,7 @@ RSpec.describe 'Pipeline', :js do
end
it 'shows counter in Jobs tab' do
- skip('unskip when jobs tab is implemented with ff `pipeline_tabs_vue`')
- expect(page.find('.js-builds-counter').text).to eq(pipeline.total_size.to_s)
+ expect(page.find('[data-testid="builds-counter"]').text).to eq(pipeline.total_size.to_s)
end
end
@@ -1130,10 +1116,6 @@ RSpec.describe 'Pipeline', :js do
let(:pipeline_failures_page) { failures_project_pipeline_path(project, pipeline) }
let!(:failed_build) { create(:ci_build, :failed, pipeline: pipeline) }
- before do
- stub_feature_flags(pipeline_tabs_vue: false)
- end
-
subject { visit pipeline_failures_page }
context 'with failed build' do
@@ -1160,42 +1142,11 @@ RSpec.describe 'Pipeline', :js do
expect(page).to have_content('There is an unknown failure, please try again')
end
- context 'when failed_jobs_tab_vue feature flag is disabled' do
- before do
- stub_feature_flags(failed_jobs_tab_vue: false)
- end
-
- context 'when user does not have permission to retry build' do
- it 'shows retry button for failed build' do
- subject
-
- page.within(find('.build-failures', match: :first)) do
- expect(page).not_to have_link('Retry')
- end
- end
- end
-
- context 'when user does have permission to retry build' do
- before do
- create(:protected_branch, :developers_can_merge,
- name: pipeline.ref, project: project)
- end
-
- it 'shows retry button for failed build' do
- subject
-
- page.within(find('.build-failures', match: :first)) do
- expect(page).to have_link('Retry')
- end
- end
- end
- end
-
context 'when user does not have permission to retry build' do
it 'shows retry button for failed build' do
subject
- page.within(find('#js-tab-failures', match: :first)) do
+ page.within(find('[data-testid="tab-failures"]', match: :first)) do
expect(page).not_to have_button('Retry')
end
end
@@ -1210,7 +1161,7 @@ RSpec.describe 'Pipeline', :js do
it 'shows retry button for failed build' do
subject
- page.within(find('#js-tab-failures', match: :first)) do
+ page.within(find('[data-testid="tab-failures"]', match: :first)) do
expect(page).to have_button('Retry')
end
end
@@ -1255,7 +1206,6 @@ RSpec.describe 'Pipeline', :js do
end
it 'does not show the failure tab' do
- skip('unskip when the failure tab has been implemented in ff `pipeline_tabs_vue`')
subject
expect(page).not_to have_content('Failed Jobs')
diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb
index a18bf7c5caf..785edc69623 100644
--- a/spec/features/projects/pipelines/pipelines_spec.rb
+++ b/spec/features/projects/pipelines/pipelines_spec.rb
@@ -311,7 +311,6 @@ RSpec.describe 'Pipelines', :js do
end
before do
- stub_feature_flags(bootstrap_confirmation_modals: false)
visit_project_pipelines
end
diff --git a/spec/features/projects/releases/user_views_edit_release_spec.rb b/spec/features/projects/releases/user_views_edit_release_spec.rb
index f08f5529472..6551b254643 100644
--- a/spec/features/projects/releases/user_views_edit_release_spec.rb
+++ b/spec/features/projects/releases/user_views_edit_release_spec.rb
@@ -30,10 +30,12 @@ RSpec.describe 'User edits Release', :js do
it 'renders the breadcrumbs' do
within('.breadcrumbs') do
- expect(page).to have_content("#{project.creator.name} #{project.name} Edit Release")
+ expect(page).to have_content("#{project.creator.name} #{project.name} Releases #{release.name} Edit Release")
expect(page).to have_link(project.creator.name, href: user_path(project.creator))
expect(page).to have_link(project.name, href: project_path(project))
+ expect(page).to have_link(_('Releases'), href: project_releases_path(project))
+ expect(page).to have_link(release.name, href: project_release_path(project, release))
expect(page).to have_link('Edit Release', href: edit_project_release_path(project, release))
end
end
diff --git a/spec/features/projects/settings/access_tokens_spec.rb b/spec/features/projects/settings/access_tokens_spec.rb
index 122bf267021..88f9a50b093 100644
--- a/spec/features/projects/settings/access_tokens_spec.rb
+++ b/spec/features/projects/settings/access_tokens_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'Project > Settings > Access Tokens', :js do
+ include Spec::Support::Helpers::ModalHelpers
+
let_it_be(:user) { create(:user) }
let_it_be(:bot_user) { create(:user, :project_bot) }
let_it_be(:group) { create(:group) }
@@ -14,7 +16,6 @@ RSpec.describe 'Project > Settings > Access Tokens', :js do
end
before do
- stub_feature_flags(bootstrap_confirmation_modals: false)
sign_in(user)
end
@@ -24,6 +25,11 @@ RSpec.describe 'Project > Settings > Access Tokens', :js do
create(:personal_access_token, user: bot_user)
end
+ def role_dropdown_options
+ role_dropdown = page.find_by_id('resource_access_token_access_level')
+ role_dropdown.all('option').map(&:text)
+ end
+
context 'when user is not a project maintainer' do
before do
project.add_developer(user)
@@ -33,37 +39,68 @@ RSpec.describe 'Project > Settings > Access Tokens', :js do
end
describe 'token creation' do
- it_behaves_like 'resource access tokens creation', 'project'
+ context 'when user is a project owner' do
+ before do
+ project.add_owner(user)
+ end
- context 'when token creation is not allowed' do
- it_behaves_like 'resource access tokens creation disallowed', 'Project access token creation is disabled in this group. You can still use and manage existing tokens.'
+ it_behaves_like 'resource access tokens creation', 'project'
- context 'with a project in a personal namespace' do
- let(:personal_project) { create(:project) }
+ it 'shows Owner option' do
+ visit resource_settings_access_tokens_path
- before do
- personal_project.add_maintainer(user)
- end
+ expect(role_dropdown_options).to include('Owner')
+ end
+ end
- it 'shows access token creation form and text' do
- visit project_settings_access_tokens_path(personal_project)
+ context 'when user is a project maintainer' do
+ before_all do
+ project.add_maintainer(user)
+ end
+
+ it_behaves_like 'resource access tokens creation', 'project'
+
+ it 'does not show Owner option for a maintainer' do
+ visit resource_settings_access_tokens_path
- expect(page).to have_selector('#new_resource_access_token')
- expect(page).to have_text('Generate project access tokens scoped to this project for your applications that need access to the GitLab API.')
- end
+ expect(role_dropdown_options).not_to include('Owner')
end
end
end
- describe 'active tokens' do
- let!(:resource_access_token) { create_resource_access_token }
+ context 'when token creation is not allowed' do
+ it_behaves_like 'resource access tokens creation disallowed', 'Project access token creation is disabled in this group. You can still use and manage existing tokens.'
- it_behaves_like 'active resource access tokens'
+ context 'with a project in a personal namespace' do
+ let(:personal_project) { create(:project) }
+
+ before do
+ personal_project.add_maintainer(user)
+ end
+
+ it 'shows access token creation form and text' do
+ visit project_settings_access_tokens_path(personal_project)
+
+ expect(page).to have_selector('#js-new-access-token-form')
+ end
+ end
end
- describe 'inactive tokens' do
- let!(:resource_access_token) { create_resource_access_token }
+ describe 'viewing tokens' do
+ before_all do
+ project.add_maintainer(user)
+ end
+
+ describe 'active tokens' do
+ let!(:resource_access_token) { create_resource_access_token }
+
+ it_behaves_like 'active resource access tokens'
+ end
- it_behaves_like 'inactive resource access tokens', 'This project has no active access tokens.'
+ describe 'inactive tokens' do
+ let!(:resource_access_token) { create_resource_access_token }
+
+ it_behaves_like 'inactive resource access tokens', 'This project has no active access tokens.'
+ end
end
end
diff --git a/spec/features/projects/settings/branch_rules_settings_spec.rb b/spec/features/projects/settings/branch_rules_settings_spec.rb
new file mode 100644
index 00000000000..5cc35f108b5
--- /dev/null
+++ b/spec/features/projects/settings/branch_rules_settings_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Projects > Settings > Repository > Branch rules settings' do
+ let(:project) { create(:project_empty_repo) }
+ let(:user) { create(:user) }
+ let(:role) { :developer }
+
+ subject(:request) { visit project_settings_repository_branch_rules_path(project) }
+
+ before do
+ project.add_role(user, role)
+ sign_in(user)
+ end
+
+ context 'for developer' do
+ let(:role) { :developer }
+
+ it 'is not allowed to view' do
+ request
+
+ expect(page).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'for maintainer' do
+ let(:role) { :maintainer }
+
+ context 'Branch rules', :js do
+ it 'renders branch rules page' do
+ request
+
+ expect(page).to have_content('Branch rules')
+ end
+ end
+
+ context 'branch_rules feature flag disabled' do
+ it 'does not render branch rules content' do
+ stub_feature_flags(branch_rules: false)
+ request
+
+ expect(page).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+end
diff --git a/spec/features/projects/settings/packages_settings_spec.rb b/spec/features/projects/settings/packages_settings_spec.rb
index 057e6b635fe..1c2b0faa215 100644
--- a/spec/features/projects/settings/packages_settings_spec.rb
+++ b/spec/features/projects/settings/packages_settings_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe 'Projects > Settings > Packages', :js do
sign_in(user)
stub_config(packages: { enabled: packages_enabled })
+ stub_feature_flags(package_registry_access_level: package_registry_access_level)
visit edit_project_path(project)
end
@@ -18,14 +19,31 @@ RSpec.describe 'Projects > Settings > Packages', :js do
context 'Packages enabled in config' do
let(:packages_enabled) { true }
- it 'displays the packages toggle button' do
- expect(page).to have_selector('[data-testid="toggle-label"]', text: 'Packages')
- expect(page).to have_selector('input[name="project[packages_enabled]"] + button', visible: true)
+ context 'with feature flag disabled' do
+ let(:package_registry_access_level) { false }
+
+ it 'displays the packages toggle button' do
+ expect(page).to have_selector('[data-testid="toggle-label"]', text: 'Packages')
+ expect(page).to have_selector('input[name="project[packages_enabled]"] + button', visible: true)
+ end
+ end
+
+ context 'with feature flag enabled' do
+ let(:package_registry_access_level) { true }
+
+ it 'displays the packages access level setting' do
+ expect(page).to have_selector('[data-testid="package-registry-access-level"] > label', text: 'Package registry')
+ expect(page).to have_selector(
+ 'input[name="project[project_feature_attributes][package_registry_access_level]"]',
+ visible: false
+ )
+ end
end
end
context 'Packages disabled in config' do
let(:packages_enabled) { false }
+ let(:package_registry_access_level) { false }
it 'does not show up in UI' do
expect(page).not_to have_selector('[data-testid="toggle-label"]', text: 'Packages')
diff --git a/spec/features/projects/settings/repository_settings_spec.rb b/spec/features/projects/settings/repository_settings_spec.rb
index 4e1b55d3d70..cfdd3d9224d 100644
--- a/spec/features/projects/settings/repository_settings_spec.rb
+++ b/spec/features/projects/settings/repository_settings_spec.rb
@@ -39,6 +39,22 @@ RSpec.describe 'Projects > Settings > Repository settings' do
end
end
+ context 'Branch rules', :js do
+ it 'renders branch rules settings' do
+ visit project_settings_repository_path(project)
+ expect(page).to have_content('Branch rules')
+ end
+
+ context 'branch_rules feature flag disabled', :js do
+ it 'does not render branch rules settings' do
+ stub_feature_flags(branch_rules: false)
+ visit project_settings_repository_path(project)
+
+ expect(page).not_to have_content('Branch rules')
+ end
+ end
+ end
+
context 'Deploy Keys', :js do
let_it_be(:private_deploy_key) { create(:deploy_key, title: 'private_deploy_key', public: false) }
let_it_be(:public_deploy_key) { create(:another_deploy_key, title: 'public_deploy_key', public: true) }
diff --git a/spec/features/projects/settings/user_searches_in_settings_spec.rb b/spec/features/projects/settings/user_searches_in_settings_spec.rb
index 44b5464a1b0..7ed96d01189 100644
--- a/spec/features/projects/settings/user_searches_in_settings_spec.rb
+++ b/spec/features/projects/settings/user_searches_in_settings_spec.rb
@@ -7,7 +7,6 @@ RSpec.describe 'User searches project settings', :js do
let_it_be(:project) { create(:project, :repository, namespace: user.namespace, pages_https_only: false) }
before do
- stub_feature_flags(bootstrap_confirmation_modals: false)
sign_in(user)
end
diff --git a/spec/features/promotion_spec.rb b/spec/features/promotion_spec.rb
index 8692930376f..903d6244a4c 100644
--- a/spec/features/promotion_spec.rb
+++ b/spec/features/promotion_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe 'Promotions', :js do
visit edit_project_path(project)
within('#promote_service_desk') do
- find('.close').click
+ find('.js-close').click
end
wait_for_requests
diff --git a/spec/features/refactor_blob_viewer_disabled/projects/files/user_replaces_files_spec.rb b/spec/features/refactor_blob_viewer_disabled/projects/files/user_replaces_files_spec.rb
deleted file mode 100644
index 5561cf15a66..00000000000
--- a/spec/features/refactor_blob_viewer_disabled/projects/files/user_replaces_files_spec.rb
+++ /dev/null
@@ -1,93 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Projects > Files > User replaces files', :js do
- include DropzoneHelper
-
- let(:fork_message) do
- "You're not allowed to make changes to this project directly. "\
- "A fork of this project has been created that you can make changes in, so you can submit a merge request."
- end
-
- let(:project) { create(:project, :repository, name: 'Shop') }
- let(:project2) { create(:project, :repository, name: 'Another Project', path: 'another-project') }
- let(:project_tree_path_root_ref) { project_tree_path(project, project.repository.root_ref) }
- let(:project2_tree_path_root_ref) { project_tree_path(project2, project2.repository.root_ref) }
- let(:user) { create(:user) }
-
- before do
- stub_feature_flags(refactor_blob_viewer: false)
- sign_in(user)
- end
-
- context 'when an user has write access' do
- before do
- project.add_maintainer(user)
- visit(project_tree_path_root_ref)
- wait_for_requests
- end
-
- it 'replaces an existed file with a new one' do
- click_link('.gitignore')
-
- expect(page).to have_content('.gitignore')
-
- click_on('Replace')
- drop_in_dropzone(File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt'))
-
- page.within('#modal-upload-blob') do
- fill_in(:commit_message, with: 'Replacement file commit message')
- end
-
- click_button('Replace file')
-
- expect(page).to have_content('Lorem ipsum dolor sit amet')
- expect(page).to have_content('Sed ut perspiciatis unde omnis')
- expect(page).to have_content('Replacement file commit message')
- end
- end
-
- context 'when an user does not have write access' do
- before do
- project2.add_reporter(user)
- visit(project2_tree_path_root_ref)
- wait_for_requests
- end
-
- it 'replaces an existed file with a new one in a forked project', :sidekiq_might_not_need_inline do
- click_link('.gitignore')
-
- expect(page).to have_content('.gitignore')
-
- click_on('Replace')
-
- expect(page).to have_link('Fork')
- expect(page).to have_button('Cancel')
-
- click_link('Fork')
-
- expect(page).to have_content(fork_message)
-
- click_on('Replace')
- drop_in_dropzone(File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt'))
-
- page.within('#modal-upload-blob') do
- fill_in(:commit_message, with: 'Replacement file commit message')
- end
-
- click_button('Replace file')
-
- expect(page).to have_content('Replacement file commit message')
-
- fork = user.fork_of(project2.reload)
-
- expect(page).to have_current_path(project_new_merge_request_path(fork), ignore_query: true)
-
- click_link('Changes')
-
- expect(page).to have_content('Lorem ipsum dolor sit amet')
- expect(page).to have_content('Sed ut perspiciatis unde omnis')
- end
- end
-end
diff --git a/spec/features/snippets/notes_on_personal_snippets_spec.rb b/spec/features/snippets/notes_on_personal_snippets_spec.rb
index 6bd31d7314c..8d55a7a64f4 100644
--- a/spec/features/snippets/notes_on_personal_snippets_spec.rb
+++ b/spec/features/snippets/notes_on_personal_snippets_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe 'Comments on personal snippets', :js do
include NoteInteractionHelpers
+ include Spec::Support::Helpers::ModalHelpers
let_it_be(:snippet) { create(:personal_snippet, :public) }
let_it_be(:other_note) { create(:note_on_personal_snippet) }
@@ -18,7 +19,6 @@ RSpec.describe 'Comments on personal snippets', :js do
end
before do
- stub_feature_flags(bootstrap_confirmation_modals: false)
sign_in user
visit snippet_path(snippet)
@@ -124,7 +124,7 @@ RSpec.describe 'Comments on personal snippets', :js do
page.within('.current-note-edit-form') do
fill_in 'note[note]', with: 'new content'
- find('.btn-success').click
+ find('.btn-confirm').click
end
page.within("#notes-list li#note_#{snippet_notes[0].id}") do
@@ -142,9 +142,11 @@ RSpec.describe 'Comments on personal snippets', :js do
open_more_actions_dropdown(snippet_notes[0])
page.within("#notes-list li#note_#{snippet_notes[0].id}") do
- accept_confirm { click_on 'Delete comment' }
+ click_on 'Delete comment'
end
+ accept_gl_confirm(button_text: 'Delete comment')
+
wait_for_requests
expect(page).not_to have_selector("#notes-list li#note_#{snippet_notes[0].id}")
diff --git a/spec/features/snippets/user_creates_snippet_spec.rb b/spec/features/snippets/user_creates_snippet_spec.rb
index c682ad06977..628468a2abe 100644
--- a/spec/features/snippets/user_creates_snippet_spec.rb
+++ b/spec/features/snippets/user_creates_snippet_spec.rb
@@ -16,7 +16,6 @@ RSpec.describe 'User creates snippet', :js do
let(:snippet_title_field) { 'snippet-title' }
before do
- stub_feature_flags(bootstrap_confirmation_modals: false)
sign_in(user)
visit new_snippet_path
diff --git a/spec/features/tags/developer_deletes_tag_spec.rb b/spec/features/tags/developer_deletes_tag_spec.rb
index 6b669695f7b..efd4b42c136 100644
--- a/spec/features/tags/developer_deletes_tag_spec.rb
+++ b/spec/features/tags/developer_deletes_tag_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe 'Developer deletes tag', :js do
before do
project.add_developer(user)
sign_in(user)
+ create(:protected_tag, project: project, name: 'v1.1.1')
visit project_tags_path(project)
end
@@ -22,6 +23,16 @@ RSpec.describe 'Developer deletes tag', :js do
expect(page).not_to have_content 'v1.1.0'
end
+
+ context 'protected tags' do
+ it 'can not delete protected tags' do
+ expect(page).to have_content 'v1.1.1'
+
+ container = page.find('.content .flex-row', text: 'v1.1.1')
+ expect(container).to have_button('Only a project maintainer or owner can delete a protected tag',
+ disabled: true)
+ end
+ end
end
context 'from a specific tag page' do
@@ -33,7 +44,7 @@ RSpec.describe 'Developer deletes tag', :js do
container = page.find('.nav-controls')
delete_tag container
- expect(page).to have_current_path("#{project_tags_path(project)}/", ignore_query: true)
+ expect(page).to have_current_path(project_tags_path(project), ignore_query: true)
expect(page).not_to have_content 'v1.0.0'
end
end
@@ -55,9 +66,9 @@ RSpec.describe 'Developer deletes tag', :js do
end
def delete_tag(container)
- container.find('.js-remove-tag').click
+ container.find('.js-delete-tag-button').click
- page.within('.modal') { click_button('Delete tag') }
+ page.within('.modal') { click_button('Yes, delete tag') }
wait_for_requests
end
end
diff --git a/spec/features/tags/maintainer_deletes_protected_tag_spec.rb b/spec/features/tags/maintainer_deletes_protected_tag_spec.rb
new file mode 100644
index 00000000000..0bf9645c2fb
--- /dev/null
+++ b/spec/features/tags/maintainer_deletes_protected_tag_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Maintainer deletes protected tag', :js do
+ let(:user) { create(:user) }
+ let(:group) { create(:group) }
+ let(:project) { create(:project, :repository, namespace: group) }
+ let(:tag_name) { 'v1.1.1' }
+
+ before do
+ project.add_maintainer(user)
+ sign_in(user)
+ create(:protected_tag, project: project, name: tag_name)
+ visit project_tags_path(project)
+ end
+
+ context 'from the tags list page' do
+ it 'deletes the tag' do
+ expect(page).to have_content "#{tag_name} protected"
+
+ page.find('.content .flex-row', text: tag_name).find('.js-delete-tag-button').click
+ assert_modal_content(tag_name)
+ confirm_delete_tag(tag_name)
+
+ expect(page).not_to have_content tag_name
+ end
+ end
+
+ context 'from a specific tag page' do
+ before do
+ click_on tag_name
+ end
+
+ it 'deletes the tag' do
+ expect(page).to have_current_path(project_tag_path(project, tag_name), ignore_query: true)
+
+ page.find('.js-delete-tag-button').click
+ assert_modal_content(tag_name)
+ confirm_delete_tag(tag_name)
+
+ expect(page).to have_current_path(project_tags_path(project), ignore_query: true)
+ expect(page).not_to have_content tag_name
+ end
+ end
+
+ def assert_modal_content(tag_name)
+ within '.modal' do
+ expect(page).to have_content("Please type the following to confirm: #{tag_name}")
+ expect(page).to have_field('delete_tag_input')
+ expect(page).to have_button('Yes, delete protected tag', disabled: true)
+ end
+ end
+
+ def confirm_delete_tag(tag_name)
+ within '.modal' do
+ fill_in('delete_tag_input', with: tag_name)
+ click_button('Yes, delete protected tag')
+ wait_for_requests
+ end
+ end
+end
diff --git a/spec/features/triggers_spec.rb b/spec/features/triggers_spec.rb
index 7f5cf2359a3..eb497715df7 100644
--- a/spec/features/triggers_spec.rb
+++ b/spec/features/triggers_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'Triggers', :js do
+ include Spec::Support::Helpers::ModalHelpers
+
let(:trigger_title) { 'trigger desc' }
let(:user) { create(:user) }
let(:user2) { create(:user) }
@@ -74,7 +76,6 @@ RSpec.describe 'Triggers', :js do
describe 'trigger "Revoke" workflow' do
before do
- stub_feature_flags(bootstrap_confirmation_modals: false)
create(:ci_trigger, owner: user2, project: @project, description: trigger_title)
visit project_settings_ci_cd_path(@project)
end
@@ -86,7 +87,7 @@ RSpec.describe 'Triggers', :js do
it 'revoke trigger' do
# See if "Revoke" on trigger works post trigger creation
- page.accept_confirm do
+ accept_gl_confirm(button_text: 'Revoke') do
find('[data-testid="trigger_revoke_button"]').send_keys(:return)
end
diff --git a/spec/features/user_sorts_things_spec.rb b/spec/features/user_sorts_things_spec.rb
index bcf3defe9c6..c6a1cfdc146 100644
--- a/spec/features/user_sorts_things_spec.rb
+++ b/spec/features/user_sorts_things_spec.rb
@@ -16,8 +16,6 @@ RSpec.describe "User sorts things", :js do
let_it_be(:merge_request) { create(:merge_request, target_project: project, source_project: project, author: current_user) }
before do
- stub_feature_flags(vue_issues_list: true)
-
project.add_developer(current_user)
sign_in(current_user)
end
diff --git a/spec/features/users/signup_spec.rb b/spec/features/users/signup_spec.rb
index 3eae4955167..30441dac7b6 100644
--- a/spec/features/users/signup_spec.rb
+++ b/spec/features/users/signup_spec.rb
@@ -341,6 +341,7 @@ RSpec.describe 'Signup' do
end
it 'redirects to step 2 of the signup process, sets the role and redirects back' do
+ stub_feature_flags(about_your_company_registration_flow: false)
visit new_user_registration_path
fill_in_signup_form
diff --git a/spec/features/users/zuora_csp_spec.rb b/spec/features/users/zuora_csp_spec.rb
new file mode 100644
index 00000000000..f3fd27d6495
--- /dev/null
+++ b/spec/features/users/zuora_csp_spec.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Zuora content security policy' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+
+ before do
+ project.add_developer(user)
+ sign_in(user)
+ end
+
+ it 'has proper Content Security Policy headers' do
+ visit pipeline_path(pipeline)
+
+ expect(response_headers['Content-Security-Policy']).to include('https://*.zuora.com')
+ end
+end
diff --git a/spec/finders/crm/contacts_finder_spec.rb b/spec/finders/crm/contacts_finder_spec.rb
index 151af1ad825..dd5274a0574 100644
--- a/spec/finders/crm/contacts_finder_spec.rb
+++ b/spec/finders/crm/contacts_finder_spec.rb
@@ -26,16 +26,6 @@ RSpec.describe Crm::ContactsFinder do
root_group.add_developer(user)
end
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(customer_relations: false)
- end
-
- it 'returns an empty array' do
- expect(subject).to be_empty
- end
- end
-
context 'when feature flag is enabled' do
it 'returns all group contacts' do
expect(subject).to match_array([contact_1, contact_2])
@@ -66,5 +56,91 @@ RSpec.describe Crm::ContactsFinder do
expect(subject).to be_empty
end
end
+
+ context 'with search informations' do
+ let_it_be(:search_test_group) { create(:group, :crm_enabled) }
+
+ let_it_be(:search_test_a) do
+ create(
+ :contact,
+ group: search_test_group,
+ first_name: "ABC",
+ last_name: "DEF",
+ email: "ghi@test.com",
+ description: "LMNO",
+ state: "inactive"
+ )
+ end
+
+ let_it_be(:search_test_b) do
+ create(
+ :contact,
+ group: search_test_group,
+ first_name: "PQR",
+ last_name: "STU",
+ email: "vwx@test.com",
+ description: "YZ",
+ state: "active"
+ )
+ end
+
+ before do
+ search_test_group.add_developer(user)
+ end
+
+ context 'when search term is empty' do
+ it 'returns all group contacts alphabetically ordered' do
+ finder = described_class.new(user, group: search_test_group, search: "")
+ expect(finder.execute).to eq([search_test_a, search_test_b])
+ end
+ end
+
+ context 'when search term is not empty' do
+ it 'searches for first name ignoring casing' do
+ finder = described_class.new(user, group: search_test_group, search: "aBc")
+ expect(finder.execute).to match_array([search_test_a])
+ end
+
+ it 'searches for last name ignoring casing' do
+ finder = described_class.new(user, group: search_test_group, search: "StU")
+ expect(finder.execute).to match_array([search_test_b])
+ end
+
+ it 'searches for email' do
+ finder = described_class.new(user, group: search_test_group, search: "ghi")
+ expect(finder.execute).to match_array([search_test_a])
+ end
+
+ it 'searches for description ignoring casing' do
+ finder = described_class.new(user, group: search_test_group, search: "Yz")
+ expect(finder.execute).to match_array([search_test_b])
+ end
+
+ it 'fuzzy searches for email and last name' do
+ finder = described_class.new(user, group: search_test_group, search: "s")
+ expect(finder.execute).to match_array([search_test_a, search_test_b])
+ end
+ end
+
+ context 'when searching for contacts state' do
+ it 'returns only inactive contacts' do
+ finder = described_class.new(user, group: search_test_group, state: :inactive)
+ expect(finder.execute).to match_array([search_test_a])
+ end
+
+ it 'returns only active contacts' do
+ finder = described_class.new(user, group: search_test_group, state: :active)
+ expect(finder.execute).to match_array([search_test_b])
+ end
+ end
+
+ context 'when searching for contacts ids' do
+ it 'returns the expected contacts' do
+ finder = described_class.new(user, group: search_test_group, ids: [search_test_b.id])
+
+ expect(finder.execute).to match_array([search_test_b])
+ end
+ end
+ end
end
end
diff --git a/spec/finders/crm/organizations_finder_spec.rb b/spec/finders/crm/organizations_finder_spec.rb
new file mode 100644
index 00000000000..f227fcd3748
--- /dev/null
+++ b/spec/finders/crm/organizations_finder_spec.rb
@@ -0,0 +1,132 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Crm::OrganizationsFinder do
+ let_it_be(:user) { create(:user) }
+
+ describe '#execute' do
+ subject { described_class.new(user, group: group).execute }
+
+ context 'when customer relations feature is enabled for the group' do
+ let_it_be(:root_group) { create(:group, :crm_enabled) }
+ let_it_be(:group) { create(:group, parent: root_group) }
+
+ let_it_be(:organization_1) { create(:organization, group: root_group) }
+ let_it_be(:organization_2) { create(:organization, group: root_group) }
+
+ context 'when user does not have permissions to see organizations in the group' do
+ it 'returns an empty array' do
+ expect(subject).to be_empty
+ end
+ end
+
+ context 'when user is member of the root group' do
+ before do
+ root_group.add_developer(user)
+ end
+
+ context 'when feature flag is enabled' do
+ it 'returns all group organizations' do
+ expect(subject).to match_array([organization_1, organization_2])
+ end
+ end
+ end
+
+ context 'when user is member of the sub group' do
+ before do
+ group.add_developer(user)
+ end
+
+ it 'returns an empty array' do
+ expect(subject).to be_empty
+ end
+ end
+ end
+
+ context 'when customer relations feature is disabled for the group' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:organization) { create(:organization, group: group) }
+
+ before do
+ group.add_developer(user)
+ end
+
+ it 'returns an empty array' do
+ expect(subject).to be_empty
+ end
+ end
+
+ context 'with search informations' do
+ let_it_be(:search_test_group) { create(:group, :crm_enabled) }
+
+ let_it_be(:search_test_a) do
+ create(
+ :organization,
+ group: search_test_group,
+ name: "DEF",
+ description: "ghi_st",
+ state: "inactive"
+ )
+ end
+
+ let_it_be(:search_test_b) do
+ create(
+ :organization,
+ group: search_test_group,
+ name: "ABC_st",
+ description: "JKL",
+ state: "active"
+ )
+ end
+
+ before do
+ search_test_group.add_developer(user)
+ end
+
+ context 'when search term is empty' do
+ it 'returns all group organizations alphabetically ordered' do
+ finder = described_class.new(user, group: search_test_group, search: "")
+ expect(finder.execute).to eq([search_test_b, search_test_a])
+ end
+ end
+
+ context 'when search term is not empty' do
+ it 'searches for name' do
+ finder = described_class.new(user, group: search_test_group, search: "aBc")
+ expect(finder.execute).to match_array([search_test_b])
+ end
+
+ it 'searches for description' do
+ finder = described_class.new(user, group: search_test_group, search: "ghI")
+ expect(finder.execute).to match_array([search_test_a])
+ end
+
+ it 'searches for name and description' do
+ finder = described_class.new(user, group: search_test_group, search: "_st")
+ expect(finder.execute).to eq([search_test_b, search_test_a])
+ end
+ end
+
+ context 'when searching for organizations state' do
+ it 'returns only inactive organizations' do
+ finder = described_class.new(user, group: search_test_group, state: :inactive)
+ expect(finder.execute).to match_array([search_test_a])
+ end
+
+ it 'returns only active organizations' do
+ finder = described_class.new(user, group: search_test_group, state: :active)
+ expect(finder.execute).to match_array([search_test_b])
+ end
+ end
+
+ context 'when searching for organizations ids' do
+ it 'returns the expected organizations' do
+ finder = described_class.new(user, group: search_test_group, ids: [search_test_a.id])
+
+ expect(finder.execute).to match_array([search_test_a])
+ end
+ end
+ end
+ end
+end
diff --git a/spec/finders/issues_finder_spec.rb b/spec/finders/issues_finder_spec.rb
index 3f5a55410d2..704171a737b 100644
--- a/spec/finders/issues_finder_spec.rb
+++ b/spec/finders/issues_finder_spec.rb
@@ -3,1453 +3,7 @@
require 'spec_helper'
RSpec.describe IssuesFinder do
- using RSpec::Parameterized::TableSyntax
include_context 'IssuesFinder context'
- describe '#execute' do
- include_context 'IssuesFinder#execute context'
-
- context 'scope: all' do
- let(:scope) { 'all' }
-
- it 'returns all issues' do
- expect(issues).to contain_exactly(issue1, issue2, issue3, issue4, issue5)
- end
-
- context 'user does not have read permissions' do
- let(:search_user) { user2 }
-
- context 'when filtering by project id' do
- let(:params) { { project_id: project1.id } }
-
- it 'returns no issues' do
- expect(issues).to be_empty
- end
- end
-
- context 'when filtering by group id' do
- let(:params) { { group_id: group.id } }
-
- it 'returns no issues' do
- expect(issues).to be_empty
- end
- end
- end
-
- context 'assignee filtering' do
- let(:issuables) { issues }
-
- it_behaves_like 'assignee ID filter' do
- let(:params) { { assignee_id: user.id } }
- let(:expected_issuables) { [issue1, issue2, issue5] }
- end
-
- it_behaves_like 'assignee NOT ID filter' do
- let(:params) { { not: { assignee_id: user.id } } }
- let(:expected_issuables) { [issue3, issue4] }
- end
-
- it_behaves_like 'assignee OR filter' do
- let(:params) { { or: { assignee_id: [user.id, user2.id] } } }
- let(:expected_issuables) { [issue1, issue2, issue3, issue5] }
- end
-
- context 'when assignee_id does not exist' do
- it_behaves_like 'assignee NOT ID filter' do
- let(:params) { { not: { assignee_id: -100 } } }
- let(:expected_issuables) { [issue1, issue2, issue3, issue4, issue5] }
- end
- end
-
- context 'filter by username' do
- let_it_be(:user3) { create(:user) }
-
- before do
- project2.add_developer(user3)
- issue2.assignees = [user2]
- issue3.assignees = [user3]
- end
-
- it_behaves_like 'assignee username filter' do
- let(:params) { { assignee_username: [user2.username] } }
- let(:expected_issuables) { [issue2] }
- end
-
- it_behaves_like 'assignee NOT username filter' do
- before do
- issue2.assignees = [user2]
- end
-
- let(:params) { { not: { assignee_username: [user.username, user2.username] } } }
- let(:expected_issuables) { [issue3, issue4] }
- end
-
- it_behaves_like 'assignee OR filter' do
- let(:params) { { or: { assignee_username: [user2.username, user3.username] } } }
- let(:expected_issuables) { [issue2, issue3] }
- end
-
- context 'when assignee_username does not exist' do
- it_behaves_like 'assignee NOT username filter' do
- before do
- issue2.assignees = [user2]
- end
-
- let(:params) { { not: { assignee_username: 'non_existent_username' } } }
- let(:expected_issuables) { [issue1, issue2, issue3, issue4, issue5] }
- end
- end
- end
-
- it_behaves_like 'no assignee filter' do
- let_it_be(:user3) { create(:user) }
- let(:expected_issuables) { [issue4] }
- end
-
- it_behaves_like 'any assignee filter' do
- let(:expected_issuables) { [issue1, issue2, issue3, issue5] }
- end
- end
-
- context 'filtering by release' do
- context 'when the release tag is none' do
- let(:params) { { release_tag: 'none' } }
-
- it 'returns issues without releases' do
- expect(issues).to contain_exactly(issue2, issue3, issue4, issue5)
- end
- end
-
- context 'when the release tag exists' do
- let(:params) { { project_id: project1.id, release_tag: release.tag } }
-
- it 'returns the issues associated with that release' do
- expect(issues).to contain_exactly(issue1)
- end
- end
- end
-
- context 'filtering by projects' do
- context 'when projects are passed in a list of ids' do
- let(:params) { { projects: [project1.id] } }
-
- it 'returns the issue belonging to the projects' do
- expect(issues).to contain_exactly(issue1, issue5)
- end
- end
-
- context 'when projects are passed in a subquery' do
- let(:params) { { projects: Project.id_in(project1.id) } }
-
- it 'returns the issue belonging to the projects' do
- expect(issues).to contain_exactly(issue1, issue5)
- end
- end
- end
-
- context 'filtering by group_id' do
- let(:params) { { group_id: group.id } }
-
- context 'when include_subgroup param not set' do
- it 'returns all group issues' do
- expect(issues).to contain_exactly(issue1, issue5)
- end
-
- context 'when projects outside the group are passed' do
- let(:params) { { group_id: group.id, projects: [project2.id] } }
-
- it 'returns no issues' do
- expect(issues).to be_empty
- end
- end
-
- context 'when projects of the group are passed' do
- let(:params) { { group_id: group.id, projects: [project1.id] } }
-
- it 'returns the issue within the group and projects' do
- expect(issues).to contain_exactly(issue1, issue5)
- end
- end
-
- context 'when projects of the group are passed as a subquery' do
- let(:params) { { group_id: group.id, projects: Project.id_in(project1.id) } }
-
- it 'returns the issue within the group and projects' do
- expect(issues).to contain_exactly(issue1, issue5)
- end
- end
-
- context 'when release_tag is passed as a parameter' do
- let(:params) { { group_id: group.id, release_tag: 'dne-release-tag' } }
-
- it 'ignores the release_tag parameter' do
- expect(issues).to contain_exactly(issue1, issue5)
- end
- end
- end
-
- context 'when include_subgroup param is true' do
- before do
- params[:include_subgroups] = true
- end
-
- it 'returns all group and subgroup issues' do
- expect(issues).to contain_exactly(issue1, issue4, issue5)
- end
-
- context 'when mixed projects are passed' do
- let(:params) { { group_id: group.id, projects: [project2.id, project3.id] } }
-
- it 'returns the issue within the group and projects' do
- expect(issues).to contain_exactly(issue4)
- end
- end
- end
- end
-
- context 'filtering by author' do
- context 'by author ID' do
- let(:params) { { author_id: user2.id } }
-
- it 'returns issues created by that user' do
- expect(issues).to contain_exactly(issue3)
- end
- end
-
- context 'using OR' do
- let(:issue6) { create(:issue, project: project2) }
- let(:params) { { or: { author_username: [issue3.author.username, issue6.author.username] } } }
-
- it 'returns issues created by any of the given users' do
- expect(issues).to contain_exactly(issue3, issue6)
- end
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(or_issuable_queries: false)
- end
-
- it 'does not add any filter' do
- expect(issues).to contain_exactly(issue1, issue2, issue3, issue4, issue5, issue6)
- end
- end
- end
-
- context 'filtering by NOT author ID' do
- let(:params) { { not: { author_id: user2.id } } }
-
- it 'returns issues not created by that user' do
- expect(issues).to contain_exactly(issue1, issue2, issue4, issue5)
- end
- end
-
- context 'filtering by nonexistent author ID and issue term using CTE for search' do
- let(:params) do
- {
- author_id: 'does-not-exist',
- search: 'git',
- attempt_group_search_optimizations: true
- }
- end
-
- it 'returns no results' do
- expect(issues).to be_empty
- end
- end
- end
-
- context 'filtering by milestone' do
- let(:params) { { milestone_title: milestone.title } }
-
- it 'returns issues assigned to that milestone' do
- expect(issues).to contain_exactly(issue1)
- end
- end
-
- context 'filtering by not milestone' do
- let(:params) { { not: { milestone_title: milestone.title } } }
-
- it 'returns issues not assigned to that milestone' do
- expect(issues).to contain_exactly(issue2, issue3, issue4, issue5)
- end
- end
-
- context 'filtering by group milestone' do
- let!(:group) { create(:group, :public) }
- let(:group_milestone) { create(:milestone, group: group) }
- let!(:group_member) { create(:group_member, group: group, user: user) }
- let(:params) { { milestone_title: group_milestone.title } }
-
- before do
- project2.update!(namespace: group)
- issue2.update!(milestone: group_milestone)
- issue3.update!(milestone: group_milestone)
- end
-
- it 'returns issues assigned to that group milestone' do
- expect(issues).to contain_exactly(issue2, issue3)
- end
-
- context 'using NOT' do
- let(:params) { { not: { milestone_title: group_milestone.title } } }
-
- it 'returns issues not assigned to that group milestone' do
- expect(issues).to contain_exactly(issue1, issue4, issue5)
- end
- end
- end
-
- context 'filtering by no milestone' do
- let(:params) { { milestone_title: 'None' } }
-
- it 'returns issues with no milestone' do
- expect(issues).to contain_exactly(issue2, issue3, issue4, issue5)
- end
-
- it 'returns issues with no milestone (deprecated)' do
- params[:milestone_title] = Milestone::None.title
-
- expect(issues).to contain_exactly(issue2, issue3, issue4, issue5)
- end
- end
-
- context 'filtering by any milestone' do
- let(:params) { { milestone_title: 'Any' } }
-
- it 'returns issues with any assigned milestone' do
- expect(issues).to contain_exactly(issue1)
- end
-
- it 'returns issues with any assigned milestone (deprecated)' do
- params[:milestone_title] = Milestone::Any.title
-
- expect(issues).to contain_exactly(issue1)
- end
- end
-
- context 'filtering by upcoming milestone' do
- let(:params) { { milestone_title: Milestone::Upcoming.name } }
-
- let!(:group) { create(:group, :public) }
- let!(:group_member) { create(:group_member, group: group, user: user) }
-
- let(:project_no_upcoming_milestones) { create(:project, :public) }
- let(:project_next_1_1) { create(:project, :public) }
- let(:project_next_8_8) { create(:project, :public) }
- let(:project_in_group) { create(:project, :public, namespace: group) }
-
- let(:yesterday) { Date.current - 1.day }
- let(:tomorrow) { Date.current + 1.day }
- let(:two_days_from_now) { Date.current + 2.days }
- let(:ten_days_from_now) { Date.current + 10.days }
-
- let(:milestones) do
- [
- create(:milestone, :closed, project: project_no_upcoming_milestones),
- create(:milestone, project: project_next_1_1, title: '1.1', due_date: two_days_from_now),
- create(:milestone, project: project_next_1_1, title: '8.9', due_date: ten_days_from_now),
- create(:milestone, project: project_next_8_8, title: '1.2', due_date: yesterday),
- create(:milestone, project: project_next_8_8, title: '8.8', due_date: tomorrow),
- create(:milestone, group: group, title: '9.9', due_date: tomorrow)
- ]
- end
-
- before do
- @created_issues = milestones.map do |milestone|
- create(:issue, project: milestone.project || project_in_group, milestone: milestone, author: user, assignees: [user])
- end
- end
-
- it 'returns issues in the upcoming milestone for each project or group' do
- expect(issues.map { |issue| issue.milestone.title }).to contain_exactly('1.1', '8.8', '9.9')
- expect(issues.map { |issue| issue.milestone.due_date }).to contain_exactly(tomorrow, two_days_from_now, tomorrow)
- end
-
- context 'using NOT' do
- let(:params) { { not: { milestone_title: Milestone::Upcoming.name } } }
-
- it 'returns issues not in upcoming milestones for each project or group, but must have a due date' do
- target_issues = @created_issues.select do |issue|
- issue.milestone&.due_date && issue.milestone.due_date <= Date.current
- end
-
- expect(issues).to contain_exactly(*target_issues)
- end
- end
- end
-
- context 'filtering by started milestone' do
- let(:params) { { milestone_title: Milestone::Started.name } }
-
- let(:project_no_started_milestones) { create(:project, :public) }
- let(:project_started_1_and_2) { create(:project, :public) }
- let(:project_started_8) { create(:project, :public) }
-
- let(:yesterday) { Date.current - 1.day }
- let(:tomorrow) { Date.current + 1.day }
- let(:two_days_ago) { Date.current - 2.days }
- let(:three_days_ago) { Date.current - 3.days }
-
- let(:milestones) do
- [
- create(:milestone, project: project_no_started_milestones, start_date: tomorrow),
- create(:milestone, project: project_started_1_and_2, title: '1.0', start_date: two_days_ago),
- create(:milestone, project: project_started_1_and_2, title: '2.0', start_date: yesterday),
- create(:milestone, project: project_started_1_and_2, title: '3.0', start_date: tomorrow),
- create(:milestone, :closed, project: project_started_1_and_2, title: '4.0', start_date: three_days_ago),
- create(:milestone, :closed, project: project_started_8, title: '6.0', start_date: three_days_ago),
- create(:milestone, project: project_started_8, title: '7.0'),
- create(:milestone, project: project_started_8, title: '8.0', start_date: yesterday),
- create(:milestone, project: project_started_8, title: '9.0', start_date: tomorrow)
- ]
- end
-
- before do
- milestones.each do |milestone|
- create(:issue, project: milestone.project, milestone: milestone, author: user, assignees: [user])
- end
- end
-
- it 'returns issues in the started milestones for each project' do
- expect(issues.map { |issue| issue.milestone.title }).to contain_exactly('1.0', '2.0', '8.0')
- expect(issues.map { |issue| issue.milestone.start_date }).to contain_exactly(two_days_ago, yesterday, yesterday)
- end
-
- context 'using NOT' do
- let(:params) { { not: { milestone_title: Milestone::Started.name } } }
-
- it 'returns issues not in the started milestones for each project' do
- target_issues = Issue.where(milestone: Milestone.not_started)
-
- expect(issues).to contain_exactly(*target_issues)
- end
- end
- end
-
- context 'filtering by label' do
- let(:params) { { label_name: label.title } }
-
- it 'returns issues with that label' do
- expect(issues).to contain_exactly(issue2)
- end
-
- context 'using NOT' do
- let(:params) { { not: { label_name: label.title } } }
-
- it 'returns issues that do not have that label' do
- expect(issues).to contain_exactly(issue1, issue3, issue4, issue5)
- end
-
- # IssuableFinder first filters using the outer params (the ones not inside the `not` key.)
- # Afterwards, it applies the `not` params to that resultset. This means that things inside the `not` param
- # do not take precedence over the outer params with the same name.
- context 'shadowing the same outside param' do
- let(:params) { { label_name: label2.title, not: { label_name: label.title } } }
-
- it 'does not take precedence over labels outside NOT' do
- expect(issues).to contain_exactly(issue3)
- end
- end
-
- context 'further filtering outside params' do
- let(:params) { { label_name: label2.title, not: { assignee_username: user2.username } } }
-
- it 'further filters on the returned resultset' do
- expect(issues).to be_empty
- end
- end
- end
- end
-
- context 'filtering by multiple labels' do
- let(:params) { { label_name: [label.title, label2.title].join(',') } }
- let(:label2) { create(:label, project: project2) }
-
- before do
- create(:label_link, label: label2, target: issue2)
- end
-
- it 'returns the unique issues with all those labels' do
- expect(issues).to contain_exactly(issue2)
- end
-
- context 'using NOT' do
- let(:params) { { not: { label_name: [label.title, label2.title].join(',') } } }
-
- it 'returns issues that do not have any of the labels provided' do
- expect(issues).to contain_exactly(issue1, issue4, issue5)
- end
- end
- end
-
- context 'filtering by a label that includes any or none in the title' do
- let(:params) { { label_name: [label.title, label2.title].join(',') } }
- let(:label) { create(:label, title: 'any foo', project: project2) }
- let(:label2) { create(:label, title: 'bar none', project: project2) }
-
- before do
- create(:label_link, label: label2, target: issue2)
- end
-
- it 'returns the unique issues with all those labels' do
- expect(issues).to contain_exactly(issue2)
- end
-
- context 'using NOT' do
- let(:params) { { not: { label_name: [label.title, label2.title].join(',') } } }
-
- it 'returns issues that do not have ANY ONE of the labels provided' do
- expect(issues).to contain_exactly(issue1, issue4, issue5)
- end
- end
- end
-
- context 'filtering by no label' do
- let(:params) { { label_name: described_class::Params::FILTER_NONE } }
-
- it 'returns issues with no labels' do
- expect(issues).to contain_exactly(issue1, issue4, issue5)
- end
- end
-
- context 'filtering by any label' do
- let(:params) { { label_name: described_class::Params::FILTER_ANY } }
-
- it 'returns issues that have one or more label' do
- create_list(:label_link, 2, label: create(:label, project: project2), target: issue3)
-
- expect(issues).to contain_exactly(issue2, issue3)
- end
- end
-
- context 'when the same label exists on project and group levels' do
- let(:issue1) { create(:issue, project: project1) }
- let(:issue2) { create(:issue, project: project1) }
-
- # Skipping validation to reproduce a "real-word" scenario.
- # We still have legacy labels on PRD that have the same title on the group and project levels, example: `bug`
- let(:project_label) { build(:label, title: 'somelabel', project: project1).tap { |r| r.save!(validate: false) } }
- let(:group_label) { create(:group_label, title: 'somelabel', group: project1.group) }
-
- let(:params) { { label_name: 'somelabel' } }
-
- before do
- create(:label_link, label: group_label, target: issue1)
- create(:label_link, label: project_label, target: issue2)
- end
-
- it 'finds both issue records' do
- expect(issues).to contain_exactly(issue1, issue2)
- end
- end
-
- context 'filtering by issue term' do
- let(:params) { { search: search_term } }
-
- let_it_be(:english) { create(:issue, project: project1, title: 'title', description: 'something english') }
- let_it_be(:japanese) { create(:issue, project: project1, title: '日本語 title', description: 'another english description') }
-
- context 'with latin search term' do
- let(:search_term) { 'title english' }
-
- it 'returns matching issues' do
- expect(issues).to contain_exactly(english, japanese)
- end
- end
-
- context 'with non-latin search term' do
- let(:search_term) { '日本語' }
-
- it 'returns matching issues' do
- expect(issues).to contain_exactly(japanese)
- end
- end
-
- context 'when full-text search is disabled' do
- let(:search_term) { 'somet' }
-
- before do
- stub_feature_flags(issues_full_text_search: false)
- end
-
- it 'allows partial word matches' do
- expect(issues).to contain_exactly(english)
- end
- end
-
- context 'with anonymous user' do
- let_it_be(:public_project) { create(:project, :public, group: subgroup) }
- let_it_be(:issue6) { create(:issue, project: public_project, title: 'tanuki') }
- let_it_be(:issue7) { create(:issue, project: public_project, title: 'ikunat') }
-
- let(:search_user) { nil }
- let(:params) { { search: 'tanuki' } }
-
- context 'with disable_anonymous_search feature flag enabled' do
- before do
- stub_feature_flags(disable_anonymous_search: true)
- end
-
- it 'does not perform search' do
- expect(issues).to contain_exactly(issue6, issue7)
- end
- end
-
- context 'with disable_anonymous_search feature flag disabled' do
- before do
- stub_feature_flags(disable_anonymous_search: false)
- end
-
- it 'finds one public issue' do
- expect(issues).to contain_exactly(issue6)
- end
- end
- end
- end
-
- context 'filtering by issue term in title' do
- let(:params) { { search: 'git', in: 'title' } }
-
- it 'returns issues with title match for search term' do
- expect(issues).to contain_exactly(issue1)
- end
- end
-
- context 'filtering by issues iids' do
- let(:params) { { iids: [issue3.iid] } }
-
- it 'returns issues where iids match' do
- expect(issues).to contain_exactly(issue3, issue5)
- end
-
- context 'using NOT' do
- let(:params) { { not: { iids: [issue3.iid] } } }
-
- it 'returns issues with no iids match' do
- expect(issues).to contain_exactly(issue1, issue2, issue4)
- end
- end
- end
-
- context 'filtering by state' do
- context 'with opened' do
- let(:params) { { state: 'opened' } }
-
- it 'returns only opened issues' do
- expect(issues).to contain_exactly(issue1, issue2, issue3, issue4, issue5)
- end
- end
-
- context 'with closed' do
- let(:params) { { state: 'closed' } }
-
- it 'returns only closed issues' do
- expect(issues).to contain_exactly(closed_issue)
- end
- end
-
- context 'with all' do
- let(:params) { { state: 'all' } }
-
- it 'returns all issues' do
- expect(issues).to contain_exactly(issue1, issue2, issue3, closed_issue, issue4, issue5)
- end
- end
-
- context 'with invalid state' do
- let(:params) { { state: 'invalid_state' } }
-
- it 'returns all issues' do
- expect(issues).to contain_exactly(issue1, issue2, issue3, closed_issue, issue4, issue5)
- end
- end
- end
-
- context 'filtering by created_at' do
- context 'through created_after' do
- let(:params) { { created_after: issue3.created_at } }
-
- it 'returns issues created on or after the given date' do
- expect(issues).to contain_exactly(issue3)
- end
- end
-
- context 'through created_before' do
- let(:params) { { created_before: issue1.created_at } }
-
- it 'returns issues created on or before the given date' do
- expect(issues).to contain_exactly(issue1)
- end
- end
-
- context 'through created_after and created_before' do
- let(:params) { { created_after: issue2.created_at, created_before: issue3.created_at } }
-
- it 'returns issues created between the given dates' do
- expect(issues).to contain_exactly(issue2, issue3)
- end
- end
- end
-
- context 'filtering by updated_at' do
- context 'through updated_after' do
- let(:params) { { updated_after: issue3.updated_at } }
-
- it 'returns issues updated on or after the given date' do
- expect(issues).to contain_exactly(issue3)
- end
- end
-
- context 'through updated_before' do
- let(:params) { { updated_before: issue1.updated_at } }
-
- it 'returns issues updated on or before the given date' do
- expect(issues).to contain_exactly(issue1)
- end
- end
-
- context 'through updated_after and updated_before' do
- let(:params) { { updated_after: issue2.updated_at, updated_before: issue3.updated_at } }
-
- it 'returns issues updated between the given dates' do
- expect(issues).to contain_exactly(issue2, issue3)
- end
- end
- end
-
- context 'filtering by closed_at' do
- let!(:closed_issue1) { create(:issue, project: project1, state: :closed, closed_at: 1.week.ago) }
- let!(:closed_issue2) { create(:issue, project: project2, state: :closed, closed_at: 1.week.from_now) }
- let!(:closed_issue3) { create(:issue, project: project2, state: :closed, closed_at: 2.weeks.from_now) }
-
- context 'through closed_after' do
- let(:params) { { state: :closed, closed_after: closed_issue3.closed_at } }
-
- it 'returns issues closed on or after the given date' do
- expect(issues).to contain_exactly(closed_issue3)
- end
- end
-
- context 'through closed_before' do
- let(:params) { { state: :closed, closed_before: closed_issue1.closed_at } }
-
- it 'returns issues closed on or before the given date' do
- expect(issues).to contain_exactly(closed_issue1)
- end
- end
-
- context 'through closed_after and closed_before' do
- let(:params) { { state: :closed, closed_after: closed_issue2.closed_at, closed_before: closed_issue3.closed_at } }
-
- it 'returns issues closed between the given dates' do
- expect(issues).to contain_exactly(closed_issue2, closed_issue3)
- end
- end
- end
-
- context 'filtering by reaction name' do
- context 'user searches by no reaction' do
- let(:params) { { my_reaction_emoji: 'None' } }
-
- it 'returns issues that the user did not react to' do
- expect(issues).to contain_exactly(issue2, issue4, issue5)
- end
- end
-
- context 'user searches by any reaction' do
- let(:params) { { my_reaction_emoji: 'Any' } }
-
- it 'returns issues that the user reacted to' do
- expect(issues).to contain_exactly(issue1, issue3)
- end
- end
-
- context 'user searches by "thumbsup" reaction' do
- let(:params) { { my_reaction_emoji: 'thumbsup' } }
-
- it 'returns issues that the user thumbsup to' do
- expect(issues).to contain_exactly(issue1)
- end
-
- context 'using NOT' do
- let(:params) { { not: { my_reaction_emoji: 'thumbsup' } } }
-
- it 'returns issues that the user did not thumbsup to' do
- expect(issues).to contain_exactly(issue2, issue3, issue4, issue5)
- end
- end
- end
-
- context 'user2 searches by "thumbsup" reaction' do
- let(:search_user) { user2 }
-
- let(:params) { { my_reaction_emoji: 'thumbsup' } }
-
- it 'returns issues that the user2 thumbsup to' do
- expect(issues).to contain_exactly(issue2)
- end
-
- context 'using NOT' do
- let(:params) { { not: { my_reaction_emoji: 'thumbsup' } } }
-
- it 'returns issues that the user2 thumbsup to' do
- expect(issues).to contain_exactly(issue3)
- end
- end
- end
-
- context 'user searches by "thumbsdown" reaction' do
- let(:params) { { my_reaction_emoji: 'thumbsdown' } }
-
- it 'returns issues that the user thumbsdown to' do
- expect(issues).to contain_exactly(issue3)
- end
-
- context 'using NOT' do
- let(:params) { { not: { my_reaction_emoji: 'thumbsdown' } } }
-
- it 'returns issues that the user thumbsdown to' do
- expect(issues).to contain_exactly(issue1, issue2, issue4, issue5)
- end
- end
- end
- end
-
- context 'filtering by confidential' do
- let_it_be(:confidential_issue) { create(:issue, project: project1, confidential: true) }
-
- context 'no filtering' do
- it 'returns all issues' do
- expect(issues).to contain_exactly(issue1, issue2, issue3, issue4, issue5, confidential_issue)
- end
- end
-
- context 'user filters confidential issues' do
- let(:params) { { confidential: true } }
-
- it 'returns only confidential issues' do
- expect(issues).to contain_exactly(confidential_issue)
- end
- end
-
- context 'user filters only public issues' do
- let(:params) { { confidential: false } }
-
- it 'returns only public issues' do
- expect(issues).to contain_exactly(issue1, issue2, issue3, issue4, issue5)
- end
- end
- end
-
- context 'filtering by issue type' do
- let_it_be(:incident_issue) { create(:incident, project: project1) }
-
- context 'no type given' do
- let(:params) { { issue_types: [] } }
-
- it 'returns all issues' do
- expect(issues).to contain_exactly(incident_issue, issue1, issue2, issue3, issue4, issue5)
- end
- end
-
- context 'incident type' do
- let(:params) { { issue_types: ['incident'] } }
-
- it 'returns incident issues' do
- expect(issues).to contain_exactly(incident_issue)
- end
- end
-
- context 'issue type' do
- let(:params) { { issue_types: ['issue'] } }
-
- it 'returns all issues with type issue' do
- expect(issues).to contain_exactly(issue1, issue2, issue3, issue4, issue5)
- end
- end
-
- context 'multiple params' do
- let(:params) { { issue_types: %w(issue incident) } }
-
- it 'returns all issues' do
- expect(issues).to contain_exactly(incident_issue, issue1, issue2, issue3, issue4, issue5)
- end
- end
-
- context 'without array' do
- let(:params) { { issue_types: 'incident' } }
-
- it 'returns incident issues' do
- expect(issues).to contain_exactly(incident_issue)
- end
- end
-
- context 'invalid params' do
- let(:params) { { issue_types: ['nonsense'] } }
-
- it 'returns no issues' do
- expect(issues).to eq(Issue.none)
- end
- end
- end
-
- context 'filtering by crm contact' do
- let_it_be(:contact1) { create(:contact, group: group) }
- let_it_be(:contact2) { create(:contact, group: group) }
-
- let_it_be(:contact1_issue1) { create(:issue, project: project1) }
- let_it_be(:contact1_issue2) { create(:issue, project: project1) }
- let_it_be(:contact2_issue1) { create(:issue, project: project1) }
-
- let(:params) { { crm_contact_id: contact1.id } }
-
- it 'returns for that contact' do
- create(:issue_customer_relations_contact, issue: contact1_issue1, contact: contact1)
- create(:issue_customer_relations_contact, issue: contact1_issue2, contact: contact1)
- create(:issue_customer_relations_contact, issue: contact2_issue1, contact: contact2)
-
- expect(issues).to contain_exactly(contact1_issue1, contact1_issue2)
- end
- end
-
- context 'filtering by crm organization' do
- let_it_be(:organization) { create(:organization, group: group) }
- let_it_be(:contact1) { create(:contact, group: group, organization: organization) }
- let_it_be(:contact2) { create(:contact, group: group, organization: organization) }
-
- let_it_be(:contact1_issue1) { create(:issue, project: project1) }
- let_it_be(:contact1_issue2) { create(:issue, project: project1) }
- let_it_be(:contact2_issue1) { create(:issue, project: project1) }
-
- let(:params) { { crm_organization_id: organization.id } }
-
- it 'returns for that contact' do
- create(:issue_customer_relations_contact, issue: contact1_issue1, contact: contact1)
- create(:issue_customer_relations_contact, issue: contact1_issue2, contact: contact1)
- create(:issue_customer_relations_contact, issue: contact2_issue1, contact: contact2)
-
- expect(issues).to contain_exactly(contact1_issue1, contact1_issue2, contact2_issue1)
- end
- end
-
- context 'when the user is unauthorized' do
- let(:search_user) { nil }
-
- it 'returns no results' do
- expect(issues).to be_empty
- end
- end
-
- context 'when the user can see some, but not all, issues' do
- let(:search_user) { user2 }
-
- it 'returns only issues they can see' do
- expect(issues).to contain_exactly(issue2, issue3)
- end
- end
-
- it 'finds issues user can access due to group' do
- group = create(:group)
- project = create(:project, group: group)
- issue = create(:issue, project: project)
- group.add_user(user, :owner)
-
- expect(issues).to include(issue)
- end
- end
-
- context 'personal scope' do
- let(:scope) { 'assigned_to_me' }
-
- it 'returns issue assigned to the user' do
- expect(issues).to contain_exactly(issue1, issue2, issue5)
- end
-
- context 'filtering by project' do
- let(:params) { { project_id: project1.id } }
-
- it 'returns issues assigned to the user in that project' do
- expect(issues).to contain_exactly(issue1, issue5)
- end
- end
- end
-
- context 'when project restricts issues' do
- let(:scope) { nil }
-
- it "doesn't return team-only issues to non team members" do
- project = create(:project, :public, :issues_private)
- issue = create(:issue, project: project)
-
- expect(issues).not_to include(issue)
- end
-
- it "doesn't return issues if feature disabled" do
- [project1, project2, project3].each do |project|
- project.project_feature.update!(issues_access_level: ProjectFeature::DISABLED)
- end
-
- expect(issues.count).to eq 0
- end
- end
-
- context 'external authorization' do
- it_behaves_like 'a finder with external authorization service' do
- let!(:subject) { create(:issue, project: project) }
- let(:project_params) { { project_id: project.id } }
- end
- end
-
- context 'filtering by due date' do
- let_it_be(:issue_due_today) { create(:issue, project: project1, due_date: Date.current) }
- let_it_be(:issue_due_tomorrow) { create(:issue, project: project1, due_date: 1.day.from_now) }
- let_it_be(:issue_overdue) { create(:issue, project: project1, due_date: 2.days.ago) }
- let_it_be(:issue_due_soon) { create(:issue, project: project1, due_date: 2.days.from_now) }
-
- let(:scope) { 'all' }
- let(:base_params) { { project_id: project1.id } }
-
- context 'with param set to no due date' do
- let(:params) { base_params.merge(due_date: Issue::NoDueDate.name) }
-
- it 'returns issues with no due date' do
- expect(issues).to contain_exactly(issue1, issue5)
- end
- end
-
- context 'with param set to any due date' do
- let(:params) { base_params.merge(due_date: Issue::AnyDueDate.name) }
-
- it 'returns issues with any due date' do
- expect(issues).to contain_exactly(issue_due_today, issue_due_tomorrow, issue_overdue, issue_due_soon)
- end
- end
-
- context 'with param set to due today' do
- let(:params) { base_params.merge(due_date: Issue::DueToday.name) }
-
- it 'returns issues due today' do
- expect(issues).to contain_exactly(issue_due_today)
- end
- end
-
- context 'with param set to due tomorrow' do
- let(:params) { base_params.merge(due_date: Issue::DueTomorrow.name) }
-
- it 'returns issues due today' do
- expect(issues).to contain_exactly(issue_due_tomorrow)
- end
- end
-
- context 'with param set to overdue' do
- let(:params) { base_params.merge(due_date: Issue::Overdue.name) }
-
- it 'returns overdue issues' do
- expect(issues).to contain_exactly(issue_overdue)
- end
- end
-
- context 'with param set to next month and previous two weeks' do
- let(:params) { base_params.merge(due_date: Issue::DueNextMonthAndPreviousTwoWeeks.name) }
-
- it 'returns issues due in the previous two weeks and next month' do
- expect(issues).to contain_exactly(issue_due_today, issue_due_tomorrow, issue_overdue, issue_due_soon)
- end
- end
-
- context 'with invalid param' do
- let(:params) { base_params.merge(due_date: 'foo') }
-
- it 'returns no issues' do
- expect(issues).to be_empty
- end
- end
- end
- end
-
- describe '#row_count', :request_store do
- let_it_be(:admin) { create(:admin) }
-
- context 'when admin mode is enabled', :enable_admin_mode do
- it 'returns the number of rows for the default state' do
- finder = described_class.new(admin)
-
- expect(finder.row_count).to eq(5)
- end
-
- it 'returns the number of rows for a given state' do
- finder = described_class.new(admin, state: 'closed')
-
- expect(finder.row_count).to be_zero
- end
- end
-
- context 'when admin mode is disabled' do
- it 'returns no rows' do
- finder = described_class.new(admin)
-
- expect(finder.row_count).to be_zero
- end
- end
-
- it 'returns -1 if the query times out' do
- finder = described_class.new(admin)
-
- expect_next_instance_of(described_class) do |subfinder|
- expect(subfinder).to receive(:execute).and_raise(ActiveRecord::QueryCanceled)
- end
-
- expect(finder.row_count).to eq(-1)
- end
- end
-
- describe '#with_confidentiality_access_check' do
- let(:guest) { create(:user) }
-
- let_it_be(:authorized_user) { create(:user) }
- let_it_be(:banned_user) { create(:user, :banned) }
- let_it_be(:project) { create(:project, namespace: authorized_user.namespace) }
- let_it_be(:public_issue) { create(:issue, project: project) }
- let_it_be(:confidential_issue) { create(:issue, project: project, confidential: true) }
- let_it_be(:hidden_issue) { create(:issue, project: project, author: banned_user) }
-
- shared_examples 'returns public, does not return hidden or confidential' do
- it 'returns only public issues' do
- expect(subject).to include(public_issue)
- expect(subject).not_to include(confidential_issue, hidden_issue)
- end
- end
-
- shared_examples 'returns public and confidential, does not return hidden' do
- it 'returns only public and confidential issues' do
- expect(subject).to include(public_issue, confidential_issue)
- expect(subject).not_to include(hidden_issue)
- end
- end
-
- shared_examples 'returns public and hidden, does not return confidential' do
- it 'returns only public and hidden issues' do
- expect(subject).to include(public_issue, hidden_issue)
- expect(subject).not_to include(confidential_issue)
- end
- end
-
- shared_examples 'returns public, confidential, and hidden' do
- it 'returns all issues' do
- expect(subject).to include(public_issue, confidential_issue, hidden_issue)
- end
- end
-
- context 'when no project filter is given' do
- let(:params) { {} }
-
- context 'for an anonymous user' do
- subject { described_class.new(nil, params).with_confidentiality_access_check }
-
- it_behaves_like 'returns public, does not return hidden or confidential'
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(ban_user_feature_flag: false)
- end
-
- it_behaves_like 'returns public and hidden, does not return confidential'
- end
- end
-
- context 'for a user without project membership' do
- subject { described_class.new(user, params).with_confidentiality_access_check }
-
- it_behaves_like 'returns public, does not return hidden or confidential'
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(ban_user_feature_flag: false)
- end
-
- it_behaves_like 'returns public and hidden, does not return confidential'
- end
- end
-
- context 'for a guest user' do
- subject { described_class.new(guest, params).with_confidentiality_access_check }
-
- before do
- project.add_guest(guest)
- end
-
- it_behaves_like 'returns public, does not return hidden or confidential'
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(ban_user_feature_flag: false)
- end
-
- it_behaves_like 'returns public and hidden, does not return confidential'
- end
- end
-
- context 'for a project member with access to view confidential issues' do
- subject { described_class.new(authorized_user, params).with_confidentiality_access_check }
-
- it_behaves_like 'returns public and confidential, does not return hidden'
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(ban_user_feature_flag: false)
- end
-
- it_behaves_like 'returns public, confidential, and hidden'
- end
- end
-
- context 'for an admin' do
- let(:admin_user) { create(:user, :admin) }
-
- subject { described_class.new(admin_user, params).with_confidentiality_access_check }
-
- context 'when admin mode is enabled', :enable_admin_mode do
- it_behaves_like 'returns public, confidential, and hidden'
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(ban_user_feature_flag: false)
- end
-
- it_behaves_like 'returns public, confidential, and hidden'
- end
- end
-
- context 'when admin mode is disabled' do
- it_behaves_like 'returns public, does not return hidden or confidential'
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(ban_user_feature_flag: false)
- end
-
- it_behaves_like 'returns public and hidden, does not return confidential'
- end
- end
- end
- end
-
- context 'when searching within a specific project' do
- let(:params) { { project_id: project.id } }
-
- context 'for an anonymous user' do
- subject { described_class.new(nil, params).with_confidentiality_access_check }
-
- it_behaves_like 'returns public, does not return hidden or confidential'
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(ban_user_feature_flag: false)
- end
-
- it_behaves_like 'returns public and hidden, does not return confidential'
- end
-
- it 'does not filter by confidentiality' do
- expect(Issue).not_to receive(:where).with(a_string_matching('confidential'), anything)
- subject
- end
- end
-
- context 'for a user without project membership' do
- subject { described_class.new(user, params).with_confidentiality_access_check }
-
- it_behaves_like 'returns public, does not return hidden or confidential'
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(ban_user_feature_flag: false)
- end
-
- it_behaves_like 'returns public and hidden, does not return confidential'
- end
-
- it 'filters by confidentiality' do
- expect(subject.to_sql).to match("issues.confidential")
- end
- end
-
- context 'for a guest user' do
- subject { described_class.new(guest, params).with_confidentiality_access_check }
-
- before do
- project.add_guest(guest)
- end
-
- it_behaves_like 'returns public, does not return hidden or confidential'
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(ban_user_feature_flag: false)
- end
-
- it_behaves_like 'returns public and hidden, does not return confidential'
- end
-
- it 'filters by confidentiality' do
- expect(subject.to_sql).to match("issues.confidential")
- end
- end
-
- context 'for a project member with access to view confidential issues' do
- subject { described_class.new(authorized_user, params).with_confidentiality_access_check }
-
- it_behaves_like 'returns public and confidential, does not return hidden'
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(ban_user_feature_flag: false)
- end
-
- it_behaves_like 'returns public, confidential, and hidden'
- end
-
- it 'does not filter by confidentiality' do
- expect(Issue).not_to receive(:where).with(a_string_matching('confidential'), anything)
-
- subject
- end
- end
-
- context 'for an admin' do
- let(:admin_user) { create(:user, :admin) }
-
- subject { described_class.new(admin_user, params).with_confidentiality_access_check }
-
- context 'when admin mode is enabled', :enable_admin_mode do
- it_behaves_like 'returns public, confidential, and hidden'
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(ban_user_feature_flag: false)
- end
-
- it_behaves_like 'returns public, confidential, and hidden'
- end
-
- it 'does not filter by confidentiality' do
- expect(Issue).not_to receive(:where).with(a_string_matching('confidential'), anything)
-
- subject
- end
- end
-
- context 'when admin mode is disabled' do
- it_behaves_like 'returns public, does not return hidden or confidential'
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(ban_user_feature_flag: false)
- end
-
- it_behaves_like 'returns public and hidden, does not return confidential'
- end
-
- it 'filters by confidentiality' do
- expect(subject.to_sql).to match("issues.confidential")
- end
- end
- end
- end
- end
-
- describe '#use_cte_for_search?' do
- let(:finder) { described_class.new(nil, params) }
-
- context 'when there is no search param' do
- let(:params) { { attempt_group_search_optimizations: true } }
-
- it 'returns false' do
- expect(finder.use_cte_for_search?).to be_falsey
- end
- end
-
- context 'when the force_cte param is falsey' do
- let(:params) { { search: '日本語' } }
-
- it 'returns false' do
- expect(finder.use_cte_for_search?).to be_falsey
- end
- end
-
- context 'when a non-simple sort is given' do
- let(:params) { { search: '日本語', attempt_project_search_optimizations: true, sort: 'popularity' } }
-
- it 'returns false' do
- expect(finder.use_cte_for_search?).to be_falsey
- end
- end
-
- context 'when all conditions are met' do
- context "uses group search optimization" do
- let(:params) { { search: '日本語', attempt_group_search_optimizations: true } }
-
- it 'returns true' do
- expect(finder.use_cte_for_search?).to be_truthy
- expect(finder.execute.to_sql).to match(/^WITH "issues" AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported}/)
- end
- end
-
- context "uses project search optimization" do
- let(:params) { { search: '日本語', attempt_project_search_optimizations: true } }
-
- it 'returns true' do
- expect(finder.use_cte_for_search?).to be_truthy
- expect(finder.execute.to_sql).to match(/^WITH "issues" AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported}/)
- end
- end
-
- context 'with simple sort' do
- let(:params) { { search: '日本語', attempt_project_search_optimizations: true, sort: 'updated_desc' } }
-
- it 'returns true' do
- expect(finder.use_cte_for_search?).to be_truthy
- expect(finder.execute.to_sql).to match(/^WITH "issues" AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported}/)
- end
- end
-
- context 'with simple sort as a symbol' do
- let(:params) { { search: '日本語', attempt_project_search_optimizations: true, sort: :updated_desc } }
-
- it 'returns true' do
- expect(finder.use_cte_for_search?).to be_truthy
- expect(finder.execute.to_sql).to match(/^WITH "issues" AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported}/)
- end
- end
- end
- end
-
- describe '#parent_param=' do
- let(:finder) { described_class.new(nil) }
-
- subject { finder.parent_param = obj }
-
- where(:klass, :param) do
- :Project | :project_id
- :Group | :group_id
- end
-
- with_them do
- let(:obj) { Object.const_get(klass, false).new }
-
- it 'sets the params' do
- subject
-
- expect(finder.params[param]).to eq(obj)
- end
- end
-
- context 'unexpected parent' do
- let(:obj) { MergeRequest.new }
-
- it 'raises an error' do
- expect { subject }.to raise_error('Unexpected parent: MergeRequest')
- end
- end
- end
+ it_behaves_like 'issues or work items finder', :issue, 'IssuesFinder#execute context'
end
diff --git a/spec/finders/packages/pypi/packages_finder_spec.rb b/spec/finders/packages/pypi/packages_finder_spec.rb
index 1a44fb99009..3957eb188da 100644
--- a/spec/finders/packages/pypi/packages_finder_spec.rb
+++ b/spec/finders/packages/pypi/packages_finder_spec.rb
@@ -12,59 +12,91 @@ RSpec.describe Packages::Pypi::PackagesFinder do
let_it_be(:package3) { create(:pypi_package, name: package2.name, project: project) }
let_it_be(:package4) { create(:pypi_package, name: package2.name, project: project2) }
- let(:package_name) { package2.name }
+ shared_examples 'when no package is found' do
+ context 'non-existing package' do
+ let(:package_name) { 'none' }
- describe 'execute' do
- subject { described_class.new(user, scope, package_name: package_name).execute }
+ it { expect(subject).to be_empty }
+ end
+ end
- shared_examples 'when no package is found' do
- context 'non-existing package' do
- let(:package_name) { 'none' }
+ shared_examples 'when package_name param is a non-normalized name' do
+ context 'non-existing package' do
+ let(:package_name) { package2.name.upcase.tr('-', '.') }
- it { expect(subject).to be_empty }
- end
+ it { expect(subject).to be_empty }
end
+ end
- shared_examples 'when package_name param is a non-normalized name' do
- context 'non-existing package' do
- let(:package_name) { package2.name.upcase.tr('-', '.') }
+ describe '#execute' do
+ subject { described_class.new(user, scope, package_name: package_name).execute }
- it { expect(subject).to be_empty }
+ context 'with package name param' do
+ let(:package_name) { package2.name }
+
+ context 'within a project' do
+ let(:scope) { project }
+
+ it { is_expected.to contain_exactly(package2, package3) }
+
+ it_behaves_like 'when no package is found'
+ it_behaves_like 'when package_name param is a non-normalized name'
end
- end
- context 'within a project' do
- let(:scope) { project }
+ context 'within a group' do
+ let(:scope) { group }
- it { is_expected.to contain_exactly(package2, package3) }
+ it { expect(subject).to be_empty }
- it_behaves_like 'when no package is found'
- it_behaves_like 'when package_name param is a non-normalized name'
- end
+ context 'user with access to only one project' do
+ before do
+ project2.add_developer(user)
+ end
- context 'within a group' do
- let(:scope) { group }
+ it { is_expected.to contain_exactly(package4) }
- it { expect(subject).to be_empty }
+ it_behaves_like 'when no package is found'
+ it_behaves_like 'when package_name param is a non-normalized name'
- context 'user with access to only one project' do
- before do
- project2.add_developer(user)
+ context 'user with access to multiple projects' do
+ before do
+ project.add_developer(user)
+ end
+
+ it { is_expected.to contain_exactly(package2, package3, package4) }
+ end
end
+ end
+ end
- it { is_expected.to contain_exactly(package4) }
+ context 'without package_name param' do
+ let(:package_name) { nil }
- it_behaves_like 'when no package is found'
- it_behaves_like 'when package_name param is a non-normalized name'
+ context 'within a group' do
+ let(:scope) { group }
- context ' user with access to multiple projects' do
+ context 'user with access to only one project' do
before do
- project.add_developer(user)
+ project2.add_developer(user)
end
- it { is_expected.to contain_exactly(package2, package3, package4) }
+ it { is_expected.to contain_exactly(package4) }
+
+ context 'user with access to multiple projects' do
+ before do
+ project.add_developer(user)
+ end
+
+ it { is_expected.to contain_exactly(package1, package2, package3, package4) }
+ end
end
end
+
+ context 'within a project' do
+ let(:scope) { project }
+
+ it { is_expected.to contain_exactly(package1, package2, package3) }
+ end
end
end
end
diff --git a/spec/finders/work_items/work_items_finder_spec.rb b/spec/finders/work_items/work_items_finder_spec.rb
new file mode 100644
index 00000000000..fe400688a23
--- /dev/null
+++ b/spec/finders/work_items/work_items_finder_spec.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::WorkItemsFinder do
+ using RSpec::Parameterized::TableSyntax
+ include_context 'WorkItemsFinder context'
+
+ it_behaves_like 'issues or work items finder', :work_item, 'WorkItemsFinder#execute context'
+end
diff --git a/spec/fixtures/api/schemas/entities/issue.json b/spec/fixtures/api/schemas/entities/issue.json
index 31743b58d98..b4a076780d9 100644
--- a/spec/fixtures/api/schemas/entities/issue.json
+++ b/spec/fixtures/api/schemas/entities/issue.json
@@ -3,6 +3,7 @@
"properties" : {
"id": { "type": "integer" },
"iid": { "type": "integer" },
+ "type": { "type": "string" },
"author_id": { "type": "integer" },
"description": { "type": ["string", "null"] },
"lock_version": { "type": ["integer", "null"] },
diff --git a/spec/fixtures/api/schemas/entities/issue_board.json b/spec/fixtures/api/schemas/entities/issue_board.json
index 58d3832440c..aa29ca08163 100644
--- a/spec/fixtures/api/schemas/entities/issue_board.json
+++ b/spec/fixtures/api/schemas/entities/issue_board.json
@@ -3,6 +3,7 @@
"properties" : {
"id": { "type": "integer" },
"iid": { "type": "integer" },
+ "type": { "type": "string" },
"title": { "type": "string" },
"confidential": { "type": "boolean" },
"closed": { "type": "boolean" },
diff --git a/spec/fixtures/api/schemas/external_validation.json b/spec/fixtures/api/schemas/external_validation.json
index e95909a2922..ddcabd4c61e 100644
--- a/spec/fixtures/api/schemas/external_validation.json
+++ b/spec/fixtures/api/schemas/external_validation.json
@@ -12,12 +12,16 @@
"required": [
"id",
"path",
- "created_at"
+ "created_at",
+ "shared_runners_enabled",
+ "group_runners_enabled"
],
"properties": {
"id": { "type": "integer" },
"path": { "type": "string" },
- "created_at": { "type": ["string", "null"], "format": "date-time" }
+ "created_at": { "type": ["string", "null"], "format": "date-time" },
+ "shared_runners_enabled": { "type": "boolean" },
+ "group_runners_enabled": { "type": "boolean" }
}
},
"user": {
diff --git a/spec/fixtures/api/schemas/public_api/v4/system_hook.json b/spec/fixtures/api/schemas/public_api/v4/system_hook.json
index f992bc8b809..3fe3e0d658e 100644
--- a/spec/fixtures/api/schemas/public_api/v4/system_hook.json
+++ b/spec/fixtures/api/schemas/public_api/v4/system_hook.json
@@ -8,7 +8,9 @@
"tag_push_events",
"merge_requests_events",
"repository_update_events",
- "enable_ssl_verification"
+ "enable_ssl_verification",
+ "alert_status",
+ "disabled_until"
],
"properties": {
"id": { "type": "integer" },
@@ -18,7 +20,9 @@
"tag_push_events": { "type": "boolean" },
"merge_requests_events": { "type": "boolean" },
"repository_update_events": { "type": "boolean" },
- "enable_ssl_verification": { "type": "boolean" }
+ "enable_ssl_verification": { "type": "boolean" },
+ "alert_status": { "type": "string", "enum": ["executable", "disabled", "temporarily_disabled"] },
+ "disabled_until": { "type": ["string", "null"] }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/emails/service_desk_reference_headers.eml b/spec/fixtures/emails/service_desk_reference_headers.eml
new file mode 100644
index 00000000000..ff10fcdd2f6
--- /dev/null
+++ b/spec/fixtures/emails/service_desk_reference_headers.eml
@@ -0,0 +1,31 @@
+Return-Path: <jake@example.com>
+Received: from myserver.example.com ([unix socket]) by myserver (Cyrus v2.2.13-Debian-2.2.13-19+squeeze3) with LMTPA; Thu, 13 Jun 2013 17:03:50 -0400
+Received: by 10.0.0.1 with HTTP; Thu, 13 Jun 2013 14:03:48 -0700
+Received: from blabla.google.com (blabla.google.com. [1.1.1.1])
+ by bla.google.com with SMTPS id something.1.1.1.1.1.1.1
+ for <abc@appmail.example.com>
+ (Google Transport Security);
+ Mon, 21 Feb 2022 14:41:58 -0800 (PST)
+References: <topic/35@some.other.server.example.com>
+ <abc123@discourse-app.mail>
+Received: from mail.example.com (mail.example.com [IPv6:2607:f8b0:4001:c03::234]) by myserver.example.com (8.14.3/8.14.3/Debian-9.4) with ESMTP id r5DL3nFJ016967 (version=TLSv1/SSLv3 cipher=RC4-SHA bits=128 verify=NOT) for <incoming+gitlabhq/gitlabhq@example.com>; Thu, 13 Jun 2013 17:03:50 -0400
+From: "jake@example.com" <jake@example.com>
+To: "some_unrelated_email@example.com" <some_unrelated_email@example.com>
+Subject: Re: Insert hilarious subject line here
+Date: Tue, 26 Nov 2019 14:22:41 +0000
+Message-ID: <7e2296f83dbf4de388cbf5f56f52c11f@EXDAG29-1.EXCHANGE.INT>
+Content-Type: multipart/alternative;
+ boundary="_000_7e2296f83dbf4de388cbf5f56f52c11fEXDAG291EXCHANGEINT_"
+MIME-Version: 1.0
+
+--_000_7e2296f83dbf4de388cbf5f56f52c11fEXDAG291EXCHANGEINT_
+Content-Type: text/plain; charset="iso-8859-1"
+Content-Transfer-Encoding: quoted-printable
+
+
+
+--_000_7e2296f83dbf4de388cbf5f56f52c11fEXDAG291EXCHANGEINT_
+Content-Type: text/html; charset="iso-8859-1"
+Content-Transfer-Encoding: quoted-printable
+
+This message has unhelpful References headers with no key, and no key in the To headers either.
diff --git a/spec/fixtures/emails/service_desk_reply_illegal_utf8.eml b/spec/fixtures/emails/service_desk_reply_illegal_utf8.eml
new file mode 100644
index 00000000000..8350348ae5f
--- /dev/null
+++ b/spec/fixtures/emails/service_desk_reply_illegal_utf8.eml
@@ -0,0 +1,26 @@
+Return-Path: <alan@adventuretime.ooo>
+Received: from iceking.adventuretime.ooo ([unix socket]) by iceking (Cyrus v2.2.13-Debian-2.2.13-19+squeeze3) with LMTPA; Thu, 13 Jun 2013 17:03:50 -0400
+Received: from mail-ie0-x234.google.com (mail-ie0-x234.google.com [IPv6:2607:f8b0:4001:c03::234]) by iceking.adventuretime.ooo (8.14.3/8.14.3/Debian-9.4) with ESMTP id r5DL3nFJ016967 (version=TLSv1/SSLv3 cipher=RC4-SHA bits=128 verify=NOT) for <incoming+gitlabhq/gitlabhq@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 17:03:50 -0400
+Received: by mail-ie0-f180.google.com with SMTP id f4so21977375iea.25 for <incoming+email-test-project_id-issue-@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 14:03:48 -0700
+Received: by 10.0.0.1 with HTTP; Thu, 13 Jun 2013 14:03:48 -0700
+Date: Thu, 13 Jun 2013 17:03:48 -0400
+From: Jake the Dog <alan@adventuretime.ooo>
+To: incoming+email-test-project_id-issue-@appmail.adventuretime.ooo
+Message-ID: <CAH_Wr+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com>
+In-Reply-To: <CADkmRc+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com>
+Subject: The message subject! @all
+Mime-Version: 1.0
+Content-Type: text/plain;
+ charset=Shift_JIS
+Content-Transfer-Encoding: 7bit
+X-Sieve: CMU Sieve 2.2
+X-Received: by 10.0.0.1 with SMTP id n7mr11234144ipb.85.1371157428600; Thu,
+ 13 Jun 2013 14:03:48 -0700 (PDT)
+X-Scanned-By: MIMEDefang 2.69 on IPv6:2001:470:1d:165::1
+
+Service desk reply!
+ƒs[ƒ^[EƒWƒ‡[ƒ“ƒY
+
+/label ~label2
+ƒs[ƒ^[EƒWƒ‡[ƒ“ƒY
+(encoded with `Shift_JIS` not UTF-8 for the purposes of testing) \ No newline at end of file
diff --git a/spec/fixtures/glfm/example_snapshots/examples_index.yml b/spec/fixtures/glfm/example_snapshots/examples_index.yml
index 98463a30cb6..08f6f88af9e 100644
--- a/spec/fixtures/glfm/example_snapshots/examples_index.yml
+++ b/spec/fixtures/glfm/example_snapshots/examples_index.yml
@@ -1,1370 +1,1370 @@
---
-02_01__preliminaries__tabs__01:
+02_01__preliminaries__tabs__001:
spec_txt_example_position: 1
source_specification: commonmark
-02_01__preliminaries__tabs__02:
+02_01__preliminaries__tabs__002:
spec_txt_example_position: 2
source_specification: commonmark
-02_01__preliminaries__tabs__03:
+02_01__preliminaries__tabs__003:
spec_txt_example_position: 3
source_specification: commonmark
-02_01__preliminaries__tabs__04:
+02_01__preliminaries__tabs__004:
spec_txt_example_position: 4
source_specification: commonmark
-02_01__preliminaries__tabs__05:
+02_01__preliminaries__tabs__005:
spec_txt_example_position: 5
source_specification: commonmark
-02_01__preliminaries__tabs__06:
+02_01__preliminaries__tabs__006:
spec_txt_example_position: 6
source_specification: commonmark
-02_01__preliminaries__tabs__07:
+02_01__preliminaries__tabs__007:
spec_txt_example_position: 7
source_specification: commonmark
-02_01__preliminaries__tabs__08:
+02_01__preliminaries__tabs__008:
spec_txt_example_position: 8
source_specification: commonmark
-02_01__preliminaries__tabs__09:
+02_01__preliminaries__tabs__009:
spec_txt_example_position: 9
source_specification: commonmark
-02_01__preliminaries__tabs__10:
+02_01__preliminaries__tabs__010:
spec_txt_example_position: 10
source_specification: commonmark
-02_01__preliminaries__tabs__11:
+02_01__preliminaries__tabs__011:
spec_txt_example_position: 11
source_specification: commonmark
-03_01__blocks_and_inlines__precedence__01:
+03_01__blocks_and_inlines__precedence__001:
spec_txt_example_position: 12
source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__01:
+04_01__leaf_blocks__thematic_breaks__001:
spec_txt_example_position: 13
source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__02:
+04_01__leaf_blocks__thematic_breaks__002:
spec_txt_example_position: 14
source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__03:
+04_01__leaf_blocks__thematic_breaks__003:
spec_txt_example_position: 15
source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__04:
+04_01__leaf_blocks__thematic_breaks__004:
spec_txt_example_position: 16
source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__05:
+04_01__leaf_blocks__thematic_breaks__005:
spec_txt_example_position: 17
source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__06:
+04_01__leaf_blocks__thematic_breaks__006:
spec_txt_example_position: 18
source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__07:
+04_01__leaf_blocks__thematic_breaks__007:
spec_txt_example_position: 19
source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__08:
+04_01__leaf_blocks__thematic_breaks__008:
spec_txt_example_position: 20
source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__09:
+04_01__leaf_blocks__thematic_breaks__009:
spec_txt_example_position: 21
source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__10:
+04_01__leaf_blocks__thematic_breaks__010:
spec_txt_example_position: 22
source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__11:
+04_01__leaf_blocks__thematic_breaks__011:
spec_txt_example_position: 23
source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__12:
+04_01__leaf_blocks__thematic_breaks__012:
spec_txt_example_position: 24
source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__13:
+04_01__leaf_blocks__thematic_breaks__013:
spec_txt_example_position: 25
source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__14:
+04_01__leaf_blocks__thematic_breaks__014:
spec_txt_example_position: 26
source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__15:
+04_01__leaf_blocks__thematic_breaks__015:
spec_txt_example_position: 27
source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__16:
+04_01__leaf_blocks__thematic_breaks__016:
spec_txt_example_position: 28
source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__17:
+04_01__leaf_blocks__thematic_breaks__017:
spec_txt_example_position: 29
source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__18:
+04_01__leaf_blocks__thematic_breaks__018:
spec_txt_example_position: 30
source_specification: commonmark
-04_01__leaf_blocks__thematic_breaks__19:
+04_01__leaf_blocks__thematic_breaks__019:
spec_txt_example_position: 31
source_specification: commonmark
-04_02__leaf_blocks__atx_headings__01:
+04_02__leaf_blocks__atx_headings__001:
spec_txt_example_position: 32
source_specification: commonmark
-04_02__leaf_blocks__atx_headings__02:
+04_02__leaf_blocks__atx_headings__002:
spec_txt_example_position: 33
source_specification: commonmark
-04_02__leaf_blocks__atx_headings__03:
+04_02__leaf_blocks__atx_headings__003:
spec_txt_example_position: 34
source_specification: commonmark
-04_02__leaf_blocks__atx_headings__04:
+04_02__leaf_blocks__atx_headings__004:
spec_txt_example_position: 35
source_specification: commonmark
-04_02__leaf_blocks__atx_headings__05:
+04_02__leaf_blocks__atx_headings__005:
spec_txt_example_position: 36
source_specification: commonmark
-04_02__leaf_blocks__atx_headings__06:
+04_02__leaf_blocks__atx_headings__006:
spec_txt_example_position: 37
source_specification: commonmark
-04_02__leaf_blocks__atx_headings__07:
+04_02__leaf_blocks__atx_headings__007:
spec_txt_example_position: 38
source_specification: commonmark
-04_02__leaf_blocks__atx_headings__08:
+04_02__leaf_blocks__atx_headings__008:
spec_txt_example_position: 39
source_specification: commonmark
-04_02__leaf_blocks__atx_headings__09:
+04_02__leaf_blocks__atx_headings__009:
spec_txt_example_position: 40
source_specification: commonmark
-04_02__leaf_blocks__atx_headings__10:
+04_02__leaf_blocks__atx_headings__010:
spec_txt_example_position: 41
source_specification: commonmark
-04_02__leaf_blocks__atx_headings__11:
+04_02__leaf_blocks__atx_headings__011:
spec_txt_example_position: 42
source_specification: commonmark
-04_02__leaf_blocks__atx_headings__12:
+04_02__leaf_blocks__atx_headings__012:
spec_txt_example_position: 43
source_specification: commonmark
-04_02__leaf_blocks__atx_headings__13:
+04_02__leaf_blocks__atx_headings__013:
spec_txt_example_position: 44
source_specification: commonmark
-04_02__leaf_blocks__atx_headings__14:
+04_02__leaf_blocks__atx_headings__014:
spec_txt_example_position: 45
source_specification: commonmark
-04_02__leaf_blocks__atx_headings__15:
+04_02__leaf_blocks__atx_headings__015:
spec_txt_example_position: 46
source_specification: commonmark
-04_02__leaf_blocks__atx_headings__16:
+04_02__leaf_blocks__atx_headings__016:
spec_txt_example_position: 47
source_specification: commonmark
-04_02__leaf_blocks__atx_headings__17:
+04_02__leaf_blocks__atx_headings__017:
spec_txt_example_position: 48
source_specification: commonmark
-04_02__leaf_blocks__atx_headings__18:
+04_02__leaf_blocks__atx_headings__018:
spec_txt_example_position: 49
source_specification: commonmark
-04_03__leaf_blocks__setext_headings__01:
+04_03__leaf_blocks__setext_headings__001:
spec_txt_example_position: 50
source_specification: commonmark
-04_03__leaf_blocks__setext_headings__02:
+04_03__leaf_blocks__setext_headings__002:
spec_txt_example_position: 51
source_specification: commonmark
-04_03__leaf_blocks__setext_headings__03:
+04_03__leaf_blocks__setext_headings__003:
spec_txt_example_position: 52
source_specification: commonmark
-04_03__leaf_blocks__setext_headings__04:
+04_03__leaf_blocks__setext_headings__004:
spec_txt_example_position: 53
source_specification: commonmark
-04_03__leaf_blocks__setext_headings__05:
+04_03__leaf_blocks__setext_headings__005:
spec_txt_example_position: 54
source_specification: commonmark
-04_03__leaf_blocks__setext_headings__06:
+04_03__leaf_blocks__setext_headings__006:
spec_txt_example_position: 55
source_specification: commonmark
-04_03__leaf_blocks__setext_headings__07:
+04_03__leaf_blocks__setext_headings__007:
spec_txt_example_position: 56
source_specification: commonmark
-04_03__leaf_blocks__setext_headings__08:
+04_03__leaf_blocks__setext_headings__008:
spec_txt_example_position: 57
source_specification: commonmark
-04_03__leaf_blocks__setext_headings__09:
+04_03__leaf_blocks__setext_headings__009:
spec_txt_example_position: 58
source_specification: commonmark
-04_03__leaf_blocks__setext_headings__10:
+04_03__leaf_blocks__setext_headings__010:
spec_txt_example_position: 59
source_specification: commonmark
-04_03__leaf_blocks__setext_headings__11:
+04_03__leaf_blocks__setext_headings__011:
spec_txt_example_position: 60
source_specification: commonmark
-04_03__leaf_blocks__setext_headings__12:
+04_03__leaf_blocks__setext_headings__012:
spec_txt_example_position: 61
source_specification: commonmark
-04_03__leaf_blocks__setext_headings__13:
+04_03__leaf_blocks__setext_headings__013:
spec_txt_example_position: 62
source_specification: commonmark
-04_03__leaf_blocks__setext_headings__14:
+04_03__leaf_blocks__setext_headings__014:
spec_txt_example_position: 63
source_specification: commonmark
-04_03__leaf_blocks__setext_headings__15:
+04_03__leaf_blocks__setext_headings__015:
spec_txt_example_position: 64
source_specification: commonmark
-04_03__leaf_blocks__setext_headings__16:
+04_03__leaf_blocks__setext_headings__016:
spec_txt_example_position: 65
source_specification: commonmark
-04_03__leaf_blocks__setext_headings__17:
+04_03__leaf_blocks__setext_headings__017:
spec_txt_example_position: 66
source_specification: commonmark
-04_03__leaf_blocks__setext_headings__18:
+04_03__leaf_blocks__setext_headings__018:
spec_txt_example_position: 67
source_specification: commonmark
-04_03__leaf_blocks__setext_headings__19:
+04_03__leaf_blocks__setext_headings__019:
spec_txt_example_position: 68
source_specification: commonmark
-04_03__leaf_blocks__setext_headings__20:
+04_03__leaf_blocks__setext_headings__020:
spec_txt_example_position: 69
source_specification: commonmark
-04_03__leaf_blocks__setext_headings__21:
+04_03__leaf_blocks__setext_headings__021:
spec_txt_example_position: 70
source_specification: commonmark
-04_03__leaf_blocks__setext_headings__22:
+04_03__leaf_blocks__setext_headings__022:
spec_txt_example_position: 71
source_specification: commonmark
-04_03__leaf_blocks__setext_headings__23:
+04_03__leaf_blocks__setext_headings__023:
spec_txt_example_position: 72
source_specification: commonmark
-04_03__leaf_blocks__setext_headings__24:
+04_03__leaf_blocks__setext_headings__024:
spec_txt_example_position: 73
source_specification: commonmark
-04_03__leaf_blocks__setext_headings__25:
+04_03__leaf_blocks__setext_headings__025:
spec_txt_example_position: 74
source_specification: commonmark
-04_03__leaf_blocks__setext_headings__26:
+04_03__leaf_blocks__setext_headings__026:
spec_txt_example_position: 75
source_specification: commonmark
-04_03__leaf_blocks__setext_headings__27:
+04_03__leaf_blocks__setext_headings__027:
spec_txt_example_position: 76
source_specification: commonmark
-04_04__leaf_blocks__indented_code_blocks__01:
+04_04__leaf_blocks__indented_code_blocks__001:
spec_txt_example_position: 77
source_specification: commonmark
-04_04__leaf_blocks__indented_code_blocks__02:
+04_04__leaf_blocks__indented_code_blocks__002:
spec_txt_example_position: 78
source_specification: commonmark
-04_04__leaf_blocks__indented_code_blocks__03:
+04_04__leaf_blocks__indented_code_blocks__003:
spec_txt_example_position: 79
source_specification: commonmark
-04_04__leaf_blocks__indented_code_blocks__04:
+04_04__leaf_blocks__indented_code_blocks__004:
spec_txt_example_position: 80
source_specification: commonmark
-04_04__leaf_blocks__indented_code_blocks__05:
+04_04__leaf_blocks__indented_code_blocks__005:
spec_txt_example_position: 81
source_specification: commonmark
-04_04__leaf_blocks__indented_code_blocks__06:
+04_04__leaf_blocks__indented_code_blocks__006:
spec_txt_example_position: 82
source_specification: commonmark
-04_04__leaf_blocks__indented_code_blocks__07:
+04_04__leaf_blocks__indented_code_blocks__007:
spec_txt_example_position: 83
source_specification: commonmark
-04_04__leaf_blocks__indented_code_blocks__08:
+04_04__leaf_blocks__indented_code_blocks__008:
spec_txt_example_position: 84
source_specification: commonmark
-04_04__leaf_blocks__indented_code_blocks__09:
+04_04__leaf_blocks__indented_code_blocks__009:
spec_txt_example_position: 85
source_specification: commonmark
-04_04__leaf_blocks__indented_code_blocks__10:
+04_04__leaf_blocks__indented_code_blocks__010:
spec_txt_example_position: 86
source_specification: commonmark
-04_04__leaf_blocks__indented_code_blocks__11:
+04_04__leaf_blocks__indented_code_blocks__011:
spec_txt_example_position: 87
source_specification: commonmark
-04_04__leaf_blocks__indented_code_blocks__12:
+04_04__leaf_blocks__indented_code_blocks__012:
spec_txt_example_position: 88
source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__01:
+04_05__leaf_blocks__fenced_code_blocks__001:
spec_txt_example_position: 89
source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__02:
+04_05__leaf_blocks__fenced_code_blocks__002:
spec_txt_example_position: 90
source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__03:
+04_05__leaf_blocks__fenced_code_blocks__003:
spec_txt_example_position: 91
source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__04:
+04_05__leaf_blocks__fenced_code_blocks__004:
spec_txt_example_position: 92
source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__05:
+04_05__leaf_blocks__fenced_code_blocks__005:
spec_txt_example_position: 93
source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__06:
+04_05__leaf_blocks__fenced_code_blocks__006:
spec_txt_example_position: 94
source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__07:
+04_05__leaf_blocks__fenced_code_blocks__007:
spec_txt_example_position: 95
source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__08:
+04_05__leaf_blocks__fenced_code_blocks__008:
spec_txt_example_position: 96
source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__09:
+04_05__leaf_blocks__fenced_code_blocks__009:
spec_txt_example_position: 97
source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__10:
+04_05__leaf_blocks__fenced_code_blocks__010:
spec_txt_example_position: 98
source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__11:
+04_05__leaf_blocks__fenced_code_blocks__011:
spec_txt_example_position: 99
source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__12:
+04_05__leaf_blocks__fenced_code_blocks__012:
spec_txt_example_position: 100
source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__13:
+04_05__leaf_blocks__fenced_code_blocks__013:
spec_txt_example_position: 101
source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__14:
+04_05__leaf_blocks__fenced_code_blocks__014:
spec_txt_example_position: 102
source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__15:
+04_05__leaf_blocks__fenced_code_blocks__015:
spec_txt_example_position: 103
source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__16:
+04_05__leaf_blocks__fenced_code_blocks__016:
spec_txt_example_position: 104
source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__17:
+04_05__leaf_blocks__fenced_code_blocks__017:
spec_txt_example_position: 105
source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__18:
+04_05__leaf_blocks__fenced_code_blocks__018:
spec_txt_example_position: 106
source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__19:
+04_05__leaf_blocks__fenced_code_blocks__019:
spec_txt_example_position: 107
source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__20:
+04_05__leaf_blocks__fenced_code_blocks__020:
spec_txt_example_position: 108
source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__21:
+04_05__leaf_blocks__fenced_code_blocks__021:
spec_txt_example_position: 109
source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__22:
+04_05__leaf_blocks__fenced_code_blocks__022:
spec_txt_example_position: 110
source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__23:
+04_05__leaf_blocks__fenced_code_blocks__023:
spec_txt_example_position: 111
source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__24:
+04_05__leaf_blocks__fenced_code_blocks__024:
spec_txt_example_position: 112
source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__25:
+04_05__leaf_blocks__fenced_code_blocks__025:
spec_txt_example_position: 113
source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__26:
+04_05__leaf_blocks__fenced_code_blocks__026:
spec_txt_example_position: 114
source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__27:
+04_05__leaf_blocks__fenced_code_blocks__027:
spec_txt_example_position: 115
source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__28:
+04_05__leaf_blocks__fenced_code_blocks__028:
spec_txt_example_position: 116
source_specification: commonmark
-04_05__leaf_blocks__fenced_code_blocks__29:
+04_05__leaf_blocks__fenced_code_blocks__029:
spec_txt_example_position: 117
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__01:
+04_06__leaf_blocks__html_blocks__001:
spec_txt_example_position: 118
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__02:
+04_06__leaf_blocks__html_blocks__002:
spec_txt_example_position: 119
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__03:
+04_06__leaf_blocks__html_blocks__003:
spec_txt_example_position: 120
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__04:
+04_06__leaf_blocks__html_blocks__004:
spec_txt_example_position: 121
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__05:
+04_06__leaf_blocks__html_blocks__005:
spec_txt_example_position: 122
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__06:
+04_06__leaf_blocks__html_blocks__006:
spec_txt_example_position: 123
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__07:
+04_06__leaf_blocks__html_blocks__007:
spec_txt_example_position: 124
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__08:
+04_06__leaf_blocks__html_blocks__008:
spec_txt_example_position: 125
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__09:
+04_06__leaf_blocks__html_blocks__009:
spec_txt_example_position: 126
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__10:
+04_06__leaf_blocks__html_blocks__010:
spec_txt_example_position: 127
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__11:
+04_06__leaf_blocks__html_blocks__011:
spec_txt_example_position: 128
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__12:
+04_06__leaf_blocks__html_blocks__012:
spec_txt_example_position: 129
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__13:
+04_06__leaf_blocks__html_blocks__013:
spec_txt_example_position: 130
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__14:
+04_06__leaf_blocks__html_blocks__014:
spec_txt_example_position: 131
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__15:
+04_06__leaf_blocks__html_blocks__015:
spec_txt_example_position: 132
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__16:
+04_06__leaf_blocks__html_blocks__016:
spec_txt_example_position: 133
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__17:
+04_06__leaf_blocks__html_blocks__017:
spec_txt_example_position: 134
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__18:
+04_06__leaf_blocks__html_blocks__018:
spec_txt_example_position: 135
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__19:
+04_06__leaf_blocks__html_blocks__019:
spec_txt_example_position: 136
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__20:
+04_06__leaf_blocks__html_blocks__020:
spec_txt_example_position: 137
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__21:
+04_06__leaf_blocks__html_blocks__021:
spec_txt_example_position: 138
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__22:
+04_06__leaf_blocks__html_blocks__022:
spec_txt_example_position: 139
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__23:
+04_06__leaf_blocks__html_blocks__023:
spec_txt_example_position: 140
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__24:
+04_06__leaf_blocks__html_blocks__024:
spec_txt_example_position: 141
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__25:
+04_06__leaf_blocks__html_blocks__025:
spec_txt_example_position: 142
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__26:
+04_06__leaf_blocks__html_blocks__026:
spec_txt_example_position: 143
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__27:
+04_06__leaf_blocks__html_blocks__027:
spec_txt_example_position: 144
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__28:
+04_06__leaf_blocks__html_blocks__028:
spec_txt_example_position: 145
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__29:
+04_06__leaf_blocks__html_blocks__029:
spec_txt_example_position: 146
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__30:
+04_06__leaf_blocks__html_blocks__030:
spec_txt_example_position: 147
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__31:
+04_06__leaf_blocks__html_blocks__031:
spec_txt_example_position: 148
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__32:
+04_06__leaf_blocks__html_blocks__032:
spec_txt_example_position: 149
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__33:
+04_06__leaf_blocks__html_blocks__033:
spec_txt_example_position: 150
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__34:
+04_06__leaf_blocks__html_blocks__034:
spec_txt_example_position: 151
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__35:
+04_06__leaf_blocks__html_blocks__035:
spec_txt_example_position: 152
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__36:
+04_06__leaf_blocks__html_blocks__036:
spec_txt_example_position: 153
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__37:
+04_06__leaf_blocks__html_blocks__037:
spec_txt_example_position: 154
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__38:
+04_06__leaf_blocks__html_blocks__038:
spec_txt_example_position: 155
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__39:
+04_06__leaf_blocks__html_blocks__039:
spec_txt_example_position: 156
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__40:
+04_06__leaf_blocks__html_blocks__040:
spec_txt_example_position: 157
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__41:
+04_06__leaf_blocks__html_blocks__041:
spec_txt_example_position: 158
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__42:
+04_06__leaf_blocks__html_blocks__042:
spec_txt_example_position: 159
source_specification: commonmark
-04_06__leaf_blocks__html_blocks__43:
+04_06__leaf_blocks__html_blocks__043:
spec_txt_example_position: 160
source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__01:
+04_07__leaf_blocks__link_reference_definitions__001:
spec_txt_example_position: 161
source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__02:
+04_07__leaf_blocks__link_reference_definitions__002:
spec_txt_example_position: 162
source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__03:
+04_07__leaf_blocks__link_reference_definitions__003:
spec_txt_example_position: 163
source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__04:
+04_07__leaf_blocks__link_reference_definitions__004:
spec_txt_example_position: 164
source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__05:
+04_07__leaf_blocks__link_reference_definitions__005:
spec_txt_example_position: 165
source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__06:
+04_07__leaf_blocks__link_reference_definitions__006:
spec_txt_example_position: 166
source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__07:
+04_07__leaf_blocks__link_reference_definitions__007:
spec_txt_example_position: 167
source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__08:
+04_07__leaf_blocks__link_reference_definitions__008:
spec_txt_example_position: 168
source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__09:
+04_07__leaf_blocks__link_reference_definitions__009:
spec_txt_example_position: 169
source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__10:
+04_07__leaf_blocks__link_reference_definitions__010:
spec_txt_example_position: 170
source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__11:
+04_07__leaf_blocks__link_reference_definitions__011:
spec_txt_example_position: 171
source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__12:
+04_07__leaf_blocks__link_reference_definitions__012:
spec_txt_example_position: 172
source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__13:
+04_07__leaf_blocks__link_reference_definitions__013:
spec_txt_example_position: 173
source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__14:
+04_07__leaf_blocks__link_reference_definitions__014:
spec_txt_example_position: 174
source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__15:
+04_07__leaf_blocks__link_reference_definitions__015:
spec_txt_example_position: 175
source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__16:
+04_07__leaf_blocks__link_reference_definitions__016:
spec_txt_example_position: 176
source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__17:
+04_07__leaf_blocks__link_reference_definitions__017:
spec_txt_example_position: 177
source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__18:
+04_07__leaf_blocks__link_reference_definitions__018:
spec_txt_example_position: 178
source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__19:
+04_07__leaf_blocks__link_reference_definitions__019:
spec_txt_example_position: 179
source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__20:
+04_07__leaf_blocks__link_reference_definitions__020:
spec_txt_example_position: 180
source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__21:
+04_07__leaf_blocks__link_reference_definitions__021:
spec_txt_example_position: 181
source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__22:
+04_07__leaf_blocks__link_reference_definitions__022:
spec_txt_example_position: 182
source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__23:
+04_07__leaf_blocks__link_reference_definitions__023:
spec_txt_example_position: 183
source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__24:
+04_07__leaf_blocks__link_reference_definitions__024:
spec_txt_example_position: 184
source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__25:
+04_07__leaf_blocks__link_reference_definitions__025:
spec_txt_example_position: 185
source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__26:
+04_07__leaf_blocks__link_reference_definitions__026:
spec_txt_example_position: 186
source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__27:
+04_07__leaf_blocks__link_reference_definitions__027:
spec_txt_example_position: 187
source_specification: commonmark
-04_07__leaf_blocks__link_reference_definitions__28:
+04_07__leaf_blocks__link_reference_definitions__028:
spec_txt_example_position: 188
source_specification: commonmark
-04_08__leaf_blocks__paragraphs__01:
+04_08__leaf_blocks__paragraphs__001:
spec_txt_example_position: 189
source_specification: commonmark
-04_08__leaf_blocks__paragraphs__02:
+04_08__leaf_blocks__paragraphs__002:
spec_txt_example_position: 190
source_specification: commonmark
-04_08__leaf_blocks__paragraphs__03:
+04_08__leaf_blocks__paragraphs__003:
spec_txt_example_position: 191
source_specification: commonmark
-04_08__leaf_blocks__paragraphs__04:
+04_08__leaf_blocks__paragraphs__004:
spec_txt_example_position: 192
source_specification: commonmark
-04_08__leaf_blocks__paragraphs__05:
+04_08__leaf_blocks__paragraphs__005:
spec_txt_example_position: 193
source_specification: commonmark
-04_08__leaf_blocks__paragraphs__06:
+04_08__leaf_blocks__paragraphs__006:
spec_txt_example_position: 194
source_specification: commonmark
-04_08__leaf_blocks__paragraphs__07:
+04_08__leaf_blocks__paragraphs__007:
spec_txt_example_position: 195
source_specification: commonmark
-04_08__leaf_blocks__paragraphs__08:
+04_08__leaf_blocks__paragraphs__008:
spec_txt_example_position: 196
source_specification: commonmark
-04_09__leaf_blocks__blank_lines__01:
+04_09__leaf_blocks__blank_lines__001:
spec_txt_example_position: 197
source_specification: commonmark
-04_10__leaf_blocks__tables_extension__01:
+04_10__leaf_blocks__tables_extension__001:
spec_txt_example_position: 198
source_specification: github
-04_10__leaf_blocks__tables_extension__02:
+04_10__leaf_blocks__tables_extension__002:
spec_txt_example_position: 199
source_specification: github
-04_10__leaf_blocks__tables_extension__03:
+04_10__leaf_blocks__tables_extension__003:
spec_txt_example_position: 200
source_specification: github
-04_10__leaf_blocks__tables_extension__04:
+04_10__leaf_blocks__tables_extension__004:
spec_txt_example_position: 201
source_specification: github
-04_10__leaf_blocks__tables_extension__05:
+04_10__leaf_blocks__tables_extension__005:
spec_txt_example_position: 202
source_specification: github
-04_10__leaf_blocks__tables_extension__06:
+04_10__leaf_blocks__tables_extension__006:
spec_txt_example_position: 203
source_specification: github
-04_10__leaf_blocks__tables_extension__07:
+04_10__leaf_blocks__tables_extension__007:
spec_txt_example_position: 204
source_specification: github
-04_10__leaf_blocks__tables_extension__08:
+04_10__leaf_blocks__tables_extension__008:
spec_txt_example_position: 205
source_specification: github
-05_01__container_blocks__block_quotes__01:
+05_01__container_blocks__block_quotes__001:
spec_txt_example_position: 206
source_specification: commonmark
-05_01__container_blocks__block_quotes__02:
+05_01__container_blocks__block_quotes__002:
spec_txt_example_position: 207
source_specification: commonmark
-05_01__container_blocks__block_quotes__03:
+05_01__container_blocks__block_quotes__003:
spec_txt_example_position: 208
source_specification: commonmark
-05_01__container_blocks__block_quotes__04:
+05_01__container_blocks__block_quotes__004:
spec_txt_example_position: 209
source_specification: commonmark
-05_01__container_blocks__block_quotes__05:
+05_01__container_blocks__block_quotes__005:
spec_txt_example_position: 210
source_specification: commonmark
-05_01__container_blocks__block_quotes__06:
+05_01__container_blocks__block_quotes__006:
spec_txt_example_position: 211
source_specification: commonmark
-05_01__container_blocks__block_quotes__07:
+05_01__container_blocks__block_quotes__007:
spec_txt_example_position: 212
source_specification: commonmark
-05_01__container_blocks__block_quotes__08:
+05_01__container_blocks__block_quotes__008:
spec_txt_example_position: 213
source_specification: commonmark
-05_01__container_blocks__block_quotes__09:
+05_01__container_blocks__block_quotes__009:
spec_txt_example_position: 214
source_specification: commonmark
-05_01__container_blocks__block_quotes__10:
+05_01__container_blocks__block_quotes__010:
spec_txt_example_position: 215
source_specification: commonmark
-05_01__container_blocks__block_quotes__11:
+05_01__container_blocks__block_quotes__011:
spec_txt_example_position: 216
source_specification: commonmark
-05_01__container_blocks__block_quotes__12:
+05_01__container_blocks__block_quotes__012:
spec_txt_example_position: 217
source_specification: commonmark
-05_01__container_blocks__block_quotes__13:
+05_01__container_blocks__block_quotes__013:
spec_txt_example_position: 218
source_specification: commonmark
-05_01__container_blocks__block_quotes__14:
+05_01__container_blocks__block_quotes__014:
spec_txt_example_position: 219
source_specification: commonmark
-05_01__container_blocks__block_quotes__15:
+05_01__container_blocks__block_quotes__015:
spec_txt_example_position: 220
source_specification: commonmark
-05_01__container_blocks__block_quotes__16:
+05_01__container_blocks__block_quotes__016:
spec_txt_example_position: 221
source_specification: commonmark
-05_01__container_blocks__block_quotes__17:
+05_01__container_blocks__block_quotes__017:
spec_txt_example_position: 222
source_specification: commonmark
-05_01__container_blocks__block_quotes__18:
+05_01__container_blocks__block_quotes__018:
spec_txt_example_position: 223
source_specification: commonmark
-05_01__container_blocks__block_quotes__19:
+05_01__container_blocks__block_quotes__019:
spec_txt_example_position: 224
source_specification: commonmark
-05_01__container_blocks__block_quotes__20:
+05_01__container_blocks__block_quotes__020:
spec_txt_example_position: 225
source_specification: commonmark
-05_01__container_blocks__block_quotes__21:
+05_01__container_blocks__block_quotes__021:
spec_txt_example_position: 226
source_specification: commonmark
-05_01__container_blocks__block_quotes__22:
+05_01__container_blocks__block_quotes__022:
spec_txt_example_position: 227
source_specification: commonmark
-05_01__container_blocks__block_quotes__23:
+05_01__container_blocks__block_quotes__023:
spec_txt_example_position: 228
source_specification: commonmark
-05_01__container_blocks__block_quotes__24:
+05_01__container_blocks__block_quotes__024:
spec_txt_example_position: 229
source_specification: commonmark
-05_01__container_blocks__block_quotes__25:
+05_01__container_blocks__block_quotes__025:
spec_txt_example_position: 230
source_specification: commonmark
-05_02__container_blocks__list_items__01:
+05_02__container_blocks__list_items__001:
spec_txt_example_position: 231
source_specification: commonmark
-05_02__container_blocks__list_items__02:
+05_02__container_blocks__list_items__002:
spec_txt_example_position: 232
source_specification: commonmark
-05_02__container_blocks__list_items__03:
+05_02__container_blocks__list_items__003:
spec_txt_example_position: 233
source_specification: commonmark
-05_02__container_blocks__list_items__04:
+05_02__container_blocks__list_items__004:
spec_txt_example_position: 234
source_specification: commonmark
-05_02__container_blocks__list_items__05:
+05_02__container_blocks__list_items__005:
spec_txt_example_position: 235
source_specification: commonmark
-05_02__container_blocks__list_items__06:
+05_02__container_blocks__list_items__006:
spec_txt_example_position: 236
source_specification: commonmark
-05_02__container_blocks__list_items__07:
+05_02__container_blocks__list_items__007:
spec_txt_example_position: 237
source_specification: commonmark
-05_02__container_blocks__list_items__08:
+05_02__container_blocks__list_items__008:
spec_txt_example_position: 238
source_specification: commonmark
-05_02__container_blocks__list_items__09:
+05_02__container_blocks__list_items__009:
spec_txt_example_position: 239
source_specification: commonmark
-05_02__container_blocks__list_items__10:
+05_02__container_blocks__list_items__010:
spec_txt_example_position: 240
source_specification: commonmark
-05_02__container_blocks__list_items__11:
+05_02__container_blocks__list_items__011:
spec_txt_example_position: 241
source_specification: commonmark
-05_02__container_blocks__list_items__12:
+05_02__container_blocks__list_items__012:
spec_txt_example_position: 242
source_specification: commonmark
-05_02__container_blocks__list_items__13:
+05_02__container_blocks__list_items__013:
spec_txt_example_position: 243
source_specification: commonmark
-05_02__container_blocks__list_items__14:
+05_02__container_blocks__list_items__014:
spec_txt_example_position: 244
source_specification: commonmark
-05_02__container_blocks__list_items__15:
+05_02__container_blocks__list_items__015:
spec_txt_example_position: 245
source_specification: commonmark
-05_02__container_blocks__list_items__16:
+05_02__container_blocks__list_items__016:
spec_txt_example_position: 246
source_specification: commonmark
-05_02__container_blocks__list_items__17:
+05_02__container_blocks__list_items__017:
spec_txt_example_position: 247
source_specification: commonmark
-05_02__container_blocks__list_items__18:
+05_02__container_blocks__list_items__018:
spec_txt_example_position: 248
source_specification: commonmark
-05_02__container_blocks__list_items__19:
+05_02__container_blocks__list_items__019:
spec_txt_example_position: 249
source_specification: commonmark
-05_02__container_blocks__list_items__20:
+05_02__container_blocks__list_items__020:
spec_txt_example_position: 250
source_specification: commonmark
-05_02__container_blocks__list_items__21:
+05_02__container_blocks__list_items__021:
spec_txt_example_position: 251
source_specification: commonmark
-05_02__container_blocks__list_items__22:
+05_02__container_blocks__list_items__022:
spec_txt_example_position: 252
source_specification: commonmark
-05_02__container_blocks__list_items__23:
+05_02__container_blocks__list_items__023:
spec_txt_example_position: 253
source_specification: commonmark
-05_02__container_blocks__list_items__24:
+05_02__container_blocks__list_items__024:
spec_txt_example_position: 254
source_specification: commonmark
-05_02__container_blocks__list_items__25:
+05_02__container_blocks__list_items__025:
spec_txt_example_position: 255
source_specification: commonmark
-05_02__container_blocks__list_items__26:
+05_02__container_blocks__list_items__026:
spec_txt_example_position: 256
source_specification: commonmark
-05_02__container_blocks__list_items__27:
+05_02__container_blocks__list_items__027:
spec_txt_example_position: 257
source_specification: commonmark
-05_02__container_blocks__list_items__28:
+05_02__container_blocks__list_items__028:
spec_txt_example_position: 258
source_specification: commonmark
-05_02__container_blocks__list_items__29:
+05_02__container_blocks__list_items__029:
spec_txt_example_position: 259
source_specification: commonmark
-05_02__container_blocks__list_items__30:
+05_02__container_blocks__list_items__030:
spec_txt_example_position: 260
source_specification: commonmark
-05_02__container_blocks__list_items__31:
+05_02__container_blocks__list_items__031:
spec_txt_example_position: 261
source_specification: commonmark
-05_02__container_blocks__list_items__32:
+05_02__container_blocks__list_items__032:
spec_txt_example_position: 262
source_specification: commonmark
-05_02__container_blocks__list_items__33:
+05_02__container_blocks__list_items__033:
spec_txt_example_position: 263
source_specification: commonmark
-05_02__container_blocks__list_items__34:
+05_02__container_blocks__list_items__034:
spec_txt_example_position: 264
source_specification: commonmark
-05_02__container_blocks__list_items__35:
+05_02__container_blocks__list_items__035:
spec_txt_example_position: 265
source_specification: commonmark
-05_02__container_blocks__list_items__36:
+05_02__container_blocks__list_items__036:
spec_txt_example_position: 266
source_specification: commonmark
-05_02__container_blocks__list_items__37:
+05_02__container_blocks__list_items__037:
spec_txt_example_position: 267
source_specification: commonmark
-05_02__container_blocks__list_items__38:
+05_02__container_blocks__list_items__038:
spec_txt_example_position: 268
source_specification: commonmark
-05_02__container_blocks__list_items__39:
+05_02__container_blocks__list_items__039:
spec_txt_example_position: 269
source_specification: commonmark
-05_02__container_blocks__list_items__40:
+05_02__container_blocks__list_items__040:
spec_txt_example_position: 270
source_specification: commonmark
-05_02__container_blocks__list_items__41:
+05_02__container_blocks__list_items__041:
spec_txt_example_position: 271
source_specification: commonmark
-05_02__container_blocks__list_items__42:
+05_02__container_blocks__list_items__042:
spec_txt_example_position: 272
source_specification: commonmark
-05_02__container_blocks__list_items__43:
+05_02__container_blocks__list_items__043:
spec_txt_example_position: 273
source_specification: commonmark
-05_02__container_blocks__list_items__44:
+05_02__container_blocks__list_items__044:
spec_txt_example_position: 274
source_specification: commonmark
-05_02__container_blocks__list_items__45:
+05_02__container_blocks__list_items__045:
spec_txt_example_position: 275
source_specification: commonmark
-05_02__container_blocks__list_items__46:
+05_02__container_blocks__list_items__046:
spec_txt_example_position: 276
source_specification: commonmark
-05_02__container_blocks__list_items__47:
+05_02__container_blocks__list_items__047:
spec_txt_example_position: 277
source_specification: commonmark
-05_02__container_blocks__list_items__48:
+05_02__container_blocks__list_items__048:
spec_txt_example_position: 278
source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__49:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__049:
spec_txt_example_position: 281
source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__50:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__050:
spec_txt_example_position: 282
source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__51:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__051:
spec_txt_example_position: 283
source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__52:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__052:
spec_txt_example_position: 284
source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__53:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__053:
spec_txt_example_position: 285
source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__54:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__054:
spec_txt_example_position: 286
source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__55:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__055:
spec_txt_example_position: 287
source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__56:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__056:
spec_txt_example_position: 288
source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__57:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__057:
spec_txt_example_position: 289
source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__58:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__058:
spec_txt_example_position: 290
source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__59:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__059:
spec_txt_example_position: 291
source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__60:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__060:
spec_txt_example_position: 292
source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__61:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__061:
spec_txt_example_position: 293
source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__62:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__062:
spec_txt_example_position: 294
source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__63:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__063:
spec_txt_example_position: 295
source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__64:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__064:
spec_txt_example_position: 296
source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__65:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__065:
spec_txt_example_position: 297
source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__66:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__066:
spec_txt_example_position: 298
source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__67:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__067:
spec_txt_example_position: 299
source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__68:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__068:
spec_txt_example_position: 300
source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__69:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__069:
spec_txt_example_position: 301
source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__70:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__070:
spec_txt_example_position: 302
source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__71:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__071:
spec_txt_example_position: 303
source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__72:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__072:
spec_txt_example_position: 304
source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__73:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__073:
spec_txt_example_position: 305
source_specification: commonmark
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__74:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__074:
spec_txt_example_position: 306
source_specification: commonmark
-06_01__inlines__01:
+06_01__inlines__001:
spec_txt_example_position: 307
source_specification: commonmark
-06_02__inlines__backslash_escapes__01:
+06_02__inlines__backslash_escapes__001:
spec_txt_example_position: 308
source_specification: commonmark
-06_02__inlines__backslash_escapes__02:
+06_02__inlines__backslash_escapes__002:
spec_txt_example_position: 309
source_specification: commonmark
-06_02__inlines__backslash_escapes__03:
+06_02__inlines__backslash_escapes__003:
spec_txt_example_position: 310
source_specification: commonmark
-06_02__inlines__backslash_escapes__04:
+06_02__inlines__backslash_escapes__004:
spec_txt_example_position: 311
source_specification: commonmark
-06_02__inlines__backslash_escapes__05:
+06_02__inlines__backslash_escapes__005:
spec_txt_example_position: 312
source_specification: commonmark
-06_02__inlines__backslash_escapes__06:
+06_02__inlines__backslash_escapes__006:
spec_txt_example_position: 313
source_specification: commonmark
-06_02__inlines__backslash_escapes__07:
+06_02__inlines__backslash_escapes__007:
spec_txt_example_position: 314
source_specification: commonmark
-06_02__inlines__backslash_escapes__08:
+06_02__inlines__backslash_escapes__008:
spec_txt_example_position: 315
source_specification: commonmark
-06_02__inlines__backslash_escapes__09:
+06_02__inlines__backslash_escapes__009:
spec_txt_example_position: 316
source_specification: commonmark
-06_02__inlines__backslash_escapes__10:
+06_02__inlines__backslash_escapes__010:
spec_txt_example_position: 317
source_specification: commonmark
-06_02__inlines__backslash_escapes__11:
+06_02__inlines__backslash_escapes__011:
spec_txt_example_position: 318
source_specification: commonmark
-06_02__inlines__backslash_escapes__12:
+06_02__inlines__backslash_escapes__012:
spec_txt_example_position: 319
source_specification: commonmark
-06_02__inlines__backslash_escapes__13:
+06_02__inlines__backslash_escapes__013:
spec_txt_example_position: 320
source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__01:
+06_03__inlines__entity_and_numeric_character_references__001:
spec_txt_example_position: 321
source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__02:
+06_03__inlines__entity_and_numeric_character_references__002:
spec_txt_example_position: 322
source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__03:
+06_03__inlines__entity_and_numeric_character_references__003:
spec_txt_example_position: 323
source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__04:
+06_03__inlines__entity_and_numeric_character_references__004:
spec_txt_example_position: 324
source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__05:
+06_03__inlines__entity_and_numeric_character_references__005:
spec_txt_example_position: 325
source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__06:
+06_03__inlines__entity_and_numeric_character_references__006:
spec_txt_example_position: 326
source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__07:
+06_03__inlines__entity_and_numeric_character_references__007:
spec_txt_example_position: 327
source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__08:
+06_03__inlines__entity_and_numeric_character_references__008:
spec_txt_example_position: 328
source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__09:
+06_03__inlines__entity_and_numeric_character_references__009:
spec_txt_example_position: 329
source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__10:
+06_03__inlines__entity_and_numeric_character_references__010:
spec_txt_example_position: 330
source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__11:
+06_03__inlines__entity_and_numeric_character_references__011:
spec_txt_example_position: 331
source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__12:
+06_03__inlines__entity_and_numeric_character_references__012:
spec_txt_example_position: 332
source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__13:
+06_03__inlines__entity_and_numeric_character_references__013:
spec_txt_example_position: 333
source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__14:
+06_03__inlines__entity_and_numeric_character_references__014:
spec_txt_example_position: 334
source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__15:
+06_03__inlines__entity_and_numeric_character_references__015:
spec_txt_example_position: 335
source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__16:
+06_03__inlines__entity_and_numeric_character_references__016:
spec_txt_example_position: 336
source_specification: commonmark
-06_03__inlines__entity_and_numeric_character_references__17:
+06_03__inlines__entity_and_numeric_character_references__017:
spec_txt_example_position: 337
source_specification: commonmark
-06_04__inlines__code_spans__01:
+06_04__inlines__code_spans__001:
spec_txt_example_position: 338
source_specification: commonmark
-06_04__inlines__code_spans__02:
+06_04__inlines__code_spans__002:
spec_txt_example_position: 339
source_specification: commonmark
-06_04__inlines__code_spans__03:
+06_04__inlines__code_spans__003:
spec_txt_example_position: 340
source_specification: commonmark
-06_04__inlines__code_spans__04:
+06_04__inlines__code_spans__004:
spec_txt_example_position: 341
source_specification: commonmark
-06_04__inlines__code_spans__05:
+06_04__inlines__code_spans__005:
spec_txt_example_position: 342
source_specification: commonmark
-06_04__inlines__code_spans__06:
+06_04__inlines__code_spans__006:
spec_txt_example_position: 343
source_specification: commonmark
-06_04__inlines__code_spans__07:
+06_04__inlines__code_spans__007:
spec_txt_example_position: 344
source_specification: commonmark
-06_04__inlines__code_spans__08:
+06_04__inlines__code_spans__008:
spec_txt_example_position: 345
source_specification: commonmark
-06_04__inlines__code_spans__09:
+06_04__inlines__code_spans__009:
spec_txt_example_position: 346
source_specification: commonmark
-06_04__inlines__code_spans__10:
+06_04__inlines__code_spans__010:
spec_txt_example_position: 347
source_specification: commonmark
-06_04__inlines__code_spans__11:
+06_04__inlines__code_spans__011:
spec_txt_example_position: 348
source_specification: commonmark
-06_04__inlines__code_spans__12:
+06_04__inlines__code_spans__012:
spec_txt_example_position: 349
source_specification: commonmark
-06_04__inlines__code_spans__13:
+06_04__inlines__code_spans__013:
spec_txt_example_position: 350
source_specification: commonmark
-06_04__inlines__code_spans__14:
+06_04__inlines__code_spans__014:
spec_txt_example_position: 351
source_specification: commonmark
-06_04__inlines__code_spans__15:
+06_04__inlines__code_spans__015:
spec_txt_example_position: 352
source_specification: commonmark
-06_04__inlines__code_spans__16:
+06_04__inlines__code_spans__016:
spec_txt_example_position: 353
source_specification: commonmark
-06_04__inlines__code_spans__17:
+06_04__inlines__code_spans__017:
spec_txt_example_position: 354
source_specification: commonmark
-06_04__inlines__code_spans__18:
+06_04__inlines__code_spans__018:
spec_txt_example_position: 355
source_specification: commonmark
-06_04__inlines__code_spans__19:
+06_04__inlines__code_spans__019:
spec_txt_example_position: 356
source_specification: commonmark
-06_04__inlines__code_spans__20:
+06_04__inlines__code_spans__020:
spec_txt_example_position: 357
source_specification: commonmark
-06_04__inlines__code_spans__21:
+06_04__inlines__code_spans__021:
spec_txt_example_position: 358
source_specification: commonmark
-06_04__inlines__code_spans__22:
+06_04__inlines__code_spans__022:
spec_txt_example_position: 359
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__01:
+06_05__inlines__emphasis_and_strong_emphasis__001:
spec_txt_example_position: 360
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__02:
+06_05__inlines__emphasis_and_strong_emphasis__002:
spec_txt_example_position: 361
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__03:
+06_05__inlines__emphasis_and_strong_emphasis__003:
spec_txt_example_position: 362
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__04:
+06_05__inlines__emphasis_and_strong_emphasis__004:
spec_txt_example_position: 363
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__05:
+06_05__inlines__emphasis_and_strong_emphasis__005:
spec_txt_example_position: 364
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__06:
+06_05__inlines__emphasis_and_strong_emphasis__006:
spec_txt_example_position: 365
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__07:
+06_05__inlines__emphasis_and_strong_emphasis__007:
spec_txt_example_position: 366
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__08:
+06_05__inlines__emphasis_and_strong_emphasis__008:
spec_txt_example_position: 367
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__09:
+06_05__inlines__emphasis_and_strong_emphasis__009:
spec_txt_example_position: 368
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__10:
+06_05__inlines__emphasis_and_strong_emphasis__010:
spec_txt_example_position: 369
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__11:
+06_05__inlines__emphasis_and_strong_emphasis__011:
spec_txt_example_position: 370
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__12:
+06_05__inlines__emphasis_and_strong_emphasis__012:
spec_txt_example_position: 371
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__13:
+06_05__inlines__emphasis_and_strong_emphasis__013:
spec_txt_example_position: 372
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__14:
+06_05__inlines__emphasis_and_strong_emphasis__014:
spec_txt_example_position: 373
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__15:
+06_05__inlines__emphasis_and_strong_emphasis__015:
spec_txt_example_position: 374
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__16:
+06_05__inlines__emphasis_and_strong_emphasis__016:
spec_txt_example_position: 375
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__17:
+06_05__inlines__emphasis_and_strong_emphasis__017:
spec_txt_example_position: 376
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__18:
+06_05__inlines__emphasis_and_strong_emphasis__018:
spec_txt_example_position: 377
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__19:
+06_05__inlines__emphasis_and_strong_emphasis__019:
spec_txt_example_position: 378
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__20:
+06_05__inlines__emphasis_and_strong_emphasis__020:
spec_txt_example_position: 379
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__21:
+06_05__inlines__emphasis_and_strong_emphasis__021:
spec_txt_example_position: 380
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__22:
+06_05__inlines__emphasis_and_strong_emphasis__022:
spec_txt_example_position: 381
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__23:
+06_05__inlines__emphasis_and_strong_emphasis__023:
spec_txt_example_position: 382
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__24:
+06_05__inlines__emphasis_and_strong_emphasis__024:
spec_txt_example_position: 383
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__25:
+06_05__inlines__emphasis_and_strong_emphasis__025:
spec_txt_example_position: 384
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__26:
+06_05__inlines__emphasis_and_strong_emphasis__026:
spec_txt_example_position: 385
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__27:
+06_05__inlines__emphasis_and_strong_emphasis__027:
spec_txt_example_position: 386
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__28:
+06_05__inlines__emphasis_and_strong_emphasis__028:
spec_txt_example_position: 387
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__29:
+06_05__inlines__emphasis_and_strong_emphasis__029:
spec_txt_example_position: 388
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__30:
+06_05__inlines__emphasis_and_strong_emphasis__030:
spec_txt_example_position: 389
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__31:
+06_05__inlines__emphasis_and_strong_emphasis__031:
spec_txt_example_position: 390
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__32:
+06_05__inlines__emphasis_and_strong_emphasis__032:
spec_txt_example_position: 391
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__33:
+06_05__inlines__emphasis_and_strong_emphasis__033:
spec_txt_example_position: 392
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__34:
+06_05__inlines__emphasis_and_strong_emphasis__034:
spec_txt_example_position: 393
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__35:
+06_05__inlines__emphasis_and_strong_emphasis__035:
spec_txt_example_position: 394
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__36:
+06_05__inlines__emphasis_and_strong_emphasis__036:
spec_txt_example_position: 395
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__37:
+06_05__inlines__emphasis_and_strong_emphasis__037:
spec_txt_example_position: 396
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__38:
+06_05__inlines__emphasis_and_strong_emphasis__038:
spec_txt_example_position: 397
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__39:
+06_05__inlines__emphasis_and_strong_emphasis__039:
spec_txt_example_position: 398
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__40:
+06_05__inlines__emphasis_and_strong_emphasis__040:
spec_txt_example_position: 399
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__41:
+06_05__inlines__emphasis_and_strong_emphasis__041:
spec_txt_example_position: 400
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__42:
+06_05__inlines__emphasis_and_strong_emphasis__042:
spec_txt_example_position: 401
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__43:
+06_05__inlines__emphasis_and_strong_emphasis__043:
spec_txt_example_position: 402
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__44:
+06_05__inlines__emphasis_and_strong_emphasis__044:
spec_txt_example_position: 403
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__45:
+06_05__inlines__emphasis_and_strong_emphasis__045:
spec_txt_example_position: 404
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__46:
+06_05__inlines__emphasis_and_strong_emphasis__046:
spec_txt_example_position: 405
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__47:
+06_05__inlines__emphasis_and_strong_emphasis__047:
spec_txt_example_position: 406
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__48:
+06_05__inlines__emphasis_and_strong_emphasis__048:
spec_txt_example_position: 407
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__49:
+06_05__inlines__emphasis_and_strong_emphasis__049:
spec_txt_example_position: 408
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__50:
+06_05__inlines__emphasis_and_strong_emphasis__050:
spec_txt_example_position: 409
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__51:
+06_05__inlines__emphasis_and_strong_emphasis__051:
spec_txt_example_position: 410
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__52:
+06_05__inlines__emphasis_and_strong_emphasis__052:
spec_txt_example_position: 411
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__53:
+06_05__inlines__emphasis_and_strong_emphasis__053:
spec_txt_example_position: 412
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__54:
+06_05__inlines__emphasis_and_strong_emphasis__054:
spec_txt_example_position: 413
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__55:
+06_05__inlines__emphasis_and_strong_emphasis__055:
spec_txt_example_position: 414
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__56:
+06_05__inlines__emphasis_and_strong_emphasis__056:
spec_txt_example_position: 415
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__57:
+06_05__inlines__emphasis_and_strong_emphasis__057:
spec_txt_example_position: 416
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__58:
+06_05__inlines__emphasis_and_strong_emphasis__058:
spec_txt_example_position: 417
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__59:
+06_05__inlines__emphasis_and_strong_emphasis__059:
spec_txt_example_position: 418
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__60:
+06_05__inlines__emphasis_and_strong_emphasis__060:
spec_txt_example_position: 419
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__61:
+06_05__inlines__emphasis_and_strong_emphasis__061:
spec_txt_example_position: 420
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__62:
+06_05__inlines__emphasis_and_strong_emphasis__062:
spec_txt_example_position: 421
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__63:
+06_05__inlines__emphasis_and_strong_emphasis__063:
spec_txt_example_position: 422
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__64:
+06_05__inlines__emphasis_and_strong_emphasis__064:
spec_txt_example_position: 423
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__65:
+06_05__inlines__emphasis_and_strong_emphasis__065:
spec_txt_example_position: 424
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__66:
+06_05__inlines__emphasis_and_strong_emphasis__066:
spec_txt_example_position: 425
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__67:
+06_05__inlines__emphasis_and_strong_emphasis__067:
spec_txt_example_position: 426
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__68:
+06_05__inlines__emphasis_and_strong_emphasis__068:
spec_txt_example_position: 427
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__69:
+06_05__inlines__emphasis_and_strong_emphasis__069:
spec_txt_example_position: 428
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__70:
+06_05__inlines__emphasis_and_strong_emphasis__070:
spec_txt_example_position: 429
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__71:
+06_05__inlines__emphasis_and_strong_emphasis__071:
spec_txt_example_position: 430
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__72:
+06_05__inlines__emphasis_and_strong_emphasis__072:
spec_txt_example_position: 431
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__73:
+06_05__inlines__emphasis_and_strong_emphasis__073:
spec_txt_example_position: 432
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__74:
+06_05__inlines__emphasis_and_strong_emphasis__074:
spec_txt_example_position: 433
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__75:
+06_05__inlines__emphasis_and_strong_emphasis__075:
spec_txt_example_position: 434
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__76:
+06_05__inlines__emphasis_and_strong_emphasis__076:
spec_txt_example_position: 435
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__77:
+06_05__inlines__emphasis_and_strong_emphasis__077:
spec_txt_example_position: 436
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__78:
+06_05__inlines__emphasis_and_strong_emphasis__078:
spec_txt_example_position: 437
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__79:
+06_05__inlines__emphasis_and_strong_emphasis__079:
spec_txt_example_position: 438
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__80:
+06_05__inlines__emphasis_and_strong_emphasis__080:
spec_txt_example_position: 439
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__81:
+06_05__inlines__emphasis_and_strong_emphasis__081:
spec_txt_example_position: 440
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__82:
+06_05__inlines__emphasis_and_strong_emphasis__082:
spec_txt_example_position: 441
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__83:
+06_05__inlines__emphasis_and_strong_emphasis__083:
spec_txt_example_position: 442
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__84:
+06_05__inlines__emphasis_and_strong_emphasis__084:
spec_txt_example_position: 443
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__85:
+06_05__inlines__emphasis_and_strong_emphasis__085:
spec_txt_example_position: 444
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__86:
+06_05__inlines__emphasis_and_strong_emphasis__086:
spec_txt_example_position: 445
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__87:
+06_05__inlines__emphasis_and_strong_emphasis__087:
spec_txt_example_position: 446
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__88:
+06_05__inlines__emphasis_and_strong_emphasis__088:
spec_txt_example_position: 447
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__89:
+06_05__inlines__emphasis_and_strong_emphasis__089:
spec_txt_example_position: 448
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__90:
+06_05__inlines__emphasis_and_strong_emphasis__090:
spec_txt_example_position: 449
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__91:
+06_05__inlines__emphasis_and_strong_emphasis__091:
spec_txt_example_position: 450
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__92:
+06_05__inlines__emphasis_and_strong_emphasis__092:
spec_txt_example_position: 451
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__93:
+06_05__inlines__emphasis_and_strong_emphasis__093:
spec_txt_example_position: 452
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__94:
+06_05__inlines__emphasis_and_strong_emphasis__094:
spec_txt_example_position: 453
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__95:
+06_05__inlines__emphasis_and_strong_emphasis__095:
spec_txt_example_position: 454
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__96:
+06_05__inlines__emphasis_and_strong_emphasis__096:
spec_txt_example_position: 455
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__97:
+06_05__inlines__emphasis_and_strong_emphasis__097:
spec_txt_example_position: 456
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__98:
+06_05__inlines__emphasis_and_strong_emphasis__098:
spec_txt_example_position: 457
source_specification: commonmark
-06_05__inlines__emphasis_and_strong_emphasis__99:
+06_05__inlines__emphasis_and_strong_emphasis__099:
spec_txt_example_position: 458
source_specification: commonmark
06_05__inlines__emphasis_and_strong_emphasis__100:
@@ -1463,558 +1463,555 @@
06_05__inlines__emphasis_and_strong_emphasis__131:
spec_txt_example_position: 490
source_specification: commonmark
-06_06__inlines__strikethrough_extension__01:
+06_06__inlines__strikethrough_extension__001:
spec_txt_example_position: 491
source_specification: github
-06_06__inlines__strikethrough_extension__02:
+06_06__inlines__strikethrough_extension__002:
spec_txt_example_position: 492
source_specification: github
-06_07__inlines__links__01:
+06_07__inlines__links__001:
spec_txt_example_position: 493
source_specification: commonmark
-06_07__inlines__links__02:
+06_07__inlines__links__002:
spec_txt_example_position: 494
source_specification: commonmark
-06_07__inlines__links__03:
+06_07__inlines__links__003:
spec_txt_example_position: 495
source_specification: commonmark
-06_07__inlines__links__04:
+06_07__inlines__links__004:
spec_txt_example_position: 496
source_specification: commonmark
-06_07__inlines__links__05:
+06_07__inlines__links__005:
spec_txt_example_position: 497
source_specification: commonmark
-06_07__inlines__links__06:
+06_07__inlines__links__006:
spec_txt_example_position: 498
source_specification: commonmark
-06_07__inlines__links__07:
+06_07__inlines__links__007:
spec_txt_example_position: 499
source_specification: commonmark
-06_07__inlines__links__08:
+06_07__inlines__links__008:
spec_txt_example_position: 500
source_specification: commonmark
-06_07__inlines__links__09:
+06_07__inlines__links__009:
spec_txt_example_position: 501
source_specification: commonmark
-06_07__inlines__links__10:
+06_07__inlines__links__010:
spec_txt_example_position: 502
source_specification: commonmark
-06_07__inlines__links__11:
+06_07__inlines__links__011:
spec_txt_example_position: 503
source_specification: commonmark
-06_07__inlines__links__12:
+06_07__inlines__links__012:
spec_txt_example_position: 504
source_specification: commonmark
-06_07__inlines__links__13:
+06_07__inlines__links__013:
spec_txt_example_position: 505
source_specification: commonmark
-06_07__inlines__links__14:
+06_07__inlines__links__014:
spec_txt_example_position: 506
source_specification: commonmark
-06_07__inlines__links__15:
+06_07__inlines__links__015:
spec_txt_example_position: 507
source_specification: commonmark
-06_07__inlines__links__16:
+06_07__inlines__links__016:
spec_txt_example_position: 508
source_specification: commonmark
-06_07__inlines__links__17:
+06_07__inlines__links__017:
spec_txt_example_position: 509
source_specification: commonmark
-06_07__inlines__links__18:
+06_07__inlines__links__018:
spec_txt_example_position: 510
source_specification: commonmark
-06_07__inlines__links__19:
+06_07__inlines__links__019:
spec_txt_example_position: 511
source_specification: commonmark
-06_07__inlines__links__20:
+06_07__inlines__links__020:
spec_txt_example_position: 512
source_specification: commonmark
-06_07__inlines__links__21:
+06_07__inlines__links__021:
spec_txt_example_position: 513
source_specification: commonmark
-06_07__inlines__links__22:
+06_07__inlines__links__022:
spec_txt_example_position: 514
source_specification: commonmark
-06_07__inlines__links__23:
+06_07__inlines__links__023:
spec_txt_example_position: 515
source_specification: commonmark
-06_07__inlines__links__24:
+06_07__inlines__links__024:
spec_txt_example_position: 516
source_specification: commonmark
-06_07__inlines__links__25:
+06_07__inlines__links__025:
spec_txt_example_position: 517
source_specification: commonmark
-06_07__inlines__links__26:
+06_07__inlines__links__026:
spec_txt_example_position: 518
source_specification: commonmark
-06_07__inlines__links__27:
+06_07__inlines__links__027:
spec_txt_example_position: 519
source_specification: commonmark
-06_07__inlines__links__28:
+06_07__inlines__links__028:
spec_txt_example_position: 520
source_specification: commonmark
-06_07__inlines__links__29:
+06_07__inlines__links__029:
spec_txt_example_position: 521
source_specification: commonmark
-06_07__inlines__links__30:
+06_07__inlines__links__030:
spec_txt_example_position: 522
source_specification: commonmark
-06_07__inlines__links__31:
+06_07__inlines__links__031:
spec_txt_example_position: 523
source_specification: commonmark
-06_07__inlines__links__32:
+06_07__inlines__links__032:
spec_txt_example_position: 524
source_specification: commonmark
-06_07__inlines__links__33:
+06_07__inlines__links__033:
spec_txt_example_position: 525
source_specification: commonmark
-06_07__inlines__links__34:
+06_07__inlines__links__034:
spec_txt_example_position: 526
source_specification: commonmark
-06_07__inlines__links__35:
+06_07__inlines__links__035:
spec_txt_example_position: 527
source_specification: commonmark
-06_07__inlines__links__36:
+06_07__inlines__links__036:
spec_txt_example_position: 528
source_specification: commonmark
-06_07__inlines__links__37:
+06_07__inlines__links__037:
spec_txt_example_position: 529
source_specification: commonmark
-06_07__inlines__links__38:
+06_07__inlines__links__038:
spec_txt_example_position: 530
source_specification: commonmark
-06_07__inlines__links__39:
+06_07__inlines__links__039:
spec_txt_example_position: 531
source_specification: commonmark
-06_07__inlines__links__40:
+06_07__inlines__links__040:
spec_txt_example_position: 532
source_specification: commonmark
-06_07__inlines__links__41:
+06_07__inlines__links__041:
spec_txt_example_position: 533
source_specification: commonmark
-06_07__inlines__links__42:
+06_07__inlines__links__042:
spec_txt_example_position: 534
source_specification: commonmark
-06_07__inlines__links__43:
+06_07__inlines__links__043:
spec_txt_example_position: 535
source_specification: commonmark
-06_07__inlines__links__44:
+06_07__inlines__links__044:
spec_txt_example_position: 536
source_specification: commonmark
-06_07__inlines__links__45:
+06_07__inlines__links__045:
spec_txt_example_position: 537
source_specification: commonmark
-06_07__inlines__links__46:
+06_07__inlines__links__046:
spec_txt_example_position: 538
source_specification: commonmark
-06_07__inlines__links__47:
+06_07__inlines__links__047:
spec_txt_example_position: 539
source_specification: commonmark
-06_07__inlines__links__48:
+06_07__inlines__links__048:
spec_txt_example_position: 540
source_specification: commonmark
-06_07__inlines__links__49:
+06_07__inlines__links__049:
spec_txt_example_position: 541
source_specification: commonmark
-06_07__inlines__links__50:
+06_07__inlines__links__050:
spec_txt_example_position: 542
source_specification: commonmark
-06_07__inlines__links__51:
+06_07__inlines__links__051:
spec_txt_example_position: 543
source_specification: commonmark
-06_07__inlines__links__52:
+06_07__inlines__links__052:
spec_txt_example_position: 544
source_specification: commonmark
-06_07__inlines__links__53:
+06_07__inlines__links__053:
spec_txt_example_position: 545
source_specification: commonmark
-06_07__inlines__links__54:
+06_07__inlines__links__054:
spec_txt_example_position: 546
source_specification: commonmark
-06_07__inlines__links__55:
+06_07__inlines__links__055:
spec_txt_example_position: 547
source_specification: commonmark
-06_07__inlines__links__56:
+06_07__inlines__links__056:
spec_txt_example_position: 548
source_specification: commonmark
-06_07__inlines__links__57:
+06_07__inlines__links__057:
spec_txt_example_position: 549
source_specification: commonmark
-06_07__inlines__links__58:
+06_07__inlines__links__058:
spec_txt_example_position: 550
source_specification: commonmark
-06_07__inlines__links__59:
+06_07__inlines__links__059:
spec_txt_example_position: 551
source_specification: commonmark
-06_07__inlines__links__60:
+06_07__inlines__links__060:
spec_txt_example_position: 552
source_specification: commonmark
-06_07__inlines__links__61:
+06_07__inlines__links__061:
spec_txt_example_position: 553
source_specification: commonmark
-06_07__inlines__links__62:
+06_07__inlines__links__062:
spec_txt_example_position: 554
source_specification: commonmark
-06_07__inlines__links__63:
+06_07__inlines__links__063:
spec_txt_example_position: 555
source_specification: commonmark
-06_07__inlines__links__64:
+06_07__inlines__links__064:
spec_txt_example_position: 556
source_specification: commonmark
-06_07__inlines__links__65:
+06_07__inlines__links__065:
spec_txt_example_position: 557
source_specification: commonmark
-06_07__inlines__links__66:
+06_07__inlines__links__066:
spec_txt_example_position: 558
source_specification: commonmark
-06_07__inlines__links__67:
+06_07__inlines__links__067:
spec_txt_example_position: 559
source_specification: commonmark
-06_07__inlines__links__68:
+06_07__inlines__links__068:
spec_txt_example_position: 560
source_specification: commonmark
-06_07__inlines__links__69:
+06_07__inlines__links__069:
spec_txt_example_position: 561
source_specification: commonmark
-06_07__inlines__links__70:
+06_07__inlines__links__070:
spec_txt_example_position: 562
source_specification: commonmark
-06_07__inlines__links__71:
+06_07__inlines__links__071:
spec_txt_example_position: 563
source_specification: commonmark
-06_07__inlines__links__72:
+06_07__inlines__links__072:
spec_txt_example_position: 564
source_specification: commonmark
-06_07__inlines__links__73:
+06_07__inlines__links__073:
spec_txt_example_position: 565
source_specification: commonmark
-06_07__inlines__links__74:
+06_07__inlines__links__074:
spec_txt_example_position: 566
source_specification: commonmark
-06_07__inlines__links__75:
+06_07__inlines__links__075:
spec_txt_example_position: 567
source_specification: commonmark
-06_07__inlines__links__76:
+06_07__inlines__links__076:
spec_txt_example_position: 568
source_specification: commonmark
-06_07__inlines__links__77:
+06_07__inlines__links__077:
spec_txt_example_position: 569
source_specification: commonmark
-06_07__inlines__links__78:
+06_07__inlines__links__078:
spec_txt_example_position: 570
source_specification: commonmark
-06_07__inlines__links__79:
+06_07__inlines__links__079:
spec_txt_example_position: 571
source_specification: commonmark
-06_07__inlines__links__80:
+06_07__inlines__links__080:
spec_txt_example_position: 572
source_specification: commonmark
-06_07__inlines__links__81:
+06_07__inlines__links__081:
spec_txt_example_position: 573
source_specification: commonmark
-06_07__inlines__links__82:
+06_07__inlines__links__082:
spec_txt_example_position: 574
source_specification: commonmark
-06_07__inlines__links__83:
+06_07__inlines__links__083:
spec_txt_example_position: 575
source_specification: commonmark
-06_07__inlines__links__84:
+06_07__inlines__links__084:
spec_txt_example_position: 576
source_specification: commonmark
-06_07__inlines__links__85:
+06_07__inlines__links__085:
spec_txt_example_position: 577
source_specification: commonmark
-06_07__inlines__links__86:
+06_07__inlines__links__086:
spec_txt_example_position: 578
source_specification: commonmark
-06_07__inlines__links__87:
+06_07__inlines__links__087:
spec_txt_example_position: 579
source_specification: commonmark
-06_08__inlines__images__01:
+06_08__inlines__images__001:
spec_txt_example_position: 580
source_specification: commonmark
-06_08__inlines__images__02:
+06_08__inlines__images__002:
spec_txt_example_position: 581
source_specification: commonmark
-06_08__inlines__images__03:
+06_08__inlines__images__003:
spec_txt_example_position: 582
source_specification: commonmark
-06_08__inlines__images__04:
+06_08__inlines__images__004:
spec_txt_example_position: 583
source_specification: commonmark
-06_08__inlines__images__05:
+06_08__inlines__images__005:
spec_txt_example_position: 584
source_specification: commonmark
-06_08__inlines__images__06:
+06_08__inlines__images__006:
spec_txt_example_position: 585
source_specification: commonmark
-06_08__inlines__images__07:
+06_08__inlines__images__007:
spec_txt_example_position: 586
source_specification: commonmark
-06_08__inlines__images__08:
+06_08__inlines__images__008:
spec_txt_example_position: 587
source_specification: commonmark
-06_08__inlines__images__09:
+06_08__inlines__images__009:
spec_txt_example_position: 588
source_specification: commonmark
-06_08__inlines__images__10:
+06_08__inlines__images__010:
spec_txt_example_position: 589
source_specification: commonmark
-06_08__inlines__images__11:
+06_08__inlines__images__011:
spec_txt_example_position: 590
source_specification: commonmark
-06_08__inlines__images__12:
+06_08__inlines__images__012:
spec_txt_example_position: 591
source_specification: commonmark
-06_08__inlines__images__13:
+06_08__inlines__images__013:
spec_txt_example_position: 592
source_specification: commonmark
-06_08__inlines__images__14:
+06_08__inlines__images__014:
spec_txt_example_position: 593
source_specification: commonmark
-06_08__inlines__images__15:
+06_08__inlines__images__015:
spec_txt_example_position: 594
source_specification: commonmark
-06_08__inlines__images__16:
+06_08__inlines__images__016:
spec_txt_example_position: 595
source_specification: commonmark
-06_08__inlines__images__17:
+06_08__inlines__images__017:
spec_txt_example_position: 596
source_specification: commonmark
-06_08__inlines__images__18:
+06_08__inlines__images__018:
spec_txt_example_position: 597
source_specification: commonmark
-06_08__inlines__images__19:
+06_08__inlines__images__019:
spec_txt_example_position: 598
source_specification: commonmark
-06_08__inlines__images__20:
+06_08__inlines__images__020:
spec_txt_example_position: 599
source_specification: commonmark
-06_08__inlines__images__21:
+06_08__inlines__images__021:
spec_txt_example_position: 600
source_specification: commonmark
-06_08__inlines__images__22:
+06_08__inlines__images__022:
spec_txt_example_position: 601
source_specification: commonmark
-06_09__inlines__autolinks__01:
+06_09__inlines__autolinks__001:
spec_txt_example_position: 602
source_specification: commonmark
-06_09__inlines__autolinks__02:
+06_09__inlines__autolinks__002:
spec_txt_example_position: 603
source_specification: commonmark
-06_09__inlines__autolinks__03:
+06_09__inlines__autolinks__003:
spec_txt_example_position: 604
source_specification: commonmark
-06_09__inlines__autolinks__04:
+06_09__inlines__autolinks__004:
spec_txt_example_position: 605
source_specification: commonmark
-06_09__inlines__autolinks__05:
+06_09__inlines__autolinks__005:
spec_txt_example_position: 606
source_specification: commonmark
-06_09__inlines__autolinks__06:
+06_09__inlines__autolinks__006:
spec_txt_example_position: 607
source_specification: commonmark
-06_09__inlines__autolinks__07:
+06_09__inlines__autolinks__007:
spec_txt_example_position: 608
source_specification: commonmark
-06_09__inlines__autolinks__08:
+06_09__inlines__autolinks__008:
spec_txt_example_position: 609
source_specification: commonmark
-06_09__inlines__autolinks__09:
+06_09__inlines__autolinks__009:
spec_txt_example_position: 610
source_specification: commonmark
-06_09__inlines__autolinks__10:
+06_09__inlines__autolinks__010:
spec_txt_example_position: 611
source_specification: commonmark
-06_09__inlines__autolinks__11:
+06_09__inlines__autolinks__011:
spec_txt_example_position: 612
source_specification: commonmark
-06_09__inlines__autolinks__12:
+06_09__inlines__autolinks__012:
spec_txt_example_position: 613
source_specification: commonmark
-06_09__inlines__autolinks__13:
+06_09__inlines__autolinks__013:
spec_txt_example_position: 614
source_specification: commonmark
-06_09__inlines__autolinks__14:
+06_09__inlines__autolinks__014:
spec_txt_example_position: 615
source_specification: commonmark
-06_09__inlines__autolinks__15:
+06_09__inlines__autolinks__015:
spec_txt_example_position: 616
source_specification: commonmark
-06_09__inlines__autolinks__16:
+06_09__inlines__autolinks__016:
spec_txt_example_position: 617
source_specification: commonmark
-06_09__inlines__autolinks__17:
+06_09__inlines__autolinks__017:
spec_txt_example_position: 618
source_specification: commonmark
-06_09__inlines__autolinks__18:
+06_09__inlines__autolinks__018:
spec_txt_example_position: 619
source_specification: commonmark
-06_09__inlines__autolinks__19:
+06_09__inlines__autolinks__019:
spec_txt_example_position: 620
source_specification: commonmark
-06_10__inlines__autolinks_extension__01:
+06_10__inlines__autolinks_extension__001:
spec_txt_example_position: 621
source_specification: github
-06_10__inlines__autolinks_extension__02:
+06_10__inlines__autolinks_extension__002:
spec_txt_example_position: 622
source_specification: github
-06_10__inlines__autolinks_extension__03:
+06_10__inlines__autolinks_extension__003:
spec_txt_example_position: 623
source_specification: github
-06_10__inlines__autolinks_extension__04:
+06_10__inlines__autolinks_extension__004:
spec_txt_example_position: 624
source_specification: github
-06_10__inlines__autolinks_extension__05:
+06_10__inlines__autolinks_extension__005:
spec_txt_example_position: 625
source_specification: github
-06_10__inlines__autolinks_extension__06:
+06_10__inlines__autolinks_extension__006:
spec_txt_example_position: 626
source_specification: github
-06_10__inlines__autolinks_extension__07:
+06_10__inlines__autolinks_extension__007:
spec_txt_example_position: 627
source_specification: github
-06_10__inlines__autolinks_extension__08:
+06_10__inlines__autolinks_extension__008:
spec_txt_example_position: 628
source_specification: github
-06_10__inlines__autolinks_extension__09:
+06_10__inlines__autolinks_extension__009:
spec_txt_example_position: 629
source_specification: github
-06_10__inlines__autolinks_extension__10:
+06_10__inlines__autolinks_extension__010:
spec_txt_example_position: 630
source_specification: github
-06_10__inlines__autolinks_extension__11:
+06_10__inlines__autolinks_extension__011:
spec_txt_example_position: 631
source_specification: github
-06_11__inlines__raw_html__01:
+06_11__inlines__raw_html__001:
spec_txt_example_position: 632
source_specification: commonmark
-06_11__inlines__raw_html__02:
+06_11__inlines__raw_html__002:
spec_txt_example_position: 633
source_specification: commonmark
-06_11__inlines__raw_html__03:
+06_11__inlines__raw_html__003:
spec_txt_example_position: 634
source_specification: commonmark
-06_11__inlines__raw_html__04:
+06_11__inlines__raw_html__004:
spec_txt_example_position: 635
source_specification: commonmark
-06_11__inlines__raw_html__05:
+06_11__inlines__raw_html__005:
spec_txt_example_position: 636
source_specification: commonmark
-06_11__inlines__raw_html__06:
+06_11__inlines__raw_html__006:
spec_txt_example_position: 637
source_specification: commonmark
-06_11__inlines__raw_html__07:
+06_11__inlines__raw_html__007:
spec_txt_example_position: 638
source_specification: commonmark
-06_11__inlines__raw_html__08:
+06_11__inlines__raw_html__008:
spec_txt_example_position: 639
source_specification: commonmark
-06_11__inlines__raw_html__09:
+06_11__inlines__raw_html__009:
spec_txt_example_position: 640
source_specification: commonmark
-06_11__inlines__raw_html__10:
+06_11__inlines__raw_html__010:
spec_txt_example_position: 641
source_specification: commonmark
-06_11__inlines__raw_html__11:
+06_11__inlines__raw_html__011:
spec_txt_example_position: 642
source_specification: commonmark
-06_11__inlines__raw_html__12:
+06_11__inlines__raw_html__012:
spec_txt_example_position: 643
source_specification: commonmark
-06_11__inlines__raw_html__13:
+06_11__inlines__raw_html__013:
spec_txt_example_position: 644
source_specification: commonmark
-06_11__inlines__raw_html__14:
+06_11__inlines__raw_html__014:
spec_txt_example_position: 645
source_specification: commonmark
-06_11__inlines__raw_html__15:
+06_11__inlines__raw_html__015:
spec_txt_example_position: 646
source_specification: commonmark
-06_11__inlines__raw_html__16:
+06_11__inlines__raw_html__016:
spec_txt_example_position: 647
source_specification: commonmark
-06_11__inlines__raw_html__17:
+06_11__inlines__raw_html__017:
spec_txt_example_position: 648
source_specification: commonmark
-06_11__inlines__raw_html__18:
+06_11__inlines__raw_html__018:
spec_txt_example_position: 649
source_specification: commonmark
-06_11__inlines__raw_html__19:
+06_11__inlines__raw_html__019:
spec_txt_example_position: 650
source_specification: commonmark
-06_11__inlines__raw_html__20:
+06_11__inlines__raw_html__020:
spec_txt_example_position: 651
source_specification: commonmark
-06_11__inlines__raw_html__21:
+06_11__inlines__raw_html__021:
spec_txt_example_position: 652
source_specification: commonmark
-06_12__inlines__disallowed_raw_html_extension__01:
+06_12__inlines__disallowed_raw_html_extension__001:
spec_txt_example_position: 653
source_specification: github
-06_13__inlines__hard_line_breaks__01:
+06_13__inlines__hard_line_breaks__001:
spec_txt_example_position: 654
source_specification: commonmark
-06_13__inlines__hard_line_breaks__02:
+06_13__inlines__hard_line_breaks__002:
spec_txt_example_position: 655
source_specification: commonmark
-06_13__inlines__hard_line_breaks__03:
+06_13__inlines__hard_line_breaks__003:
spec_txt_example_position: 656
source_specification: commonmark
-06_13__inlines__hard_line_breaks__04:
+06_13__inlines__hard_line_breaks__004:
spec_txt_example_position: 657
source_specification: commonmark
-06_13__inlines__hard_line_breaks__05:
+06_13__inlines__hard_line_breaks__005:
spec_txt_example_position: 658
source_specification: commonmark
-06_13__inlines__hard_line_breaks__06:
+06_13__inlines__hard_line_breaks__006:
spec_txt_example_position: 659
source_specification: commonmark
-06_13__inlines__hard_line_breaks__07:
+06_13__inlines__hard_line_breaks__007:
spec_txt_example_position: 660
source_specification: commonmark
-06_13__inlines__hard_line_breaks__08:
+06_13__inlines__hard_line_breaks__008:
spec_txt_example_position: 661
source_specification: commonmark
-06_13__inlines__hard_line_breaks__09:
+06_13__inlines__hard_line_breaks__009:
spec_txt_example_position: 662
source_specification: commonmark
-06_13__inlines__hard_line_breaks__10:
+06_13__inlines__hard_line_breaks__010:
spec_txt_example_position: 663
source_specification: commonmark
-06_13__inlines__hard_line_breaks__11:
+06_13__inlines__hard_line_breaks__011:
spec_txt_example_position: 664
source_specification: commonmark
-06_13__inlines__hard_line_breaks__12:
+06_13__inlines__hard_line_breaks__012:
spec_txt_example_position: 665
source_specification: commonmark
-06_13__inlines__hard_line_breaks__13:
+06_13__inlines__hard_line_breaks__013:
spec_txt_example_position: 666
source_specification: commonmark
-06_13__inlines__hard_line_breaks__14:
+06_13__inlines__hard_line_breaks__014:
spec_txt_example_position: 667
source_specification: commonmark
-06_13__inlines__hard_line_breaks__15:
+06_13__inlines__hard_line_breaks__015:
spec_txt_example_position: 668
source_specification: commonmark
-06_14__inlines__soft_line_breaks__01:
+06_14__inlines__soft_line_breaks__001:
spec_txt_example_position: 669
source_specification: commonmark
-06_14__inlines__soft_line_breaks__02:
+06_14__inlines__soft_line_breaks__002:
spec_txt_example_position: 670
source_specification: commonmark
-06_15__inlines__textual_content__01:
+06_15__inlines__textual_content__001:
spec_txt_example_position: 671
source_specification: commonmark
-06_15__inlines__textual_content__02:
+06_15__inlines__textual_content__002:
spec_txt_example_position: 672
source_specification: commonmark
-06_15__inlines__textual_content__03:
+06_15__inlines__textual_content__003:
spec_txt_example_position: 673
source_specification: commonmark
-07_01__first_gitlab_specific_section_with_examples__strong_but_with_two_asterisks__01:
+07_01__gitlab_specific_markdown__footnotes__001:
spec_txt_example_position: 674
- source_specification: commonmark
-08_01__second_gitlab_specific_section_with_examples__strong_but_with_html__01:
- spec_txt_example_position: 675
- source_specification: commonmark
+ source_specification: gitlab
diff --git a/spec/fixtures/glfm/example_snapshots/html.yml b/spec/fixtures/glfm/example_snapshots/html.yml
index a536b5a4834..b9deadcb4cb 100644
--- a/spec/fixtures/glfm/example_snapshots/html.yml
+++ b/spec/fixtures/glfm/example_snapshots/html.yml
@@ -1,24 +1,27 @@
---
-02_01__preliminaries__tabs__01:
+02_01__preliminaries__tabs__001:
canonical: "<pre><code>foo\tbaz\t\tbim\n</code></pre>\n"
- static: "<div class=\"gl-relative markdown-code-block js-markdown-code\">&#x000A;<pre
+ static: "<div class=\"gl-relative markdown-code-block js-markdown-code\">\n<pre
data-sourcepos=\"1:2-1:13\" class=\"code highlight js-syntax-highlight language-plaintext\"
- lang=\"plaintext\" v-pre=\"true\"><code><span id=\"LC1\" class=\"line\" lang=\"plaintext\">foo\tbaz\t\tbim</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>"
+ lang=\"plaintext\" data-canonical-lang=\"\" v-pre=\"true\"><code><span id=\"LC1\"
+ class=\"line\" lang=\"plaintext\">foo\tbaz\t\tbim</span></code></pre>\n<copy-code></copy-code>\n</div>"
wysiwyg: "<pre class=\"content-editor-code-block undefined code highlight\"><code>foo\tbaz\t\tbim</code></pre>"
-02_01__preliminaries__tabs__02:
+02_01__preliminaries__tabs__002:
canonical: "<pre><code>foo\tbaz\t\tbim\n</code></pre>\n"
- static: "<div class=\"gl-relative markdown-code-block js-markdown-code\">&#x000A;<pre
+ static: "<div class=\"gl-relative markdown-code-block js-markdown-code\">\n<pre
data-sourcepos=\"1:4-1:15\" class=\"code highlight js-syntax-highlight language-plaintext\"
- lang=\"plaintext\" v-pre=\"true\"><code><span id=\"LC1\" class=\"line\" lang=\"plaintext\">foo\tbaz\t\tbim</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>"
+ lang=\"plaintext\" data-canonical-lang=\"\" v-pre=\"true\"><code><span id=\"LC1\"
+ class=\"line\" lang=\"plaintext\">foo\tbaz\t\tbim</span></code></pre>\n<copy-code></copy-code>\n</div>"
wysiwyg: "<pre class=\"content-editor-code-block undefined code highlight\"><code>foo\tbaz\t\tbim</code></pre>"
-02_01__preliminaries__tabs__03:
+02_01__preliminaries__tabs__003:
canonical: "<pre><code>a\ta\ná½\ta\n</code></pre>\n"
- static: "<div class=\"gl-relative markdown-code-block js-markdown-code\">&#x000A;<pre
+ static: "<div class=\"gl-relative markdown-code-block js-markdown-code\">\n<pre
data-sourcepos=\"1:5-2:9\" class=\"code highlight js-syntax-highlight language-plaintext\"
- lang=\"plaintext\" v-pre=\"true\"><code><span id=\"LC1\" class=\"line\" lang=\"plaintext\">a\ta</span>&#x000A;<span
- id=\"LC2\" class=\"line\" lang=\"plaintext\">á½\ta</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>"
+ lang=\"plaintext\" data-canonical-lang=\"\" v-pre=\"true\"><code><span id=\"LC1\"
+ class=\"line\" lang=\"plaintext\">a\ta</span>\n<span id=\"LC2\" class=\"line\"
+ lang=\"plaintext\">á½\ta</span></code></pre>\n<copy-code></copy-code>\n</div>"
wysiwyg: "<pre class=\"content-editor-code-block undefined code highlight\"><code>a\ta\ná½\ta</code></pre>"
-02_01__preliminaries__tabs__04:
+02_01__preliminaries__tabs__004:
canonical: |
<ul>
<li>
@@ -27,10 +30,15 @@
</li>
</ul>
static: |-
- <ul data-sourcepos="1:3-3:4" dir="auto">&#x000A;<li data-sourcepos="1:3-3:4">&#x000A;<p data-sourcepos="1:5-1:7">foo</p>&#x000A;<p data-sourcepos="3:2-3:4">bar</p>&#x000A;</li>&#x000A;</ul>
+ <ul data-sourcepos="1:3-3:4" dir="auto">
+ <li data-sourcepos="1:3-3:4">
+ <p data-sourcepos="1:5-1:7">foo</p>
+ <p data-sourcepos="3:2-3:4">bar</p>
+ </li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p>foo</p><p>bar</p></li></ul>
-02_01__preliminaries__tabs__05:
+02_01__preliminaries__tabs__005:
canonical: |
<ul>
<li>
@@ -40,20 +48,33 @@
</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-3:5" dir="auto">&#x000A;<li data-sourcepos="1:1-3:5">&#x000A;<p data-sourcepos="1:3-1:5">foo</p>&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="3:2-3:5" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"> bar</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-3:5" dir="auto">
+ <li data-sourcepos="1:1-3:5">
+ <p data-sourcepos="1:3-1:5">foo</p>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="3:2-3:5" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"> bar</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ </li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p>foo</p><pre class="content-editor-code-block undefined code highlight"><code> bar</code></pre></li></ul>
-02_01__preliminaries__tabs__06:
+02_01__preliminaries__tabs__006:
canonical: |
<blockquote>
<pre><code> foo
</code></pre>
</blockquote>
static: |-
- <blockquote data-sourcepos="1:1-1:6" dir="auto">&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:3-1:6" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"> foo</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;</blockquote>
+ <blockquote data-sourcepos="1:1-1:6" dir="auto">
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:3-1:6" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"> foo</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ </blockquote>
wysiwyg: |-
<blockquote multiline="false"><pre class="content-editor-code-block undefined code highlight"><code> foo</code></pre></blockquote>
-02_01__preliminaries__tabs__07:
+02_01__preliminaries__tabs__007:
canonical: |
<ul>
<li>
@@ -62,20 +83,31 @@
</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-1:6" dir="auto">&#x000A;<li data-sourcepos="1:1-1:6">&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:3-1:6" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"> foo</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-1:6" dir="auto">
+ <li data-sourcepos="1:1-1:6">
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:3-1:6" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"> foo</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ </li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p></p><pre class="content-editor-code-block undefined code highlight"><code> foo</code></pre></li></ul>
-02_01__preliminaries__tabs__08:
+02_01__preliminaries__tabs__008:
canonical: |
<pre><code>foo
bar
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:5-2:4" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span>&#x000A;<span id="LC2" class="line" lang="plaintext">bar</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:5-2:4" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span>
+ <span id="LC2" class="line" lang="plaintext">bar</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>foo
bar</code></pre>
-02_01__preliminaries__tabs__09:
+02_01__preliminaries__tabs__009:
canonical: |
<ul>
<li>foo
@@ -89,147 +121,171 @@
</li>
</ul>
static: |-
- <ul data-sourcepos="1:2-3:7" dir="auto">&#x000A;<li data-sourcepos="1:2-3:7">foo&#x000A;<ul data-sourcepos="2:4-3:7">&#x000A;<li data-sourcepos="2:4-3:7">bar&#x000A;<ul data-sourcepos="3:3-3:7">&#x000A;<li data-sourcepos="3:3-3:7">baz</li>&#x000A;</ul>&#x000A;</li>&#x000A;</ul>&#x000A;</li>&#x000A;</ul>
+ <ul data-sourcepos="1:2-3:7" dir="auto">
+ <li data-sourcepos="1:2-3:7">foo
+ <ul data-sourcepos="2:4-3:7">
+ <li data-sourcepos="2:4-3:7">bar
+ <ul data-sourcepos="3:3-3:7">
+ <li data-sourcepos="3:3-3:7">baz</li>
+ </ul>
+ </li>
+ </ul>
+ </li>
+ </ul>
wysiwyg: |-
- <ul bullet="*"><li><p>foo
- </p><ul bullet="*"><li><p>bar
- </p><ul bullet="*"><li><p>baz</p></li></ul></li></ul></li></ul>
-02_01__preliminaries__tabs__10:
+ <ul bullet="*"><li><p>foo</p><ul bullet="*"><li><p>bar</p><ul bullet="*"><li><p>baz</p></li></ul></li></ul></li></ul>
+02_01__preliminaries__tabs__010:
canonical: |
<h1>Foo</h1>
static: |-
- <h1 data-sourcepos="1:1-1:5" dir="auto">&#x000A;<a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h1>
+ <h1 data-sourcepos="1:1-1:5" dir="auto">
+ <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h1>
wysiwyg: |-
<h1>Foo</h1>
-02_01__preliminaries__tabs__11:
+02_01__preliminaries__tabs__011:
canonical: |
<hr />
static: |-
<hr data-sourcepos="1:1-1:6">
wysiwyg: |-
<hr>
-03_01__blocks_and_inlines__precedence__01:
+03_01__blocks_and_inlines__precedence__001:
canonical: |
<ul>
<li>`one</li>
<li>two`</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-2:6" dir="auto">&#x000A;<li data-sourcepos="1:1-1:6">`one</li>&#x000A;<li data-sourcepos="2:1-2:6">two`</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-2:6" dir="auto">
+ <li data-sourcepos="1:1-1:6">`one</li>
+ <li data-sourcepos="2:1-2:6">two`</li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p>`one</p></li><li><p>two`</p></li></ul>
-04_01__leaf_blocks__thematic_breaks__01:
+04_01__leaf_blocks__thematic_breaks__001:
canonical: |
<hr />
<hr />
<hr />
static: |-
- <hr data-sourcepos="1:1-1:3">&#x000A;<hr data-sourcepos="2:1-2:3">&#x000A;<hr data-sourcepos="3:1-3:3">
+ <hr data-sourcepos="1:1-1:3">
+ <hr data-sourcepos="2:1-2:3">
+ <hr data-sourcepos="3:1-3:3">
wysiwyg: |-
<hr>
-04_01__leaf_blocks__thematic_breaks__02:
+04_01__leaf_blocks__thematic_breaks__002:
canonical: |
<p>+++</p>
static: |-
<p data-sourcepos="1:1-1:3" dir="auto">+++</p>
wysiwyg: |-
<p>+++</p>
-04_01__leaf_blocks__thematic_breaks__03:
+04_01__leaf_blocks__thematic_breaks__003:
canonical: |
<p>===</p>
static: |-
<p data-sourcepos="1:1-1:3" dir="auto">===</p>
wysiwyg: |-
<p>===</p>
-04_01__leaf_blocks__thematic_breaks__04:
+04_01__leaf_blocks__thematic_breaks__004:
canonical: |
<p>--
**
__</p>
static: |-
- <p data-sourcepos="1:1-3:2" dir="auto">--&#x000A;**&#x000A;__</p>
+ <p data-sourcepos="1:1-3:2" dir="auto">--
+ **
+ __</p>
wysiwyg: |-
<p>--
**
__</p>
-04_01__leaf_blocks__thematic_breaks__05:
+04_01__leaf_blocks__thematic_breaks__005:
canonical: |
<hr />
<hr />
<hr />
static: |-
- <hr data-sourcepos="1:2-1:4">&#x000A;<hr data-sourcepos="2:3-2:5">&#x000A;<hr data-sourcepos="3:4-3:6">
+ <hr data-sourcepos="1:2-1:4">
+ <hr data-sourcepos="2:3-2:5">
+ <hr data-sourcepos="3:4-3:6">
wysiwyg: |-
<hr>
-04_01__leaf_blocks__thematic_breaks__06:
+04_01__leaf_blocks__thematic_breaks__006:
canonical: |
<pre><code>***
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:5-1:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">***</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:5-1:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">***</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>***</code></pre>
-04_01__leaf_blocks__thematic_breaks__07:
+04_01__leaf_blocks__thematic_breaks__007:
canonical: |
<p>Foo
***</p>
static: |-
- <p data-sourcepos="1:1-2:7" dir="auto">Foo&#x000A;***</p>
+ <p data-sourcepos="1:1-2:7" dir="auto">Foo
+ ***</p>
wysiwyg: |-
<p>Foo
***</p>
-04_01__leaf_blocks__thematic_breaks__08:
+04_01__leaf_blocks__thematic_breaks__008:
canonical: |
<hr />
static: |-
<hr data-sourcepos="1:1-1:37">
wysiwyg: |-
<hr>
-04_01__leaf_blocks__thematic_breaks__09:
+04_01__leaf_blocks__thematic_breaks__009:
canonical: |
<hr />
static: |-
<hr data-sourcepos="1:2-1:6">
wysiwyg: |-
<hr>
-04_01__leaf_blocks__thematic_breaks__10:
+04_01__leaf_blocks__thematic_breaks__010:
canonical: |
<hr />
static: |-
<hr data-sourcepos="1:2-1:19">
wysiwyg: |-
<hr>
-04_01__leaf_blocks__thematic_breaks__11:
+04_01__leaf_blocks__thematic_breaks__011:
canonical: |
<hr />
static: |-
<hr data-sourcepos="1:1-1:21">
wysiwyg: |-
<hr>
-04_01__leaf_blocks__thematic_breaks__12:
+04_01__leaf_blocks__thematic_breaks__012:
canonical: |
<hr />
static: |-
<hr data-sourcepos="1:1-1:11">
wysiwyg: |-
<hr>
-04_01__leaf_blocks__thematic_breaks__13:
+04_01__leaf_blocks__thematic_breaks__013:
canonical: |
<p>_ _ _ _ a</p>
<p>a------</p>
<p>---a---</p>
static: |-
- <p data-sourcepos="1:1-1:9" dir="auto">_ _ _ _ a</p>&#x000A;<p data-sourcepos="3:1-3:7" dir="auto">a------</p>&#x000A;<p data-sourcepos="5:1-5:7" dir="auto">---a---</p>
+ <p data-sourcepos="1:1-1:9" dir="auto">_ _ _ _ a</p>
+ <p data-sourcepos="3:1-3:7" dir="auto">a------</p>
+ <p data-sourcepos="5:1-5:7" dir="auto">---a---</p>
wysiwyg: |-
<p>_ _ _ _ a</p>
-04_01__leaf_blocks__thematic_breaks__14:
+04_01__leaf_blocks__thematic_breaks__014:
canonical: |
<p><em>-</em></p>
static: |-
<p data-sourcepos="1:2-1:4" dir="auto"><em>-</em></p>
wysiwyg: |-
<p><em>-</em></p>
-04_01__leaf_blocks__thematic_breaks__15:
+04_01__leaf_blocks__thematic_breaks__015:
canonical: |
<ul>
<li>foo</li>
@@ -239,27 +295,37 @@
<li>bar</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-1:5" dir="auto">&#x000A;<li data-sourcepos="1:1-1:5">foo</li>&#x000A;</ul>&#x000A;<hr data-sourcepos="2:1-2:3">&#x000A;<ul data-sourcepos="3:1-3:5" dir="auto">&#x000A;<li data-sourcepos="3:1-3:5">bar</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-1:5" dir="auto">
+ <li data-sourcepos="1:1-1:5">foo</li>
+ </ul>
+ <hr data-sourcepos="2:1-2:3">
+ <ul data-sourcepos="3:1-3:5" dir="auto">
+ <li data-sourcepos="3:1-3:5">bar</li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p>foo</p></li></ul>
-04_01__leaf_blocks__thematic_breaks__16:
+04_01__leaf_blocks__thematic_breaks__016:
canonical: |
<p>Foo</p>
<hr />
<p>bar</p>
static: |-
- <p data-sourcepos="1:1-1:3" dir="auto">Foo</p>&#x000A;<hr data-sourcepos="2:1-2:3">&#x000A;<p data-sourcepos="3:1-3:3" dir="auto">bar</p>
+ <p data-sourcepos="1:1-1:3" dir="auto">Foo</p>
+ <hr data-sourcepos="2:1-2:3">
+ <p data-sourcepos="3:1-3:3" dir="auto">bar</p>
wysiwyg: |-
<p>Foo</p>
-04_01__leaf_blocks__thematic_breaks__17:
+04_01__leaf_blocks__thematic_breaks__017:
canonical: |
<h2>Foo</h2>
<p>bar</p>
static: |-
- <h2 data-sourcepos="1:1-3:3" dir="auto">&#x000A;<a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h2>&#x000A;<p data-sourcepos="3:1-3:3" dir="auto">bar</p>
+ <h2 data-sourcepos="1:1-3:3" dir="auto">
+ <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h2>
+ <p data-sourcepos="3:1-3:3" dir="auto">bar</p>
wysiwyg: |-
<h2>Foo</h2>
-04_01__leaf_blocks__thematic_breaks__18:
+04_01__leaf_blocks__thematic_breaks__018:
canonical: |
<ul>
<li>Foo</li>
@@ -269,10 +335,16 @@
<li>Bar</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-1:5" dir="auto">&#x000A;<li data-sourcepos="1:1-1:5">Foo</li>&#x000A;</ul>&#x000A;<hr data-sourcepos="2:1-2:5">&#x000A;<ul data-sourcepos="3:1-3:5" dir="auto">&#x000A;<li data-sourcepos="3:1-3:5">Bar</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-1:5" dir="auto">
+ <li data-sourcepos="1:1-1:5">Foo</li>
+ </ul>
+ <hr data-sourcepos="2:1-2:5">
+ <ul data-sourcepos="3:1-3:5" dir="auto">
+ <li data-sourcepos="3:1-3:5">Bar</li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p>Foo</p></li></ul>
-04_01__leaf_blocks__thematic_breaks__19:
+04_01__leaf_blocks__thematic_breaks__019:
canonical: |
<ul>
<li>Foo</li>
@@ -281,10 +353,15 @@
</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-2:7" dir="auto">&#x000A;<li data-sourcepos="1:1-1:5">Foo</li>&#x000A;<li data-sourcepos="2:1-2:7">&#x000A;<hr data-sourcepos="2:3-2:7">&#x000A;</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-2:7" dir="auto">
+ <li data-sourcepos="1:1-1:5">Foo</li>
+ <li data-sourcepos="2:1-2:7">
+ <hr data-sourcepos="2:3-2:7">
+ </li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p>Foo</p></li><li><p></p><hr></li></ul>
-04_02__leaf_blocks__atx_headings__01:
+04_02__leaf_blocks__atx_headings__001:
canonical: |
<h1>foo</h1>
<h2>foo</h2>
@@ -293,188 +370,253 @@
<h5>foo</h5>
<h6>foo</h6>
static: |-
- <h1 data-sourcepos="1:1-1:5" dir="auto">&#x000A;<a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo</h1>&#x000A;<h2 data-sourcepos="2:1-2:6" dir="auto">&#x000A;<a id="user-content-foo-1" class="anchor" href="#foo-1" aria-hidden="true"></a>foo</h2>&#x000A;<h3 data-sourcepos="3:1-3:7" dir="auto">&#x000A;<a id="user-content-foo-2" class="anchor" href="#foo-2" aria-hidden="true"></a>foo</h3>&#x000A;<h4 data-sourcepos="4:1-4:8" dir="auto">&#x000A;<a id="user-content-foo-3" class="anchor" href="#foo-3" aria-hidden="true"></a>foo</h4>&#x000A;<h5 data-sourcepos="5:1-5:9" dir="auto">&#x000A;<a id="user-content-foo-4" class="anchor" href="#foo-4" aria-hidden="true"></a>foo</h5>&#x000A;<h6 data-sourcepos="6:1-6:10" dir="auto">&#x000A;<a id="user-content-foo-5" class="anchor" href="#foo-5" aria-hidden="true"></a>foo</h6>
+ <h1 data-sourcepos="1:1-1:5" dir="auto">
+ <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo</h1>
+ <h2 data-sourcepos="2:1-2:6" dir="auto">
+ <a id="user-content-foo-1" class="anchor" href="#foo-1" aria-hidden="true"></a>foo</h2>
+ <h3 data-sourcepos="3:1-3:7" dir="auto">
+ <a id="user-content-foo-2" class="anchor" href="#foo-2" aria-hidden="true"></a>foo</h3>
+ <h4 data-sourcepos="4:1-4:8" dir="auto">
+ <a id="user-content-foo-3" class="anchor" href="#foo-3" aria-hidden="true"></a>foo</h4>
+ <h5 data-sourcepos="5:1-5:9" dir="auto">
+ <a id="user-content-foo-4" class="anchor" href="#foo-4" aria-hidden="true"></a>foo</h5>
+ <h6 data-sourcepos="6:1-6:10" dir="auto">
+ <a id="user-content-foo-5" class="anchor" href="#foo-5" aria-hidden="true"></a>foo</h6>
wysiwyg: |-
<h1>foo</h1>
-04_02__leaf_blocks__atx_headings__02:
+04_02__leaf_blocks__atx_headings__002:
canonical: |
<p>####### foo</p>
static: |-
<p data-sourcepos="1:1-1:11" dir="auto">####### foo</p>
wysiwyg: |-
<p>####### foo</p>
-04_02__leaf_blocks__atx_headings__03:
+04_02__leaf_blocks__atx_headings__003:
canonical: |
<p>#5 bolt</p>
<p>#hashtag</p>
static: |-
- <p data-sourcepos="1:1-1:7" dir="auto">#5 bolt</p>&#x000A;<p data-sourcepos="3:1-3:8" dir="auto">#hashtag</p>
+ <p data-sourcepos="1:1-1:7" dir="auto">#5 bolt</p>
+ <p data-sourcepos="3:1-3:8" dir="auto">#hashtag</p>
wysiwyg: |-
<p>#5 bolt</p>
-04_02__leaf_blocks__atx_headings__04:
+04_02__leaf_blocks__atx_headings__004:
canonical: |
<p>## foo</p>
static: |-
<p data-sourcepos="1:1-1:27" dir="auto"><span>#</span># foo</p>
wysiwyg: |-
<p>## foo</p>
-04_02__leaf_blocks__atx_headings__05:
+04_02__leaf_blocks__atx_headings__005:
canonical: |
<h1>foo <em>bar</em> *baz*</h1>
static: |-
- <h1 data-sourcepos="1:1-1:19" dir="auto">&#x000A;<a id="user-content-foo-bar-baz" class="anchor" href="#foo-bar-baz" aria-hidden="true"></a>foo <em>bar</em> *baz*</h1>
+ <h1 data-sourcepos="1:1-1:19" dir="auto">
+ <a id="user-content-foo-bar-baz" class="anchor" href="#foo-bar-baz" aria-hidden="true"></a>foo <em>bar</em> *baz*</h1>
wysiwyg: |-
<h1>foo <em>bar</em> *baz*</h1>
-04_02__leaf_blocks__atx_headings__06:
+04_02__leaf_blocks__atx_headings__006:
canonical: |
<h1>foo</h1>
static: |-
- <h1 data-sourcepos="1:1-1:22" dir="auto">&#x000A;<a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo</h1>
+ <h1 data-sourcepos="1:1-1:22" dir="auto">
+ <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo</h1>
wysiwyg: |-
<h1>foo</h1>
-04_02__leaf_blocks__atx_headings__07:
+04_02__leaf_blocks__atx_headings__007:
canonical: |
<h3>foo</h3>
<h2>foo</h2>
<h1>foo</h1>
static: |-
- <h3 data-sourcepos="1:2-1:8" dir="auto">&#x000A;<a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo</h3>&#x000A;<h2 data-sourcepos="2:3-2:8" dir="auto">&#x000A;<a id="user-content-foo-1" class="anchor" href="#foo-1" aria-hidden="true"></a>foo</h2>&#x000A;<h1 data-sourcepos="3:4-3:8" dir="auto">&#x000A;<a id="user-content-foo-2" class="anchor" href="#foo-2" aria-hidden="true"></a>foo</h1>
+ <h3 data-sourcepos="1:2-1:8" dir="auto">
+ <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo</h3>
+ <h2 data-sourcepos="2:3-2:8" dir="auto">
+ <a id="user-content-foo-1" class="anchor" href="#foo-1" aria-hidden="true"></a>foo</h2>
+ <h1 data-sourcepos="3:4-3:8" dir="auto">
+ <a id="user-content-foo-2" class="anchor" href="#foo-2" aria-hidden="true"></a>foo</h1>
wysiwyg: |-
<h3>foo</h3>
-04_02__leaf_blocks__atx_headings__08:
+04_02__leaf_blocks__atx_headings__008:
canonical: |
<pre><code># foo
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:5-1:9" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"># foo</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:5-1:9" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"># foo</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code># foo</code></pre>
-04_02__leaf_blocks__atx_headings__09:
+04_02__leaf_blocks__atx_headings__009:
canonical: |
<p>foo
# bar</p>
static: |-
- <p data-sourcepos="1:1-2:9" dir="auto">foo&#x000A;# bar</p>
+ <p data-sourcepos="1:1-2:9" dir="auto">foo
+ # bar</p>
wysiwyg: |-
<p>foo
# bar</p>
-04_02__leaf_blocks__atx_headings__10:
+04_02__leaf_blocks__atx_headings__010:
canonical: |
<h2>foo</h2>
<h3>bar</h3>
static: |-
- <h2 data-sourcepos="1:1-1:6" dir="auto">&#x000A;<a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo</h2>&#x000A;<h3 data-sourcepos="2:3-2:11" dir="auto">&#x000A;<a id="user-content-bar" class="anchor" href="#bar" aria-hidden="true"></a>bar</h3>
+ <h2 data-sourcepos="1:1-1:6" dir="auto">
+ <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo</h2>
+ <h3 data-sourcepos="2:3-2:11" dir="auto">
+ <a id="user-content-bar" class="anchor" href="#bar" aria-hidden="true"></a>bar</h3>
wysiwyg: |-
<h2>foo</h2>
-04_02__leaf_blocks__atx_headings__11:
+04_02__leaf_blocks__atx_headings__011:
canonical: |
<h1>foo</h1>
<h5>foo</h5>
static: |-
- <h1 data-sourcepos="1:1-1:5" dir="auto">&#x000A;<a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo</h1>&#x000A;<h5 data-sourcepos="2:1-2:9" dir="auto">&#x000A;<a id="user-content-foo-1" class="anchor" href="#foo-1" aria-hidden="true"></a>foo</h5>
+ <h1 data-sourcepos="1:1-1:5" dir="auto">
+ <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo</h1>
+ <h5 data-sourcepos="2:1-2:9" dir="auto">
+ <a id="user-content-foo-1" class="anchor" href="#foo-1" aria-hidden="true"></a>foo</h5>
wysiwyg: |-
<h1>foo</h1>
-04_02__leaf_blocks__atx_headings__12:
+04_02__leaf_blocks__atx_headings__012:
canonical: |
<h3>foo</h3>
static: |-
- <h3 data-sourcepos="1:1-1:7" dir="auto">&#x000A;<a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo</h3>
+ <h3 data-sourcepos="1:1-1:7" dir="auto">
+ <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo</h3>
wysiwyg: |-
<h3>foo</h3>
-04_02__leaf_blocks__atx_headings__13:
+04_02__leaf_blocks__atx_headings__013:
canonical: |
<h3>foo ### b</h3>
static: |-
- <h3 data-sourcepos="1:1-1:13" dir="auto">&#x000A;<a id="user-content-foo-b" class="anchor" href="#foo-b" aria-hidden="true"></a>foo ### b</h3>
+ <h3 data-sourcepos="1:1-1:13" dir="auto">
+ <a id="user-content-foo-b" class="anchor" href="#foo-b" aria-hidden="true"></a>foo ### b</h3>
wysiwyg: |-
<h3>foo ### b</h3>
-04_02__leaf_blocks__atx_headings__14:
+04_02__leaf_blocks__atx_headings__014:
canonical: |
<h1>foo#</h1>
static: |-
- <h1 data-sourcepos="1:1-1:6" dir="auto">&#x000A;<a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo#</h1>
+ <h1 data-sourcepos="1:1-1:6" dir="auto">
+ <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo#</h1>
wysiwyg: |-
<h1>foo#</h1>
-04_02__leaf_blocks__atx_headings__15:
+04_02__leaf_blocks__atx_headings__015:
canonical: |
<h3>foo ###</h3>
<h2>foo ###</h2>
<h1>foo #</h1>
static: |-
- <h3 data-sourcepos="1:1-1:32" dir="auto">&#x000A;<a id="user-content-foo-" class="anchor" href="#foo-" aria-hidden="true"></a>foo <span>#</span>##</h3>&#x000A;<h2 data-sourcepos="2:1-2:31" dir="auto">&#x000A;<a id="user-content-foo--1" class="anchor" href="#foo--1" aria-hidden="true"></a>foo #<span>#</span>#</h2>&#x000A;<h1 data-sourcepos="3:1-3:28" dir="auto">&#x000A;<a id="user-content-foo--2" class="anchor" href="#foo--2" aria-hidden="true"></a>foo <span>#</span>&#x000A;</h1>
+ <h3 data-sourcepos="1:1-1:32" dir="auto">
+ <a id="user-content-foo-" class="anchor" href="#foo-" aria-hidden="true"></a>foo <span>#</span>##</h3>
+ <h2 data-sourcepos="2:1-2:31" dir="auto">
+ <a id="user-content-foo--1" class="anchor" href="#foo--1" aria-hidden="true"></a>foo #<span>#</span>#</h2>
+ <h1 data-sourcepos="3:1-3:28" dir="auto">
+ <a id="user-content-foo--2" class="anchor" href="#foo--2" aria-hidden="true"></a>foo <span>#</span>
+ </h1>
wysiwyg: |-
<h3>foo ###</h3>
-04_02__leaf_blocks__atx_headings__16:
+04_02__leaf_blocks__atx_headings__016:
canonical: |
<hr />
<h2>foo</h2>
<hr />
static: |-
- <hr data-sourcepos="1:1-1:4">&#x000A;<h2 data-sourcepos="2:1-2:6" dir="auto">&#x000A;<a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo</h2>&#x000A;<hr data-sourcepos="3:1-3:4">
+ <hr data-sourcepos="1:1-1:4">
+ <h2 data-sourcepos="2:1-2:6" dir="auto">
+ <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo</h2>
+ <hr data-sourcepos="3:1-3:4">
wysiwyg: |-
<hr>
-04_02__leaf_blocks__atx_headings__17:
+04_02__leaf_blocks__atx_headings__017:
canonical: |
<p>Foo bar</p>
<h1>baz</h1>
<p>Bar foo</p>
static: |-
- <p data-sourcepos="1:1-1:7" dir="auto">Foo bar</p>&#x000A;<h1 data-sourcepos="2:1-2:5" dir="auto">&#x000A;<a id="user-content-baz" class="anchor" href="#baz" aria-hidden="true"></a>baz</h1>&#x000A;<p data-sourcepos="3:1-3:7" dir="auto">Bar foo</p>
+ <p data-sourcepos="1:1-1:7" dir="auto">Foo bar</p>
+ <h1 data-sourcepos="2:1-2:5" dir="auto">
+ <a id="user-content-baz" class="anchor" href="#baz" aria-hidden="true"></a>baz</h1>
+ <p data-sourcepos="3:1-3:7" dir="auto">Bar foo</p>
wysiwyg: |-
<p>Foo bar</p>
-04_02__leaf_blocks__atx_headings__18:
+04_02__leaf_blocks__atx_headings__018:
canonical: |
<h2></h2>
<h1></h1>
<h3></h3>
static: |-
- <h2 data-sourcepos="1:1-1:3" dir="auto"></h2>&#x000A;<h1 data-sourcepos="2:1-2:1" dir="auto"></h1>&#x000A;<h3 data-sourcepos="3:1-3:3" dir="auto"></h3>
+ <h2 data-sourcepos="1:1-1:3" dir="auto"></h2>
+ <h1 data-sourcepos="2:1-2:1" dir="auto"></h1>
+ <h3 data-sourcepos="3:1-3:3" dir="auto"></h3>
wysiwyg: |-
<h2></h2>
-04_03__leaf_blocks__setext_headings__01:
+04_03__leaf_blocks__setext_headings__001:
canonical: |
<h1>Foo <em>bar</em></h1>
<h2>Foo <em>bar</em></h2>
static: |-
- <h1 data-sourcepos="1:1-3:0" dir="auto">&#x000A;<a id="user-content-foo-bar" class="anchor" href="#foo-bar" aria-hidden="true"></a>Foo <em>bar</em>&#x000A;</h1>&#x000A;<h2 data-sourcepos="4:1-5:9" dir="auto">&#x000A;<a id="user-content-foo-bar-1" class="anchor" href="#foo-bar-1" aria-hidden="true"></a>Foo <em>bar</em>&#x000A;</h2>
+ <h1 data-sourcepos="1:1-3:0" dir="auto">
+ <a id="user-content-foo-bar" class="anchor" href="#foo-bar" aria-hidden="true"></a>Foo <em>bar</em>
+ </h1>
+ <h2 data-sourcepos="4:1-5:9" dir="auto">
+ <a id="user-content-foo-bar-1" class="anchor" href="#foo-bar-1" aria-hidden="true"></a>Foo <em>bar</em>
+ </h2>
wysiwyg: |-
<h1>Foo <em>bar</em></h1>
-04_03__leaf_blocks__setext_headings__02:
+04_03__leaf_blocks__setext_headings__002:
canonical: |
<h1>Foo <em>bar
baz</em></h1>
static: |-
- <h1 data-sourcepos="1:1-3:4" dir="auto">&#x000A;<a id="user-content-foo-barbaz" class="anchor" href="#foo-barbaz" aria-hidden="true"></a>Foo <em>bar&#x000A;baz</em>&#x000A;</h1>
+ <h1 data-sourcepos="1:1-3:4" dir="auto">
+ <a id="user-content-foo-barbaz" class="anchor" href="#foo-barbaz" aria-hidden="true"></a>Foo <em>bar
+ baz</em>
+ </h1>
wysiwyg: |-
<h1>Foo <em>bar
baz</em></h1>
-04_03__leaf_blocks__setext_headings__03:
+04_03__leaf_blocks__setext_headings__003:
canonical: |
<h1>Foo <em>bar
baz</em></h1>
static: |-
- <h1 data-sourcepos="1:3-3:4" dir="auto">&#x000A;<a id="user-content-foo-barbaz" class="anchor" href="#foo-barbaz" aria-hidden="true"></a>Foo <em>bar&#x000A;baz</em>&#x000A;</h1>
+ <h1 data-sourcepos="1:3-3:4" dir="auto">
+ <a id="user-content-foo-barbaz" class="anchor" href="#foo-barbaz" aria-hidden="true"></a>Foo <em>bar
+ baz</em>
+ </h1>
wysiwyg: |-
<h1>Foo <em>bar
baz</em></h1>
-04_03__leaf_blocks__setext_headings__04:
+04_03__leaf_blocks__setext_headings__004:
canonical: |
<h2>Foo</h2>
<h1>Foo</h1>
static: |-
- <h2 data-sourcepos="1:1-3:0" dir="auto">&#x000A;<a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h2>&#x000A;<h1 data-sourcepos="4:1-5:1" dir="auto">&#x000A;<a id="user-content-foo-1" class="anchor" href="#foo-1" aria-hidden="true"></a>Foo</h1>
+ <h2 data-sourcepos="1:1-3:0" dir="auto">
+ <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h2>
+ <h1 data-sourcepos="4:1-5:1" dir="auto">
+ <a id="user-content-foo-1" class="anchor" href="#foo-1" aria-hidden="true"></a>Foo</h1>
wysiwyg: |-
<h2>Foo</h2>
-04_03__leaf_blocks__setext_headings__05:
+04_03__leaf_blocks__setext_headings__005:
canonical: |
<h2>Foo</h2>
<h2>Foo</h2>
<h1>Foo</h1>
static: |-
- <h2 data-sourcepos="1:4-3:0" dir="auto">&#x000A;<a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h2>&#x000A;<h2 data-sourcepos="4:3-6:0" dir="auto">&#x000A;<a id="user-content-foo-1" class="anchor" href="#foo-1" aria-hidden="true"></a>Foo</h2>&#x000A;<h1 data-sourcepos="7:3-8:5" dir="auto">&#x000A;<a id="user-content-foo-2" class="anchor" href="#foo-2" aria-hidden="true"></a>Foo</h1>
+ <h2 data-sourcepos="1:4-3:0" dir="auto">
+ <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h2>
+ <h2 data-sourcepos="4:3-6:0" dir="auto">
+ <a id="user-content-foo-1" class="anchor" href="#foo-1" aria-hidden="true"></a>Foo</h2>
+ <h1 data-sourcepos="7:3-8:5" dir="auto">
+ <a id="user-content-foo-2" class="anchor" href="#foo-2" aria-hidden="true"></a>Foo</h1>
wysiwyg: |-
<h2>Foo</h2>
-04_03__leaf_blocks__setext_headings__06:
+04_03__leaf_blocks__setext_headings__006:
canonical: |
<pre><code>Foo
---
@@ -483,74 +625,96 @@
</code></pre>
<hr />
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:5-4:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">Foo</span>&#x000A;<span id="LC2" class="line" lang="plaintext">---</span>&#x000A;<span id="LC3" class="line" lang="plaintext"></span>&#x000A;<span id="LC4" class="line" lang="plaintext">Foo</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;<hr data-sourcepos="5:1-5:3">
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:5-4:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">Foo</span>
+ <span id="LC2" class="line" lang="plaintext">---</span>
+ <span id="LC3" class="line" lang="plaintext"></span>
+ <span id="LC4" class="line" lang="plaintext">Foo</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ <hr data-sourcepos="5:1-5:3">
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>Foo
---
Foo</code></pre>
-04_03__leaf_blocks__setext_headings__07:
+04_03__leaf_blocks__setext_headings__007:
canonical: |
<h2>Foo</h2>
static: |-
- <h2 data-sourcepos="1:1-2:13" dir="auto">&#x000A;<a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h2>
+ <h2 data-sourcepos="1:1-2:13" dir="auto">
+ <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h2>
wysiwyg: |-
<h2>Foo</h2>
-04_03__leaf_blocks__setext_headings__08:
+04_03__leaf_blocks__setext_headings__008:
canonical: |
<p>Foo
---</p>
static: |-
- <p data-sourcepos="1:1-2:7" dir="auto">Foo&#x000A;---</p>
+ <p data-sourcepos="1:1-2:7" dir="auto">Foo
+ ---</p>
wysiwyg: |-
<p>Foo
---</p>
-04_03__leaf_blocks__setext_headings__09:
+04_03__leaf_blocks__setext_headings__009:
canonical: |
<p>Foo
= =</p>
<p>Foo</p>
<hr />
static: |-
- <p data-sourcepos="1:1-2:3" dir="auto">Foo&#x000A;= =</p>&#x000A;<p data-sourcepos="4:1-4:3" dir="auto">Foo</p>&#x000A;<hr data-sourcepos="5:1-5:5">
+ <p data-sourcepos="1:1-2:3" dir="auto">Foo
+ = =</p>
+ <p data-sourcepos="4:1-4:3" dir="auto">Foo</p>
+ <hr data-sourcepos="5:1-5:5">
wysiwyg: |-
<p>Foo
= =</p>
-04_03__leaf_blocks__setext_headings__10:
+04_03__leaf_blocks__setext_headings__010:
canonical: |
<h2>Foo</h2>
static: |-
- <h2 data-sourcepos="1:1-2:5" dir="auto">&#x000A;<a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h2>
+ <h2 data-sourcepos="1:1-2:5" dir="auto">
+ <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h2>
wysiwyg: |-
<h2>Foo</h2>
-04_03__leaf_blocks__setext_headings__11:
+04_03__leaf_blocks__setext_headings__011:
canonical: |
<h2>Foo\</h2>
static: |-
- <h2 data-sourcepos="1:1-2:4" dir="auto">&#x000A;<a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo\</h2>
+ <h2 data-sourcepos="1:1-2:4" dir="auto">
+ <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo\</h2>
wysiwyg: |-
<h2>Foo\</h2>
-04_03__leaf_blocks__setext_headings__12:
+04_03__leaf_blocks__setext_headings__012:
canonical: |
<h2>`Foo</h2>
<p>`</p>
<h2>&lt;a title=&quot;a lot</h2>
<p>of dashes&quot;/&gt;</p>
static: |-
- <h2 data-sourcepos="1:1-3:1" dir="auto">&#x000A;<a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>`Foo</h2>&#x000A;<p data-sourcepos="3:1-3:1" dir="auto">`</p>&#x000A;<h2 data-sourcepos="5:1-7:12" dir="auto">&#x000A;<a id="user-content-a-titlea-lot" class="anchor" href="#a-titlea-lot" aria-hidden="true"></a>&lt;a title="a lot</h2>&#x000A;<p data-sourcepos="7:1-7:12" dir="auto">of dashes"/&gt;</p>
+ <h2 data-sourcepos="1:1-3:1" dir="auto">
+ <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>`Foo</h2>
+ <p data-sourcepos="3:1-3:1" dir="auto">`</p>
+ <h2 data-sourcepos="5:1-7:12" dir="auto">
+ <a id="user-content-a-titlea-lot" class="anchor" href="#a-titlea-lot" aria-hidden="true"></a>&lt;a title="a lot</h2>
+ <p data-sourcepos="7:1-7:12" dir="auto">of dashes"/&gt;</p>
wysiwyg: |-
<h2>`Foo</h2>
-04_03__leaf_blocks__setext_headings__13:
+04_03__leaf_blocks__setext_headings__013:
canonical: |
<blockquote>
<p>Foo</p>
</blockquote>
<hr />
static: |-
- <blockquote data-sourcepos="1:1-1:5" dir="auto">&#x000A;<p data-sourcepos="1:3-1:5">Foo</p>&#x000A;</blockquote>&#x000A;<hr data-sourcepos="2:1-2:3">
+ <blockquote data-sourcepos="1:1-1:5" dir="auto">
+ <p data-sourcepos="1:3-1:5">Foo</p>
+ </blockquote>
+ <hr data-sourcepos="2:1-2:3">
wysiwyg: |-
<blockquote multiline="false"><p>Foo</p></blockquote>
-04_03__leaf_blocks__setext_headings__14:
+04_03__leaf_blocks__setext_headings__014:
canonical: |
<blockquote>
<p>foo
@@ -558,146 +722,191 @@
===</p>
</blockquote>
static: |-
- <blockquote data-sourcepos="1:1-3:3" dir="auto">&#x000A;<p data-sourcepos="1:3-3:3">foo&#x000A;bar&#x000A;===</p>&#x000A;</blockquote>
+ <blockquote data-sourcepos="1:1-3:3" dir="auto">
+ <p data-sourcepos="1:3-3:3">foo
+ bar
+ ===</p>
+ </blockquote>
wysiwyg: |-
<blockquote multiline="false"><p>foo
bar
===</p></blockquote>
-04_03__leaf_blocks__setext_headings__15:
+04_03__leaf_blocks__setext_headings__015:
canonical: |
<ul>
<li>Foo</li>
</ul>
<hr />
static: |-
- <ul data-sourcepos="1:1-1:5" dir="auto">&#x000A;<li data-sourcepos="1:1-1:5">Foo</li>&#x000A;</ul>&#x000A;<hr data-sourcepos="2:1-2:3">
+ <ul data-sourcepos="1:1-1:5" dir="auto">
+ <li data-sourcepos="1:1-1:5">Foo</li>
+ </ul>
+ <hr data-sourcepos="2:1-2:3">
wysiwyg: |-
<ul bullet="*"><li><p>Foo</p></li></ul>
-04_03__leaf_blocks__setext_headings__16:
+04_03__leaf_blocks__setext_headings__016:
canonical: |
<h2>Foo
Bar</h2>
static: |-
- <h2 data-sourcepos="1:1-3:3" dir="auto">&#x000A;<a id="user-content-foobar" class="anchor" href="#foobar" aria-hidden="true"></a>Foo&#x000A;Bar</h2>
+ <h2 data-sourcepos="1:1-3:3" dir="auto">
+ <a id="user-content-foobar" class="anchor" href="#foobar" aria-hidden="true"></a>Foo
+ Bar</h2>
wysiwyg: |-
<h2>Foo
Bar</h2>
-04_03__leaf_blocks__setext_headings__17:
+04_03__leaf_blocks__setext_headings__017:
canonical: |
<hr />
<h2>Foo</h2>
<h2>Bar</h2>
<p>Baz</p>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-yaml" lang="yaml" data-lang-params="frontmatter" v-pre="true"><code><span id="LC1" class="line" lang="yaml"><span class="s">Foo</span></span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;<h2 data-sourcepos="4:1-6:3" dir="auto">&#x000A;<a id="user-content-bar" class="anchor" href="#bar" aria-hidden="true"></a>Bar</h2>&#x000A;<p data-sourcepos="6:1-6:3" dir="auto">Baz</p>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-yaml" lang="yaml" data-lang-params="frontmatter" v-pre="true"><code><span id="LC1" class="line" lang="yaml"><span class="s">Foo</span></span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ <h2 data-sourcepos="4:1-6:3" dir="auto">
+ <a id="user-content-bar" class="anchor" href="#bar" aria-hidden="true"></a>Bar</h2>
+ <p data-sourcepos="6:1-6:3" dir="auto">Baz</p>
wysiwyg: |-
<hr>
-04_03__leaf_blocks__setext_headings__18:
+04_03__leaf_blocks__setext_headings__018:
canonical: |
<p>====</p>
static: |-
<p data-sourcepos="2:1-2:4" dir="auto">====</p>
wysiwyg: |-
<p>====</p>
-04_03__leaf_blocks__setext_headings__19:
+04_03__leaf_blocks__setext_headings__019:
canonical: |
<hr />
<hr />
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:1-2:3" class="code highlight js-syntax-highlight language-yaml" lang="yaml" data-lang-params="frontmatter" v-pre="true"><code></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:1-2:3" class="code highlight js-syntax-highlight language-yaml" lang="yaml" data-lang-params="frontmatter" v-pre="true"><code></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<hr>
-04_03__leaf_blocks__setext_headings__20:
+04_03__leaf_blocks__setext_headings__020:
canonical: |
<ul>
<li>foo</li>
</ul>
<hr />
static: |-
- <ul data-sourcepos="1:1-1:5" dir="auto">&#x000A;<li data-sourcepos="1:1-1:5">foo</li>&#x000A;</ul>&#x000A;<hr data-sourcepos="2:1-2:5">
+ <ul data-sourcepos="1:1-1:5" dir="auto">
+ <li data-sourcepos="1:1-1:5">foo</li>
+ </ul>
+ <hr data-sourcepos="2:1-2:5">
wysiwyg: |-
<ul bullet="*"><li><p>foo</p></li></ul>
-04_03__leaf_blocks__setext_headings__21:
+04_03__leaf_blocks__setext_headings__021:
canonical: |
<pre><code>foo
</code></pre>
<hr />
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:5-1:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;<hr data-sourcepos="2:1-2:3">
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:5-1:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ <hr data-sourcepos="2:1-2:3">
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>foo</code></pre>
-04_03__leaf_blocks__setext_headings__22:
+04_03__leaf_blocks__setext_headings__022:
canonical: |
<blockquote>
<p>foo</p>
</blockquote>
<hr />
static: |-
- <blockquote data-sourcepos="1:1-1:5" dir="auto">&#x000A;<p data-sourcepos="1:3-1:5">foo</p>&#x000A;</blockquote>&#x000A;<hr data-sourcepos="2:1-2:5">
+ <blockquote data-sourcepos="1:1-1:5" dir="auto">
+ <p data-sourcepos="1:3-1:5">foo</p>
+ </blockquote>
+ <hr data-sourcepos="2:1-2:5">
wysiwyg: |-
<blockquote multiline="false"><p>foo</p></blockquote>
-04_03__leaf_blocks__setext_headings__23:
+04_03__leaf_blocks__setext_headings__023:
canonical: |
<h2>&gt; foo</h2>
static: |-
- <h2 data-sourcepos="1:1-2:6" dir="auto">&#x000A;<a id="user-content--foo" class="anchor" href="#-foo" aria-hidden="true"></a>&gt; foo</h2>
+ <h2 data-sourcepos="1:1-2:6" dir="auto">
+ <a id="user-content--foo" class="anchor" href="#-foo" aria-hidden="true"></a>&gt; foo</h2>
wysiwyg: |-
<h2>&gt; foo</h2>
-04_03__leaf_blocks__setext_headings__24:
+04_03__leaf_blocks__setext_headings__024:
canonical: |
<p>Foo</p>
<h2>bar</h2>
<p>baz</p>
static: |-
- <p data-sourcepos="1:1-1:3" dir="auto">Foo</p>&#x000A;<h2 data-sourcepos="3:1-5:3" dir="auto">&#x000A;<a id="user-content-bar" class="anchor" href="#bar" aria-hidden="true"></a>bar</h2>&#x000A;<p data-sourcepos="5:1-5:3" dir="auto">baz</p>
+ <p data-sourcepos="1:1-1:3" dir="auto">Foo</p>
+ <h2 data-sourcepos="3:1-5:3" dir="auto">
+ <a id="user-content-bar" class="anchor" href="#bar" aria-hidden="true"></a>bar</h2>
+ <p data-sourcepos="5:1-5:3" dir="auto">baz</p>
wysiwyg: |-
<p>Foo</p>
-04_03__leaf_blocks__setext_headings__25:
+04_03__leaf_blocks__setext_headings__025:
canonical: |
<p>Foo
bar</p>
<hr />
<p>baz</p>
static: |-
- <p data-sourcepos="1:1-2:3" dir="auto">Foo&#x000A;bar</p>&#x000A;<hr data-sourcepos="4:1-5:0">&#x000A;<p data-sourcepos="6:1-6:3" dir="auto">baz</p>
+ <p data-sourcepos="1:1-2:3" dir="auto">Foo
+ bar</p>
+ <hr data-sourcepos="4:1-5:0">
+ <p data-sourcepos="6:1-6:3" dir="auto">baz</p>
wysiwyg: |-
<p>Foo
bar</p>
-04_03__leaf_blocks__setext_headings__26:
+04_03__leaf_blocks__setext_headings__026:
canonical: |
<p>Foo
bar</p>
<hr />
<p>baz</p>
static: |-
- <p data-sourcepos="1:1-2:3" dir="auto">Foo&#x000A;bar</p>&#x000A;<hr data-sourcepos="3:1-3:5">&#x000A;<p data-sourcepos="4:1-4:3" dir="auto">baz</p>
+ <p data-sourcepos="1:1-2:3" dir="auto">Foo
+ bar</p>
+ <hr data-sourcepos="3:1-3:5">
+ <p data-sourcepos="4:1-4:3" dir="auto">baz</p>
wysiwyg: |-
<p>Foo
bar</p>
-04_03__leaf_blocks__setext_headings__27:
+04_03__leaf_blocks__setext_headings__027:
canonical: |
<p>Foo
bar
---
baz</p>
static: |-
- <p data-sourcepos="1:1-4:3" dir="auto">Foo&#x000A;bar&#x000A;---&#x000A;baz</p>
+ <p data-sourcepos="1:1-4:3" dir="auto">Foo
+ bar
+ ---
+ baz</p>
wysiwyg: |-
<p>Foo
bar
---
baz</p>
-04_04__leaf_blocks__indented_code_blocks__01:
+04_04__leaf_blocks__indented_code_blocks__001:
canonical: |
<pre><code>a simple
indented code block
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:5-2:25" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">a simple</span>&#x000A;<span id="LC2" class="line" lang="plaintext"> indented code block</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:5-2:25" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">a simple</span>
+ <span id="LC2" class="line" lang="plaintext"> indented code block</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>a simple
indented code block</code></pre>
-04_04__leaf_blocks__indented_code_blocks__02:
+04_04__leaf_blocks__indented_code_blocks__002:
canonical: |
<ul>
<li>
@@ -706,10 +915,15 @@
</li>
</ul>
static: |-
- <ul data-sourcepos="1:3-3:7" dir="auto">&#x000A;<li data-sourcepos="1:3-3:7">&#x000A;<p data-sourcepos="1:5-1:7">foo</p>&#x000A;<p data-sourcepos="3:5-3:7">bar</p>&#x000A;</li>&#x000A;</ul>
+ <ul data-sourcepos="1:3-3:7" dir="auto">
+ <li data-sourcepos="1:3-3:7">
+ <p data-sourcepos="1:5-1:7">foo</p>
+ <p data-sourcepos="3:5-3:7">bar</p>
+ </li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p>foo</p><p>bar</p></li></ul>
-04_04__leaf_blocks__indented_code_blocks__03:
+04_04__leaf_blocks__indented_code_blocks__003:
canonical: |
<ol>
<li>
@@ -720,10 +934,17 @@
</li>
</ol>
static: |-
- <ol data-sourcepos="1:1-3:9" dir="auto">&#x000A;<li data-sourcepos="1:1-3:9">&#x000A;<p data-sourcepos="1:5-1:7">foo</p>&#x000A;<ul data-sourcepos="3:5-3:9">&#x000A;<li data-sourcepos="3:5-3:9">bar</li>&#x000A;</ul>&#x000A;</li>&#x000A;</ol>
+ <ol data-sourcepos="1:1-3:9" dir="auto">
+ <li data-sourcepos="1:1-3:9">
+ <p data-sourcepos="1:5-1:7">foo</p>
+ <ul data-sourcepos="3:5-3:9">
+ <li data-sourcepos="3:5-3:9">bar</li>
+ </ul>
+ </li>
+ </ol>
wysiwyg: |-
<ol parens="false"><li><p>foo</p><ul bullet="*"><li><p>bar</p></li></ul></li></ol>
-04_04__leaf_blocks__indented_code_blocks__04:
+04_04__leaf_blocks__indented_code_blocks__004:
canonical: |
<pre><code>&lt;a/&gt;
*hi*
@@ -731,13 +952,19 @@
- one
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:5-4:9" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">&lt;a/&gt;</span>&#x000A;<span id="LC2" class="line" lang="plaintext">*hi*</span>&#x000A;<span id="LC3" class="line" lang="plaintext"></span>&#x000A;<span id="LC4" class="line" lang="plaintext">- one</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:5-4:9" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">&lt;a/&gt;</span>
+ <span id="LC2" class="line" lang="plaintext">*hi*</span>
+ <span id="LC3" class="line" lang="plaintext"></span>
+ <span id="LC4" class="line" lang="plaintext">- one</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>&lt;a/&gt;
*hi*
- one</code></pre>
-04_04__leaf_blocks__indented_code_blocks__05:
+04_04__leaf_blocks__indented_code_blocks__005:
canonical: |
<pre><code>chunk1
@@ -748,7 +975,16 @@
chunk3
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:5-7:10" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">chunk1</span>&#x000A;<span id="LC2" class="line" lang="plaintext"></span>&#x000A;<span id="LC3" class="line" lang="plaintext">chunk2</span>&#x000A;<span id="LC4" class="line" lang="plaintext"></span>&#x000A;<span id="LC5" class="line" lang="plaintext"></span>&#x000A;<span id="LC6" class="line" lang="plaintext"></span>&#x000A;<span id="LC7" class="line" lang="plaintext">chunk3</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:5-7:10" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">chunk1</span>
+ <span id="LC2" class="line" lang="plaintext"></span>
+ <span id="LC3" class="line" lang="plaintext">chunk2</span>
+ <span id="LC4" class="line" lang="plaintext"></span>
+ <span id="LC5" class="line" lang="plaintext"></span>
+ <span id="LC6" class="line" lang="plaintext"></span>
+ <span id="LC7" class="line" lang="plaintext">chunk3</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>chunk1
@@ -757,31 +993,41 @@
chunk3</code></pre>
-04_04__leaf_blocks__indented_code_blocks__06:
+04_04__leaf_blocks__indented_code_blocks__006:
canonical: "<pre><code>chunk1\n \n chunk2\n</code></pre>\n"
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:5-3:12" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">chunk1</span>&#x000A;<span id="LC2" class="line" lang="plaintext"> </span>&#x000A;<span id="LC3" class="line" lang="plaintext"> chunk2</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:5-3:12" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">chunk1</span>
+ <span id="LC2" class="line" lang="plaintext"> </span>
+ <span id="LC3" class="line" lang="plaintext"> chunk2</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: "<pre class=\"content-editor-code-block undefined code highlight\"><code>chunk1\n
\ \n chunk2</code></pre>"
-04_04__leaf_blocks__indented_code_blocks__07:
+04_04__leaf_blocks__indented_code_blocks__007:
canonical: |
<p>Foo
bar</p>
static: |-
- <p data-sourcepos="1:1-2:7" dir="auto">Foo&#x000A;bar</p>
+ <p data-sourcepos="1:1-2:7" dir="auto">Foo
+ bar</p>
wysiwyg: |-
<p>Foo
bar</p>
-04_04__leaf_blocks__indented_code_blocks__08:
+04_04__leaf_blocks__indented_code_blocks__008:
canonical: |
<pre><code>foo
</code></pre>
<p>bar</p>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:5-1:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;<p data-sourcepos="2:1-2:3" dir="auto">bar</p>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:5-1:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ <p data-sourcepos="2:1-2:3" dir="auto">bar</p>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>foo</code></pre>
-04_04__leaf_blocks__indented_code_blocks__09:
+04_04__leaf_blocks__indented_code_blocks__009:
canonical: |
<h1>Heading</h1>
<pre><code>foo
@@ -791,120 +1037,174 @@
</code></pre>
<hr />
static: |-
- <h1 data-sourcepos="1:1-1:9" dir="auto">&#x000A;<a id="user-content-heading" class="anchor" href="#heading" aria-hidden="true"></a>Heading</h1>&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="2:5-2:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;<h2 data-sourcepos="3:1-5:7" dir="auto">&#x000A;<a id="user-content-heading-1" class="anchor" href="#heading-1" aria-hidden="true"></a>Heading</h2>&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="5:5-5:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;<hr data-sourcepos="6:1-6:4">
+ <h1 data-sourcepos="1:1-1:9" dir="auto">
+ <a id="user-content-heading" class="anchor" href="#heading" aria-hidden="true"></a>Heading</h1>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="2:5-2:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ <h2 data-sourcepos="3:1-5:7" dir="auto">
+ <a id="user-content-heading-1" class="anchor" href="#heading-1" aria-hidden="true"></a>Heading</h2>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="5:5-5:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ <hr data-sourcepos="6:1-6:4">
wysiwyg: |-
<h1>Heading</h1>
-04_04__leaf_blocks__indented_code_blocks__10:
+04_04__leaf_blocks__indented_code_blocks__010:
canonical: |
<pre><code> foo
bar
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:5-2:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"> foo</span>&#x000A;<span id="LC2" class="line" lang="plaintext">bar</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:5-2:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"> foo</span>
+ <span id="LC2" class="line" lang="plaintext">bar</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code> foo
bar</code></pre>
-04_04__leaf_blocks__indented_code_blocks__11:
+04_04__leaf_blocks__indented_code_blocks__011:
canonical: |
<pre><code>foo
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="3:5-5:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="3:5-5:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>foo</code></pre>
-04_04__leaf_blocks__indented_code_blocks__12:
+04_04__leaf_blocks__indented_code_blocks__012:
canonical: "<pre><code>foo \n</code></pre>\n"
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:5-1:9" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo </span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:5-1:9" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo </span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>foo </code></pre>
-04_05__leaf_blocks__fenced_code_blocks__01:
+04_05__leaf_blocks__fenced_code_blocks__001:
canonical: |
<pre><code>&lt;
&gt;
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:1-4:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">&lt;</span>&#x000A;<span id="LC2" class="line" lang="plaintext"> &gt;</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:1-4:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">&lt;</span>
+ <span id="LC2" class="line" lang="plaintext"> &gt;</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>&lt;
&gt;</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__02:
+04_05__leaf_blocks__fenced_code_blocks__002:
canonical: |
<pre><code>&lt;
&gt;
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:1-4:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">&lt;</span>&#x000A;<span id="LC2" class="line" lang="plaintext"> &gt;</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:1-4:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">&lt;</span>
+ <span id="LC2" class="line" lang="plaintext"> &gt;</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>&lt;
&gt;</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__03:
+04_05__leaf_blocks__fenced_code_blocks__003:
canonical: |
<p><code>foo</code></p>
static: |-
<p data-sourcepos="1:1-3:2" dir="auto"><code>foo</code></p>
wysiwyg: |-
<p><code>foo</code></p>
-04_05__leaf_blocks__fenced_code_blocks__04:
+04_05__leaf_blocks__fenced_code_blocks__004:
canonical: |
<pre><code>aaa
~~~
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:1-4:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span>&#x000A;<span id="LC2" class="line" lang="plaintext">~~~</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:1-4:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span>
+ <span id="LC2" class="line" lang="plaintext">~~~</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>aaa
~~~</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__05:
+04_05__leaf_blocks__fenced_code_blocks__005:
canonical: |
<pre><code>aaa
```
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:1-4:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span>&#x000A;<span id="LC2" class="line" lang="plaintext">```</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:1-4:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span>
+ <span id="LC2" class="line" lang="plaintext">```</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>aaa
```</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__06:
+04_05__leaf_blocks__fenced_code_blocks__006:
canonical: |
<pre><code>aaa
```
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:1-4:6" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span>&#x000A;<span id="LC2" class="line" lang="plaintext">```</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:1-4:6" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span>
+ <span id="LC2" class="line" lang="plaintext">```</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>aaa
```</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__07:
+04_05__leaf_blocks__fenced_code_blocks__007:
canonical: |
<pre><code>aaa
~~~
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:1-4:4" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span>&#x000A;<span id="LC2" class="line" lang="plaintext">~~~</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:1-4:4" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span>
+ <span id="LC2" class="line" lang="plaintext">~~~</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>aaa
~~~</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__08:
+04_05__leaf_blocks__fenced_code_blocks__008:
canonical: |
<pre><code></code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:1-1:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:1-1:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code></code></pre>
-04_05__leaf_blocks__fenced_code_blocks__09:
+04_05__leaf_blocks__fenced_code_blocks__009:
canonical: |
<pre><code>
```
aaa
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:1-4:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"></span>&#x000A;<span id="LC2" class="line" lang="plaintext">```</span>&#x000A;<span id="LC3" class="line" lang="plaintext">aaa</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:1-4:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"></span>
+ <span id="LC2" class="line" lang="plaintext">```</span>
+ <span id="LC3" class="line" lang="plaintext">aaa</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>
```
aaa</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__10:
+04_05__leaf_blocks__fenced_code_blocks__010:
canonical: |
<blockquote>
<pre><code>aaa
@@ -912,191 +1212,270 @@
</blockquote>
<p>bbb</p>
static: |-
- <blockquote data-sourcepos="1:1-2:5" dir="auto">&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:3-3:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;</blockquote>&#x000A;<p data-sourcepos="4:1-4:3" dir="auto">bbb</p>
+ <blockquote data-sourcepos="1:1-2:5" dir="auto">
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:3-3:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ </blockquote>
+ <p data-sourcepos="4:1-4:3" dir="auto">bbb</p>
wysiwyg: |-
<blockquote multiline="false"><pre class="content-editor-code-block undefined code highlight"><code>aaa</code></pre></blockquote>
-04_05__leaf_blocks__fenced_code_blocks__11:
+04_05__leaf_blocks__fenced_code_blocks__011:
canonical: "<pre><code>\n \n</code></pre>\n"
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:1-4:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"></span>&#x000A;<span id="LC2" class="line" lang="plaintext"> </span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:1-4:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"></span>
+ <span id="LC2" class="line" lang="plaintext"> </span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>
</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__12:
+04_05__leaf_blocks__fenced_code_blocks__012:
canonical: |
<pre><code></code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:1-2:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:1-2:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code></code></pre>
-04_05__leaf_blocks__fenced_code_blocks__13:
+04_05__leaf_blocks__fenced_code_blocks__013:
canonical: |
<pre><code>aaa
aaa
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:2-4:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span>&#x000A;<span id="LC2" class="line" lang="plaintext">aaa</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:2-4:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span>
+ <span id="LC2" class="line" lang="plaintext">aaa</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>aaa
aaa</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__14:
+04_05__leaf_blocks__fenced_code_blocks__014:
canonical: |
<pre><code>aaa
aaa
aaa
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:3-5:5" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span>&#x000A;<span id="LC2" class="line" lang="plaintext">aaa</span>&#x000A;<span id="LC3" class="line" lang="plaintext">aaa</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:3-5:5" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span>
+ <span id="LC2" class="line" lang="plaintext">aaa</span>
+ <span id="LC3" class="line" lang="plaintext">aaa</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>aaa
aaa
aaa</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__15:
+04_05__leaf_blocks__fenced_code_blocks__015:
canonical: |
<pre><code>aaa
aaa
aaa
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:4-5:6" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span>&#x000A;<span id="LC2" class="line" lang="plaintext"> aaa</span>&#x000A;<span id="LC3" class="line" lang="plaintext">aaa</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:4-5:6" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span>
+ <span id="LC2" class="line" lang="plaintext"> aaa</span>
+ <span id="LC3" class="line" lang="plaintext">aaa</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>aaa
aaa
aaa</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__16:
+04_05__leaf_blocks__fenced_code_blocks__016:
canonical: |
<pre><code>```
aaa
```
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:5-3:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">```</span>&#x000A;<span id="LC2" class="line" lang="plaintext">aaa</span>&#x000A;<span id="LC3" class="line" lang="plaintext">```</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:5-3:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">```</span>
+ <span id="LC2" class="line" lang="plaintext">aaa</span>
+ <span id="LC3" class="line" lang="plaintext">```</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>```
aaa
```</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__17:
+04_05__leaf_blocks__fenced_code_blocks__017:
canonical: |
<pre><code>aaa
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:1-3:5" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:1-3:5" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>aaa</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__18:
+04_05__leaf_blocks__fenced_code_blocks__018:
canonical: |
<pre><code>aaa
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:4-3:5" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:4-3:5" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>aaa</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__19:
+04_05__leaf_blocks__fenced_code_blocks__019:
canonical: |
<pre><code>aaa
```
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:1-3:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span>&#x000A;<span id="LC2" class="line" lang="plaintext"> ```</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:1-3:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span>
+ <span id="LC2" class="line" lang="plaintext"> ```</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>aaa
```</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__20:
+04_05__leaf_blocks__fenced_code_blocks__020:
canonical: |
<p><code> </code>
aaa</p>
static: |-
- <p data-sourcepos="1:1-2:3" dir="auto"><code> </code>&#x000A;aaa</p>
+ <p data-sourcepos="1:1-2:3" dir="auto"><code> </code>
+ aaa</p>
wysiwyg: |-
<p><code>
aaa</code></p>
-04_05__leaf_blocks__fenced_code_blocks__21:
+04_05__leaf_blocks__fenced_code_blocks__021:
canonical: |
<pre><code>aaa
~~~ ~~
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:1-3:6" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span>&#x000A;<span id="LC2" class="line" lang="plaintext">~~~ ~~</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:1-3:6" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span>
+ <span id="LC2" class="line" lang="plaintext">~~~ ~~</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>aaa
~~~ ~~</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__22:
+04_05__leaf_blocks__fenced_code_blocks__022:
canonical: |
<p>foo</p>
<pre><code>bar
</code></pre>
<p>baz</p>
static: |-
- <p data-sourcepos="1:1-1:3" dir="auto">foo</p>&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="2:1-4:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">bar</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;<p data-sourcepos="5:1-5:3" dir="auto">baz</p>
+ <p data-sourcepos="1:1-1:3" dir="auto">foo</p>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="2:1-4:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">bar</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ <p data-sourcepos="5:1-5:3" dir="auto">baz</p>
wysiwyg: |-
<p>foo</p>
-04_05__leaf_blocks__fenced_code_blocks__23:
+04_05__leaf_blocks__fenced_code_blocks__023:
canonical: |
<h2>foo</h2>
<pre><code>bar
</code></pre>
<h1>baz</h1>
static: |-
- <h2 data-sourcepos="1:1-3:3" dir="auto">&#x000A;<a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo</h2>&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="3:1-5:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">bar</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;<h1 data-sourcepos="6:1-6:5" dir="auto">&#x000A;<a id="user-content-baz" class="anchor" href="#baz" aria-hidden="true"></a>baz</h1>
+ <h2 data-sourcepos="1:1-3:3" dir="auto">
+ <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo</h2>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="3:1-5:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">bar</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ <h1 data-sourcepos="6:1-6:5" dir="auto">
+ <a id="user-content-baz" class="anchor" href="#baz" aria-hidden="true"></a>baz</h1>
wysiwyg: |-
<h2>foo</h2>
-04_05__leaf_blocks__fenced_code_blocks__24:
+04_05__leaf_blocks__fenced_code_blocks__024:
canonical: |
<pre><code class="language-ruby">def foo(x)
return 3
end
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:1-5:3" class="code highlight js-syntax-highlight language-ruby" lang="ruby" v-pre="true"><code><span id="LC1" class="line" lang="ruby"><span class="k">def</span> <span class="nf">foo</span><span class="p">(</span><span class="n">x</span><span class="p">)</span></span>&#x000A;<span id="LC2" class="line" lang="ruby"> <span class="k">return</span> <span class="mi">3</span></span>&#x000A;<span id="LC3" class="line" lang="ruby"><span class="k">end</span></span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:1-5:3" class="code highlight js-syntax-highlight language-ruby" lang="ruby" v-pre="true"><code><span id="LC1" class="line" lang="ruby"><span class="k">def</span> <span class="nf">foo</span><span class="p">(</span><span class="n">x</span><span class="p">)</span></span>
+ <span id="LC2" class="line" lang="ruby"> <span class="k">return</span> <span class="mi">3</span></span>
+ <span id="LC3" class="line" lang="ruby"><span class="k">end</span></span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre language="ruby" class="content-editor-code-block undefined code highlight"><code>def foo(x)
return 3
end</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__25:
+04_05__leaf_blocks__fenced_code_blocks__025:
canonical: |
<pre><code class="language-ruby">def foo(x)
return 3
end
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:1-5:7" class="code highlight js-syntax-highlight language-ruby" lang="ruby" v-pre="true"><code><span id="LC1" class="line" lang="ruby"><span class="k">def</span> <span class="nf">foo</span><span class="p">(</span><span class="n">x</span><span class="p">)</span></span>&#x000A;<span id="LC2" class="line" lang="ruby"> <span class="k">return</span> <span class="mi">3</span></span>&#x000A;<span id="LC3" class="line" lang="ruby"><span class="k">end</span></span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:1-5:7" class="code highlight js-syntax-highlight language-ruby" lang="ruby" v-pre="true"><code><span id="LC1" class="line" lang="ruby"><span class="k">def</span> <span class="nf">foo</span><span class="p">(</span><span class="n">x</span><span class="p">)</span></span>
+ <span id="LC2" class="line" lang="ruby"> <span class="k">return</span> <span class="mi">3</span></span>
+ <span id="LC3" class="line" lang="ruby"><span class="k">end</span></span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre language="ruby" class="content-editor-code-block undefined code highlight"><code>def foo(x)
return 3
end</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__26:
+04_05__leaf_blocks__fenced_code_blocks__026:
canonical: |
<pre><code class="language-;"></code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:1-2:4" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:1-2:4" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang=";" v-pre="true"><code></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre language=";" class="content-editor-code-block undefined code highlight"><code></code></pre>
-04_05__leaf_blocks__fenced_code_blocks__27:
+04_05__leaf_blocks__fenced_code_blocks__027:
canonical: |
<p><code>aa</code>
foo</p>
static: |-
- <p data-sourcepos="1:1-2:3" dir="auto"><code>aa</code>&#x000A;foo</p>
+ <p data-sourcepos="1:1-2:3" dir="auto"><code>aa</code>
+ foo</p>
wysiwyg: |-
<p><code>aa</code>
foo</p>
-04_05__leaf_blocks__fenced_code_blocks__28:
+04_05__leaf_blocks__fenced_code_blocks__028:
canonical: |
<pre><code class="language-aa">foo
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="aa" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre language="aa" class="content-editor-code-block undefined code highlight"><code>foo</code></pre>
-04_05__leaf_blocks__fenced_code_blocks__29:
+04_05__leaf_blocks__fenced_code_blocks__029:
canonical: |
<pre><code>``` aaa
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">``` aaa</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">``` aaa</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>``` aaa</code></pre>
-04_06__leaf_blocks__html_blocks__01:
+04_06__leaf_blocks__html_blocks__001:
canonical: |
<table><tr><td>
<pre>
@@ -1105,11 +1484,16 @@
</pre></p>
</td></tr></table>
static: |-
- <table dir="auto"><tr><td>&#x000A;<pre>&#x000A;**Hello**,&#x000A;<p data-sourcepos="5:1-6:6"><em>world</em>.&#x000A;</p></pre>&#x000A;</td></tr></table>
+ <table dir="auto"><tr><td>
+ <pre>
+ **Hello**,
+ <p data-sourcepos="5:1-6:6"><em>world</em>.
+ </p></pre>
+ </td></tr></table>
wysiwyg: |-
Error - check implementation:
Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__02:
+04_06__leaf_blocks__html_blocks__002:
canonical: |
<table>
<tr>
@@ -1120,70 +1504,88 @@
</table>
<p>okay.</p>
static: |-
- <table dir="auto">&#x000A; <tr>&#x000A; <td>&#x000A; hi&#x000A; </td>&#x000A; </tr>&#x000A;</table>&#x000A;<p data-sourcepos="9:1-9:5" dir="auto">okay.</p>
+ <table dir="auto">
+ <tr>
+ <td>
+ hi
+ </td>
+ </tr>
+ </table>
+ <p data-sourcepos="9:1-9:5" dir="auto">okay.</p>
wysiwyg: |-
Error - check implementation:
Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__03:
+04_06__leaf_blocks__html_blocks__003:
canonical: |2
<div>
*hello*
<foo><a>
static: |2-
- <div>&#x000A; *hello*&#x000A; <a></a>&#x000A;</div>
+ <div>
+ *hello*
+ <a></a>
+ </div>
wysiwyg: |-
Error - check implementation:
Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__04:
+04_06__leaf_blocks__html_blocks__004:
canonical: |
</div>
*foo*
- static: |-
- &#x000A;*foo*
+ static: |2-
+
+ *foo*
wysiwyg: |-
- Error - check implementation:
- Cannot read properties of undefined (reading 'wrapTextInParagraph')
-04_06__leaf_blocks__html_blocks__05:
+ <p>
+ *foo*</p>
+04_06__leaf_blocks__html_blocks__005:
canonical: |
<DIV CLASS="foo">
<p><em>Markdown</em></p>
</DIV>
static: |-
- <div>&#x000A;<p data-sourcepos="3:1-3:10"><em>Markdown</em></p>&#x000A;</div>
+ <div>
+ <p data-sourcepos="3:1-3:10"><em>Markdown</em></p>
+ </div>
wysiwyg: |-
Error - check implementation:
Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__06:
+04_06__leaf_blocks__html_blocks__006:
canonical: |
<div id="foo"
class="bar">
</div>
static: |-
- <div>&#x000A;</div>
+ <div>
+ </div>
wysiwyg: |-
Error - check implementation:
Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__07:
+04_06__leaf_blocks__html_blocks__007:
canonical: |
<div id="foo" class="bar
baz">
</div>
static: |-
- <div>&#x000A;</div>
+ <div>
+ </div>
wysiwyg: |-
Error - check implementation:
Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__08:
+04_06__leaf_blocks__html_blocks__008:
canonical: |
<div>
*foo*
<p><em>bar</em></p>
static: |-
- <div>&#x000A;*foo*&#x000A;<p data-sourcepos="4:1-4:5"><em>bar</em></p>&#x000A;</div>
+ <div>
+ *foo*
+ <p data-sourcepos="4:1-4:5"><em>bar</em></p>
+ </div>
wysiwyg: |-
Error - check implementation:
Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__09:
+04_06__leaf_blocks__html_blocks__009:
canonical: |
<div id="foo"
*hi*
@@ -1191,7 +1593,7 @@
<div></div>
wysiwyg: |-
<p></p>
-04_06__leaf_blocks__html_blocks__10:
+04_06__leaf_blocks__html_blocks__010:
canonical: |
<div class
foo
@@ -1199,7 +1601,7 @@
<div></div>
wysiwyg: |-
<p></p>
-04_06__leaf_blocks__html_blocks__11:
+04_06__leaf_blocks__html_blocks__011:
canonical: |
<div *???-&&&-<---
*foo*
@@ -1207,7 +1609,7 @@
<div></div>
wysiwyg: |-
<p></p>
-04_06__leaf_blocks__html_blocks__12:
+04_06__leaf_blocks__html_blocks__012:
canonical: |
<div><a href="bar">*foo*</a></div>
static: |-
@@ -1215,95 +1617,112 @@
wysiwyg: |-
Error - check implementation:
Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__13:
+04_06__leaf_blocks__html_blocks__013:
canonical: |
<table><tr><td>
foo
</td></tr></table>
static: |-
- <table dir="auto"><tr><td>&#x000A;foo&#x000A;</td></tr></table>
+ <table dir="auto"><tr><td>
+ foo
+ </td></tr></table>
wysiwyg: |-
Error - check implementation:
Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__14:
+04_06__leaf_blocks__html_blocks__014:
canonical: |
<div></div>
``` c
int x = 33;
```
static: |-
- <div></div>&#x000A;``` c&#x000A;int x = 33;&#x000A;```
+ <div></div>
+ ``` c
+ int x = 33;
+ ```
wysiwyg: |-
Error - check implementation:
Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__15:
+04_06__leaf_blocks__html_blocks__015:
canonical: |
<a href="foo">
*bar*
</a>
static: |-
- <a href="foo">&#x000A;*bar*&#x000A;</a>
+ <a href="foo">
+ *bar*
+ </a>
wysiwyg: |-
- Error - check implementation:
- Cannot read properties of undefined (reading 'wrapTextInParagraph')
-04_06__leaf_blocks__html_blocks__16:
+ <p><a target="_blank" rel="noopener noreferrer nofollow" href="foo">
+ *bar*
+ </a></p>
+04_06__leaf_blocks__html_blocks__016:
canonical: |
<Warning>
*bar*
</Warning>
- static: |-
- &#x000A;*bar*
+ static: |2
+
+ *bar*
wysiwyg: |-
Error - check implementation:
Hast node of type "warning" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__17:
+04_06__leaf_blocks__html_blocks__017:
canonical: |
<i class="foo">
*bar*
</i>
static: |-
- <i>&#x000A;*bar*&#x000A;</i>
+ <i>
+ *bar*
+ </i>
wysiwyg: |-
- Error - check implementation:
- Cannot read properties of undefined (reading 'wrapTextInParagraph')
-04_06__leaf_blocks__html_blocks__18:
+ <p><em>
+ *bar*
+ </em></p>
+04_06__leaf_blocks__html_blocks__018:
canonical: |
</ins>
*bar*
- static: |-
- &#x000A;*bar*
+ static: |2-
+
+ *bar*
wysiwyg: |-
- Error - check implementation:
- Cannot read properties of undefined (reading 'wrapTextInParagraph')
-04_06__leaf_blocks__html_blocks__19:
+ <p>
+ *bar*</p>
+04_06__leaf_blocks__html_blocks__019:
canonical: |
<del>
*foo*
</del>
static: |-
- <del>&#x000A;*foo*&#x000A;</del>
+ <del>
+ *foo*
+ </del>
wysiwyg: |-
- Error - check implementation:
- Hast node of type "del" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__20:
+ <p><s>
+ *foo*
+ </s></p>
+04_06__leaf_blocks__html_blocks__020:
canonical: |
<del>
<p><em>foo</em></p>
</del>
static: |-
- <del>&#x000A;<p data-sourcepos="3:1-3:5"><em>foo</em></p>&#x000A;</del>
+ <del>
+ <p data-sourcepos="3:1-3:5"><em>foo</em></p>
+ </del>
wysiwyg: |-
Error - check implementation:
- Hast node of type "del" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__21:
+ Cannot destructure property 'type' of 'this.stack.pop(...)' as it is undefined.
+04_06__leaf_blocks__html_blocks__021:
canonical: |
<p><del><em>foo</em></del></p>
static: |-
<p data-sourcepos="1:1-1:16" dir="auto"><del><em>foo</em></del></p>
wysiwyg: |-
- Error - check implementation:
- Hast node of type "del" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__22:
+ <p><em><s>foo</s></em></p>
+04_06__leaf_blocks__html_blocks__022:
canonical: |
<pre language="haskell"><code>
import Text.HTML.TagSoup
@@ -1313,14 +1732,22 @@
</code></pre>
<p>okay</p>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"></span>&#x000A;<span id="LC2" class="line" lang="plaintext">import Text.HTML.TagSoup</span>&#x000A;<span id="LC3" class="line" lang="plaintext"></span>&#x000A;<span id="LC4" class="line" lang="plaintext">main :: IO ()</span>&#x000A;<span id="LC5" class="line" lang="plaintext">main = print $ parseTags tags</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;<p data-sourcepos="7:1-7:4" dir="auto">okay</p>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"></span>
+ <span id="LC2" class="line" lang="plaintext">import Text.HTML.TagSoup</span>
+ <span id="LC3" class="line" lang="plaintext"></span>
+ <span id="LC4" class="line" lang="plaintext">main :: IO ()</span>
+ <span id="LC5" class="line" lang="plaintext">main = print $ parseTags tags</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ <p data-sourcepos="7:1-7:4" dir="auto">okay</p>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>
import Text.HTML.TagSoup
main :: IO ()
main = print $ parseTags tags</code></pre>
-04_06__leaf_blocks__html_blocks__23:
+04_06__leaf_blocks__html_blocks__023:
canonical: |
<script type="text/javascript">
// JavaScript example
@@ -1328,12 +1755,13 @@
document.getElementById("demo").innerHTML = "Hello JavaScript!";
</script>
<p>okay</p>
- static: |-
- &#x000A;<p data-sourcepos="6:1-6:4" dir="auto">okay</p>
+ static: |2-
+
+ <p data-sourcepos="6:1-6:4" dir="auto">okay</p>
wysiwyg: |-
Error - check implementation:
Hast node of type "script" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__24:
+04_06__leaf_blocks__html_blocks__024:
canonical: |
<style
type="text/css">
@@ -1342,23 +1770,30 @@
p {color:blue;}
</style>
<p>okay</p>
- static: |-
- &#x000A;h1 {color:red;}&#x000A;&#x000A;p {color:blue;}&#x000A;&#x000A;<p data-sourcepos="7:1-7:4" dir="auto">okay</p>
+ static: |2-
+
+ h1 {color:red;}
+
+ p {color:blue;}
+
+ <p data-sourcepos="7:1-7:4" dir="auto">okay</p>
wysiwyg: |-
Error - check implementation:
Hast node of type "style" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__25:
+04_06__leaf_blocks__html_blocks__025:
canonical: |
<style
type="text/css">
foo
- static: |-
- &#x000A;&#x000A;foo
+ static: |2-
+
+
+ foo
wysiwyg: |-
Error - check implementation:
Hast node of type "style" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__26:
+04_06__leaf_blocks__html_blocks__026:
canonical: |
<blockquote>
<div>
@@ -1366,11 +1801,17 @@
</blockquote>
<p>bar</p>
static: |-
- <blockquote data-sourcepos="1:1-2:5" dir="auto">&#x000A;<div>&#x000A;foo&#x000A;&#x000A;<p data-sourcepos="4:1-4:3">bar</p>&#x000A;</div>&#x000A;</blockquote>
+ <blockquote data-sourcepos="1:1-2:5" dir="auto">
+ <div>
+ foo
+
+ <p data-sourcepos="4:1-4:3">bar</p>
+ </div>
+ </blockquote>
wysiwyg: |-
Error - check implementation:
Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__27:
+04_06__leaf_blocks__html_blocks__027:
canonical: |
<ul>
<li>
@@ -1379,29 +1820,38 @@
<li>foo</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-2:5" dir="auto">&#x000A;<li data-sourcepos="1:1-1:7">&#x000A;<div>&#x000A;&#x000A;<li data-sourcepos="2:1-2:5">foo</li>&#x000A;</div>&#x000A;</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-2:5" dir="auto">
+ <li data-sourcepos="1:1-1:7">
+ <div>
+
+ <li data-sourcepos="2:1-2:5">foo</li>
+ </div>
+ </li>
+ </ul>
wysiwyg: |-
Error - check implementation:
Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__28:
+04_06__leaf_blocks__html_blocks__028:
canonical: |
<style>p{color:red;}</style>
<p><em>foo</em></p>
static: |-
- p{color:red;}&#x000A;<p data-sourcepos="2:1-2:5" dir="auto"><em>foo</em></p>
+ p{color:red;}
+ <p data-sourcepos="2:1-2:5" dir="auto"><em>foo</em></p>
wysiwyg: |-
Error - check implementation:
Hast node of type "style" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__29:
+04_06__leaf_blocks__html_blocks__029:
canonical: |
<!-- foo -->*bar*
<p><em>baz</em></p>
static: |-
- *bar*&#x000A;<p data-sourcepos="2:1-2:5" dir="auto"><em>baz</em></p>
+ *bar*
+ <p data-sourcepos="2:1-2:5" dir="auto"><em>baz</em></p>
wysiwyg: |-
Error - check implementation:
- Hast node of type "comment" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__30:
+ Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
+04_06__leaf_blocks__html_blocks__030:
canonical: |
<script>
foo
@@ -1411,19 +1861,20 @@
wysiwyg: |-
Error - check implementation:
Hast node of type "script" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__31:
+04_06__leaf_blocks__html_blocks__031:
canonical: |
<!-- Foo
bar
baz -->
<p>okay</p>
- static: |-
- &#x000A;<p data-sourcepos="5:1-5:4" dir="auto">okay</p>
+ static: |2-
+
+ <p data-sourcepos="5:1-5:4" dir="auto">okay</p>
wysiwyg: |-
Error - check implementation:
- Hast node of type "comment" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__32:
+ Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
+04_06__leaf_blocks__html_blocks__032:
canonical: |
<?php
@@ -1432,17 +1883,20 @@
?>
<p>okay</p>
static: |-
- <?php echo '>';&#x000A;&#x000A;?&gt;&#x000A;<p data-sourcepos="6:1-6:4" dir="auto">okay</p>
+ <?php echo '>';
+
+ ?&gt;
+ <p data-sourcepos="6:1-6:4" dir="auto">okay</p>
wysiwyg: |-
Error - check implementation:
- Hast node of type "comment" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__33:
+ Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
+04_06__leaf_blocks__html_blocks__033:
canonical: |
<!DOCTYPE html>
static: ""
wysiwyg: |-
<p></p>
-04_06__leaf_blocks__html_blocks__34:
+04_06__leaf_blocks__html_blocks__034:
canonical: |
<![CDATA[
function matchwo(a,b)
@@ -1457,85 +1911,116 @@
}
]]>
<p>okay</p>
- static: |-
- &#x000A;<p data-sourcepos="13:1-13:4" dir="auto">okay</p>
+ static: |2-
+ &lt;![CDATA[
+ function matchwo(a,b)
+ {
+ if (a &lt; b &amp;&amp; a &lt; 0) then {
+ return 1;
+
+ } else {
+
+ return 0;
+ }
+ }
+ ]]&gt;
+ <p data-sourcepos="13:1-13:4" dir="auto">okay</p>
wysiwyg: |-
Error - check implementation:
- Hast node of type "comment" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__35:
+ Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
+04_06__leaf_blocks__html_blocks__035:
canonical: |2
<!-- foo -->
<pre><code>&lt;!-- foo --&gt;
</code></pre>
- static: |2-
- &#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="3:5-3:16" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">&lt;!-- foo --&gt;</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ static: " \n<div class=\"gl-relative markdown-code-block js-markdown-code\">\n<pre
+ data-sourcepos=\"3:5-3:16\" class=\"code highlight js-syntax-highlight language-plaintext\"
+ lang=\"plaintext\" data-canonical-lang=\"\" v-pre=\"true\"><code><span id=\"LC1\"
+ class=\"line\" lang=\"plaintext\">&lt;!-- foo --&gt;</span></code></pre>\n<copy-code></copy-code>\n</div>"
wysiwyg: |-
Error - check implementation:
- Hast node of type "comment" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__36:
+ Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
+04_06__leaf_blocks__html_blocks__036:
canonical: |2
<div>
<pre><code>&lt;div&gt;
</code></pre>
static: |2-
- <div>&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="3:5-3:9" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">&lt;div&gt;</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;</div>
+ <div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="3:5-3:9" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">&lt;div&gt;</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ </div>
wysiwyg: |-
Error - check implementation:
Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__37:
+04_06__leaf_blocks__html_blocks__037:
canonical: |
<p>Foo</p>
<div>
bar
</div>
static: |-
- <p data-sourcepos="1:1-1:3" dir="auto">Foo</p>&#x000A;<div>&#x000A;bar&#x000A;</div>
+ <p data-sourcepos="1:1-1:3" dir="auto">Foo</p>
+ <div>
+ bar
+ </div>
wysiwyg: |-
Error - check implementation:
Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__38:
+04_06__leaf_blocks__html_blocks__038:
canonical: |
<div>
bar
</div>
*foo*
static: |-
- <div>&#x000A;bar&#x000A;</div>&#x000A;*foo*
+ <div>
+ bar
+ </div>
+ *foo*
wysiwyg: |-
Error - check implementation:
Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__39:
+04_06__leaf_blocks__html_blocks__039:
canonical: |
<p>Foo
<a href="bar">
baz</p>
static: |-
- <p data-sourcepos="1:1-3:3" dir="auto">Foo&#x000A;<a href="bar">&#x000A;baz</a></p>
+ <p data-sourcepos="1:1-3:3" dir="auto">Foo
+ <a href="bar">
+ baz</a></p>
wysiwyg: |-
<p>Foo
<a target="_blank" rel="noopener noreferrer nofollow" href="bar">
baz</a></p>
-04_06__leaf_blocks__html_blocks__40:
+04_06__leaf_blocks__html_blocks__040:
canonical: |
<div>
<p><em>Emphasized</em> text.</p>
</div>
static: |-
- <div>&#x000A;<p data-sourcepos="3:1-3:18"><em>Emphasized</em> text.</p>&#x000A;</div>
+ <div>
+ <p data-sourcepos="3:1-3:18"><em>Emphasized</em> text.</p>
+ </div>
wysiwyg: |-
Error - check implementation:
Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__41:
+04_06__leaf_blocks__html_blocks__041:
canonical: |
<div>
*Emphasized* text.
</div>
static: |-
- <div>&#x000A;*Emphasized* text.&#x000A;</div>
+ <div>
+ *Emphasized* text.
+ </div>
wysiwyg: |-
Error - check implementation:
Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__42:
+04_06__leaf_blocks__html_blocks__042:
canonical: |
<table>
<tr>
@@ -1545,11 +2030,17 @@
</tr>
</table>
static: |-
- <table dir="auto">&#x000A;<tr>&#x000A;<td>&#x000A;Hi&#x000A;</td>&#x000A;</tr>&#x000A;</table>
+ <table dir="auto">
+ <tr>
+ <td>
+ Hi
+ </td>
+ </tr>
+ </table>
wysiwyg: |-
Error - check implementation:
Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__43:
+04_06__leaf_blocks__html_blocks__043:
canonical: |
<table>
<tr>
@@ -1560,39 +2051,48 @@
</tr>
</table>
static: |-
- <table dir="auto">&#x000A; <tr>&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="5:5-8:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">&lt;td&gt;</span>&#x000A;<span id="LC2" class="line" lang="plaintext"> Hi</span>&#x000A;<span id="LC3" class="line" lang="plaintext">&lt;/td&gt;</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A; </tr>&#x000A;</table>
+ <table dir="auto">
+ <tr>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="5:5-8:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">&lt;td&gt;</span>
+ <span id="LC2" class="line" lang="plaintext"> Hi</span>
+ <span id="LC3" class="line" lang="plaintext">&lt;/td&gt;</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ </tr>
+ </table>
wysiwyg: |-
Error - check implementation:
Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_07__leaf_blocks__link_reference_definitions__01:
+04_07__leaf_blocks__link_reference_definitions__001:
canonical: |
<p><a href="/url" title="title">foo</a></p>
static: |-
<p data-sourcepos="3:1-3:5" dir="auto"><a href="/url" title="title">foo</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title">foo</a></p>
-04_07__leaf_blocks__link_reference_definitions__02:
+04_07__leaf_blocks__link_reference_definitions__002:
canonical: |
<p><a href="/url" title="the title">foo</a></p>
static: |-
<p data-sourcepos="5:1-5:5" dir="auto"><a href="/url" title="the title">foo</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="the title">foo</a></p>
-04_07__leaf_blocks__link_reference_definitions__03:
+04_07__leaf_blocks__link_reference_definitions__003:
canonical: |
<p><a href="my_(url)" title="title (with parens)">Foo*bar]</a></p>
static: |-
<p data-sourcepos="3:1-3:11" dir="auto"><a href="my_(url)" title="title (with parens)">Foo*bar]</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="my_(url)" title="title (with parens)">Foo*bar]</a></p>
-04_07__leaf_blocks__link_reference_definitions__04:
+04_07__leaf_blocks__link_reference_definitions__004:
canonical: |
<p><a href="my%20url" title="title">Foo bar</a></p>
static: |-
<p data-sourcepos="5:1-5:9" dir="auto"><a href="my%20url" title="title">Foo bar</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="my%20url" title="title">Foo bar</a></p>
-04_07__leaf_blocks__link_reference_definitions__05:
+04_07__leaf_blocks__link_reference_definitions__005:
canonical: |
<p><a href="/url" title="
title
@@ -1600,277 +2100,323 @@
line2
">foo</a></p>
static: |-
- <p data-sourcepos="7:1-7:5" dir="auto"><a href="/url" title="&#x000A;title&#x000A;line1&#x000A;line2&#x000A;">foo</a></p>
+ <p data-sourcepos="7:1-7:5" dir="auto"><a href="/url" title="
+ title
+ line1
+ line2
+ ">foo</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="
title
line1
line2
">foo</a></p>
-04_07__leaf_blocks__link_reference_definitions__06:
+04_07__leaf_blocks__link_reference_definitions__006:
canonical: |
<p>[foo]: /url 'title</p>
<p>with blank line'</p>
<p>[foo]</p>
static: |-
- <p data-sourcepos="1:1-1:18" dir="auto">[foo]: /url 'title</p>&#x000A;<p data-sourcepos="3:1-3:16" dir="auto">with blank line'</p>&#x000A;<p data-sourcepos="5:1-5:5" dir="auto">[foo]</p>
+ <p data-sourcepos="1:1-1:18" dir="auto">[foo]: /url 'title</p>
+ <p data-sourcepos="3:1-3:16" dir="auto">with blank line'</p>
+ <p data-sourcepos="5:1-5:5" dir="auto">[foo]</p>
wysiwyg: |-
<p>[foo]: /url 'title</p>
-04_07__leaf_blocks__link_reference_definitions__07:
+04_07__leaf_blocks__link_reference_definitions__007:
canonical: |
<p><a href="/url">foo</a></p>
static: |-
<p data-sourcepos="4:1-4:5" dir="auto"><a href="/url">foo</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/url">foo</a></p>
-04_07__leaf_blocks__link_reference_definitions__08:
+04_07__leaf_blocks__link_reference_definitions__008:
canonical: |
<p>[foo]:</p>
<p>[foo]</p>
static: |-
- <p data-sourcepos="1:1-1:6" dir="auto">[foo]:</p>&#x000A;<p data-sourcepos="3:1-3:5" dir="auto">[foo]</p>
+ <p data-sourcepos="1:1-1:6" dir="auto">[foo]:</p>
+ <p data-sourcepos="3:1-3:5" dir="auto">[foo]</p>
wysiwyg: |-
<p>[foo]:</p>
-04_07__leaf_blocks__link_reference_definitions__09:
+04_07__leaf_blocks__link_reference_definitions__009:
canonical: |
<p><a href="">foo</a></p>
static: |-
<p data-sourcepos="3:1-3:5" dir="auto"><a href="">foo</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="">foo</a></p>
-04_07__leaf_blocks__link_reference_definitions__10:
+04_07__leaf_blocks__link_reference_definitions__010:
canonical: |
<p>[foo]: <bar>(baz)</p>
<p>[foo]</p>
static: |-
- <p data-sourcepos="1:1-1:17" dir="auto">[foo]: (baz)</p>&#x000A;<p data-sourcepos="3:1-3:5" dir="auto">[foo]</p>
+ <p data-sourcepos="1:1-1:17" dir="auto">[foo]: (baz)</p>
+ <p data-sourcepos="3:1-3:5" dir="auto">[foo]</p>
wysiwyg: |-
Error - check implementation:
Hast node of type "bar" not supported by this converter. Please, provide an specification.
-04_07__leaf_blocks__link_reference_definitions__11:
+04_07__leaf_blocks__link_reference_definitions__011:
canonical: |
<p><a href="/url%5Cbar*baz" title="foo&quot;bar\baz">foo</a></p>
static: |-
<p data-sourcepos="3:1-3:5" dir="auto"><a href="/url%5Cbar*baz" title='foo"bar\baz'>foo</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/url%5Cbar*baz" title="foo&quot;bar\baz">foo</a></p>
-04_07__leaf_blocks__link_reference_definitions__12:
+04_07__leaf_blocks__link_reference_definitions__012:
canonical: |
<p><a href="url">foo</a></p>
static: |-
<p data-sourcepos="1:1-1:5" dir="auto"><a href="url">foo</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="url">foo</a></p>
-04_07__leaf_blocks__link_reference_definitions__13:
+04_07__leaf_blocks__link_reference_definitions__013:
canonical: |
<p><a href="first">foo</a></p>
static: |-
<p data-sourcepos="1:1-1:5" dir="auto"><a href="first">foo</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="first">foo</a></p>
-04_07__leaf_blocks__link_reference_definitions__14:
+04_07__leaf_blocks__link_reference_definitions__014:
canonical: |
<p><a href="/url">Foo</a></p>
static: |-
<p data-sourcepos="3:1-3:5" dir="auto"><a href="/url">Foo</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/url">Foo</a></p>
-04_07__leaf_blocks__link_reference_definitions__15:
+04_07__leaf_blocks__link_reference_definitions__015:
canonical: |
<p><a href="/%CF%86%CE%BF%CF%85">αγω</a></p>
static: |-
<p data-sourcepos="3:1-3:8" dir="auto"><a href="/%CF%86%CE%BF%CF%85">αγω</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/%CF%86%CE%BF%CF%85">αγω</a></p>
-04_07__leaf_blocks__link_reference_definitions__16:
+04_07__leaf_blocks__link_reference_definitions__016:
canonical: ""
static: ""
wysiwyg: |-
<p></p>
-04_07__leaf_blocks__link_reference_definitions__17:
+04_07__leaf_blocks__link_reference_definitions__017:
canonical: |
<p>bar</p>
static: |-
<p data-sourcepos="1:1-4:3" dir="auto">bar</p>
wysiwyg: |-
<p>bar</p>
-04_07__leaf_blocks__link_reference_definitions__18:
+04_07__leaf_blocks__link_reference_definitions__018:
canonical: |
<p>[foo]: /url &quot;title&quot; ok</p>
static: |-
<p data-sourcepos="1:1-1:22" dir="auto">[foo]: /url "title" ok</p>
wysiwyg: |-
<p>[foo]: /url "title" ok</p>
-04_07__leaf_blocks__link_reference_definitions__19:
+04_07__leaf_blocks__link_reference_definitions__019:
canonical: |
<p>&quot;title&quot; ok</p>
static: |-
<p data-sourcepos="1:1-2:10" dir="auto">"title" ok</p>
wysiwyg: |-
<p>"title" ok</p>
-04_07__leaf_blocks__link_reference_definitions__20:
+04_07__leaf_blocks__link_reference_definitions__020:
canonical: |
<pre><code>[foo]: /url &quot;title&quot;
</code></pre>
<p>[foo]</p>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:5-2:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">[foo]: /url "title"</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;<p data-sourcepos="3:1-3:5" dir="auto">[foo]</p>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:5-2:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">[foo]: /url "title"</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ <p data-sourcepos="3:1-3:5" dir="auto">[foo]</p>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>[foo]: /url "title"</code></pre>
-04_07__leaf_blocks__link_reference_definitions__21:
+04_07__leaf_blocks__link_reference_definitions__021:
canonical: |
<pre><code>[foo]: /url
</code></pre>
<p>[foo]</p>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">[foo]: /url</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;<p data-sourcepos="5:1-5:5" dir="auto">[foo]</p>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">[foo]: /url</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ <p data-sourcepos="5:1-5:5" dir="auto">[foo]</p>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>[foo]: /url</code></pre>
-04_07__leaf_blocks__link_reference_definitions__22:
+04_07__leaf_blocks__link_reference_definitions__022:
canonical: |
<p>Foo
[bar]: /baz</p>
<p>[bar]</p>
static: |-
- <p data-sourcepos="1:1-2:11" dir="auto">Foo&#x000A;[bar]: /baz</p>&#x000A;<p data-sourcepos="4:1-4:5" dir="auto">[bar]</p>
+ <p data-sourcepos="1:1-2:11" dir="auto">Foo
+ [bar]: /baz</p>
+ <p data-sourcepos="4:1-4:5" dir="auto">[bar]</p>
wysiwyg: |-
<p>Foo
[bar]: /baz</p>
-04_07__leaf_blocks__link_reference_definitions__23:
+04_07__leaf_blocks__link_reference_definitions__023:
canonical: |
<h1><a href="/url">Foo</a></h1>
<blockquote>
<p>bar</p>
</blockquote>
static: |-
- <h1 data-sourcepos="1:1-1:7" dir="auto">&#x000A;<a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a><a href="/url">Foo</a>&#x000A;</h1>&#x000A;<blockquote data-sourcepos="3:1-3:5" dir="auto">&#x000A;<p data-sourcepos="3:3-3:5">bar</p>&#x000A;</blockquote>
+ <h1 data-sourcepos="1:1-1:7" dir="auto">
+ <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a><a href="/url">Foo</a>
+ </h1>
+ <blockquote data-sourcepos="3:1-3:5" dir="auto">
+ <p data-sourcepos="3:3-3:5">bar</p>
+ </blockquote>
wysiwyg: |-
<h1><a target="_blank" rel="noopener noreferrer nofollow" href="/url">Foo</a></h1>
-04_07__leaf_blocks__link_reference_definitions__24:
+04_07__leaf_blocks__link_reference_definitions__024:
canonical: |
<h1>bar</h1>
<p><a href="/url">foo</a></p>
static: |-
- <h1 data-sourcepos="1:1-4:5" dir="auto">&#x000A;<a id="user-content-bar" class="anchor" href="#bar" aria-hidden="true"></a>bar</h1>&#x000A;<p data-sourcepos="4:1-4:5" dir="auto"><a href="/url">foo</a></p>
+ <h1 data-sourcepos="1:1-4:5" dir="auto">
+ <a id="user-content-bar" class="anchor" href="#bar" aria-hidden="true"></a>bar</h1>
+ <p data-sourcepos="4:1-4:5" dir="auto"><a href="/url">foo</a></p>
wysiwyg: |-
<h1>bar</h1>
-04_07__leaf_blocks__link_reference_definitions__25:
+04_07__leaf_blocks__link_reference_definitions__025:
canonical: |
<p>===
<a href="/url">foo</a></p>
static: |-
- <p data-sourcepos="1:1-3:5" dir="auto">===&#x000A;<a href="/url">foo</a></p>
+ <p data-sourcepos="1:1-3:5" dir="auto">===
+ <a href="/url">foo</a></p>
wysiwyg: |-
<p>===
<a target="_blank" rel="noopener noreferrer nofollow" href="/url">foo</a></p>
-04_07__leaf_blocks__link_reference_definitions__26:
+04_07__leaf_blocks__link_reference_definitions__026:
canonical: |
<p><a href="/foo-url" title="foo">foo</a>,
<a href="/bar-url" title="bar">bar</a>,
<a href="/baz-url">baz</a></p>
static: |-
- <p data-sourcepos="6:1-8:5" dir="auto"><a href="/foo-url" title="foo">foo</a>,&#x000A;<a href="/bar-url" title="bar">bar</a>,&#x000A;<a href="/baz-url">baz</a></p>
+ <p data-sourcepos="6:1-8:5" dir="auto"><a href="/foo-url" title="foo">foo</a>,
+ <a href="/bar-url" title="bar">bar</a>,
+ <a href="/baz-url">baz</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/foo-url" title="foo">foo</a>,
<a target="_blank" rel="noopener noreferrer nofollow" href="/bar-url" title="bar">bar</a>,
<a target="_blank" rel="noopener noreferrer nofollow" href="/baz-url">baz</a></p>
-04_07__leaf_blocks__link_reference_definitions__27:
+04_07__leaf_blocks__link_reference_definitions__027:
canonical: |
<p><a href="/url">foo</a></p>
<blockquote>
</blockquote>
static: |-
- <p data-sourcepos="1:1-1:5" dir="auto"><a href="/url">foo</a></p>&#x000A;<blockquote data-sourcepos="3:1-3:13" dir="auto">&#x000A;</blockquote>
+ <p data-sourcepos="1:1-1:5" dir="auto"><a href="/url">foo</a></p>
+ <blockquote data-sourcepos="3:1-3:13" dir="auto">
+ </blockquote>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/url">foo</a></p>
-04_07__leaf_blocks__link_reference_definitions__28:
+04_07__leaf_blocks__link_reference_definitions__028:
canonical: ""
static: ""
wysiwyg: |-
<p></p>
-04_08__leaf_blocks__paragraphs__01:
+04_08__leaf_blocks__paragraphs__001:
canonical: |
<p>aaa</p>
<p>bbb</p>
static: |-
- <p data-sourcepos="1:1-1:3" dir="auto">aaa</p>&#x000A;<p data-sourcepos="3:1-3:3" dir="auto">bbb</p>
+ <p data-sourcepos="1:1-1:3" dir="auto">aaa</p>
+ <p data-sourcepos="3:1-3:3" dir="auto">bbb</p>
wysiwyg: |-
<p>aaa</p>
-04_08__leaf_blocks__paragraphs__02:
+04_08__leaf_blocks__paragraphs__002:
canonical: |
<p>aaa
bbb</p>
<p>ccc
ddd</p>
static: |-
- <p data-sourcepos="1:1-2:3" dir="auto">aaa&#x000A;bbb</p>&#x000A;<p data-sourcepos="4:1-5:3" dir="auto">ccc&#x000A;ddd</p>
+ <p data-sourcepos="1:1-2:3" dir="auto">aaa
+ bbb</p>
+ <p data-sourcepos="4:1-5:3" dir="auto">ccc
+ ddd</p>
wysiwyg: |-
<p>aaa
bbb</p>
-04_08__leaf_blocks__paragraphs__03:
+04_08__leaf_blocks__paragraphs__003:
canonical: |
<p>aaa</p>
<p>bbb</p>
static: |-
- <p data-sourcepos="1:1-1:3" dir="auto">aaa</p>&#x000A;<p data-sourcepos="4:1-4:3" dir="auto">bbb</p>
+ <p data-sourcepos="1:1-1:3" dir="auto">aaa</p>
+ <p data-sourcepos="4:1-4:3" dir="auto">bbb</p>
wysiwyg: |-
<p>aaa</p>
-04_08__leaf_blocks__paragraphs__04:
+04_08__leaf_blocks__paragraphs__004:
canonical: |
<p>aaa
bbb</p>
static: |-
- <p data-sourcepos="1:3-2:4" dir="auto">aaa&#x000A;bbb</p>
+ <p data-sourcepos="1:3-2:4" dir="auto">aaa
+ bbb</p>
wysiwyg: |-
<p>aaa
bbb</p>
-04_08__leaf_blocks__paragraphs__05:
+04_08__leaf_blocks__paragraphs__005:
canonical: |
<p>aaa
bbb
ccc</p>
static: |-
- <p data-sourcepos="1:1-3:42" dir="auto">aaa&#x000A;bbb&#x000A;ccc</p>
+ <p data-sourcepos="1:1-3:42" dir="auto">aaa
+ bbb
+ ccc</p>
wysiwyg: |-
<p>aaa
bbb
ccc</p>
-04_08__leaf_blocks__paragraphs__06:
+04_08__leaf_blocks__paragraphs__006:
canonical: |
<p>aaa
bbb</p>
static: |-
- <p data-sourcepos="1:4-2:3" dir="auto">aaa&#x000A;bbb</p>
+ <p data-sourcepos="1:4-2:3" dir="auto">aaa
+ bbb</p>
wysiwyg: |-
<p>aaa
bbb</p>
-04_08__leaf_blocks__paragraphs__07:
+04_08__leaf_blocks__paragraphs__007:
canonical: |
<pre><code>aaa
</code></pre>
<p>bbb</p>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:5-1:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;<p data-sourcepos="2:1-2:3" dir="auto">bbb</p>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:5-1:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">aaa</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ <p data-sourcepos="2:1-2:3" dir="auto">bbb</p>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>aaa</code></pre>
-04_08__leaf_blocks__paragraphs__08:
+04_08__leaf_blocks__paragraphs__008:
canonical: |
<p>aaa<br />
bbb</p>
static: |-
- <p data-sourcepos="1:1-2:8" dir="auto">aaa<br>&#x000A;bbb</p>
+ <p data-sourcepos="1:1-2:8" dir="auto">aaa<br>
+ bbb</p>
wysiwyg: |-
<p>aaa<br>
bbb</p>
-04_09__leaf_blocks__blank_lines__01:
+04_09__leaf_blocks__blank_lines__001:
canonical: |
<p>aaa</p>
<h1>aaa</h1>
static: |-
- <p data-sourcepos="3:1-3:3" dir="auto">aaa</p>&#x000A;<h1 data-sourcepos="6:1-6:5" dir="auto">&#x000A;<a id="user-content-aaa" class="anchor" href="#aaa" aria-hidden="true"></a>aaa</h1>
+ <p data-sourcepos="3:1-3:3" dir="auto">aaa</p>
+ <h1 data-sourcepos="6:1-6:5" dir="auto">
+ <a id="user-content-aaa" class="anchor" href="#aaa" aria-hidden="true"></a>aaa</h1>
wysiwyg: |-
<p>aaa</p>
-04_10__leaf_blocks__tables_extension__01:
+04_10__leaf_blocks__tables_extension__001:
canonical: |
<table>
<thead>
@@ -1887,12 +2433,24 @@
</tbody>
</table>
static: |-
- <table data-sourcepos="1:1-3:13" dir="auto">&#x000A;<thead>&#x000A;<tr data-sourcepos="1:1-1:13">&#x000A;<th data-sourcepos="1:2-1:6">foo</th>&#x000A;<th data-sourcepos="1:8-1:12">bar</th>&#x000A;</tr>&#x000A;</thead>&#x000A;<tbody>&#x000A;<tr data-sourcepos="3:1-3:13">&#x000A;<td data-sourcepos="3:2-3:6">baz</td>&#x000A;<td data-sourcepos="3:8-3:12">bim</td>&#x000A;</tr>&#x000A;</tbody>&#x000A;</table>
+ <table data-sourcepos="1:1-3:13" dir="auto">
+ <thead>
+ <tr data-sourcepos="1:1-1:13">
+ <th data-sourcepos="1:2-1:6">foo</th>
+ <th data-sourcepos="1:8-1:12">bar</th>
+ </tr>
+ </thead>
+ <tbody>
+ <tr data-sourcepos="3:1-3:13">
+ <td data-sourcepos="3:2-3:6">baz</td>
+ <td data-sourcepos="3:8-3:12">bim</td>
+ </tr>
+ </tbody>
+ </table>
wysiwyg: |-
- <p>| foo | bar |
- | --- | --- |
- | baz | bim |</p>
-04_10__leaf_blocks__tables_extension__02:
+ Error - check implementation:
+ Hast node of type "table" not supported by this converter. Please, provide an specification.
+04_10__leaf_blocks__tables_extension__002:
canonical: |
<table>
<thead>
@@ -1909,12 +2467,24 @@
</tbody>
</table>
static: |-
- <table data-sourcepos="1:1-3:9" dir="auto">&#x000A;<thead>&#x000A;<tr data-sourcepos="1:1-1:16">&#x000A;<th align="center" data-sourcepos="1:2-1:6">abc</th>&#x000A;<th align="right" data-sourcepos="1:8-1:15">defghi</th>&#x000A;</tr>&#x000A;</thead>&#x000A;<tbody>&#x000A;<tr data-sourcepos="3:1-3:9">&#x000A;<td align="center" data-sourcepos="3:1-3:4">bar</td>&#x000A;<td align="right" data-sourcepos="3:6-3:9">baz</td>&#x000A;</tr>&#x000A;</tbody>&#x000A;</table>
+ <table data-sourcepos="1:1-3:9" dir="auto">
+ <thead>
+ <tr data-sourcepos="1:1-1:16">
+ <th align="center" data-sourcepos="1:2-1:6">abc</th>
+ <th align="right" data-sourcepos="1:8-1:15">defghi</th>
+ </tr>
+ </thead>
+ <tbody>
+ <tr data-sourcepos="3:1-3:9">
+ <td align="center" data-sourcepos="3:1-3:4">bar</td>
+ <td align="right" data-sourcepos="3:6-3:9">baz</td>
+ </tr>
+ </tbody>
+ </table>
wysiwyg: |-
- <p>| abc | defghi |
- :-: | -----------:
- bar | baz</p>
-04_10__leaf_blocks__tables_extension__03:
+ Error - check implementation:
+ Hast node of type "table" not supported by this converter. Please, provide an specification.
+04_10__leaf_blocks__tables_extension__003:
canonical: |
<table>
<thead>
@@ -1932,13 +2502,25 @@
</tbody>
</table>
static: |-
- <table data-sourcepos="1:1-4:15" dir="auto">&#x000A;<thead>&#x000A;<tr data-sourcepos="1:1-1:10">&#x000A;<th data-sourcepos="1:2-1:9">f|oo</th>&#x000A;</tr>&#x000A;</thead>&#x000A;<tbody>&#x000A;<tr data-sourcepos="3:1-3:13">&#x000A;<td data-sourcepos="3:2-3:12">b <code>|</code> az</td>&#x000A;</tr>&#x000A;<tr data-sourcepos="4:1-4:15">&#x000A;<td data-sourcepos="4:2-4:14">b <strong>|</strong> im</td>&#x000A;</tr>&#x000A;</tbody>&#x000A;</table>
+ <table data-sourcepos="1:1-4:15" dir="auto">
+ <thead>
+ <tr data-sourcepos="1:1-1:10">
+ <th data-sourcepos="1:2-1:9">f|oo</th>
+ </tr>
+ </thead>
+ <tbody>
+ <tr data-sourcepos="3:1-3:13">
+ <td data-sourcepos="3:2-3:12">b <code>|</code> az</td>
+ </tr>
+ <tr data-sourcepos="4:1-4:15">
+ <td data-sourcepos="4:2-4:14">b <strong>|</strong> im</td>
+ </tr>
+ </tbody>
+ </table>
wysiwyg: |-
- <p>| f|oo |
- | ------ |
- | b <code>\|</code> az |
- | b <strong>|</strong> im |</p>
-04_10__leaf_blocks__tables_extension__04:
+ Error - check implementation:
+ Hast node of type "table" not supported by this converter. Please, provide an specification.
+04_10__leaf_blocks__tables_extension__004:
canonical: |
<table>
<thead>
@@ -1958,12 +2540,27 @@
<p>bar</p>
</blockquote>
static: |-
- <table data-sourcepos="1:1-3:13" dir="auto">&#x000A;<thead>&#x000A;<tr data-sourcepos="1:1-1:13">&#x000A;<th data-sourcepos="1:2-1:6">abc</th>&#x000A;<th data-sourcepos="1:8-1:12">def</th>&#x000A;</tr>&#x000A;</thead>&#x000A;<tbody>&#x000A;<tr data-sourcepos="3:1-3:13">&#x000A;<td data-sourcepos="3:2-3:6">bar</td>&#x000A;<td data-sourcepos="3:8-3:12">baz</td>&#x000A;</tr>&#x000A;</tbody>&#x000A;</table>&#x000A;<blockquote data-sourcepos="4:1-4:5" dir="auto">&#x000A;<p data-sourcepos="4:3-4:5">bar</p>&#x000A;</blockquote>
+ <table data-sourcepos="1:1-3:13" dir="auto">
+ <thead>
+ <tr data-sourcepos="1:1-1:13">
+ <th data-sourcepos="1:2-1:6">abc</th>
+ <th data-sourcepos="1:8-1:12">def</th>
+ </tr>
+ </thead>
+ <tbody>
+ <tr data-sourcepos="3:1-3:13">
+ <td data-sourcepos="3:2-3:6">bar</td>
+ <td data-sourcepos="3:8-3:12">baz</td>
+ </tr>
+ </tbody>
+ </table>
+ <blockquote data-sourcepos="4:1-4:5" dir="auto">
+ <p data-sourcepos="4:3-4:5">bar</p>
+ </blockquote>
wysiwyg: |-
- <p>| abc | def |
- | --- | --- |
- | bar | baz |</p>
-04_10__leaf_blocks__tables_extension__05:
+ Error - check implementation:
+ Hast node of type "table" not supported by this converter. Please, provide an specification.
+04_10__leaf_blocks__tables_extension__005:
canonical: |
<table>
<thead>
@@ -1985,24 +2582,42 @@
</table>
<p>bar</p>
static: |-
- <table data-sourcepos="1:1-4:3" dir="auto">&#x000A;<thead>&#x000A;<tr data-sourcepos="1:1-1:13">&#x000A;<th data-sourcepos="1:2-1:6">abc</th>&#x000A;<th data-sourcepos="1:8-1:12">def</th>&#x000A;</tr>&#x000A;</thead>&#x000A;<tbody>&#x000A;<tr data-sourcepos="3:1-3:13">&#x000A;<td data-sourcepos="3:2-3:6">bar</td>&#x000A;<td data-sourcepos="3:8-3:12">baz</td>&#x000A;</tr>&#x000A;<tr data-sourcepos="4:1-4:3">&#x000A;<td data-sourcepos="4:1-4:3">bar</td>&#x000A;<td data-sourcepos="4:0-4:0"></td>&#x000A;</tr>&#x000A;</tbody>&#x000A;</table>&#x000A;<p data-sourcepos="6:1-6:3" dir="auto">bar</p>
+ <table data-sourcepos="1:1-4:3" dir="auto">
+ <thead>
+ <tr data-sourcepos="1:1-1:13">
+ <th data-sourcepos="1:2-1:6">abc</th>
+ <th data-sourcepos="1:8-1:12">def</th>
+ </tr>
+ </thead>
+ <tbody>
+ <tr data-sourcepos="3:1-3:13">
+ <td data-sourcepos="3:2-3:6">bar</td>
+ <td data-sourcepos="3:8-3:12">baz</td>
+ </tr>
+ <tr data-sourcepos="4:1-4:3">
+ <td data-sourcepos="4:1-4:3">bar</td>
+ <td data-sourcepos="4:0-4:0"></td>
+ </tr>
+ </tbody>
+ </table>
+ <p data-sourcepos="6:1-6:3" dir="auto">bar</p>
wysiwyg: |-
- <p>| abc | def |
- | --- | --- |
- | bar | baz |
- bar</p>
-04_10__leaf_blocks__tables_extension__06:
+ Error - check implementation:
+ Hast node of type "table" not supported by this converter. Please, provide an specification.
+04_10__leaf_blocks__tables_extension__006:
canonical: |
<p>| abc | def |
| --- |
| bar |</p>
static: |-
- <p data-sourcepos="1:1-3:7" dir="auto">| abc | def |&#x000A;| --- |&#x000A;| bar |</p>
+ <p data-sourcepos="1:1-3:7" dir="auto">| abc | def |
+ | --- |
+ | bar |</p>
wysiwyg: |-
<p>| abc | def |
| --- |
| bar |</p>
-04_10__leaf_blocks__tables_extension__07:
+04_10__leaf_blocks__tables_extension__007:
canonical: |
<table>
<thead>
@@ -2023,13 +2638,28 @@
</tbody>
</table>
static: |-
- <table data-sourcepos="1:1-4:19" dir="auto">&#x000A;<thead>&#x000A;<tr data-sourcepos="1:1-1:13">&#x000A;<th data-sourcepos="1:2-1:6">abc</th>&#x000A;<th data-sourcepos="1:8-1:12">def</th>&#x000A;</tr>&#x000A;</thead>&#x000A;<tbody>&#x000A;<tr data-sourcepos="3:1-3:7">&#x000A;<td data-sourcepos="3:2-3:6">bar</td>&#x000A;<td data-sourcepos="3:0-3:0"></td>&#x000A;</tr>&#x000A;<tr data-sourcepos="4:1-4:19">&#x000A;<td data-sourcepos="4:2-4:6">bar</td>&#x000A;<td data-sourcepos="4:8-4:12">baz</td>&#x000A;</tr>&#x000A;</tbody>&#x000A;</table>
+ <table data-sourcepos="1:1-4:19" dir="auto">
+ <thead>
+ <tr data-sourcepos="1:1-1:13">
+ <th data-sourcepos="1:2-1:6">abc</th>
+ <th data-sourcepos="1:8-1:12">def</th>
+ </tr>
+ </thead>
+ <tbody>
+ <tr data-sourcepos="3:1-3:7">
+ <td data-sourcepos="3:2-3:6">bar</td>
+ <td data-sourcepos="3:0-3:0"></td>
+ </tr>
+ <tr data-sourcepos="4:1-4:19">
+ <td data-sourcepos="4:2-4:6">bar</td>
+ <td data-sourcepos="4:8-4:12">baz</td>
+ </tr>
+ </tbody>
+ </table>
wysiwyg: |-
- <p>| abc | def |
- | --- | --- |
- | bar |
- | bar | baz | boo |</p>
-04_10__leaf_blocks__tables_extension__08:
+ Error - check implementation:
+ Hast node of type "table" not supported by this converter. Please, provide an specification.
+04_10__leaf_blocks__tables_extension__008:
canonical: |
<table>
<thead>
@@ -2040,11 +2670,18 @@
</thead>
</table>
static: |-
- <table data-sourcepos="1:1-2:13" dir="auto">&#x000A;<thead>&#x000A;<tr data-sourcepos="1:1-1:13">&#x000A;<th data-sourcepos="1:2-1:6">abc</th>&#x000A;<th data-sourcepos="1:8-1:12">def</th>&#x000A;</tr>&#x000A;</thead>&#x000A;</table>
+ <table data-sourcepos="1:1-2:13" dir="auto">
+ <thead>
+ <tr data-sourcepos="1:1-1:13">
+ <th data-sourcepos="1:2-1:6">abc</th>
+ <th data-sourcepos="1:8-1:12">def</th>
+ </tr>
+ </thead>
+ </table>
wysiwyg: |-
- <p>| abc | def |
- | --- | --- |</p>
-05_01__container_blocks__block_quotes__01:
+ Error - check implementation:
+ Hast node of type "table" not supported by this converter. Please, provide an specification.
+05_01__container_blocks__block_quotes__001:
canonical: |
<blockquote>
<h1>Foo</h1>
@@ -2052,11 +2689,16 @@
baz</p>
</blockquote>
static: |-
- <blockquote data-sourcepos="1:1-3:5" dir="auto">&#x000A;<h1 data-sourcepos="1:3-1:7">&#x000A;<a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h1>&#x000A;<p data-sourcepos="2:3-3:5">bar&#x000A;baz</p>&#x000A;</blockquote>
+ <blockquote data-sourcepos="1:1-3:5" dir="auto">
+ <h1 data-sourcepos="1:3-1:7">
+ <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h1>
+ <p data-sourcepos="2:3-3:5">bar
+ baz</p>
+ </blockquote>
wysiwyg: |-
<blockquote multiline="false"><h1>Foo</h1><p>bar
baz</p></blockquote>
-05_01__container_blocks__block_quotes__02:
+05_01__container_blocks__block_quotes__002:
canonical: |
<blockquote>
<h1>Foo</h1>
@@ -2064,11 +2706,16 @@
baz</p>
</blockquote>
static: |-
- <blockquote data-sourcepos="1:1-3:5" dir="auto">&#x000A;<h1 data-sourcepos="1:2-1:6">&#x000A;<a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h1>&#x000A;<p data-sourcepos="2:2-3:5">bar&#x000A;baz</p>&#x000A;</blockquote>
+ <blockquote data-sourcepos="1:1-3:5" dir="auto">
+ <h1 data-sourcepos="1:2-1:6">
+ <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h1>
+ <p data-sourcepos="2:2-3:5">bar
+ baz</p>
+ </blockquote>
wysiwyg: |-
<blockquote multiline="false"><h1>Foo</h1><p>bar
baz</p></blockquote>
-05_01__container_blocks__block_quotes__03:
+05_01__container_blocks__block_quotes__003:
canonical: |
<blockquote>
<h1>Foo</h1>
@@ -2076,23 +2723,33 @@
baz</p>
</blockquote>
static: |-
- <blockquote data-sourcepos="1:4-3:6" dir="auto">&#x000A;<h1 data-sourcepos="1:6-1:10">&#x000A;<a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h1>&#x000A;<p data-sourcepos="2:6-3:6">bar&#x000A;baz</p>&#x000A;</blockquote>
+ <blockquote data-sourcepos="1:4-3:6" dir="auto">
+ <h1 data-sourcepos="1:6-1:10">
+ <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h1>
+ <p data-sourcepos="2:6-3:6">bar
+ baz</p>
+ </blockquote>
wysiwyg: |-
<blockquote multiline="false"><h1>Foo</h1><p>bar
baz</p></blockquote>
-05_01__container_blocks__block_quotes__04:
+05_01__container_blocks__block_quotes__004:
canonical: |
<pre><code>&gt; # Foo
&gt; bar
&gt; baz
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:5-3:9" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">&gt; # Foo</span>&#x000A;<span id="LC2" class="line" lang="plaintext">&gt; bar</span>&#x000A;<span id="LC3" class="line" lang="plaintext">&gt; baz</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:5-3:9" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">&gt; # Foo</span>
+ <span id="LC2" class="line" lang="plaintext">&gt; bar</span>
+ <span id="LC3" class="line" lang="plaintext">&gt; baz</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>&gt; # Foo
&gt; bar
&gt; baz</code></pre>
-05_01__container_blocks__block_quotes__05:
+05_01__container_blocks__block_quotes__005:
canonical: |
<blockquote>
<h1>Foo</h1>
@@ -2100,11 +2757,16 @@
baz</p>
</blockquote>
static: |-
- <blockquote data-sourcepos="1:1-3:3" dir="auto">&#x000A;<h1 data-sourcepos="1:3-1:7">&#x000A;<a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h1>&#x000A;<p data-sourcepos="2:3-3:3">bar&#x000A;baz</p>&#x000A;</blockquote>
+ <blockquote data-sourcepos="1:1-3:3" dir="auto">
+ <h1 data-sourcepos="1:3-1:7">
+ <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h1>
+ <p data-sourcepos="2:3-3:3">bar
+ baz</p>
+ </blockquote>
wysiwyg: |-
<blockquote multiline="false"><h1>Foo</h1><p>bar
baz</p></blockquote>
-05_01__container_blocks__block_quotes__06:
+05_01__container_blocks__block_quotes__006:
canonical: |
<blockquote>
<p>bar
@@ -2112,22 +2774,29 @@
foo</p>
</blockquote>
static: |-
- <blockquote data-sourcepos="1:1-3:5" dir="auto">&#x000A;<p data-sourcepos="1:3-3:5">bar&#x000A;baz&#x000A;foo</p>&#x000A;</blockquote>
+ <blockquote data-sourcepos="1:1-3:5" dir="auto">
+ <p data-sourcepos="1:3-3:5">bar
+ baz
+ foo</p>
+ </blockquote>
wysiwyg: |-
<blockquote multiline="false"><p>bar
baz
foo</p></blockquote>
-05_01__container_blocks__block_quotes__07:
+05_01__container_blocks__block_quotes__007:
canonical: |
<blockquote>
<p>foo</p>
</blockquote>
<hr />
static: |-
- <blockquote data-sourcepos="1:1-1:5" dir="auto">&#x000A;<p data-sourcepos="1:3-1:5">foo</p>&#x000A;</blockquote>&#x000A;<hr data-sourcepos="2:1-2:3">
+ <blockquote data-sourcepos="1:1-1:5" dir="auto">
+ <p data-sourcepos="1:3-1:5">foo</p>
+ </blockquote>
+ <hr data-sourcepos="2:1-2:3">
wysiwyg: |-
<blockquote multiline="false"><p>foo</p></blockquote>
-05_01__container_blocks__block_quotes__08:
+05_01__container_blocks__block_quotes__008:
canonical: |
<blockquote>
<ul>
@@ -2138,10 +2807,17 @@
<li>bar</li>
</ul>
static: |-
- <blockquote data-sourcepos="1:1-1:7" dir="auto">&#x000A;<ul data-sourcepos="1:3-1:7">&#x000A;<li data-sourcepos="1:3-1:7">foo</li>&#x000A;</ul>&#x000A;</blockquote>&#x000A;<ul data-sourcepos="2:1-2:5" dir="auto">&#x000A;<li data-sourcepos="2:1-2:5">bar</li>&#x000A;</ul>
+ <blockquote data-sourcepos="1:1-1:7" dir="auto">
+ <ul data-sourcepos="1:3-1:7">
+ <li data-sourcepos="1:3-1:7">foo</li>
+ </ul>
+ </blockquote>
+ <ul data-sourcepos="2:1-2:5" dir="auto">
+ <li data-sourcepos="2:1-2:5">bar</li>
+ </ul>
wysiwyg: |-
<blockquote multiline="false"><ul bullet="*"><li><p>foo</p></li></ul></blockquote>
-05_01__container_blocks__block_quotes__09:
+05_01__container_blocks__block_quotes__009:
canonical: |
<blockquote>
<pre><code>foo
@@ -2150,10 +2826,19 @@
<pre><code>bar
</code></pre>
static: |-
- <blockquote data-sourcepos="1:1-1:9" dir="auto">&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:7-1:9" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;</blockquote>&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="2:5-2:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">bar</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <blockquote data-sourcepos="1:1-1:9" dir="auto">
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:7-1:9" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ </blockquote>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="2:5-2:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">bar</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<blockquote multiline="false"><pre class="content-editor-code-block undefined code highlight"><code>foo</code></pre></blockquote>
-05_01__container_blocks__block_quotes__10:
+05_01__container_blocks__block_quotes__010:
canonical: |
<blockquote>
<pre><code></code></pre>
@@ -2161,46 +2846,63 @@
<p>foo</p>
<pre><code></code></pre>
static: |-
- <blockquote data-sourcepos="1:1-1:5" dir="auto">&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:3-2:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;</blockquote>&#x000A;<p data-sourcepos="2:1-2:3" dir="auto">foo</p>&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="3:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <blockquote data-sourcepos="1:1-1:5" dir="auto">
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:3-2:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code></code></pre>
+ <copy-code></copy-code>
+ </div>
+ </blockquote>
+ <p data-sourcepos="2:1-2:3" dir="auto">foo</p>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="3:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<blockquote multiline="false"><pre class="content-editor-code-block undefined code highlight"><code></code></pre></blockquote>
-05_01__container_blocks__block_quotes__11:
+05_01__container_blocks__block_quotes__011:
canonical: |
<blockquote>
<p>foo
- bar</p>
</blockquote>
static: |-
- <blockquote data-sourcepos="1:1-2:9" dir="auto">&#x000A;<p data-sourcepos="1:3-2:9">foo&#x000A;- bar</p>&#x000A;</blockquote>
+ <blockquote data-sourcepos="1:1-2:9" dir="auto">
+ <p data-sourcepos="1:3-2:9">foo
+ - bar</p>
+ </blockquote>
wysiwyg: |-
<blockquote multiline="false"><p>foo
- bar</p></blockquote>
-05_01__container_blocks__block_quotes__12:
+05_01__container_blocks__block_quotes__012:
canonical: |
<blockquote>
</blockquote>
static: |-
- <blockquote data-sourcepos="1:1-1:1" dir="auto">&#x000A;</blockquote>
+ <blockquote data-sourcepos="1:1-1:1" dir="auto">
+ </blockquote>
wysiwyg: |-
<blockquote multiline="false"><p></p></blockquote>
-05_01__container_blocks__block_quotes__13:
+05_01__container_blocks__block_quotes__013:
canonical: |
<blockquote>
</blockquote>
static: |-
- <blockquote data-sourcepos="1:1-3:2" dir="auto">&#x000A;</blockquote>
+ <blockquote data-sourcepos="1:1-3:2" dir="auto">
+ </blockquote>
wysiwyg: |-
<blockquote multiline="false"><p></p></blockquote>
-05_01__container_blocks__block_quotes__14:
+05_01__container_blocks__block_quotes__014:
canonical: |
<blockquote>
<p>foo</p>
</blockquote>
static: |-
- <blockquote data-sourcepos="1:1-3:3" dir="auto">&#x000A;<p data-sourcepos="2:3-2:5">foo</p>&#x000A;</blockquote>
+ <blockquote data-sourcepos="1:1-3:3" dir="auto">
+ <p data-sourcepos="2:3-2:5">foo</p>
+ </blockquote>
wysiwyg: |-
<blockquote multiline="false"><p>foo</p></blockquote>
-05_01__container_blocks__block_quotes__15:
+05_01__container_blocks__block_quotes__015:
canonical: |
<blockquote>
<p>foo</p>
@@ -2209,41 +2911,55 @@
<p>bar</p>
</blockquote>
static: |-
- <blockquote data-sourcepos="1:1-1:5" dir="auto">&#x000A;<p data-sourcepos="1:3-1:5">foo</p>&#x000A;</blockquote>&#x000A;<blockquote data-sourcepos="3:1-3:5" dir="auto">&#x000A;<p data-sourcepos="3:3-3:5">bar</p>&#x000A;</blockquote>
+ <blockquote data-sourcepos="1:1-1:5" dir="auto">
+ <p data-sourcepos="1:3-1:5">foo</p>
+ </blockquote>
+ <blockquote data-sourcepos="3:1-3:5" dir="auto">
+ <p data-sourcepos="3:3-3:5">bar</p>
+ </blockquote>
wysiwyg: |-
<blockquote multiline="false"><p>foo</p></blockquote>
-05_01__container_blocks__block_quotes__16:
+05_01__container_blocks__block_quotes__016:
canonical: |
<blockquote>
<p>foo
bar</p>
</blockquote>
static: |-
- <blockquote data-sourcepos="1:1-2:5" dir="auto">&#x000A;<p data-sourcepos="1:3-2:5">foo&#x000A;bar</p>&#x000A;</blockquote>
+ <blockquote data-sourcepos="1:1-2:5" dir="auto">
+ <p data-sourcepos="1:3-2:5">foo
+ bar</p>
+ </blockquote>
wysiwyg: |-
<blockquote multiline="false"><p>foo
bar</p></blockquote>
-05_01__container_blocks__block_quotes__17:
+05_01__container_blocks__block_quotes__017:
canonical: |
<blockquote>
<p>foo</p>
<p>bar</p>
</blockquote>
static: |-
- <blockquote data-sourcepos="1:1-3:5" dir="auto">&#x000A;<p data-sourcepos="1:3-1:5">foo</p>&#x000A;<p data-sourcepos="3:3-3:5">bar</p>&#x000A;</blockquote>
+ <blockquote data-sourcepos="1:1-3:5" dir="auto">
+ <p data-sourcepos="1:3-1:5">foo</p>
+ <p data-sourcepos="3:3-3:5">bar</p>
+ </blockquote>
wysiwyg: |-
<blockquote multiline="false"><p>foo</p><p>bar</p></blockquote>
-05_01__container_blocks__block_quotes__18:
+05_01__container_blocks__block_quotes__018:
canonical: |
<p>foo</p>
<blockquote>
<p>bar</p>
</blockquote>
static: |-
- <p data-sourcepos="1:1-1:3" dir="auto">foo</p>&#x000A;<blockquote data-sourcepos="2:1-2:5" dir="auto">&#x000A;<p data-sourcepos="2:3-2:5">bar</p>&#x000A;</blockquote>
+ <p data-sourcepos="1:1-1:3" dir="auto">foo</p>
+ <blockquote data-sourcepos="2:1-2:5" dir="auto">
+ <p data-sourcepos="2:3-2:5">bar</p>
+ </blockquote>
wysiwyg: |-
<p>foo</p>
-05_01__container_blocks__block_quotes__19:
+05_01__container_blocks__block_quotes__019:
canonical: |
<blockquote>
<p>aaa</p>
@@ -2253,41 +2969,56 @@
<p>bbb</p>
</blockquote>
static: |-
- <blockquote data-sourcepos="1:1-1:5" dir="auto">&#x000A;<p data-sourcepos="1:3-1:5">aaa</p>&#x000A;</blockquote>&#x000A;<hr data-sourcepos="2:1-2:3">&#x000A;<blockquote data-sourcepos="3:1-3:5" dir="auto">&#x000A;<p data-sourcepos="3:3-3:5">bbb</p>&#x000A;</blockquote>
+ <blockquote data-sourcepos="1:1-1:5" dir="auto">
+ <p data-sourcepos="1:3-1:5">aaa</p>
+ </blockquote>
+ <hr data-sourcepos="2:1-2:3">
+ <blockquote data-sourcepos="3:1-3:5" dir="auto">
+ <p data-sourcepos="3:3-3:5">bbb</p>
+ </blockquote>
wysiwyg: |-
<blockquote multiline="false"><p>aaa</p></blockquote>
-05_01__container_blocks__block_quotes__20:
+05_01__container_blocks__block_quotes__020:
canonical: |
<blockquote>
<p>bar
baz</p>
</blockquote>
static: |-
- <blockquote data-sourcepos="1:1-2:3" dir="auto">&#x000A;<p data-sourcepos="1:3-2:3">bar&#x000A;baz</p>&#x000A;</blockquote>
+ <blockquote data-sourcepos="1:1-2:3" dir="auto">
+ <p data-sourcepos="1:3-2:3">bar
+ baz</p>
+ </blockquote>
wysiwyg: |-
<blockquote multiline="false"><p>bar
baz</p></blockquote>
-05_01__container_blocks__block_quotes__21:
+05_01__container_blocks__block_quotes__021:
canonical: |
<blockquote>
<p>bar</p>
</blockquote>
<p>baz</p>
static: |-
- <blockquote data-sourcepos="1:1-1:5" dir="auto">&#x000A;<p data-sourcepos="1:3-1:5">bar</p>&#x000A;</blockquote>&#x000A;<p data-sourcepos="3:1-3:3" dir="auto">baz</p>
+ <blockquote data-sourcepos="1:1-1:5" dir="auto">
+ <p data-sourcepos="1:3-1:5">bar</p>
+ </blockquote>
+ <p data-sourcepos="3:1-3:3" dir="auto">baz</p>
wysiwyg: |-
<blockquote multiline="false"><p>bar</p></blockquote>
-05_01__container_blocks__block_quotes__22:
+05_01__container_blocks__block_quotes__022:
canonical: |
<blockquote>
<p>bar</p>
</blockquote>
<p>baz</p>
static: |-
- <blockquote data-sourcepos="1:1-2:1" dir="auto">&#x000A;<p data-sourcepos="1:3-1:5">bar</p>&#x000A;</blockquote>&#x000A;<p data-sourcepos="3:1-3:3" dir="auto">baz</p>
+ <blockquote data-sourcepos="1:1-2:1" dir="auto">
+ <p data-sourcepos="1:3-1:5">bar</p>
+ </blockquote>
+ <p data-sourcepos="3:1-3:3" dir="auto">baz</p>
wysiwyg: |-
<blockquote multiline="false"><p>bar</p></blockquote>
-05_01__container_blocks__block_quotes__23:
+05_01__container_blocks__block_quotes__023:
canonical: |
<blockquote>
<blockquote>
@@ -2298,11 +3029,18 @@
</blockquote>
</blockquote>
static: |-
- <blockquote data-sourcepos="1:1-2:3" dir="auto">&#x000A;<blockquote data-sourcepos="1:3-2:3">&#x000A;<blockquote data-sourcepos="1:5-2:3">&#x000A;<p data-sourcepos="1:7-2:3">foo&#x000A;bar</p>&#x000A;</blockquote>&#x000A;</blockquote>&#x000A;</blockquote>
+ <blockquote data-sourcepos="1:1-2:3" dir="auto">
+ <blockquote data-sourcepos="1:3-2:3">
+ <blockquote data-sourcepos="1:5-2:3">
+ <p data-sourcepos="1:7-2:3">foo
+ bar</p>
+ </blockquote>
+ </blockquote>
+ </blockquote>
wysiwyg: |-
<blockquote multiline="false"><blockquote multiline="false"><blockquote multiline="false"><p>foo
bar</p></blockquote></blockquote></blockquote>
-05_01__container_blocks__block_quotes__24:
+05_01__container_blocks__block_quotes__024:
canonical: |
<blockquote>
<blockquote>
@@ -2314,12 +3052,20 @@
</blockquote>
</blockquote>
static: |-
- <blockquote data-sourcepos="1:1-3:5" dir="auto">&#x000A;<blockquote data-sourcepos="1:2-3:5">&#x000A;<blockquote data-sourcepos="1:3-3:5">&#x000A;<p data-sourcepos="1:5-3:5">foo&#x000A;bar&#x000A;baz</p>&#x000A;</blockquote>&#x000A;</blockquote>&#x000A;</blockquote>
+ <blockquote data-sourcepos="1:1-3:5" dir="auto">
+ <blockquote data-sourcepos="1:2-3:5">
+ <blockquote data-sourcepos="1:3-3:5">
+ <p data-sourcepos="1:5-3:5">foo
+ bar
+ baz</p>
+ </blockquote>
+ </blockquote>
+ </blockquote>
wysiwyg: |-
<blockquote multiline="false"><blockquote multiline="false"><blockquote multiline="false"><p>foo
bar
baz</p></blockquote></blockquote></blockquote>
-05_01__container_blocks__block_quotes__25:
+05_01__container_blocks__block_quotes__025:
canonical: |
<blockquote>
<pre><code>code
@@ -2329,10 +3075,18 @@
<p>not code</p>
</blockquote>
static: |-
- <blockquote data-sourcepos="1:1-1:10" dir="auto">&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:7-1:10" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">code</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;</blockquote>&#x000A;<blockquote data-sourcepos="3:1-3:13" dir="auto">&#x000A;<p data-sourcepos="3:6-3:13">not code</p>&#x000A;</blockquote>
+ <blockquote data-sourcepos="1:1-1:10" dir="auto">
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:7-1:10" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">code</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ </blockquote>
+ <blockquote data-sourcepos="3:1-3:13" dir="auto">
+ <p data-sourcepos="3:6-3:13">not code</p>
+ </blockquote>
wysiwyg: |-
<blockquote multiline="false"><pre class="content-editor-code-block undefined code highlight"><code>code</code></pre></blockquote>
-05_02__container_blocks__list_items__01:
+05_02__container_blocks__list_items__001:
canonical: |
<p>A paragraph
with two lines.</p>
@@ -2342,11 +3096,19 @@
<p>A block quote.</p>
</blockquote>
static: |-
- <p data-sourcepos="1:1-2:15" dir="auto">A paragraph&#x000A;with two lines.</p>&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="4:5-5:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">indented code</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;<blockquote data-sourcepos="6:1-6:16" dir="auto">&#x000A;<p data-sourcepos="6:3-6:16">A block quote.</p>&#x000A;</blockquote>
+ <p data-sourcepos="1:1-2:15" dir="auto">A paragraph
+ with two lines.</p>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="4:5-5:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">indented code</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ <blockquote data-sourcepos="6:1-6:16" dir="auto">
+ <p data-sourcepos="6:3-6:16">A block quote.</p>
+ </blockquote>
wysiwyg: |-
<p>A paragraph
with two lines.</p>
-05_02__container_blocks__list_items__02:
+05_02__container_blocks__list_items__002:
canonical: |
<ol>
<li>
@@ -2360,21 +3122,36 @@
</li>
</ol>
static: |-
- <ol data-sourcepos="1:1-6:20" dir="auto">&#x000A;<li data-sourcepos="1:1-6:20">&#x000A;<p data-sourcepos="1:5-2:19">A paragraph&#x000A;with two lines.</p>&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="4:9-5:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">indented code</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;<blockquote data-sourcepos="6:5-6:20">&#x000A;<p data-sourcepos="6:7-6:20">A block quote.</p>&#x000A;</blockquote>&#x000A;</li>&#x000A;</ol>
+ <ol data-sourcepos="1:1-6:20" dir="auto">
+ <li data-sourcepos="1:1-6:20">
+ <p data-sourcepos="1:5-2:19">A paragraph
+ with two lines.</p>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="4:9-5:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">indented code</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ <blockquote data-sourcepos="6:5-6:20">
+ <p data-sourcepos="6:7-6:20">A block quote.</p>
+ </blockquote>
+ </li>
+ </ol>
wysiwyg: |-
<ol parens="false"><li><p>A paragraph
with two lines.</p><pre class="content-editor-code-block undefined code highlight"><code>indented code</code></pre><blockquote multiline="false"><p>A block quote.</p></blockquote></li></ol>
-05_02__container_blocks__list_items__03:
+05_02__container_blocks__list_items__003:
canonical: |
<ul>
<li>one</li>
</ul>
<p>two</p>
static: |-
- <ul data-sourcepos="1:1-2:0" dir="auto">&#x000A;<li data-sourcepos="1:1-2:0">one</li>&#x000A;</ul>&#x000A;<p data-sourcepos="3:2-3:4" dir="auto">two</p>
+ <ul data-sourcepos="1:1-2:0" dir="auto">
+ <li data-sourcepos="1:1-2:0">one</li>
+ </ul>
+ <p data-sourcepos="3:2-3:4" dir="auto">two</p>
wysiwyg: |-
<ul bullet="*"><li><p>one</p></li></ul>
-05_02__container_blocks__list_items__04:
+05_02__container_blocks__list_items__004:
canonical: |
<ul>
<li>
@@ -2383,10 +3160,15 @@
</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-3:5" dir="auto">&#x000A;<li data-sourcepos="1:1-3:5">&#x000A;<p data-sourcepos="1:3-1:5">one</p>&#x000A;<p data-sourcepos="3:3-3:5">two</p>&#x000A;</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-3:5" dir="auto">
+ <li data-sourcepos="1:1-3:5">
+ <p data-sourcepos="1:3-1:5">one</p>
+ <p data-sourcepos="3:3-3:5">two</p>
+ </li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p>one</p><p>two</p></li></ul>
-05_02__container_blocks__list_items__05:
+05_02__container_blocks__list_items__005:
canonical: |
<ul>
<li>one</li>
@@ -2394,10 +3176,16 @@
<pre><code> two
</code></pre>
static: |-
- <ul data-sourcepos="1:2-2:0" dir="auto">&#x000A;<li data-sourcepos="1:2-2:0">one</li>&#x000A;</ul>&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="3:5-3:8" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"> two</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <ul data-sourcepos="1:2-2:0" dir="auto">
+ <li data-sourcepos="1:2-2:0">one</li>
+ </ul>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="3:5-3:8" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"> two</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<ul bullet="*"><li><p>one</p></li></ul>
-05_02__container_blocks__list_items__06:
+05_02__container_blocks__list_items__006:
canonical: |
<ul>
<li>
@@ -2406,10 +3194,15 @@
</li>
</ul>
static: |-
- <ul data-sourcepos="1:2-3:9" dir="auto">&#x000A;<li data-sourcepos="1:2-3:9">&#x000A;<p data-sourcepos="1:7-1:9">one</p>&#x000A;<p data-sourcepos="3:7-3:9">two</p>&#x000A;</li>&#x000A;</ul>
+ <ul data-sourcepos="1:2-3:9" dir="auto">
+ <li data-sourcepos="1:2-3:9">
+ <p data-sourcepos="1:7-1:9">one</p>
+ <p data-sourcepos="3:7-3:9">two</p>
+ </li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p>one</p><p>two</p></li></ul>
-05_02__container_blocks__list_items__07:
+05_02__container_blocks__list_items__007:
canonical: |
<blockquote>
<blockquote>
@@ -2422,10 +3215,19 @@
</blockquote>
</blockquote>
static: |-
- <blockquote data-sourcepos="1:4-3:10" dir="auto">&#x000A;<blockquote data-sourcepos="1:6-3:10">&#x000A;<ol data-sourcepos="1:8-3:10">&#x000A;<li data-sourcepos="1:8-3:10">&#x000A;<p data-sourcepos="1:12-1:14">one</p>&#x000A;<p data-sourcepos="3:8-3:10">two</p>&#x000A;</li>&#x000A;</ol>&#x000A;</blockquote>&#x000A;</blockquote>
+ <blockquote data-sourcepos="1:4-3:10" dir="auto">
+ <blockquote data-sourcepos="1:6-3:10">
+ <ol data-sourcepos="1:8-3:10">
+ <li data-sourcepos="1:8-3:10">
+ <p data-sourcepos="1:12-1:14">one</p>
+ <p data-sourcepos="3:8-3:10">two</p>
+ </li>
+ </ol>
+ </blockquote>
+ </blockquote>
wysiwyg: |-
<blockquote multiline="false"><blockquote multiline="false"><ol parens="false"><li><p>one</p><p>two</p></li></ol></blockquote></blockquote>
-05_02__container_blocks__list_items__08:
+05_02__container_blocks__list_items__008:
canonical: |
<blockquote>
<blockquote>
@@ -2436,18 +3238,26 @@
</blockquote>
</blockquote>
static: |-
- <blockquote data-sourcepos="1:1-3:10" dir="auto">&#x000A;<blockquote data-sourcepos="1:2-3:10">&#x000A;<ul data-sourcepos="1:3-2:2">&#x000A;<li data-sourcepos="1:3-2:2">one</li>&#x000A;</ul>&#x000A;<p data-sourcepos="3:8-3:10">two</p>&#x000A;</blockquote>&#x000A;</blockquote>
+ <blockquote data-sourcepos="1:1-3:10" dir="auto">
+ <blockquote data-sourcepos="1:2-3:10">
+ <ul data-sourcepos="1:3-2:2">
+ <li data-sourcepos="1:3-2:2">one</li>
+ </ul>
+ <p data-sourcepos="3:8-3:10">two</p>
+ </blockquote>
+ </blockquote>
wysiwyg: |-
<blockquote multiline="false"><blockquote multiline="false"><ul bullet="*"><li><p>one</p></li></ul><p>two</p></blockquote></blockquote>
-05_02__container_blocks__list_items__09:
+05_02__container_blocks__list_items__009:
canonical: |
<p>-one</p>
<p>2.two</p>
static: |-
- <p data-sourcepos="1:1-1:4" dir="auto">-one</p>&#x000A;<p data-sourcepos="3:1-3:5" dir="auto">2.two</p>
+ <p data-sourcepos="1:1-1:4" dir="auto">-one</p>
+ <p data-sourcepos="3:1-3:5" dir="auto">2.two</p>
wysiwyg: |-
<p>-one</p>
-05_02__container_blocks__list_items__10:
+05_02__container_blocks__list_items__010:
canonical: |
<ul>
<li>
@@ -2456,10 +3266,15 @@
</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-4:5" dir="auto">&#x000A;<li data-sourcepos="1:1-4:5">&#x000A;<p data-sourcepos="1:3-1:5">foo</p>&#x000A;<p data-sourcepos="4:3-4:5">bar</p>&#x000A;</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-4:5" dir="auto">
+ <li data-sourcepos="1:1-4:5">
+ <p data-sourcepos="1:3-1:5">foo</p>
+ <p data-sourcepos="4:3-4:5">bar</p>
+ </li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p>foo</p><p>bar</p></li></ul>
-05_02__container_blocks__list_items__11:
+05_02__container_blocks__list_items__011:
canonical: |
<ol>
<li>
@@ -2473,10 +3288,22 @@
</li>
</ol>
static: |-
- <ol data-sourcepos="1:1-9:9" dir="auto">&#x000A;<li data-sourcepos="1:1-9:9">&#x000A;<p data-sourcepos="1:5-1:7">foo</p>&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="3:5-5:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">bar</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;<p data-sourcepos="7:5-7:7">baz</p>&#x000A;<blockquote data-sourcepos="9:5-9:9">&#x000A;<p data-sourcepos="9:7-9:9">bam</p>&#x000A;</blockquote>&#x000A;</li>&#x000A;</ol>
+ <ol data-sourcepos="1:1-9:9" dir="auto">
+ <li data-sourcepos="1:1-9:9">
+ <p data-sourcepos="1:5-1:7">foo</p>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="3:5-5:7" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">bar</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ <p data-sourcepos="7:5-7:7">baz</p>
+ <blockquote data-sourcepos="9:5-9:9">
+ <p data-sourcepos="9:7-9:9">bam</p>
+ </blockquote>
+ </li>
+ </ol>
wysiwyg: |-
<ol parens="false"><li><p>foo</p><pre class="content-editor-code-block undefined code highlight"><code>bar</code></pre><p>baz</p><blockquote multiline="false"><p>bam</p></blockquote></li></ol>
-05_02__container_blocks__list_items__12:
+05_02__container_blocks__list_items__012:
canonical: |
<ul>
<li>
@@ -2489,54 +3316,71 @@
</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-6:9" dir="auto">&#x000A;<li data-sourcepos="1:1-6:9">&#x000A;<p data-sourcepos="1:3-1:5">Foo</p>&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="3:7-6:9" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">bar</span>&#x000A;<span id="LC2" class="line" lang="plaintext"></span>&#x000A;<span id="LC3" class="line" lang="plaintext"></span>&#x000A;<span id="LC4" class="line" lang="plaintext">baz</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-6:9" dir="auto">
+ <li data-sourcepos="1:1-6:9">
+ <p data-sourcepos="1:3-1:5">Foo</p>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="3:7-6:9" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">bar</span>
+ <span id="LC2" class="line" lang="plaintext"></span>
+ <span id="LC3" class="line" lang="plaintext"></span>
+ <span id="LC4" class="line" lang="plaintext">baz</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ </li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p>Foo</p><pre class="content-editor-code-block undefined code highlight"><code>bar
baz</code></pre></li></ul>
-05_02__container_blocks__list_items__13:
+05_02__container_blocks__list_items__013:
canonical: |
<ol start="123456789">
<li>ok</li>
</ol>
static: |-
- <ol start="123456789" data-sourcepos="1:1-1:13" dir="auto">&#x000A;<li data-sourcepos="1:1-1:13">ok</li>&#x000A;</ol>
+ <ol start="123456789" data-sourcepos="1:1-1:13" dir="auto">
+ <li data-sourcepos="1:1-1:13">ok</li>
+ </ol>
wysiwyg: |-
<ol parens="false"><li><p>ok</p></li></ol>
-05_02__container_blocks__list_items__14:
+05_02__container_blocks__list_items__014:
canonical: |
<p>1234567890. not ok</p>
static: |-
<p data-sourcepos="1:1-1:18" dir="auto">1234567890. not ok</p>
wysiwyg: |-
<p>1234567890. not ok</p>
-05_02__container_blocks__list_items__15:
+05_02__container_blocks__list_items__015:
canonical: |
<ol start="0">
<li>ok</li>
</ol>
static: |-
- <ol start="0" data-sourcepos="1:1-1:5" dir="auto">&#x000A;<li data-sourcepos="1:1-1:5">ok</li>&#x000A;</ol>
+ <ol start="0" data-sourcepos="1:1-1:5" dir="auto">
+ <li data-sourcepos="1:1-1:5">ok</li>
+ </ol>
wysiwyg: |-
<ol parens="false"><li><p>ok</p></li></ol>
-05_02__container_blocks__list_items__16:
+05_02__container_blocks__list_items__016:
canonical: |
<ol start="3">
<li>ok</li>
</ol>
static: |-
- <ol start="3" data-sourcepos="1:1-1:7" dir="auto">&#x000A;<li data-sourcepos="1:1-1:7">ok</li>&#x000A;</ol>
+ <ol start="3" data-sourcepos="1:1-1:7" dir="auto">
+ <li data-sourcepos="1:1-1:7">ok</li>
+ </ol>
wysiwyg: |-
<ol parens="false"><li><p>ok</p></li></ol>
-05_02__container_blocks__list_items__17:
+05_02__container_blocks__list_items__017:
canonical: |
<p>-1. not ok</p>
static: |-
<p data-sourcepos="1:1-1:10" dir="auto">-1. not ok</p>
wysiwyg: |-
<p>-1. not ok</p>
-05_02__container_blocks__list_items__18:
+05_02__container_blocks__list_items__018:
canonical: |
<ul>
<li>
@@ -2546,10 +3390,18 @@
</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-3:9" dir="auto">&#x000A;<li data-sourcepos="1:1-3:9">&#x000A;<p data-sourcepos="1:3-1:5">foo</p>&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="3:7-3:9" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">bar</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-3:9" dir="auto">
+ <li data-sourcepos="1:1-3:9">
+ <p data-sourcepos="1:3-1:5">foo</p>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="3:7-3:9" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">bar</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ </li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p>foo</p><pre class="content-editor-code-block undefined code highlight"><code>bar</code></pre></li></ul>
-05_02__container_blocks__list_items__19:
+05_02__container_blocks__list_items__019:
canonical: |
<ol start="10">
<li>
@@ -2559,10 +3411,18 @@
</li>
</ol>
static: |-
- <ol start="10" data-sourcepos="1:3-3:14" dir="auto">&#x000A;<li data-sourcepos="1:3-3:14">&#x000A;<p data-sourcepos="1:8-1:10">foo</p>&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="3:12-3:14" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">bar</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;</li>&#x000A;</ol>
+ <ol start="10" data-sourcepos="1:3-3:14" dir="auto">
+ <li data-sourcepos="1:3-3:14">
+ <p data-sourcepos="1:8-1:10">foo</p>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="3:12-3:14" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">bar</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ </li>
+ </ol>
wysiwyg: |-
<ol parens="false"><li><p>foo</p><pre class="content-editor-code-block undefined code highlight"><code>bar</code></pre></li></ol>
-05_02__container_blocks__list_items__20:
+05_02__container_blocks__list_items__020:
canonical: |
<pre><code>indented code
</code></pre>
@@ -2570,10 +3430,18 @@
<pre><code>more code
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:5-2:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">indented code</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;<p data-sourcepos="3:1-3:9" dir="auto">paragraph</p>&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="5:5-5:13" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">more code</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:5-2:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">indented code</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ <p data-sourcepos="3:1-3:9" dir="auto">paragraph</p>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="5:5-5:13" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">more code</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>indented code</code></pre>
-05_02__container_blocks__list_items__21:
+05_02__container_blocks__list_items__021:
canonical: |
<ol>
<li>
@@ -2585,10 +3453,22 @@
</li>
</ol>
static: |-
- <ol data-sourcepos="1:1-5:16" dir="auto">&#x000A;<li data-sourcepos="1:1-5:16">&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:8-2:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">indented code</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;<p data-sourcepos="3:4-3:12">paragraph</p>&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="5:8-5:16" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">more code</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;</li>&#x000A;</ol>
+ <ol data-sourcepos="1:1-5:16" dir="auto">
+ <li data-sourcepos="1:1-5:16">
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:8-2:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">indented code</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ <p data-sourcepos="3:4-3:12">paragraph</p>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="5:8-5:16" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">more code</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ </li>
+ </ol>
wysiwyg: |-
<ol parens="false"><li><p></p><pre class="content-editor-code-block undefined code highlight"><code>indented code</code></pre><p>paragraph</p><pre class="content-editor-code-block undefined code highlight"><code>more code</code></pre></li></ol>
-05_02__container_blocks__list_items__22:
+05_02__container_blocks__list_items__022:
canonical: |
<ol>
<li>
@@ -2600,28 +3480,44 @@
</li>
</ol>
static: |-
- <ol data-sourcepos="1:1-5:16" dir="auto">&#x000A;<li data-sourcepos="1:1-5:16">&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:8-2:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"> indented code</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;<p data-sourcepos="3:4-3:12">paragraph</p>&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="5:8-5:16" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">more code</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;</li>&#x000A;</ol>
+ <ol data-sourcepos="1:1-5:16" dir="auto">
+ <li data-sourcepos="1:1-5:16">
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:8-2:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"> indented code</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ <p data-sourcepos="3:4-3:12">paragraph</p>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="5:8-5:16" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">more code</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ </li>
+ </ol>
wysiwyg: |-
<ol parens="false"><li><p></p><pre class="content-editor-code-block undefined code highlight"><code> indented code</code></pre><p>paragraph</p><pre class="content-editor-code-block undefined code highlight"><code>more code</code></pre></li></ol>
-05_02__container_blocks__list_items__23:
+05_02__container_blocks__list_items__023:
canonical: |
<p>foo</p>
<p>bar</p>
static: |-
- <p data-sourcepos="1:4-1:6" dir="auto">foo</p>&#x000A;<p data-sourcepos="3:1-3:3" dir="auto">bar</p>
+ <p data-sourcepos="1:4-1:6" dir="auto">foo</p>
+ <p data-sourcepos="3:1-3:3" dir="auto">bar</p>
wysiwyg: |-
<p>foo</p>
-05_02__container_blocks__list_items__24:
+05_02__container_blocks__list_items__024:
canonical: |
<ul>
<li>foo</li>
</ul>
<p>bar</p>
static: |-
- <ul data-sourcepos="1:1-2:0" dir="auto">&#x000A;<li data-sourcepos="1:1-2:0">foo</li>&#x000A;</ul>&#x000A;<p data-sourcepos="3:3-3:5" dir="auto">bar</p>
+ <ul data-sourcepos="1:1-2:0" dir="auto">
+ <li data-sourcepos="1:1-2:0">foo</li>
+ </ul>
+ <p data-sourcepos="3:3-3:5" dir="auto">bar</p>
wysiwyg: |-
<ul bullet="*"><li><p>foo</p></li></ul>
-05_02__container_blocks__list_items__25:
+05_02__container_blocks__list_items__025:
canonical: |
<ul>
<li>
@@ -2630,10 +3526,15 @@
</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-3:6" dir="auto">&#x000A;<li data-sourcepos="1:1-3:6">&#x000A;<p data-sourcepos="1:4-1:6">foo</p>&#x000A;<p data-sourcepos="3:4-3:6">bar</p>&#x000A;</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-3:6" dir="auto">
+ <li data-sourcepos="1:1-3:6">
+ <p data-sourcepos="1:4-1:6">foo</p>
+ <p data-sourcepos="3:4-3:6">bar</p>
+ </li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p>foo</p><p>bar</p></li></ul>
-05_02__container_blocks__list_items__26:
+05_02__container_blocks__list_items__026:
canonical: |
<ul>
<li>foo</li>
@@ -2647,29 +3548,49 @@
</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-8:9" dir="auto">&#x000A;<li data-sourcepos="1:1-2:5">foo</li>&#x000A;<li data-sourcepos="3:1-6:5">&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="4:3-6:5" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">bar</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;</li>&#x000A;<li data-sourcepos="7:1-8:9">&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="8:7-8:9" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">baz</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-8:9" dir="auto">
+ <li data-sourcepos="1:1-2:5">foo</li>
+ <li data-sourcepos="3:1-6:5">
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="4:3-6:5" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">bar</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ </li>
+ <li data-sourcepos="7:1-8:9">
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="8:7-8:9" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">baz</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ </li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p>foo</p></li><li><p></p><pre class="content-editor-code-block undefined code highlight"><code>bar</code></pre></li><li><p></p><pre class="content-editor-code-block undefined code highlight"><code>baz</code></pre></li></ul>
-05_02__container_blocks__list_items__27:
+05_02__container_blocks__list_items__027:
canonical: |
<ul>
<li>foo</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-2:5" dir="auto">&#x000A;<li data-sourcepos="1:1-2:5">foo</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-2:5" dir="auto">
+ <li data-sourcepos="1:1-2:5">foo</li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p>foo</p></li></ul>
-05_02__container_blocks__list_items__28:
+05_02__container_blocks__list_items__028:
canonical: |
<ul>
<li></li>
</ul>
<p>foo</p>
static: |-
- <ul data-sourcepos="1:1-2:0" dir="auto">&#x000A;<li data-sourcepos="1:1-1:1">&#x000A;</li>&#x000A;</ul>&#x000A;<p data-sourcepos="3:3-3:5" dir="auto">foo</p>
+ <ul data-sourcepos="1:1-2:0" dir="auto">
+ <li data-sourcepos="1:1-1:1">
+ </li>
+ </ul>
+ <p data-sourcepos="3:3-3:5" dir="auto">foo</p>
wysiwyg: |-
<ul bullet="*"><li><p></p></li></ul>
-05_02__container_blocks__list_items__29:
+05_02__container_blocks__list_items__029:
canonical: |
<ul>
<li>foo</li>
@@ -2677,10 +3598,15 @@
<li>bar</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-3:5" dir="auto">&#x000A;<li data-sourcepos="1:1-1:5">foo</li>&#x000A;<li data-sourcepos="2:1-2:1">&#x000A;</li>&#x000A;<li data-sourcepos="3:1-3:5">bar</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-3:5" dir="auto">
+ <li data-sourcepos="1:1-1:5">foo</li>
+ <li data-sourcepos="2:1-2:1">
+ </li>
+ <li data-sourcepos="3:1-3:5">bar</li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p>foo</p></li><li><p></p></li><li><p>bar</p></li></ul>
-05_02__container_blocks__list_items__30:
+05_02__container_blocks__list_items__030:
canonical: |
<ul>
<li>foo</li>
@@ -2688,10 +3614,15 @@
<li>bar</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-3:5" dir="auto">&#x000A;<li data-sourcepos="1:1-1:5">foo</li>&#x000A;<li data-sourcepos="2:1-2:4">&#x000A;</li>&#x000A;<li data-sourcepos="3:1-3:5">bar</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-3:5" dir="auto">
+ <li data-sourcepos="1:1-1:5">foo</li>
+ <li data-sourcepos="2:1-2:4">
+ </li>
+ <li data-sourcepos="3:1-3:5">bar</li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p>foo</p></li><li><p></p></li><li><p>bar</p></li></ul>
-05_02__container_blocks__list_items__31:
+05_02__container_blocks__list_items__031:
canonical: |
<ol>
<li>foo</li>
@@ -2699,30 +3630,41 @@
<li>bar</li>
</ol>
static: |-
- <ol data-sourcepos="1:1-3:6" dir="auto">&#x000A;<li data-sourcepos="1:1-1:6">foo</li>&#x000A;<li data-sourcepos="2:1-2:2">&#x000A;</li>&#x000A;<li data-sourcepos="3:1-3:6">bar</li>&#x000A;</ol>
+ <ol data-sourcepos="1:1-3:6" dir="auto">
+ <li data-sourcepos="1:1-1:6">foo</li>
+ <li data-sourcepos="2:1-2:2">
+ </li>
+ <li data-sourcepos="3:1-3:6">bar</li>
+ </ol>
wysiwyg: |-
<ol parens="false"><li><p>foo</p></li><li><p></p></li><li><p>bar</p></li></ol>
-05_02__container_blocks__list_items__32:
+05_02__container_blocks__list_items__032:
canonical: |
<ul>
<li></li>
</ul>
static: |-
- <ul data-sourcepos="1:1-1:1" dir="auto">&#x000A;<li data-sourcepos="1:1-1:1">&#x000A;</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-1:1" dir="auto">
+ <li data-sourcepos="1:1-1:1">
+ </li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p></p></li></ul>
-05_02__container_blocks__list_items__33:
+05_02__container_blocks__list_items__033:
canonical: |
<p>foo
*</p>
<p>foo
1.</p>
static: |-
- <p data-sourcepos="1:1-2:1" dir="auto">foo&#x000A;*</p>&#x000A;<p data-sourcepos="4:1-5:2" dir="auto">foo&#x000A;1.</p>
+ <p data-sourcepos="1:1-2:1" dir="auto">foo
+ *</p>
+ <p data-sourcepos="4:1-5:2" dir="auto">foo
+ 1.</p>
wysiwyg: |-
<p>foo
*</p>
-05_02__container_blocks__list_items__34:
+05_02__container_blocks__list_items__034:
canonical: |
<ol>
<li>
@@ -2736,11 +3678,23 @@
</li>
</ol>
static: |-
- <ol data-sourcepos="1:2-6:21" dir="auto">&#x000A;<li data-sourcepos="1:2-6:21">&#x000A;<p data-sourcepos="1:6-2:20">A paragraph&#x000A;with two lines.</p>&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="4:10-5:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">indented code</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;<blockquote data-sourcepos="6:6-6:21">&#x000A;<p data-sourcepos="6:8-6:21">A block quote.</p>&#x000A;</blockquote>&#x000A;</li>&#x000A;</ol>
+ <ol data-sourcepos="1:2-6:21" dir="auto">
+ <li data-sourcepos="1:2-6:21">
+ <p data-sourcepos="1:6-2:20">A paragraph
+ with two lines.</p>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="4:10-5:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">indented code</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ <blockquote data-sourcepos="6:6-6:21">
+ <p data-sourcepos="6:8-6:21">A block quote.</p>
+ </blockquote>
+ </li>
+ </ol>
wysiwyg: |-
<ol parens="false"><li><p>A paragraph
with two lines.</p><pre class="content-editor-code-block undefined code highlight"><code>indented code</code></pre><blockquote multiline="false"><p>A block quote.</p></blockquote></li></ol>
-05_02__container_blocks__list_items__35:
+05_02__container_blocks__list_items__035:
canonical: |
<ol>
<li>
@@ -2754,11 +3708,23 @@
</li>
</ol>
static: |-
- <ol data-sourcepos="1:3-6:22" dir="auto">&#x000A;<li data-sourcepos="1:3-6:22">&#x000A;<p data-sourcepos="1:7-2:21">A paragraph&#x000A;with two lines.</p>&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="4:11-5:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">indented code</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;<blockquote data-sourcepos="6:7-6:22">&#x000A;<p data-sourcepos="6:9-6:22">A block quote.</p>&#x000A;</blockquote>&#x000A;</li>&#x000A;</ol>
+ <ol data-sourcepos="1:3-6:22" dir="auto">
+ <li data-sourcepos="1:3-6:22">
+ <p data-sourcepos="1:7-2:21">A paragraph
+ with two lines.</p>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="4:11-5:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">indented code</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ <blockquote data-sourcepos="6:7-6:22">
+ <p data-sourcepos="6:9-6:22">A block quote.</p>
+ </blockquote>
+ </li>
+ </ol>
wysiwyg: |-
<ol parens="false"><li><p>A paragraph
with two lines.</p><pre class="content-editor-code-block undefined code highlight"><code>indented code</code></pre><blockquote multiline="false"><p>A block quote.</p></blockquote></li></ol>
-05_02__container_blocks__list_items__36:
+05_02__container_blocks__list_items__036:
canonical: |
<ol>
<li>
@@ -2772,11 +3738,23 @@
</li>
</ol>
static: |-
- <ol data-sourcepos="1:4-6:23" dir="auto">&#x000A;<li data-sourcepos="1:4-6:23">&#x000A;<p data-sourcepos="1:8-2:22">A paragraph&#x000A;with two lines.</p>&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="4:12-5:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">indented code</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;<blockquote data-sourcepos="6:8-6:23">&#x000A;<p data-sourcepos="6:10-6:23">A block quote.</p>&#x000A;</blockquote>&#x000A;</li>&#x000A;</ol>
+ <ol data-sourcepos="1:4-6:23" dir="auto">
+ <li data-sourcepos="1:4-6:23">
+ <p data-sourcepos="1:8-2:22">A paragraph
+ with two lines.</p>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="4:12-5:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">indented code</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ <blockquote data-sourcepos="6:8-6:23">
+ <p data-sourcepos="6:10-6:23">A block quote.</p>
+ </blockquote>
+ </li>
+ </ol>
wysiwyg: |-
<ol parens="false"><li><p>A paragraph
with two lines.</p><pre class="content-editor-code-block undefined code highlight"><code>indented code</code></pre><blockquote multiline="false"><p>A block quote.</p></blockquote></li></ol>
-05_02__container_blocks__list_items__37:
+05_02__container_blocks__list_items__037:
canonical: |
<pre><code>1. A paragraph
with two lines.
@@ -2786,7 +3764,15 @@
&gt; A block quote.
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:5-6:24" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">1. A paragraph</span>&#x000A;<span id="LC2" class="line" lang="plaintext"> with two lines.</span>&#x000A;<span id="LC3" class="line" lang="plaintext"></span>&#x000A;<span id="LC4" class="line" lang="plaintext"> indented code</span>&#x000A;<span id="LC5" class="line" lang="plaintext"></span>&#x000A;<span id="LC6" class="line" lang="plaintext"> &gt; A block quote.</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:5-6:24" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">1. A paragraph</span>
+ <span id="LC2" class="line" lang="plaintext"> with two lines.</span>
+ <span id="LC3" class="line" lang="plaintext"></span>
+ <span id="LC4" class="line" lang="plaintext"> indented code</span>
+ <span id="LC5" class="line" lang="plaintext"></span>
+ <span id="LC6" class="line" lang="plaintext"> &gt; A block quote.</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>1. A paragraph
with two lines.
@@ -2794,7 +3780,7 @@
indented code
&gt; A block quote.</code></pre>
-05_02__container_blocks__list_items__38:
+05_02__container_blocks__list_items__038:
canonical: |
<ol>
<li>
@@ -2808,22 +3794,37 @@
</li>
</ol>
static: |-
- <ol data-sourcepos="1:3-6:22" dir="auto">&#x000A;<li data-sourcepos="1:3-6:22">&#x000A;<p data-sourcepos="1:7-2:15">A paragraph&#x000A;with two lines.</p>&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="4:11-5:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">indented code</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;<blockquote data-sourcepos="6:7-6:22">&#x000A;<p data-sourcepos="6:9-6:22">A block quote.</p>&#x000A;</blockquote>&#x000A;</li>&#x000A;</ol>
+ <ol data-sourcepos="1:3-6:22" dir="auto">
+ <li data-sourcepos="1:3-6:22">
+ <p data-sourcepos="1:7-2:15">A paragraph
+ with two lines.</p>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="4:11-5:0" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">indented code</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ <blockquote data-sourcepos="6:7-6:22">
+ <p data-sourcepos="6:9-6:22">A block quote.</p>
+ </blockquote>
+ </li>
+ </ol>
wysiwyg: |-
<ol parens="false"><li><p>A paragraph
with two lines.</p><pre class="content-editor-code-block undefined code highlight"><code>indented code</code></pre><blockquote multiline="false"><p>A block quote.</p></blockquote></li></ol>
-05_02__container_blocks__list_items__39:
+05_02__container_blocks__list_items__039:
canonical: |
<ol>
<li>A paragraph
with two lines.</li>
</ol>
static: |-
- <ol data-sourcepos="1:3-2:19" dir="auto">&#x000A;<li data-sourcepos="1:3-2:19">A paragraph&#x000A;with two lines.</li>&#x000A;</ol>
+ <ol data-sourcepos="1:3-2:19" dir="auto">
+ <li data-sourcepos="1:3-2:19">A paragraph
+ with two lines.</li>
+ </ol>
wysiwyg: |-
<ol parens="false"><li><p>A paragraph
with two lines.</p></li></ol>
-05_02__container_blocks__list_items__40:
+05_02__container_blocks__list_items__040:
canonical: |
<blockquote>
<ol>
@@ -2836,11 +3837,20 @@
</ol>
</blockquote>
static: |-
- <blockquote data-sourcepos="1:1-2:15" dir="auto">&#x000A;<ol data-sourcepos="1:3-2:15">&#x000A;<li data-sourcepos="1:3-2:15">&#x000A;<blockquote data-sourcepos="1:6-2:15">&#x000A;<p data-sourcepos="1:8-2:15">Blockquote&#x000A;continued here.</p>&#x000A;</blockquote>&#x000A;</li>&#x000A;</ol>&#x000A;</blockquote>
+ <blockquote data-sourcepos="1:1-2:15" dir="auto">
+ <ol data-sourcepos="1:3-2:15">
+ <li data-sourcepos="1:3-2:15">
+ <blockquote data-sourcepos="1:6-2:15">
+ <p data-sourcepos="1:8-2:15">Blockquote
+ continued here.</p>
+ </blockquote>
+ </li>
+ </ol>
+ </blockquote>
wysiwyg: |-
<blockquote multiline="false"><ol parens="false"><li><p></p><blockquote multiline="false"><p>Blockquote
continued here.</p></blockquote></li></ol></blockquote>
-05_02__container_blocks__list_items__41:
+05_02__container_blocks__list_items__041:
canonical: |
<blockquote>
<ol>
@@ -2853,11 +3863,20 @@
</ol>
</blockquote>
static: |-
- <blockquote data-sourcepos="1:1-2:17" dir="auto">&#x000A;<ol data-sourcepos="1:3-2:17">&#x000A;<li data-sourcepos="1:3-2:17">&#x000A;<blockquote data-sourcepos="1:6-2:17">&#x000A;<p data-sourcepos="1:8-2:17">Blockquote&#x000A;continued here.</p>&#x000A;</blockquote>&#x000A;</li>&#x000A;</ol>&#x000A;</blockquote>
+ <blockquote data-sourcepos="1:1-2:17" dir="auto">
+ <ol data-sourcepos="1:3-2:17">
+ <li data-sourcepos="1:3-2:17">
+ <blockquote data-sourcepos="1:6-2:17">
+ <p data-sourcepos="1:8-2:17">Blockquote
+ continued here.</p>
+ </blockquote>
+ </li>
+ </ol>
+ </blockquote>
wysiwyg: |-
<blockquote multiline="false"><ol parens="false"><li><p></p><blockquote multiline="false"><p>Blockquote
continued here.</p></blockquote></li></ol></blockquote>
-05_02__container_blocks__list_items__42:
+05_02__container_blocks__list_items__042:
canonical: |
<ul>
<li>foo
@@ -2875,13 +3894,24 @@
</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-4:11" dir="auto">&#x000A;<li data-sourcepos="1:1-4:11">foo&#x000A;<ul data-sourcepos="2:3-4:11">&#x000A;<li data-sourcepos="2:3-4:11">bar&#x000A;<ul data-sourcepos="3:5-4:11">&#x000A;<li data-sourcepos="3:5-4:11">baz&#x000A;<ul data-sourcepos="4:7-4:11">&#x000A;<li data-sourcepos="4:7-4:11">boo</li>&#x000A;</ul>&#x000A;</li>&#x000A;</ul>&#x000A;</li>&#x000A;</ul>&#x000A;</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-4:11" dir="auto">
+ <li data-sourcepos="1:1-4:11">foo
+ <ul data-sourcepos="2:3-4:11">
+ <li data-sourcepos="2:3-4:11">bar
+ <ul data-sourcepos="3:5-4:11">
+ <li data-sourcepos="3:5-4:11">baz
+ <ul data-sourcepos="4:7-4:11">
+ <li data-sourcepos="4:7-4:11">boo</li>
+ </ul>
+ </li>
+ </ul>
+ </li>
+ </ul>
+ </li>
+ </ul>
wysiwyg: |-
- <ul bullet="*"><li><p>foo
- </p><ul bullet="*"><li><p>bar
- </p><ul bullet="*"><li><p>baz
- </p><ul bullet="*"><li><p>boo</p></li></ul></li></ul></li></ul></li></ul>
-05_02__container_blocks__list_items__43:
+ <ul bullet="*"><li><p>foo</p><ul bullet="*"><li><p>bar</p><ul bullet="*"><li><p>baz</p><ul bullet="*"><li><p>boo</p></li></ul></li></ul></li></ul></li></ul>
+05_02__container_blocks__list_items__043:
canonical: |
<ul>
<li>foo</li>
@@ -2890,10 +3920,15 @@
<li>boo</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-4:8" dir="auto">&#x000A;<li data-sourcepos="1:1-1:5">foo</li>&#x000A;<li data-sourcepos="2:2-2:6">bar</li>&#x000A;<li data-sourcepos="3:3-3:7">baz</li>&#x000A;<li data-sourcepos="4:4-4:8">boo</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-4:8" dir="auto">
+ <li data-sourcepos="1:1-1:5">foo</li>
+ <li data-sourcepos="2:2-2:6">bar</li>
+ <li data-sourcepos="3:3-3:7">baz</li>
+ <li data-sourcepos="4:4-4:8">boo</li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p>foo</p></li><li><p>bar</p></li><li><p>baz</p></li><li><p>boo</p></li></ul>
-05_02__container_blocks__list_items__44:
+05_02__container_blocks__list_items__044:
canonical: |
<ol start="10">
<li>foo
@@ -2903,11 +3938,16 @@
</li>
</ol>
static: |-
- <ol start="10" data-sourcepos="1:1-2:9" dir="auto">&#x000A;<li data-sourcepos="1:1-2:9">foo&#x000A;<ul data-sourcepos="2:5-2:9">&#x000A;<li data-sourcepos="2:5-2:9">bar</li>&#x000A;</ul>&#x000A;</li>&#x000A;</ol>
+ <ol start="10" data-sourcepos="1:1-2:9" dir="auto">
+ <li data-sourcepos="1:1-2:9">foo
+ <ul data-sourcepos="2:5-2:9">
+ <li data-sourcepos="2:5-2:9">bar</li>
+ </ul>
+ </li>
+ </ol>
wysiwyg: |-
- <ol parens="false"><li><p>foo
- </p><ul bullet="*"><li><p>bar</p></li></ul></li></ol>
-05_02__container_blocks__list_items__45:
+ <ol parens="false"><li><p>foo</p><ul bullet="*"><li><p>bar</p></li></ul></li></ol>
+05_02__container_blocks__list_items__045:
canonical: |
<ol start="10">
<li>foo</li>
@@ -2916,10 +3956,15 @@
<li>bar</li>
</ul>
static: |-
- <ol start="10" data-sourcepos="1:1-1:7" dir="auto">&#x000A;<li data-sourcepos="1:1-1:7">foo</li>&#x000A;</ol>&#x000A;<ul data-sourcepos="2:4-2:8" dir="auto">&#x000A;<li data-sourcepos="2:4-2:8">bar</li>&#x000A;</ul>
+ <ol start="10" data-sourcepos="1:1-1:7" dir="auto">
+ <li data-sourcepos="1:1-1:7">foo</li>
+ </ol>
+ <ul data-sourcepos="2:4-2:8" dir="auto">
+ <li data-sourcepos="2:4-2:8">bar</li>
+ </ul>
wysiwyg: |-
<ol parens="false"><li><p>foo</p></li></ol>
-05_02__container_blocks__list_items__46:
+05_02__container_blocks__list_items__046:
canonical: |
<ul>
<li>
@@ -2929,10 +3974,16 @@
</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-1:7" dir="auto">&#x000A;<li data-sourcepos="1:1-1:7">&#x000A;<ul data-sourcepos="1:3-1:7">&#x000A;<li data-sourcepos="1:3-1:7">foo</li>&#x000A;</ul>&#x000A;</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-1:7" dir="auto">
+ <li data-sourcepos="1:1-1:7">
+ <ul data-sourcepos="1:3-1:7">
+ <li data-sourcepos="1:3-1:7">foo</li>
+ </ul>
+ </li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p></p><ul bullet="*"><li><p>foo</p></li></ul></li></ul>
-05_02__container_blocks__list_items__47:
+05_02__container_blocks__list_items__047:
canonical: |
<ol>
<li>
@@ -2946,10 +3997,20 @@
</li>
</ol>
static: |-
- <ol data-sourcepos="1:1-1:11" dir="auto">&#x000A;<li data-sourcepos="1:1-1:11">&#x000A;<ul data-sourcepos="1:4-1:11">&#x000A;<li data-sourcepos="1:4-1:11">&#x000A;<ol start="2" data-sourcepos="1:6-1:11">&#x000A;<li data-sourcepos="1:6-1:11">foo</li>&#x000A;</ol>&#x000A;</li>&#x000A;</ul>&#x000A;</li>&#x000A;</ol>
+ <ol data-sourcepos="1:1-1:11" dir="auto">
+ <li data-sourcepos="1:1-1:11">
+ <ul data-sourcepos="1:4-1:11">
+ <li data-sourcepos="1:4-1:11">
+ <ol start="2" data-sourcepos="1:6-1:11">
+ <li data-sourcepos="1:6-1:11">foo</li>
+ </ol>
+ </li>
+ </ul>
+ </li>
+ </ol>
wysiwyg: |-
<ol parens="false"><li><p></p><ul bullet="*"><li><p></p><ol parens="false"><li><p>foo</p></li></ol></li></ul></li></ol>
-05_02__container_blocks__list_items__48:
+05_02__container_blocks__list_items__048:
canonical: |
<ul>
<li>
@@ -2960,11 +4021,20 @@
baz</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-4:5" dir="auto">&#x000A;<li data-sourcepos="1:1-1:7">&#x000A;<h1 data-sourcepos="1:3-1:7">&#x000A;<a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h1>&#x000A;</li>&#x000A;<li data-sourcepos="2:1-4:5">&#x000A;<h2 data-sourcepos="2:3-4:5">&#x000A;<a id="user-content-bar" class="anchor" href="#bar" aria-hidden="true"></a>Bar</h2>&#x000A;baz</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-4:5" dir="auto">
+ <li data-sourcepos="1:1-1:7">
+ <h1 data-sourcepos="1:3-1:7">
+ <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>Foo</h1>
+ </li>
+ <li data-sourcepos="2:1-4:5">
+ <h2 data-sourcepos="2:3-4:5">
+ <a id="user-content-bar" class="anchor" href="#bar" aria-hidden="true"></a>Bar</h2>
+ baz</li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p></p><h1>Foo</h1></li><li><p></p><h2>Bar
baz</h2></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__49:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__049:
canonical: |
<ul>
<li><input disabled="" type="checkbox"> foo</li>
@@ -2987,11 +4057,31 @@
<li>baz</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-8:5" class="task-list" dir="auto">&#x000A;<li data-sourcepos="1:1-1:9" class="task-list-item">&#x000A;<task-button></task-button><input type="checkbox" class="task-list-item-checkbox" disabled> foo</li>&#x000A;<li data-sourcepos="2:1-2:9" class="task-list-item">&#x000A;<task-button></task-button><input type="checkbox" class="task-list-item-checkbox" checked disabled> bar</li>&#x000A;<li data-sourcepos="3:1-5:11" class="task-list-item">&#x000A;<task-button></task-button><input type="checkbox" class="task-list-item-checkbox" checked disabled> foo&#x000A;<ul data-sourcepos="4:3-5:11" class="task-list">&#x000A;<li data-sourcepos="4:3-4:11" class="task-list-item">&#x000A;<task-button></task-button><input type="checkbox" class="task-list-item-checkbox" disabled> bar</li>&#x000A;<li data-sourcepos="5:3-5:11" class="task-list-item">&#x000A;<task-button></task-button><input type="checkbox" class="task-list-item-checkbox" checked disabled> baz</li>&#x000A;</ul>&#x000A;</li>&#x000A;<li data-sourcepos="6:1-6:9" class="task-list-item">&#x000A;<task-button></task-button><input type="checkbox" class="task-list-item-checkbox" disabled> bim</li>&#x000A;<li data-sourcepos="7:1-7:5">foo</li>&#x000A;<li data-sourcepos="8:1-8:5">bar</li>&#x000A;</ul>&#x000A;<ul data-sourcepos="9:1-9:5" dir="auto">&#x000A;<li data-sourcepos="9:1-9:5">baz</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-8:5" class="task-list" dir="auto">
+ <li data-sourcepos="1:1-1:9" class="task-list-item">
+ <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" disabled> foo</li>
+ <li data-sourcepos="2:1-2:9" class="task-list-item">
+ <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" checked disabled> bar</li>
+ <li data-sourcepos="3:1-5:11" class="task-list-item">
+ <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" checked disabled> foo
+ <ul data-sourcepos="4:3-5:11" class="task-list">
+ <li data-sourcepos="4:3-4:11" class="task-list-item">
+ <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" disabled> bar</li>
+ <li data-sourcepos="5:3-5:11" class="task-list-item">
+ <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" checked disabled> baz</li>
+ </ul>
+ </li>
+ <li data-sourcepos="6:1-6:9" class="task-list-item">
+ <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" disabled> bim</li>
+ <li data-sourcepos="7:1-7:5">foo</li>
+ <li data-sourcepos="8:1-8:5">bar</li>
+ </ul>
+ <ul data-sourcepos="9:1-9:5" dir="auto">
+ <li data-sourcepos="9:1-9:5">baz</li>
+ </ul>
wysiwyg: |-
- <ul bullet="*"><li><p>[ ] foo</p></li><li><p>[x] bar</p></li><li><p>[x] foo
- </p><ul bullet="*"><li><p>[ ] bar</p></li><li><p>[x] baz</p></li></ul></li><li><p>[ ] bim</p></li><li><p>foo</p></li><li><p>bar</p></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__50:
+ <ul bullet="*"><li><p>baz</p></li></ul>
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__050:
canonical: |
<ol>
<li>foo</li>
@@ -3001,10 +4091,16 @@
<li>baz</li>
</ol>
static: |-
- <ol data-sourcepos="1:1-2:6" dir="auto">&#x000A;<li data-sourcepos="1:1-1:6">foo</li>&#x000A;<li data-sourcepos="2:1-2:6">bar</li>&#x000A;</ol>&#x000A;<ol start="3" data-sourcepos="3:1-3:6" dir="auto">&#x000A;<li data-sourcepos="3:1-3:6">baz</li>&#x000A;</ol>
+ <ol data-sourcepos="1:1-2:6" dir="auto">
+ <li data-sourcepos="1:1-1:6">foo</li>
+ <li data-sourcepos="2:1-2:6">bar</li>
+ </ol>
+ <ol start="3" data-sourcepos="3:1-3:6" dir="auto">
+ <li data-sourcepos="3:1-3:6">baz</li>
+ </ol>
wysiwyg: |-
<ol parens="false"><li><p>foo</p></li><li><p>bar</p></li></ol>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__51:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__051:
canonical: |
<p>Foo</p>
<ul>
@@ -3012,29 +4108,37 @@
<li>baz</li>
</ul>
static: |-
- <p data-sourcepos="1:1-1:3" dir="auto">Foo</p>&#x000A;<ul data-sourcepos="2:1-3:5" dir="auto">&#x000A;<li data-sourcepos="2:1-2:5">bar</li>&#x000A;<li data-sourcepos="3:1-3:5">baz</li>&#x000A;</ul>
+ <p data-sourcepos="1:1-1:3" dir="auto">Foo</p>
+ <ul data-sourcepos="2:1-3:5" dir="auto">
+ <li data-sourcepos="2:1-2:5">bar</li>
+ <li data-sourcepos="3:1-3:5">baz</li>
+ </ul>
wysiwyg: |-
<p>Foo</p>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__52:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__052:
canonical: |
<p>The number of windows in my house is
14. The number of doors is 6.</p>
static: |-
- <p data-sourcepos="1:1-2:30" dir="auto">The number of windows in my house is&#x000A;14. The number of doors is 6.</p>
+ <p data-sourcepos="1:1-2:30" dir="auto">The number of windows in my house is
+ 14. The number of doors is 6.</p>
wysiwyg: |-
<p>The number of windows in my house is
14. The number of doors is 6.</p>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__53:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__053:
canonical: |
<p>The number of windows in my house is</p>
<ol>
<li>The number of doors is 6.</li>
</ol>
static: |-
- <p data-sourcepos="1:1-1:36" dir="auto">The number of windows in my house is</p>&#x000A;<ol data-sourcepos="2:1-2:29" dir="auto">&#x000A;<li data-sourcepos="2:1-2:29">The number of doors is 6.</li>&#x000A;</ol>
+ <p data-sourcepos="1:1-1:36" dir="auto">The number of windows in my house is</p>
+ <ol data-sourcepos="2:1-2:29" dir="auto">
+ <li data-sourcepos="2:1-2:29">The number of doors is 6.</li>
+ </ol>
wysiwyg: |-
<p>The number of windows in my house is</p>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__54:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__054:
canonical: |
<ul>
<li>
@@ -3048,10 +4152,20 @@
</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-6:5" dir="auto">&#x000A;<li data-sourcepos="1:1-2:0">&#x000A;<p data-sourcepos="1:3-1:5">foo</p>&#x000A;</li>&#x000A;<li data-sourcepos="3:1-5:0">&#x000A;<p data-sourcepos="3:3-3:5">bar</p>&#x000A;</li>&#x000A;<li data-sourcepos="6:1-6:5">&#x000A;<p data-sourcepos="6:3-6:5">baz</p>&#x000A;</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-6:5" dir="auto">
+ <li data-sourcepos="1:1-2:0">
+ <p data-sourcepos="1:3-1:5">foo</p>
+ </li>
+ <li data-sourcepos="3:1-5:0">
+ <p data-sourcepos="3:3-3:5">bar</p>
+ </li>
+ <li data-sourcepos="6:1-6:5">
+ <p data-sourcepos="6:3-6:5">baz</p>
+ </li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p>foo</p></li><li><p>bar</p></li><li><p>baz</p></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__55:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__055:
canonical: |
<ul>
<li>foo
@@ -3068,12 +4182,23 @@
</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-6:9" dir="auto">&#x000A;<li data-sourcepos="1:1-6:9">foo&#x000A;<ul data-sourcepos="2:3-6:9">&#x000A;<li data-sourcepos="2:3-6:9">bar&#x000A;<ul data-sourcepos="3:5-6:9">&#x000A;<li data-sourcepos="3:5-6:9">&#x000A;<p data-sourcepos="3:7-3:9">baz</p>&#x000A;<p data-sourcepos="6:7-6:9">bim</p>&#x000A;</li>&#x000A;</ul>&#x000A;</li>&#x000A;</ul>&#x000A;</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-6:9" dir="auto">
+ <li data-sourcepos="1:1-6:9">foo
+ <ul data-sourcepos="2:3-6:9">
+ <li data-sourcepos="2:3-6:9">bar
+ <ul data-sourcepos="3:5-6:9">
+ <li data-sourcepos="3:5-6:9">
+ <p data-sourcepos="3:7-3:9">baz</p>
+ <p data-sourcepos="6:7-6:9">bim</p>
+ </li>
+ </ul>
+ </li>
+ </ul>
+ </li>
+ </ul>
wysiwyg: |-
- <ul bullet="*"><li><p>foo
- </p><ul bullet="*"><li><p>bar
- </p><ul bullet="*"><li><p>baz</p><p>bim</p></li></ul></li></ul></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__56:
+ <ul bullet="*"><li><p>foo</p><ul bullet="*"><li><p>bar</p><ul bullet="*"><li><p>baz</p><p>bim</p></li></ul></li></ul></li></ul>
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__056:
canonical: |
<ul>
<li>foo</li>
@@ -3085,11 +4210,19 @@
<li>bim</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-3:0" dir="auto">&#x000A;<li data-sourcepos="1:1-1:5">foo</li>&#x000A;<li data-sourcepos="2:1-3:0">bar</li>&#x000A;</ul>&#x000A;&#x000A;<ul data-sourcepos="6:1-7:5" dir="auto">&#x000A;<li data-sourcepos="6:1-6:5">baz</li>&#x000A;<li data-sourcepos="7:1-7:5">bim</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-3:0" dir="auto">
+ <li data-sourcepos="1:1-1:5">foo</li>
+ <li data-sourcepos="2:1-3:0">bar</li>
+ </ul>
+
+ <ul data-sourcepos="6:1-7:5" dir="auto">
+ <li data-sourcepos="6:1-6:5">baz</li>
+ <li data-sourcepos="7:1-7:5">bim</li>
+ </ul>
wysiwyg: |-
Error - check implementation:
- Hast node of type "comment" not supported by this converter. Please, provide an specification.
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__57:
+ Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__057:
canonical: |
<ul>
<li>
@@ -3104,11 +4237,24 @@
<pre><code>code
</code></pre>
static: |-
- <ul data-sourcepos="1:1-6:0" dir="auto">&#x000A;<li data-sourcepos="1:1-4:0">&#x000A;<p data-sourcepos="1:5-1:7">foo</p>&#x000A;<p data-sourcepos="3:5-3:11">notcode</p>&#x000A;</li>&#x000A;<li data-sourcepos="5:1-6:0">&#x000A;<p data-sourcepos="5:5-5:7">foo</p>&#x000A;</li>&#x000A;</ul>&#x000A;&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="9:5-9:8" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">code</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <ul data-sourcepos="1:1-6:0" dir="auto">
+ <li data-sourcepos="1:1-4:0">
+ <p data-sourcepos="1:5-1:7">foo</p>
+ <p data-sourcepos="3:5-3:11">notcode</p>
+ </li>
+ <li data-sourcepos="5:1-6:0">
+ <p data-sourcepos="5:5-5:7">foo</p>
+ </li>
+ </ul>
+
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="9:5-9:8" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">code</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
Error - check implementation:
- Hast node of type "comment" not supported by this converter. Please, provide an specification.
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__58:
+ Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__058:
canonical: |
<ul>
<li>a</li>
@@ -3120,10 +4266,18 @@
<li>g</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-7:3" dir="auto">&#x000A;<li data-sourcepos="1:1-1:3">a</li>&#x000A;<li data-sourcepos="2:2-2:4">b</li>&#x000A;<li data-sourcepos="3:3-3:5">c</li>&#x000A;<li data-sourcepos="4:4-4:6">d</li>&#x000A;<li data-sourcepos="5:3-5:5">e</li>&#x000A;<li data-sourcepos="6:2-6:4">f</li>&#x000A;<li data-sourcepos="7:1-7:3">g</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-7:3" dir="auto">
+ <li data-sourcepos="1:1-1:3">a</li>
+ <li data-sourcepos="2:2-2:4">b</li>
+ <li data-sourcepos="3:3-3:5">c</li>
+ <li data-sourcepos="4:4-4:6">d</li>
+ <li data-sourcepos="5:3-5:5">e</li>
+ <li data-sourcepos="6:2-6:4">f</li>
+ <li data-sourcepos="7:1-7:3">g</li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p>a</p></li><li><p>b</p></li><li><p>c</p></li><li><p>d</p></li><li><p>e</p></li><li><p>f</p></li><li><p>g</p></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__59:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__059:
canonical: |
<ol>
<li>
@@ -3137,10 +4291,20 @@
</li>
</ol>
static: |-
- <ol data-sourcepos="1:1-5:7" dir="auto">&#x000A;<li data-sourcepos="1:1-2:0">&#x000A;<p data-sourcepos="1:4-1:4">a</p>&#x000A;</li>&#x000A;<li data-sourcepos="3:3-4:0">&#x000A;<p data-sourcepos="3:6-3:6">b</p>&#x000A;</li>&#x000A;<li data-sourcepos="5:4-5:7">&#x000A;<p data-sourcepos="5:7-5:7">c</p>&#x000A;</li>&#x000A;</ol>
+ <ol data-sourcepos="1:1-5:7" dir="auto">
+ <li data-sourcepos="1:1-2:0">
+ <p data-sourcepos="1:4-1:4">a</p>
+ </li>
+ <li data-sourcepos="3:3-4:0">
+ <p data-sourcepos="3:6-3:6">b</p>
+ </li>
+ <li data-sourcepos="5:4-5:7">
+ <p data-sourcepos="5:7-5:7">c</p>
+ </li>
+ </ol>
wysiwyg: |-
<ol parens="false"><li><p>a</p></li><li><p>b</p></li><li><p>c</p></li></ol>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__60:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__060:
canonical: |
<ul>
<li>a</li>
@@ -3150,11 +4314,17 @@
- e</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-5:7" dir="auto">&#x000A;<li data-sourcepos="1:1-1:3">a</li>&#x000A;<li data-sourcepos="2:2-2:4">b</li>&#x000A;<li data-sourcepos="3:3-3:5">c</li>&#x000A;<li data-sourcepos="4:4-5:7">d&#x000A;- e</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-5:7" dir="auto">
+ <li data-sourcepos="1:1-1:3">a</li>
+ <li data-sourcepos="2:2-2:4">b</li>
+ <li data-sourcepos="3:3-3:5">c</li>
+ <li data-sourcepos="4:4-5:7">d
+ - e</li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p>a</p></li><li><p>b</p></li><li><p>c</p></li><li><p>d
- e</p></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__61:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__061:
canonical: |
<ol>
<li>
@@ -3167,10 +4337,21 @@
<pre><code>3. c
</code></pre>
static: |-
- <ol data-sourcepos="1:1-4:0" dir="auto">&#x000A;<li data-sourcepos="1:1-2:0">&#x000A;<p data-sourcepos="1:4-1:4">a</p>&#x000A;</li>&#x000A;<li data-sourcepos="3:3-4:0">&#x000A;<p data-sourcepos="3:6-3:6">b</p>&#x000A;</li>&#x000A;</ol>&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="5:5-5:8" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">3. c</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <ol data-sourcepos="1:1-4:0" dir="auto">
+ <li data-sourcepos="1:1-2:0">
+ <p data-sourcepos="1:4-1:4">a</p>
+ </li>
+ <li data-sourcepos="3:3-4:0">
+ <p data-sourcepos="3:6-3:6">b</p>
+ </li>
+ </ol>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="5:5-5:8" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">3. c</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<ol parens="false"><li><p>a</p></li><li><p>b</p></li></ol>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__62:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__062:
canonical: |
<ul>
<li>
@@ -3184,10 +4365,20 @@
</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-4:3" dir="auto">&#x000A;<li data-sourcepos="1:1-1:3">&#x000A;<p data-sourcepos="1:3-1:3">a</p>&#x000A;</li>&#x000A;<li data-sourcepos="2:1-3:0">&#x000A;<p data-sourcepos="2:3-2:3">b</p>&#x000A;</li>&#x000A;<li data-sourcepos="4:1-4:3">&#x000A;<p data-sourcepos="4:3-4:3">c</p>&#x000A;</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-4:3" dir="auto">
+ <li data-sourcepos="1:1-1:3">
+ <p data-sourcepos="1:3-1:3">a</p>
+ </li>
+ <li data-sourcepos="2:1-3:0">
+ <p data-sourcepos="2:3-2:3">b</p>
+ </li>
+ <li data-sourcepos="4:1-4:3">
+ <p data-sourcepos="4:3-4:3">c</p>
+ </li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p>a</p></li><li><p>b</p></li><li><p>c</p></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__63:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__063:
canonical: |
<ul>
<li>
@@ -3199,10 +4390,19 @@
</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-4:3" dir="auto">&#x000A;<li data-sourcepos="1:1-1:3">&#x000A;<p data-sourcepos="1:3-1:3">a</p>&#x000A;</li>&#x000A;<li data-sourcepos="2:1-2:1">&#x000A;</li>&#x000A;<li data-sourcepos="4:1-4:3">&#x000A;<p data-sourcepos="4:3-4:3">c</p>&#x000A;</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-4:3" dir="auto">
+ <li data-sourcepos="1:1-1:3">
+ <p data-sourcepos="1:3-1:3">a</p>
+ </li>
+ <li data-sourcepos="2:1-2:1">
+ </li>
+ <li data-sourcepos="4:1-4:3">
+ <p data-sourcepos="4:3-4:3">c</p>
+ </li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p>a</p></li><li><p></p></li><li><p>c</p></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__64:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__064:
canonical: |
<ul>
<li>
@@ -3217,10 +4417,21 @@
</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-5:3" dir="auto">&#x000A;<li data-sourcepos="1:1-1:3">&#x000A;<p data-sourcepos="1:3-1:3">a</p>&#x000A;</li>&#x000A;<li data-sourcepos="2:1-4:3">&#x000A;<p data-sourcepos="2:3-2:3">b</p>&#x000A;<p data-sourcepos="4:3-4:3">c</p>&#x000A;</li>&#x000A;<li data-sourcepos="5:1-5:3">&#x000A;<p data-sourcepos="5:3-5:3">d</p>&#x000A;</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-5:3" dir="auto">
+ <li data-sourcepos="1:1-1:3">
+ <p data-sourcepos="1:3-1:3">a</p>
+ </li>
+ <li data-sourcepos="2:1-4:3">
+ <p data-sourcepos="2:3-2:3">b</p>
+ <p data-sourcepos="4:3-4:3">c</p>
+ </li>
+ <li data-sourcepos="5:1-5:3">
+ <p data-sourcepos="5:3-5:3">d</p>
+ </li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p>a</p></li><li><p>b</p><p>c</p></li><li><p>d</p></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__65:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__065:
canonical: |
<ul>
<li>
@@ -3234,10 +4445,20 @@
</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-5:3" dir="auto">&#x000A;<li data-sourcepos="1:1-1:3">&#x000A;<p data-sourcepos="1:3-1:3">a</p>&#x000A;</li>&#x000A;<li data-sourcepos="2:1-4:13">&#x000A;<p data-sourcepos="2:3-2:3">b</p>&#x000A;</li>&#x000A;<li data-sourcepos="5:1-5:3">&#x000A;<p data-sourcepos="5:3-5:3">d</p>&#x000A;</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-5:3" dir="auto">
+ <li data-sourcepos="1:1-1:3">
+ <p data-sourcepos="1:3-1:3">a</p>
+ </li>
+ <li data-sourcepos="2:1-4:13">
+ <p data-sourcepos="2:3-2:3">b</p>
+ </li>
+ <li data-sourcepos="5:1-5:3">
+ <p data-sourcepos="5:3-5:3">d</p>
+ </li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p>a</p></li><li><p>b</p></li><li><p>d</p></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__66:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__066:
canonical: |
<ul>
<li>a</li>
@@ -3250,12 +4471,23 @@
<li>c</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-7:3" dir="auto">&#x000A;<li data-sourcepos="1:1-1:3">a</li>&#x000A;<li data-sourcepos="2:1-6:5">&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="2:3-6:5" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">b</span>&#x000A;<span id="LC2" class="line" lang="plaintext"></span>&#x000A;<span id="LC3" class="line" lang="plaintext"></span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;</li>&#x000A;<li data-sourcepos="7:1-7:3">c</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-7:3" dir="auto">
+ <li data-sourcepos="1:1-1:3">a</li>
+ <li data-sourcepos="2:1-6:5">
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="2:3-6:5" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">b</span>
+ <span id="LC2" class="line" lang="plaintext"></span>
+ <span id="LC3" class="line" lang="plaintext"></span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ </li>
+ <li data-sourcepos="7:1-7:3">c</li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p>a</p></li><li><p></p><pre class="content-editor-code-block undefined code highlight"><code>b
</code></pre></li><li><p>c</p></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__67:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__067:
canonical: |
<ul>
<li>a
@@ -3269,11 +4501,20 @@
<li>d</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-5:3" dir="auto">&#x000A;<li data-sourcepos="1:1-4:5">a&#x000A;<ul data-sourcepos="2:3-4:5">&#x000A;<li data-sourcepos="2:3-4:5">&#x000A;<p data-sourcepos="2:5-2:5">b</p>&#x000A;<p data-sourcepos="4:5-4:5">c</p>&#x000A;</li>&#x000A;</ul>&#x000A;</li>&#x000A;<li data-sourcepos="5:1-5:3">d</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-5:3" dir="auto">
+ <li data-sourcepos="1:1-4:5">a
+ <ul data-sourcepos="2:3-4:5">
+ <li data-sourcepos="2:3-4:5">
+ <p data-sourcepos="2:5-2:5">b</p>
+ <p data-sourcepos="4:5-4:5">c</p>
+ </li>
+ </ul>
+ </li>
+ <li data-sourcepos="5:1-5:3">d</li>
+ </ul>
wysiwyg: |-
- <ul bullet="*"><li><p>a
- </p><ul bullet="*"><li><p>b</p><p>c</p></li></ul></li><li><p>d</p></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__68:
+ <ul bullet="*"><li><p>a</p><ul bullet="*"><li><p>b</p><p>c</p></li></ul></li><li><p>d</p></li></ul>
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__068:
canonical: |
<ul>
<li>a
@@ -3284,11 +4525,17 @@
<li>c</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-4:3" dir="auto">&#x000A;<li data-sourcepos="1:1-3:3">a&#x000A;<blockquote data-sourcepos="2:3-3:3">&#x000A;<p data-sourcepos="2:5-2:5">b</p>&#x000A;</blockquote>&#x000A;</li>&#x000A;<li data-sourcepos="4:1-4:3">c</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-4:3" dir="auto">
+ <li data-sourcepos="1:1-3:3">a
+ <blockquote data-sourcepos="2:3-3:3">
+ <p data-sourcepos="2:5-2:5">b</p>
+ </blockquote>
+ </li>
+ <li data-sourcepos="4:1-4:3">c</li>
+ </ul>
wysiwyg: |-
- <ul bullet="*"><li><p>a
- </p><blockquote multiline="false"><p>b</p></blockquote></li><li><p>c</p></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__69:
+ <ul bullet="*"><li><p>a</p><blockquote multiline="false"><p>b</p></blockquote></li><li><p>c</p></li></ul>
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__069:
canonical: |
<ul>
<li>a
@@ -3301,20 +4548,32 @@
<li>d</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-6:3" dir="auto">&#x000A;<li data-sourcepos="1:1-5:5">a&#x000A;<blockquote data-sourcepos="2:3-2:5">&#x000A;<p data-sourcepos="2:5-2:5">b</p>&#x000A;</blockquote>&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="3:3-5:5" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">c</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;</li>&#x000A;<li data-sourcepos="6:1-6:3">d</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-6:3" dir="auto">
+ <li data-sourcepos="1:1-5:5">a
+ <blockquote data-sourcepos="2:3-2:5">
+ <p data-sourcepos="2:5-2:5">b</p>
+ </blockquote>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="3:3-5:5" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">c</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ </li>
+ <li data-sourcepos="6:1-6:3">d</li>
+ </ul>
wysiwyg: |-
- <ul bullet="*"><li><p>a
- </p><blockquote multiline="false"><p>b</p></blockquote><pre class="content-editor-code-block undefined code highlight"><code>c</code></pre></li><li><p>d</p></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__70:
+ <ul bullet="*"><li><p>a</p><blockquote multiline="false"><p>b</p></blockquote><pre class="content-editor-code-block undefined code highlight"><code>c</code></pre></li><li><p>d</p></li></ul>
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__070:
canonical: |
<ul>
<li>a</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-1:3" dir="auto">&#x000A;<li data-sourcepos="1:1-1:3">a</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-1:3" dir="auto">
+ <li data-sourcepos="1:1-1:3">a</li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p>a</p></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__71:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__071:
canonical: |
<ul>
<li>a
@@ -3324,11 +4583,16 @@
</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-2:5" dir="auto">&#x000A;<li data-sourcepos="1:1-2:5">a&#x000A;<ul data-sourcepos="2:3-2:5">&#x000A;<li data-sourcepos="2:3-2:5">b</li>&#x000A;</ul>&#x000A;</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-2:5" dir="auto">
+ <li data-sourcepos="1:1-2:5">a
+ <ul data-sourcepos="2:3-2:5">
+ <li data-sourcepos="2:3-2:5">b</li>
+ </ul>
+ </li>
+ </ul>
wysiwyg: |-
- <ul bullet="*"><li><p>a
- </p><ul bullet="*"><li><p>b</p></li></ul></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__72:
+ <ul bullet="*"><li><p>a</p><ul bullet="*"><li><p>b</p></li></ul></li></ul>
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__072:
canonical: |
<ol>
<li>
@@ -3338,10 +4602,18 @@
</li>
</ol>
static: |-
- <ol data-sourcepos="1:1-5:6" dir="auto">&#x000A;<li data-sourcepos="1:1-5:6">&#x000A;<div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:4-3:6" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>&#x000A;<p data-sourcepos="5:4-5:6">bar</p>&#x000A;</li>&#x000A;</ol>
+ <ol data-sourcepos="1:1-5:6" dir="auto">
+ <li data-sourcepos="1:1-5:6">
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:4-3:6" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ <p data-sourcepos="5:4-5:6">bar</p>
+ </li>
+ </ol>
wysiwyg: |-
<ol parens="false"><li><p></p><pre class="content-editor-code-block undefined code highlight"><code>foo</code></pre><p>bar</p></li></ol>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__73:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__073:
canonical: |
<ul>
<li>
@@ -3353,10 +4625,18 @@
</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-4:5" dir="auto">&#x000A;<li data-sourcepos="1:1-4:5">&#x000A;<p data-sourcepos="1:3-1:5">foo</p>&#x000A;<ul data-sourcepos="2:3-3:0">&#x000A;<li data-sourcepos="2:3-3:0">bar</li>&#x000A;</ul>&#x000A;<p data-sourcepos="4:3-4:5">baz</p>&#x000A;</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-4:5" dir="auto">
+ <li data-sourcepos="1:1-4:5">
+ <p data-sourcepos="1:3-1:5">foo</p>
+ <ul data-sourcepos="2:3-3:0">
+ <li data-sourcepos="2:3-3:0">bar</li>
+ </ul>
+ <p data-sourcepos="4:3-4:5">baz</p>
+ </li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p>foo</p><ul bullet="*"><li><p>bar</p></li></ul><p>baz</p></li></ul>
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__74:
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__074:
canonical: |
<ul>
<li>
@@ -3375,28 +4655,43 @@
</li>
</ul>
static: |-
- <ul data-sourcepos="1:1-7:5" dir="auto">&#x000A;<li data-sourcepos="1:1-4:0">&#x000A;<p data-sourcepos="1:3-1:3">a</p>&#x000A;<ul data-sourcepos="2:3-4:0">&#x000A;<li data-sourcepos="2:3-2:5">b</li>&#x000A;<li data-sourcepos="3:3-4:0">c</li>&#x000A;</ul>&#x000A;</li>&#x000A;<li data-sourcepos="5:1-7:5">&#x000A;<p data-sourcepos="5:3-5:3">d</p>&#x000A;<ul data-sourcepos="6:3-7:5">&#x000A;<li data-sourcepos="6:3-6:5">e</li>&#x000A;<li data-sourcepos="7:3-7:5">f</li>&#x000A;</ul>&#x000A;</li>&#x000A;</ul>
+ <ul data-sourcepos="1:1-7:5" dir="auto">
+ <li data-sourcepos="1:1-4:0">
+ <p data-sourcepos="1:3-1:3">a</p>
+ <ul data-sourcepos="2:3-4:0">
+ <li data-sourcepos="2:3-2:5">b</li>
+ <li data-sourcepos="3:3-4:0">c</li>
+ </ul>
+ </li>
+ <li data-sourcepos="5:1-7:5">
+ <p data-sourcepos="5:3-5:3">d</p>
+ <ul data-sourcepos="6:3-7:5">
+ <li data-sourcepos="6:3-6:5">e</li>
+ <li data-sourcepos="7:3-7:5">f</li>
+ </ul>
+ </li>
+ </ul>
wysiwyg: |-
<ul bullet="*"><li><p>a</p><ul bullet="*"><li><p>b</p></li><li><p>c</p></li></ul></li><li><p>d</p><ul bullet="*"><li><p>e</p></li><li><p>f</p></li></ul></li></ul>
-06_01__inlines__01:
+06_01__inlines__001:
canonical: |
<p><code>hi</code>lo`</p>
static: |-
<p data-sourcepos="1:1-1:7" dir="auto"><code>hi</code>lo`</p>
wysiwyg: |-
<p><code>hi</code>lo`</p>
-06_02__inlines__backslash_escapes__01:
+06_02__inlines__backslash_escapes__001:
canonical: |
<p>!&quot;#$%&amp;'()*+,-./:;&lt;=&gt;?@[\]^_`{|}~</p>
static: |-
<p data-sourcepos="1:1-1:224" dir="auto"><span>!</span>"<span>#</span><span>$</span><span>%</span><span>&amp;</span>'()*+,-./:;&lt;=&gt;?<span>@</span>[\]<span>^</span>_`{|}<span>~</span></p>
wysiwyg: |-
<p>!"#$%&amp;'()*+,-./:;&lt;=&gt;?@[\]^_`{|}~</p>
-06_02__inlines__backslash_escapes__02:
+06_02__inlines__backslash_escapes__002:
canonical: "<p>\\\t\\A\\a\\ \\3\\φ\\«</p>\n"
static: "<p data-sourcepos=\"1:1-1:16\" dir=\"auto\">\\\t\\A\\a\\ \\3\\φ\\«</p>"
wysiwyg: "<p>\\\t\\A\\a\\ \\3\\φ\\«</p>"
-06_02__inlines__backslash_escapes__03:
+06_02__inlines__backslash_escapes__003:
canonical: |
<p>*not emphasized*
&lt;br/&gt; not a tag
@@ -3408,7 +4703,15 @@
[foo]: /url &quot;not a reference&quot;
&amp;ouml; not a character entity</p>
static: |-
- <p data-sourcepos="1:1-9:50" dir="auto">*not emphasized*&#x000A;&lt;br/&gt; not a tag&#x000A;<a href="/foo">not a link</a>&#x000A;`not code`&#x000A;1. not a list&#x000A;* not a list&#x000A;<span>#</span> not a heading&#x000A;[foo]: /url "not a reference"&#x000A;<span>&amp;</span>ouml; not a character entity</p>
+ <p data-sourcepos="1:1-9:50" dir="auto">*not emphasized*
+ &lt;br/&gt; not a tag
+ <a href="/foo">not a link</a>
+ `not code`
+ 1. not a list
+ * not a list
+ <span>#</span> not a heading
+ [foo]: /url "not a reference"
+ <span>&amp;</span>ouml; not a character entity</p>
wysiwyg: |-
<p>*not emphasized*
&lt;br/&gt; not a tag
@@ -3419,1072 +4722,1106 @@
# not a heading
[foo]: /url "not a reference"
&amp;ouml; not a character entity</p>
-06_02__inlines__backslash_escapes__04:
+06_02__inlines__backslash_escapes__004:
canonical: |
<p>\<em>emphasis</em></p>
static: |-
<p data-sourcepos="1:1-1:12" dir="auto">\<em>emphasis</em></p>
wysiwyg: |-
<p>\<em>emphasis</em></p>
-06_02__inlines__backslash_escapes__05:
+06_02__inlines__backslash_escapes__005:
canonical: |
<p>foo<br />
bar</p>
static: |-
- <p data-sourcepos="1:1-2:3" dir="auto">foo<br>&#x000A;bar</p>
+ <p data-sourcepos="1:1-2:3" dir="auto">foo<br>
+ bar</p>
wysiwyg: |-
<p>foo<br>
bar</p>
-06_02__inlines__backslash_escapes__06:
+06_02__inlines__backslash_escapes__006:
canonical: |
<p><code>\[\`</code></p>
static: |-
<p data-sourcepos="1:1-1:10" dir="auto"><code>\[\`</code></p>
wysiwyg: |-
<p><code>\[\`</code></p>
-06_02__inlines__backslash_escapes__07:
+06_02__inlines__backslash_escapes__007:
canonical: |
<pre><code>\[\]
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:5-1:8" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">\[\]</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:5-1:8" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">\[\]</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>\[\]</code></pre>
-06_02__inlines__backslash_escapes__08:
+06_02__inlines__backslash_escapes__008:
canonical: |
<pre><code>\[\]
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">\[\]</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">\[\]</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>\[\]</code></pre>
-06_02__inlines__backslash_escapes__09:
+06_02__inlines__backslash_escapes__009:
canonical: |
<p><a href="http://example.com?find=%5C*">http://example.com?find=\*</a></p>
static: |-
<p data-sourcepos="1:1-1:28" dir="auto"><a href="http://example.com?find=%5C*" rel="nofollow noreferrer noopener" target="_blank">http://example.com?find=\*</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="http://example.com?find=%5C*">http://example.com?find=\*</a></p>
-06_02__inlines__backslash_escapes__10:
+06_02__inlines__backslash_escapes__010:
canonical: |
<a href="/bar\/)">
static: |-
<a href="/bar%5C/)" rel="nofollow noreferrer noopener" target="_blank"></a>
wysiwyg: |-
<p></p>
-06_02__inlines__backslash_escapes__11:
+06_02__inlines__backslash_escapes__011:
canonical: |
<p><a href="/bar*" title="ti*tle">foo</a></p>
static: |-
<p data-sourcepos="1:1-1:23" dir="auto"><a href="/bar*" title="ti*tle">foo</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/bar*" title="ti*tle">foo</a></p>
-06_02__inlines__backslash_escapes__12:
+06_02__inlines__backslash_escapes__012:
canonical: |
<p><a href="/bar*" title="ti*tle">foo</a></p>
static: |-
<p data-sourcepos="1:1-1:5" dir="auto"><a href="/bar*" title="ti*tle">foo</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/bar*" title="ti*tle">foo</a></p>
-06_02__inlines__backslash_escapes__13:
+06_02__inlines__backslash_escapes__013:
canonical: |
<pre><code class="language-foo+bar">foo
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="foo+bar" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre language="foo+bar" class="content-editor-code-block undefined code highlight"><code>foo</code></pre>
-06_03__inlines__entity_and_numeric_character_references__01:
+06_03__inlines__entity_and_numeric_character_references__001:
canonical: |
<p>  &amp; © Æ Ď
¾ ℋ ⅆ
∲ ≧̸</p>
static: |-
- <p data-sourcepos="1:1-3:32" dir="auto">  &amp; © Æ Ď&#x000A;¾ ℋ ⅆ&#x000A;∲ ≧̸</p>
+ <p data-sourcepos="1:1-3:32" dir="auto">  &amp; © Æ Ď
+ ¾ ℋ ⅆ
+ ∲ ≧̸</p>
wysiwyg: |-
<p>&nbsp; &amp; © Æ Ď
¾ ℋ ⅆ
∲ ≧̸</p>
-06_03__inlines__entity_and_numeric_character_references__02:
+06_03__inlines__entity_and_numeric_character_references__002:
canonical: |
<p># Ӓ Ϡ �</p>
static: |-
<p data-sourcepos="1:1-1:25" dir="auto"># Ӓ Ϡ �</p>
wysiwyg: |-
<p># Ӓ Ϡ �</p>
-06_03__inlines__entity_and_numeric_character_references__03:
+06_03__inlines__entity_and_numeric_character_references__003:
canonical: |
<p>&quot; ആ ಫ</p>
static: |-
<p data-sourcepos="1:1-1:22" dir="auto">" ആ ಫ</p>
wysiwyg: |-
<p>" ആ ಫ</p>
-06_03__inlines__entity_and_numeric_character_references__04:
+06_03__inlines__entity_and_numeric_character_references__004:
canonical: |
<p>&amp;nbsp &amp;x; &amp;#; &amp;#x;
&amp;#987654321;
&amp;#abcdef0;
&amp;ThisIsNotDefined; &amp;hi?;</p>
static: |-
- <p data-sourcepos="1:1-4:24" dir="auto">&amp;nbsp &amp;x; &amp;#; &amp;#x;&#x000A;&amp;#987654321;&#x000A;&amp;#abcdef0;&#x000A;&amp;ThisIsNotDefined; &amp;hi?;</p>
+ <p data-sourcepos="1:1-4:24" dir="auto">&amp;nbsp &amp;x; &amp;#; &amp;#x;
+ &amp;#987654321;
+ &amp;#abcdef0;
+ &amp;ThisIsNotDefined; &amp;hi?;</p>
wysiwyg: |-
<p>&amp;nbsp &amp;x; &amp;#; &amp;#x;
&amp;#987654321;
&amp;#abcdef0;
&amp;ThisIsNotDefined; &amp;hi?;</p>
-06_03__inlines__entity_and_numeric_character_references__05:
+06_03__inlines__entity_and_numeric_character_references__005:
canonical: |
<p>&amp;copy</p>
static: |-
<p data-sourcepos="1:1-1:5" dir="auto">&amp;copy</p>
wysiwyg: |-
<p>&amp;copy</p>
-06_03__inlines__entity_and_numeric_character_references__06:
+06_03__inlines__entity_and_numeric_character_references__006:
canonical: |
<p>&amp;MadeUpEntity;</p>
static: |-
<p data-sourcepos="1:1-1:14" dir="auto">&amp;MadeUpEntity;</p>
wysiwyg: |-
<p>&amp;MadeUpEntity;</p>
-06_03__inlines__entity_and_numeric_character_references__07:
+06_03__inlines__entity_and_numeric_character_references__007:
canonical: |
<a href="&ouml;&ouml;.html">
static: |-
<a href="%C3%B6%C3%B6.html" rel="nofollow noreferrer noopener" target="_blank"></a>
wysiwyg: |-
<p></p>
-06_03__inlines__entity_and_numeric_character_references__08:
+06_03__inlines__entity_and_numeric_character_references__008:
canonical: |
<p><a href="/f%C3%B6%C3%B6" title="föö">foo</a></p>
static: |-
<p data-sourcepos="1:1-1:37" dir="auto"><a href="/f%C3%B6%C3%B6" title="föö">foo</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/f%C3%B6%C3%B6" title="föö">foo</a></p>
-06_03__inlines__entity_and_numeric_character_references__09:
+06_03__inlines__entity_and_numeric_character_references__009:
canonical: |
<p><a href="/f%C3%B6%C3%B6" title="föö">foo</a></p>
static: |-
<p data-sourcepos="1:1-1:5" dir="auto"><a href="/f%C3%B6%C3%B6" title="föö">foo</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/f%C3%B6%C3%B6" title="föö">foo</a></p>
-06_03__inlines__entity_and_numeric_character_references__10:
+06_03__inlines__entity_and_numeric_character_references__010:
canonical: |
<pre><code class="language-föö">foo
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="föö" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">foo</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre language="föö" class="content-editor-code-block undefined code highlight"><code>foo</code></pre>
-06_03__inlines__entity_and_numeric_character_references__11:
+06_03__inlines__entity_and_numeric_character_references__011:
canonical: |
<p><code>f&amp;ouml;&amp;ouml;</code></p>
static: |-
<p data-sourcepos="1:1-1:15" dir="auto"><code>f&amp;ouml;&amp;ouml;</code></p>
wysiwyg: |-
<p><code>f&amp;ouml;&amp;ouml;</code></p>
-06_03__inlines__entity_and_numeric_character_references__12:
+06_03__inlines__entity_and_numeric_character_references__012:
canonical: |
<pre><code>f&amp;ouml;f&amp;ouml;
</code></pre>
static: |-
- <div class="gl-relative markdown-code-block js-markdown-code">&#x000A;<pre data-sourcepos="1:5-1:18" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">f&amp;ouml;f&amp;ouml;</span></code></pre>&#x000A;<copy-code></copy-code>&#x000A;</div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:5-1:18" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">f&amp;ouml;f&amp;ouml;</span></code></pre>
+ <copy-code></copy-code>
+ </div>
wysiwyg: |-
<pre class="content-editor-code-block undefined code highlight"><code>f&amp;ouml;f&amp;ouml;</code></pre>
-06_03__inlines__entity_and_numeric_character_references__13:
+06_03__inlines__entity_and_numeric_character_references__013:
canonical: |
<p>*foo*
<em>foo</em></p>
static: |-
- <p data-sourcepos="1:1-2:5" dir="auto">*foo*&#x000A;<em>foo</em></p>
+ <p data-sourcepos="1:1-2:5" dir="auto">*foo*
+ <em>foo</em></p>
wysiwyg: |-
<p>*foo*
<em>foo</em></p>
-06_03__inlines__entity_and_numeric_character_references__14:
+06_03__inlines__entity_and_numeric_character_references__014:
canonical: |
<p>* foo</p>
<ul>
<li>foo</li>
</ul>
static: |-
- <p data-sourcepos="1:1-1:9" dir="auto">* foo</p>&#x000A;<ul data-sourcepos="3:1-3:5" dir="auto">&#x000A;<li data-sourcepos="3:1-3:5">foo</li>&#x000A;</ul>
+ <p data-sourcepos="1:1-1:9" dir="auto">* foo</p>
+ <ul data-sourcepos="3:1-3:5" dir="auto">
+ <li data-sourcepos="3:1-3:5">foo</li>
+ </ul>
wysiwyg: |-
<p>* foo</p>
-06_03__inlines__entity_and_numeric_character_references__15:
+06_03__inlines__entity_and_numeric_character_references__015:
canonical: |
<p>foo
bar</p>
static: |-
- <p data-sourcepos="1:1-1:16" dir="auto">foo&#x000A;&#x000A;bar</p>
+ <p data-sourcepos="1:1-1:16" dir="auto">foo
+
+ bar</p>
wysiwyg: |-
<p>foo
bar</p>
-06_03__inlines__entity_and_numeric_character_references__16:
+06_03__inlines__entity_and_numeric_character_references__016:
canonical: "<p>\tfoo</p>\n"
static: "<p data-sourcepos=\"1:1-1:7\" dir=\"auto\">\tfoo</p>"
wysiwyg: "<p>\tfoo</p>"
-06_03__inlines__entity_and_numeric_character_references__17:
+06_03__inlines__entity_and_numeric_character_references__017:
canonical: |
<p>[a](url &quot;tit&quot;)</p>
static: |-
<p data-sourcepos="1:1-1:24" dir="auto"><a href="url" title="tit">a</a></p>
wysiwyg: |-
<p>[a](url "tit")</p>
-06_04__inlines__code_spans__01:
+06_04__inlines__code_spans__001:
canonical: |
<p><code>foo</code></p>
static: |-
<p data-sourcepos="1:1-1:5" dir="auto"><code>foo</code></p>
wysiwyg: |-
<p><code>foo</code></p>
-06_04__inlines__code_spans__02:
+06_04__inlines__code_spans__002:
canonical: |
<p><code>foo ` bar</code></p>
static: |-
<p data-sourcepos="1:1-1:15" dir="auto"><code>foo ` bar</code></p>
wysiwyg: |-
<p><code>foo ` bar</code></p>
-06_04__inlines__code_spans__03:
+06_04__inlines__code_spans__003:
canonical: |
<p><code>``</code></p>
static: |-
<p data-sourcepos="1:1-1:6" dir="auto"><code>``</code></p>
wysiwyg: |-
<p><code>``</code></p>
-06_04__inlines__code_spans__04:
+06_04__inlines__code_spans__004:
canonical: |
<p><code> `` </code></p>
static: |-
<p data-sourcepos="1:1-1:8" dir="auto"><code> `` </code></p>
wysiwyg: |-
<p><code> `` </code></p>
-06_04__inlines__code_spans__05:
+06_04__inlines__code_spans__005:
canonical: |
<p><code> a</code></p>
static: |-
<p data-sourcepos="1:1-1:4" dir="auto"><code> a</code></p>
wysiwyg: |-
<p><code> a</code></p>
-06_04__inlines__code_spans__06:
+06_04__inlines__code_spans__006:
canonical: |
<p><code> b </code></p>
static: |-
<p data-sourcepos="1:1-1:7" dir="auto"><code> b </code></p>
wysiwyg: |-
<p><code>&nbsp;b&nbsp;</code></p>
-06_04__inlines__code_spans__07:
+06_04__inlines__code_spans__007:
canonical: |
<p><code> </code>
<code> </code></p>
static: |-
- <p data-sourcepos="1:1-2:4" dir="auto"><code> </code>&#x000A;<code> </code></p>
+ <p data-sourcepos="1:1-2:4" dir="auto"><code> </code>
+ <code> </code></p>
wysiwyg: |-
<p></p>
-06_04__inlines__code_spans__08:
+06_04__inlines__code_spans__008:
canonical: |
<p><code>foo bar baz</code></p>
static: |-
<p data-sourcepos="1:1-5:2" dir="auto"><code>foo bar baz</code></p>
wysiwyg: |-
<p><code>foo bar baz</code></p>
-06_04__inlines__code_spans__09:
+06_04__inlines__code_spans__009:
canonical: |
<p><code>foo </code></p>
static: |-
<p data-sourcepos="1:1-3:2" dir="auto"><code>foo </code></p>
wysiwyg: |-
<p><code>foo </code></p>
-06_04__inlines__code_spans__10:
+06_04__inlines__code_spans__010:
canonical: |
<p><code>foo bar baz</code></p>
static: |-
<p data-sourcepos="1:1-2:4" dir="auto"><code>foo bar baz</code></p>
wysiwyg: |-
<p><code>foo bar baz</code></p>
-06_04__inlines__code_spans__11:
+06_04__inlines__code_spans__011:
canonical: |
<p><code>foo\</code>bar`</p>
static: |-
<p data-sourcepos="1:1-1:10" dir="auto"><code>foo\</code>bar`</p>
wysiwyg: |-
<p><code>foo\</code>bar`</p>
-06_04__inlines__code_spans__12:
+06_04__inlines__code_spans__012:
canonical: |
<p><code>foo`bar</code></p>
static: |-
<p data-sourcepos="1:1-1:11" dir="auto"><code>foo`bar</code></p>
wysiwyg: |-
<p><code>foo`bar</code></p>
-06_04__inlines__code_spans__13:
+06_04__inlines__code_spans__013:
canonical: |
<p><code>foo `` bar</code></p>
static: |-
<p data-sourcepos="1:1-1:14" dir="auto"><code>foo `` bar</code></p>
wysiwyg: |-
<p><code>foo `` bar</code></p>
-06_04__inlines__code_spans__14:
+06_04__inlines__code_spans__014:
canonical: |
<p>*foo<code>*</code></p>
static: |-
<p data-sourcepos="1:1-1:7" dir="auto">*foo<code>*</code></p>
wysiwyg: |-
<p>*foo<code>*</code></p>
-06_04__inlines__code_spans__15:
+06_04__inlines__code_spans__015:
canonical: |
<p>[not a <code>link](/foo</code>)</p>
static: |-
<p data-sourcepos="1:1-1:20" dir="auto">[not a <code>link](/foo</code>)</p>
wysiwyg: |-
<p>[not a <code>link](/foo</code>)</p>
-06_04__inlines__code_spans__16:
+06_04__inlines__code_spans__016:
canonical: |
<p><code>&lt;a href=&quot;</code>&quot;&gt;`</p>
static: |-
<p data-sourcepos="1:1-1:14" dir="auto"><code>&lt;a href="</code>"&gt;`</p>
wysiwyg: |-
<p><code>&lt;a href="</code>"&gt;`</p>
-06_04__inlines__code_spans__17:
+06_04__inlines__code_spans__017:
canonical: |
<p><a href="`">`</p>
static: |-
<p data-sourcepos="1:1-1:13" dir="auto"><a href="%60" rel="nofollow noreferrer noopener" target="_blank">`</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="`">`</a></p>
-06_04__inlines__code_spans__18:
+06_04__inlines__code_spans__018:
canonical: |
<p><code>&lt;http://foo.bar.</code>baz&gt;`</p>
static: |-
<p data-sourcepos="1:1-1:23" dir="auto"><code>&lt;http://foo.bar.</code>baz&gt;`</p>
wysiwyg: |-
<p><code>&lt;http://foo.bar.</code>baz&gt;`</p>
-06_04__inlines__code_spans__19:
+06_04__inlines__code_spans__019:
canonical: |
<p><a href="http://foo.bar.%60baz">http://foo.bar.`baz</a>`</p>
static: |-
<p data-sourcepos="1:1-1:22" dir="auto"><a href="http://foo.bar.%60baz" rel="nofollow noreferrer noopener" target="_blank">http://foo.bar.`baz</a>`</p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="http://foo.bar.%60baz">http://foo.bar.`baz</a>`</p>
-06_04__inlines__code_spans__20:
+06_04__inlines__code_spans__020:
canonical: |
<p>```foo``</p>
static: |-
<p data-sourcepos="1:1-1:8" dir="auto">```foo``</p>
wysiwyg: |-
<p>```foo``</p>
-06_04__inlines__code_spans__21:
+06_04__inlines__code_spans__021:
canonical: |
<p>`foo</p>
static: |-
<p data-sourcepos="1:1-1:4" dir="auto">`foo</p>
wysiwyg: |-
<p>`foo</p>
-06_04__inlines__code_spans__22:
+06_04__inlines__code_spans__022:
canonical: |
<p>`foo<code>bar</code></p>
static: |-
<p data-sourcepos="1:1-1:11" dir="auto">`foo<code>bar</code></p>
wysiwyg: |-
<p>`foo<code>bar</code></p>
-06_05__inlines__emphasis_and_strong_emphasis__01:
+06_05__inlines__emphasis_and_strong_emphasis__001:
canonical: |
<p><em>foo bar</em></p>
static: |-
<p data-sourcepos="1:1-1:9" dir="auto"><em>foo bar</em></p>
wysiwyg: |-
<p><em>foo bar</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__02:
+06_05__inlines__emphasis_and_strong_emphasis__002:
canonical: |
<p>a * foo bar*</p>
static: |-
<p data-sourcepos="1:1-1:12" dir="auto">a * foo bar*</p>
wysiwyg: |-
<p>a * foo bar*</p>
-06_05__inlines__emphasis_and_strong_emphasis__03:
+06_05__inlines__emphasis_and_strong_emphasis__003:
canonical: |
<p>a*&quot;foo&quot;*</p>
static: |-
<p data-sourcepos="1:1-1:8" dir="auto">a*"foo"*</p>
wysiwyg: |-
<p>a*"foo"*</p>
-06_05__inlines__emphasis_and_strong_emphasis__04:
+06_05__inlines__emphasis_and_strong_emphasis__004:
canonical: |
<p>* a *</p>
static: |-
<p data-sourcepos="1:1-1:7" dir="auto">* a *</p>
wysiwyg: |-
<p>*&nbsp;a&nbsp;*</p>
-06_05__inlines__emphasis_and_strong_emphasis__05:
+06_05__inlines__emphasis_and_strong_emphasis__005:
canonical: |
<p>foo<em>bar</em></p>
static: |-
<p data-sourcepos="1:1-1:8" dir="auto">foo<em>bar</em></p>
wysiwyg: |-
<p>foo<em>bar</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__06:
+06_05__inlines__emphasis_and_strong_emphasis__006:
canonical: |
<p>5<em>6</em>78</p>
static: |-
<p data-sourcepos="1:1-1:6" dir="auto">5<em>6</em>78</p>
wysiwyg: |-
<p>5<em>6</em>78</p>
-06_05__inlines__emphasis_and_strong_emphasis__07:
+06_05__inlines__emphasis_and_strong_emphasis__007:
canonical: |
<p><em>foo bar</em></p>
static: |-
<p data-sourcepos="1:1-1:9" dir="auto"><em>foo bar</em></p>
wysiwyg: |-
<p><em>foo bar</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__08:
+06_05__inlines__emphasis_and_strong_emphasis__008:
canonical: |
<p>_ foo bar_</p>
static: |-
<p data-sourcepos="1:1-1:10" dir="auto">_ foo bar_</p>
wysiwyg: |-
<p>_ foo bar_</p>
-06_05__inlines__emphasis_and_strong_emphasis__09:
+06_05__inlines__emphasis_and_strong_emphasis__009:
canonical: |
<p>a_&quot;foo&quot;_</p>
static: |-
<p data-sourcepos="1:1-1:8" dir="auto">a_"foo"_</p>
wysiwyg: |-
<p>a_"foo"_</p>
-06_05__inlines__emphasis_and_strong_emphasis__10:
+06_05__inlines__emphasis_and_strong_emphasis__010:
canonical: |
<p>foo_bar_</p>
static: |-
<p data-sourcepos="1:1-1:8" dir="auto">foo_bar_</p>
wysiwyg: |-
<p>foo_bar_</p>
-06_05__inlines__emphasis_and_strong_emphasis__11:
+06_05__inlines__emphasis_and_strong_emphasis__011:
canonical: |
<p>5_6_78</p>
static: |-
<p data-sourcepos="1:1-1:6" dir="auto">5_6_78</p>
wysiwyg: |-
<p>5_6_78</p>
-06_05__inlines__emphasis_and_strong_emphasis__12:
+06_05__inlines__emphasis_and_strong_emphasis__012:
canonical: |
<p>приÑтанÑм_ÑтремÑÑ‚ÑÑ_</p>
static: |-
<p data-sourcepos="1:1-1:38" dir="auto">приÑтанÑм_ÑтремÑÑ‚ÑÑ_</p>
wysiwyg: |-
<p>приÑтанÑм_ÑтремÑÑ‚ÑÑ_</p>
-06_05__inlines__emphasis_and_strong_emphasis__13:
+06_05__inlines__emphasis_and_strong_emphasis__013:
canonical: |
<p>aa_&quot;bb&quot;_cc</p>
static: |-
<p data-sourcepos="1:1-1:10" dir="auto">aa_"bb"_cc</p>
wysiwyg: |-
<p>aa_"bb"_cc</p>
-06_05__inlines__emphasis_and_strong_emphasis__14:
+06_05__inlines__emphasis_and_strong_emphasis__014:
canonical: |
<p>foo-<em>(bar)</em></p>
static: |-
<p data-sourcepos="1:1-1:11" dir="auto">foo-<em>(bar)</em></p>
wysiwyg: |-
<p>foo-<em>(bar)</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__15:
+06_05__inlines__emphasis_and_strong_emphasis__015:
canonical: |
<p>_foo*</p>
static: |-
<p data-sourcepos="1:1-1:5" dir="auto">_foo*</p>
wysiwyg: |-
<p>_foo*</p>
-06_05__inlines__emphasis_and_strong_emphasis__16:
+06_05__inlines__emphasis_and_strong_emphasis__016:
canonical: |
<p>*foo bar *</p>
static: |-
<p data-sourcepos="1:1-1:10" dir="auto">*foo bar *</p>
wysiwyg: |-
<p>*foo bar *</p>
-06_05__inlines__emphasis_and_strong_emphasis__17:
+06_05__inlines__emphasis_and_strong_emphasis__017:
canonical: |
<p>*foo bar
*</p>
static: |-
- <p data-sourcepos="1:1-2:1" dir="auto">*foo bar&#x000A;*</p>
+ <p data-sourcepos="1:1-2:1" dir="auto">*foo bar
+ *</p>
wysiwyg: |-
<p>*foo bar
*</p>
-06_05__inlines__emphasis_and_strong_emphasis__18:
+06_05__inlines__emphasis_and_strong_emphasis__018:
canonical: |
<p>*(*foo)</p>
static: |-
<p data-sourcepos="1:1-1:7" dir="auto">*(*foo)</p>
wysiwyg: |-
<p>*(*foo)</p>
-06_05__inlines__emphasis_and_strong_emphasis__19:
+06_05__inlines__emphasis_and_strong_emphasis__019:
canonical: |
<p><em>(<em>foo</em>)</em></p>
static: |-
<p data-sourcepos="1:1-1:9" dir="auto"><em>(<em>foo</em>)</em></p>
wysiwyg: |-
<p><em>(foo</em>)</p>
-06_05__inlines__emphasis_and_strong_emphasis__20:
+06_05__inlines__emphasis_and_strong_emphasis__020:
canonical: |
<p><em>foo</em>bar</p>
static: |-
<p data-sourcepos="1:1-1:8" dir="auto"><em>foo</em>bar</p>
wysiwyg: |-
<p><em>foo</em>bar</p>
-06_05__inlines__emphasis_and_strong_emphasis__21:
+06_05__inlines__emphasis_and_strong_emphasis__021:
canonical: |
<p>_foo bar _</p>
static: |-
<p data-sourcepos="1:1-1:10" dir="auto">_foo bar _</p>
wysiwyg: |-
<p>_foo bar _</p>
-06_05__inlines__emphasis_and_strong_emphasis__22:
+06_05__inlines__emphasis_and_strong_emphasis__022:
canonical: |
<p>_(_foo)</p>
static: |-
<p data-sourcepos="1:1-1:7" dir="auto">_(_foo)</p>
wysiwyg: |-
<p>_(_foo)</p>
-06_05__inlines__emphasis_and_strong_emphasis__23:
+06_05__inlines__emphasis_and_strong_emphasis__023:
canonical: |
<p><em>(<em>foo</em>)</em></p>
static: |-
<p data-sourcepos="1:1-1:9" dir="auto"><em>(<em>foo</em>)</em></p>
wysiwyg: |-
<p><em>(foo</em>)</p>
-06_05__inlines__emphasis_and_strong_emphasis__24:
+06_05__inlines__emphasis_and_strong_emphasis__024:
canonical: |
<p>_foo_bar</p>
static: |-
<p data-sourcepos="1:1-1:8" dir="auto">_foo_bar</p>
wysiwyg: |-
<p>_foo_bar</p>
-06_05__inlines__emphasis_and_strong_emphasis__25:
+06_05__inlines__emphasis_and_strong_emphasis__025:
canonical: |
<p>_приÑтанÑм_ÑтремÑÑ‚ÑÑ</p>
static: |-
<p data-sourcepos="1:1-1:38" dir="auto">_приÑтанÑм_ÑтремÑÑ‚ÑÑ</p>
wysiwyg: |-
<p>_приÑтанÑм_ÑтремÑÑ‚ÑÑ</p>
-06_05__inlines__emphasis_and_strong_emphasis__26:
+06_05__inlines__emphasis_and_strong_emphasis__026:
canonical: |
<p><em>foo_bar_baz</em></p>
static: |-
<p data-sourcepos="1:1-1:13" dir="auto"><em>foo_bar_baz</em></p>
wysiwyg: |-
<p><em>foo_bar_baz</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__27:
+06_05__inlines__emphasis_and_strong_emphasis__027:
canonical: |
<p><em>(bar)</em>.</p>
static: |-
<p data-sourcepos="1:1-1:8" dir="auto"><em>(bar)</em>.</p>
wysiwyg: |-
<p><em>(bar)</em>.</p>
-06_05__inlines__emphasis_and_strong_emphasis__28:
+06_05__inlines__emphasis_and_strong_emphasis__028:
canonical: |
<p><strong>foo bar</strong></p>
static: |-
<p data-sourcepos="1:1-1:11" dir="auto"><strong>foo bar</strong></p>
wysiwyg: |-
<p><strong>foo bar</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__29:
+06_05__inlines__emphasis_and_strong_emphasis__029:
canonical: |
<p>** foo bar**</p>
static: |-
<p data-sourcepos="1:1-1:12" dir="auto">** foo bar**</p>
wysiwyg: |-
<p>** foo bar**</p>
-06_05__inlines__emphasis_and_strong_emphasis__30:
+06_05__inlines__emphasis_and_strong_emphasis__030:
canonical: |
<p>a**&quot;foo&quot;**</p>
static: |-
<p data-sourcepos="1:1-1:10" dir="auto">a**"foo"**</p>
wysiwyg: |-
<p>a**"foo"**</p>
-06_05__inlines__emphasis_and_strong_emphasis__31:
+06_05__inlines__emphasis_and_strong_emphasis__031:
canonical: |
<p>foo<strong>bar</strong></p>
static: |-
<p data-sourcepos="1:1-1:10" dir="auto">foo<strong>bar</strong></p>
wysiwyg: |-
<p>foo<strong>bar</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__32:
+06_05__inlines__emphasis_and_strong_emphasis__032:
canonical: |
<p><strong>foo bar</strong></p>
static: |-
<p data-sourcepos="1:1-1:11" dir="auto"><strong>foo bar</strong></p>
wysiwyg: |-
<p><strong>foo bar</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__33:
+06_05__inlines__emphasis_and_strong_emphasis__033:
canonical: |
<p>__ foo bar__</p>
static: |-
<p data-sourcepos="1:1-1:12" dir="auto">__ foo bar__</p>
wysiwyg: |-
<p>__ foo bar__</p>
-06_05__inlines__emphasis_and_strong_emphasis__34:
+06_05__inlines__emphasis_and_strong_emphasis__034:
canonical: |
<p>__
foo bar__</p>
static: |-
- <p data-sourcepos="1:1-2:9" dir="auto">__&#x000A;foo bar__</p>
+ <p data-sourcepos="1:1-2:9" dir="auto">__
+ foo bar__</p>
wysiwyg: |-
<p>__
foo bar__</p>
-06_05__inlines__emphasis_and_strong_emphasis__35:
+06_05__inlines__emphasis_and_strong_emphasis__035:
canonical: |
<p>a__&quot;foo&quot;__</p>
static: |-
<p data-sourcepos="1:1-1:10" dir="auto">a__"foo"__</p>
wysiwyg: |-
<p>a__"foo"__</p>
-06_05__inlines__emphasis_and_strong_emphasis__36:
+06_05__inlines__emphasis_and_strong_emphasis__036:
canonical: |
<p>foo__bar__</p>
static: |-
<p data-sourcepos="1:1-1:10" dir="auto">foo__bar__</p>
wysiwyg: |-
<p>foo__bar__</p>
-06_05__inlines__emphasis_and_strong_emphasis__37:
+06_05__inlines__emphasis_and_strong_emphasis__037:
canonical: |
<p>5__6__78</p>
static: |-
<p data-sourcepos="1:1-1:8" dir="auto">5__6__78</p>
wysiwyg: |-
<p>5__6__78</p>
-06_05__inlines__emphasis_and_strong_emphasis__38:
+06_05__inlines__emphasis_and_strong_emphasis__038:
canonical: |
<p>приÑтанÑм__ÑтремÑÑ‚ÑÑ__</p>
static: |-
<p data-sourcepos="1:1-1:40" dir="auto">приÑтанÑм__ÑтремÑÑ‚ÑÑ__</p>
wysiwyg: |-
<p>приÑтанÑм__ÑтремÑÑ‚ÑÑ__</p>
-06_05__inlines__emphasis_and_strong_emphasis__39:
+06_05__inlines__emphasis_and_strong_emphasis__039:
canonical: |
<p><strong>foo, <strong>bar</strong>, baz</strong></p>
static: |-
<p data-sourcepos="1:1-1:21" dir="auto"><strong>foo, <strong>bar</strong>, baz</strong></p>
wysiwyg: |-
<p><strong>foo, bar</strong>, baz</p>
-06_05__inlines__emphasis_and_strong_emphasis__40:
+06_05__inlines__emphasis_and_strong_emphasis__040:
canonical: |
<p>foo-<strong>(bar)</strong></p>
static: |-
<p data-sourcepos="1:1-1:13" dir="auto">foo-<strong>(bar)</strong></p>
wysiwyg: |-
<p>foo-<strong>(bar)</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__41:
+06_05__inlines__emphasis_and_strong_emphasis__041:
canonical: |
<p>**foo bar **</p>
static: |-
<p data-sourcepos="1:1-1:12" dir="auto">**foo bar **</p>
wysiwyg: |-
<p>**foo bar **</p>
-06_05__inlines__emphasis_and_strong_emphasis__42:
+06_05__inlines__emphasis_and_strong_emphasis__042:
canonical: |
<p>**(**foo)</p>
static: |-
<p data-sourcepos="1:1-1:9" dir="auto">**(**foo)</p>
wysiwyg: |-
<p>**(**foo)</p>
-06_05__inlines__emphasis_and_strong_emphasis__43:
+06_05__inlines__emphasis_and_strong_emphasis__043:
canonical: |
<p><em>(<strong>foo</strong>)</em></p>
static: |-
<p data-sourcepos="1:1-1:11" dir="auto"><em>(<strong>foo</strong>)</em></p>
wysiwyg: |-
<p><em>(</em><strong>foo</strong>)</p>
-06_05__inlines__emphasis_and_strong_emphasis__44:
+06_05__inlines__emphasis_and_strong_emphasis__044:
canonical: |
<p><strong>Gomphocarpus (<em>Gomphocarpus physocarpus</em>, syn.
<em>Asclepias physocarpa</em>)</strong></p>
static: |-
- <p data-sourcepos="1:1-2:25" dir="auto"><strong>Gomphocarpus (<em>Gomphocarpus physocarpus</em>, syn.&#x000A;<em>Asclepias physocarpa</em>)</strong></p>
+ <p data-sourcepos="1:1-2:25" dir="auto"><strong>Gomphocarpus (<em>Gomphocarpus physocarpus</em>, syn.
+ <em>Asclepias physocarpa</em>)</strong></p>
wysiwyg: |-
<p><strong>Gomphocarpus (</strong><em>Gomphocarpus physocarpus</em>, syn.
<em>Asclepias physocarpa</em>)</p>
-06_05__inlines__emphasis_and_strong_emphasis__45:
+06_05__inlines__emphasis_and_strong_emphasis__045:
canonical: |
<p><strong>foo &quot;<em>bar</em>&quot; foo</strong></p>
static: |-
<p data-sourcepos="1:1-1:19" dir="auto"><strong>foo "<em>bar</em>" foo</strong></p>
wysiwyg: |-
<p><strong>foo "</strong><em>bar</em>" foo</p>
-06_05__inlines__emphasis_and_strong_emphasis__46:
+06_05__inlines__emphasis_and_strong_emphasis__046:
canonical: |
<p><strong>foo</strong>bar</p>
static: |-
<p data-sourcepos="1:1-1:10" dir="auto"><strong>foo</strong>bar</p>
wysiwyg: |-
<p><strong>foo</strong>bar</p>
-06_05__inlines__emphasis_and_strong_emphasis__47:
+06_05__inlines__emphasis_and_strong_emphasis__047:
canonical: |
<p>__foo bar __</p>
static: |-
<p data-sourcepos="1:1-1:12" dir="auto">__foo bar __</p>
wysiwyg: |-
<p>__foo bar __</p>
-06_05__inlines__emphasis_and_strong_emphasis__48:
+06_05__inlines__emphasis_and_strong_emphasis__048:
canonical: |
<p>__(__foo)</p>
static: |-
<p data-sourcepos="1:1-1:9" dir="auto">__(__foo)</p>
wysiwyg: |-
<p>__(__foo)</p>
-06_05__inlines__emphasis_and_strong_emphasis__49:
+06_05__inlines__emphasis_and_strong_emphasis__049:
canonical: |
<p><em>(<strong>foo</strong>)</em></p>
static: |-
<p data-sourcepos="1:1-1:11" dir="auto"><em>(<strong>foo</strong>)</em></p>
wysiwyg: |-
<p><em>(</em><strong>foo</strong>)</p>
-06_05__inlines__emphasis_and_strong_emphasis__50:
+06_05__inlines__emphasis_and_strong_emphasis__050:
canonical: |
<p>__foo__bar</p>
static: |-
<p data-sourcepos="1:1-1:10" dir="auto">__foo__bar</p>
wysiwyg: |-
<p>__foo__bar</p>
-06_05__inlines__emphasis_and_strong_emphasis__51:
+06_05__inlines__emphasis_and_strong_emphasis__051:
canonical: |
<p>__приÑтанÑм__ÑтремÑÑ‚ÑÑ</p>
static: |-
<p data-sourcepos="1:1-1:40" dir="auto">__приÑтанÑм__ÑтремÑÑ‚ÑÑ</p>
wysiwyg: |-
<p>__приÑтанÑм__ÑтремÑÑ‚ÑÑ</p>
-06_05__inlines__emphasis_and_strong_emphasis__52:
+06_05__inlines__emphasis_and_strong_emphasis__052:
canonical: |
<p><strong>foo__bar__baz</strong></p>
static: |-
<p data-sourcepos="1:1-1:17" dir="auto"><strong>foo__bar__baz</strong></p>
wysiwyg: |-
<p><strong>foo__bar__baz</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__53:
+06_05__inlines__emphasis_and_strong_emphasis__053:
canonical: |
<p><strong>(bar)</strong>.</p>
static: |-
<p data-sourcepos="1:1-1:10" dir="auto"><strong>(bar)</strong>.</p>
wysiwyg: |-
<p><strong>(bar)</strong>.</p>
-06_05__inlines__emphasis_and_strong_emphasis__54:
+06_05__inlines__emphasis_and_strong_emphasis__054:
canonical: |
<p><em>foo <a href="/url">bar</a></em></p>
static: |-
<p data-sourcepos="1:1-1:17" dir="auto"><em>foo <a href="/url">bar</a></em></p>
wysiwyg: |-
<p><em>foo </em><a target="_blank" rel="noopener noreferrer nofollow" href="/url">bar</a></p>
-06_05__inlines__emphasis_and_strong_emphasis__55:
+06_05__inlines__emphasis_and_strong_emphasis__055:
canonical: |
<p><em>foo
bar</em></p>
static: |-
- <p data-sourcepos="1:1-2:4" dir="auto"><em>foo&#x000A;bar</em></p>
+ <p data-sourcepos="1:1-2:4" dir="auto"><em>foo
+ bar</em></p>
wysiwyg: |-
<p><em>foo
bar</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__56:
+06_05__inlines__emphasis_and_strong_emphasis__056:
canonical: |
<p><em>foo <strong>bar</strong> baz</em></p>
static: |-
<p data-sourcepos="1:1-1:17" dir="auto"><em>foo <strong>bar</strong> baz</em></p>
wysiwyg: |-
<p><em>foo </em><strong>bar</strong> baz</p>
-06_05__inlines__emphasis_and_strong_emphasis__57:
+06_05__inlines__emphasis_and_strong_emphasis__057:
canonical: |
<p><em>foo <em>bar</em> baz</em></p>
static: |-
<p data-sourcepos="1:1-1:15" dir="auto"><em>foo <em>bar</em> baz</em></p>
wysiwyg: |-
<p><em>foo bar</em> baz</p>
-06_05__inlines__emphasis_and_strong_emphasis__58:
+06_05__inlines__emphasis_and_strong_emphasis__058:
canonical: |
<p><em><em>foo</em> bar</em></p>
static: |-
<p data-sourcepos="1:1-1:11" dir="auto"><em><em>foo</em> bar</em></p>
wysiwyg: |-
<p><em>foo</em> bar</p>
-06_05__inlines__emphasis_and_strong_emphasis__59:
+06_05__inlines__emphasis_and_strong_emphasis__059:
canonical: |
<p><em>foo <em>bar</em></em></p>
static: |-
<p data-sourcepos="1:1-1:11" dir="auto"><em>foo <em>bar</em></em></p>
wysiwyg: |-
<p><em>foo bar</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__60:
+06_05__inlines__emphasis_and_strong_emphasis__060:
canonical: |
<p><em>foo <strong>bar</strong> baz</em></p>
static: |-
<p data-sourcepos="1:1-1:17" dir="auto"><em>foo <strong>bar</strong> baz</em></p>
wysiwyg: |-
<p><em>foo </em><strong>bar</strong> baz</p>
-06_05__inlines__emphasis_and_strong_emphasis__61:
+06_05__inlines__emphasis_and_strong_emphasis__061:
canonical: |
<p><em>foo<strong>bar</strong>baz</em></p>
static: |-
<p data-sourcepos="1:1-1:15" dir="auto"><em>foo<strong>bar</strong>baz</em></p>
wysiwyg: |-
<p><em>foo</em><strong>bar</strong>baz</p>
-06_05__inlines__emphasis_and_strong_emphasis__62:
+06_05__inlines__emphasis_and_strong_emphasis__062:
canonical: |
<p><em>foo**bar</em></p>
static: |-
<p data-sourcepos="1:1-1:10" dir="auto"><em>foo**bar</em></p>
wysiwyg: |-
<p><em>foo**bar</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__63:
+06_05__inlines__emphasis_and_strong_emphasis__063:
canonical: |
<p><em><strong>foo</strong> bar</em></p>
static: |-
<p data-sourcepos="1:1-1:13" dir="auto"><em><strong>foo</strong> bar</em></p>
wysiwyg: |-
<p><strong><em>foo</em></strong> bar</p>
-06_05__inlines__emphasis_and_strong_emphasis__64:
+06_05__inlines__emphasis_and_strong_emphasis__064:
canonical: |
<p><em>foo <strong>bar</strong></em></p>
static: |-
<p data-sourcepos="1:1-1:13" dir="auto"><em>foo <strong>bar</strong></em></p>
wysiwyg: |-
<p><em>foo </em><strong>bar</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__65:
+06_05__inlines__emphasis_and_strong_emphasis__065:
canonical: |
<p><em>foo<strong>bar</strong></em></p>
static: |-
<p data-sourcepos="1:1-1:12" dir="auto"><em>foo<strong>bar</strong></em></p>
wysiwyg: |-
<p><em>foo</em><strong>bar</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__66:
+06_05__inlines__emphasis_and_strong_emphasis__066:
canonical: |
<p>foo<em><strong>bar</strong></em>baz</p>
static: |-
<p data-sourcepos="1:1-1:15" dir="auto">foo<em><strong>bar</strong></em>baz</p>
wysiwyg: |-
<p>foo<strong><em>bar</em></strong>baz</p>
-06_05__inlines__emphasis_and_strong_emphasis__67:
+06_05__inlines__emphasis_and_strong_emphasis__067:
canonical: |
<p>foo<strong><strong><strong>bar</strong></strong></strong>***baz</p>
static: |-
<p data-sourcepos="1:1-1:24" dir="auto">foo<strong><strong><strong>bar</strong></strong></strong>***baz</p>
wysiwyg: |-
<p>foo<strong>bar</strong>***baz</p>
-06_05__inlines__emphasis_and_strong_emphasis__68:
+06_05__inlines__emphasis_and_strong_emphasis__068:
canonical: |
<p><em>foo <strong>bar <em>baz</em> bim</strong> bop</em></p>
static: |-
<p data-sourcepos="1:1-1:27" dir="auto"><em>foo <strong>bar <em>baz</em> bim</strong> bop</em></p>
wysiwyg: |-
<p><em>foo </em><strong>bar </strong><em>baz</em> bim bop</p>
-06_05__inlines__emphasis_and_strong_emphasis__69:
+06_05__inlines__emphasis_and_strong_emphasis__069:
canonical: |
<p><em>foo <a href="/url"><em>bar</em></a></em></p>
static: |-
<p data-sourcepos="1:1-1:19" dir="auto"><em>foo <a href="/url"><em>bar</em></a></em></p>
wysiwyg: |-
<p><em>foo </em><a target="_blank" rel="noopener noreferrer nofollow" href="/url"><em>bar</em></a></p>
-06_05__inlines__emphasis_and_strong_emphasis__70:
+06_05__inlines__emphasis_and_strong_emphasis__070:
canonical: |
<p>** is not an empty emphasis</p>
static: |-
<p data-sourcepos="1:1-1:27" dir="auto">** is not an empty emphasis</p>
wysiwyg: |-
<p>** is not an empty emphasis</p>
-06_05__inlines__emphasis_and_strong_emphasis__71:
+06_05__inlines__emphasis_and_strong_emphasis__071:
canonical: |
<p>**** is not an empty strong emphasis</p>
static: |-
<p data-sourcepos="1:1-1:36" dir="auto">**** is not an empty strong emphasis</p>
wysiwyg: |-
<p>**** is not an empty strong emphasis</p>
-06_05__inlines__emphasis_and_strong_emphasis__72:
+06_05__inlines__emphasis_and_strong_emphasis__072:
canonical: |
<p><strong>foo <a href="/url">bar</a></strong></p>
static: |-
<p data-sourcepos="1:1-1:19" dir="auto"><strong>foo <a href="/url">bar</a></strong></p>
wysiwyg: |-
<p><strong>foo </strong><a target="_blank" rel="noopener noreferrer nofollow" href="/url">bar</a></p>
-06_05__inlines__emphasis_and_strong_emphasis__73:
+06_05__inlines__emphasis_and_strong_emphasis__073:
canonical: |
<p><strong>foo
bar</strong></p>
static: |-
- <p data-sourcepos="1:1-2:5" dir="auto"><strong>foo&#x000A;bar</strong></p>
+ <p data-sourcepos="1:1-2:5" dir="auto"><strong>foo
+ bar</strong></p>
wysiwyg: |-
<p><strong>foo
bar</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__74:
+06_05__inlines__emphasis_and_strong_emphasis__074:
canonical: |
<p><strong>foo <em>bar</em> baz</strong></p>
static: |-
<p data-sourcepos="1:1-1:17" dir="auto"><strong>foo <em>bar</em> baz</strong></p>
wysiwyg: |-
<p><strong>foo </strong><em>bar</em> baz</p>
-06_05__inlines__emphasis_and_strong_emphasis__75:
+06_05__inlines__emphasis_and_strong_emphasis__075:
canonical: |
<p><strong>foo <strong>bar</strong> baz</strong></p>
static: |-
<p data-sourcepos="1:1-1:19" dir="auto"><strong>foo <strong>bar</strong> baz</strong></p>
wysiwyg: |-
<p><strong>foo bar</strong> baz</p>
-06_05__inlines__emphasis_and_strong_emphasis__76:
+06_05__inlines__emphasis_and_strong_emphasis__076:
canonical: |
<p><strong><strong>foo</strong> bar</strong></p>
static: |-
<p data-sourcepos="1:1-1:15" dir="auto"><strong><strong>foo</strong> bar</strong></p>
wysiwyg: |-
<p><strong>foo</strong> bar</p>
-06_05__inlines__emphasis_and_strong_emphasis__77:
+06_05__inlines__emphasis_and_strong_emphasis__077:
canonical: |
<p><strong>foo <strong>bar</strong></strong></p>
static: |-
<p data-sourcepos="1:1-1:15" dir="auto"><strong>foo <strong>bar</strong></strong></p>
wysiwyg: |-
<p><strong>foo bar</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__78:
+06_05__inlines__emphasis_and_strong_emphasis__078:
canonical: |
<p><strong>foo <em>bar</em> baz</strong></p>
static: |-
<p data-sourcepos="1:1-1:17" dir="auto"><strong>foo <em>bar</em> baz</strong></p>
wysiwyg: |-
<p><strong>foo </strong><em>bar</em> baz</p>
-06_05__inlines__emphasis_and_strong_emphasis__79:
+06_05__inlines__emphasis_and_strong_emphasis__079:
canonical: |
<p><strong>foo<em>bar</em>baz</strong></p>
static: |-
<p data-sourcepos="1:1-1:15" dir="auto"><strong>foo<em>bar</em>baz</strong></p>
wysiwyg: |-
<p><strong>foo</strong><em>bar</em>baz</p>
-06_05__inlines__emphasis_and_strong_emphasis__80:
+06_05__inlines__emphasis_and_strong_emphasis__080:
canonical: |
<p><strong><em>foo</em> bar</strong></p>
static: |-
<p data-sourcepos="1:1-1:13" dir="auto"><strong><em>foo</em> bar</strong></p>
wysiwyg: |-
<p><strong><em>foo</em></strong> bar</p>
-06_05__inlines__emphasis_and_strong_emphasis__81:
+06_05__inlines__emphasis_and_strong_emphasis__081:
canonical: |
<p><strong>foo <em>bar</em></strong></p>
static: |-
<p data-sourcepos="1:1-1:13" dir="auto"><strong>foo <em>bar</em></strong></p>
wysiwyg: |-
<p><strong>foo </strong><em>bar</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__82:
+06_05__inlines__emphasis_and_strong_emphasis__082:
canonical: |
<p><strong>foo <em>bar <strong>baz</strong>
bim</em> bop</strong></p>
static: |-
- <p data-sourcepos="1:1-2:10" dir="auto"><strong>foo <em>bar <strong>baz</strong>&#x000A;bim</em> bop</strong></p>
+ <p data-sourcepos="1:1-2:10" dir="auto"><strong>foo <em>bar <strong>baz</strong>
+ bim</em> bop</strong></p>
wysiwyg: |-
<p><strong>foo </strong><em>bar </em><strong>baz</strong>
bim bop</p>
-06_05__inlines__emphasis_and_strong_emphasis__83:
+06_05__inlines__emphasis_and_strong_emphasis__083:
canonical: |
<p><strong>foo <a href="/url"><em>bar</em></a></strong></p>
static: |-
<p data-sourcepos="1:1-1:21" dir="auto"><strong>foo <a href="/url"><em>bar</em></a></strong></p>
wysiwyg: |-
<p><strong>foo </strong><a target="_blank" rel="noopener noreferrer nofollow" href="/url"><em>bar</em></a></p>
-06_05__inlines__emphasis_and_strong_emphasis__84:
+06_05__inlines__emphasis_and_strong_emphasis__084:
canonical: |
<p>__ is not an empty emphasis</p>
static: |-
<p data-sourcepos="1:1-1:27" dir="auto">__ is not an empty emphasis</p>
wysiwyg: |-
<p>__ is not an empty emphasis</p>
-06_05__inlines__emphasis_and_strong_emphasis__85:
+06_05__inlines__emphasis_and_strong_emphasis__085:
canonical: |
<p>____ is not an empty strong emphasis</p>
static: |-
<p data-sourcepos="1:1-1:36" dir="auto">____ is not an empty strong emphasis</p>
wysiwyg: |-
<p>____ is not an empty strong emphasis</p>
-06_05__inlines__emphasis_and_strong_emphasis__86:
+06_05__inlines__emphasis_and_strong_emphasis__086:
canonical: |
<p>foo ***</p>
static: |-
<p data-sourcepos="1:1-1:7" dir="auto">foo ***</p>
wysiwyg: |-
<p>foo ***</p>
-06_05__inlines__emphasis_and_strong_emphasis__87:
+06_05__inlines__emphasis_and_strong_emphasis__087:
canonical: |
<p>foo <em>*</em></p>
static: |-
<p data-sourcepos="1:1-1:8" dir="auto">foo <em>*</em></p>
wysiwyg: |-
<p>foo <em>*</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__88:
+06_05__inlines__emphasis_and_strong_emphasis__088:
canonical: |
<p>foo <em>_</em></p>
static: |-
<p data-sourcepos="1:1-1:7" dir="auto">foo <em>_</em></p>
wysiwyg: |-
<p>foo <em>_</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__89:
+06_05__inlines__emphasis_and_strong_emphasis__089:
canonical: |
<p>foo *****</p>
static: |-
<p data-sourcepos="1:1-1:9" dir="auto">foo *****</p>
wysiwyg: |-
<p>foo *****</p>
-06_05__inlines__emphasis_and_strong_emphasis__90:
+06_05__inlines__emphasis_and_strong_emphasis__090:
canonical: |
<p>foo <strong>*</strong></p>
static: |-
<p data-sourcepos="1:1-1:10" dir="auto">foo <strong>*</strong></p>
wysiwyg: |-
<p>foo <strong>*</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__91:
+06_05__inlines__emphasis_and_strong_emphasis__091:
canonical: |
<p>foo <strong>_</strong></p>
static: |-
<p data-sourcepos="1:1-1:9" dir="auto">foo <strong>_</strong></p>
wysiwyg: |-
<p>foo <strong>_</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__92:
+06_05__inlines__emphasis_and_strong_emphasis__092:
canonical: |
<p>*<em>foo</em></p>
static: |-
<p data-sourcepos="1:1-1:6" dir="auto">*<em>foo</em></p>
wysiwyg: |-
<p>*<em>foo</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__93:
+06_05__inlines__emphasis_and_strong_emphasis__093:
canonical: |
<p><em>foo</em>*</p>
static: |-
<p data-sourcepos="1:1-1:6" dir="auto"><em>foo</em>*</p>
wysiwyg: |-
<p><em>foo</em>*</p>
-06_05__inlines__emphasis_and_strong_emphasis__94:
+06_05__inlines__emphasis_and_strong_emphasis__094:
canonical: |
<p>*<strong>foo</strong></p>
static: |-
<p data-sourcepos="1:1-1:8" dir="auto">*<strong>foo</strong></p>
wysiwyg: |-
<p>*<strong>foo</strong></p>
-06_05__inlines__emphasis_and_strong_emphasis__95:
+06_05__inlines__emphasis_and_strong_emphasis__095:
canonical: |
<p>***<em>foo</em></p>
static: |-
<p data-sourcepos="1:1-1:8" dir="auto">***<em>foo</em></p>
wysiwyg: |-
<p>***<em>foo</em></p>
-06_05__inlines__emphasis_and_strong_emphasis__96:
+06_05__inlines__emphasis_and_strong_emphasis__096:
canonical: |
<p><strong>foo</strong>*</p>
static: |-
<p data-sourcepos="1:1-1:8" dir="auto"><strong>foo</strong>*</p>
wysiwyg: |-
<p><strong>foo</strong>*</p>
-06_05__inlines__emphasis_and_strong_emphasis__97:
+06_05__inlines__emphasis_and_strong_emphasis__097:
canonical: |
<p><em>foo</em>***</p>
static: |-
<p data-sourcepos="1:1-1:8" dir="auto"><em>foo</em>***</p>
wysiwyg: |-
<p><em>foo</em>***</p>
-06_05__inlines__emphasis_and_strong_emphasis__98:
+06_05__inlines__emphasis_and_strong_emphasis__098:
canonical: |
<p>foo ___</p>
static: |-
<p data-sourcepos="1:1-1:7" dir="auto">foo ___</p>
wysiwyg: |-
<p>foo ___</p>
-06_05__inlines__emphasis_and_strong_emphasis__99:
+06_05__inlines__emphasis_and_strong_emphasis__099:
canonical: |
<p>foo <em>_</em></p>
static: |-
@@ -4715,73 +6052,75 @@
<p data-sourcepos="1:1-1:25" dir="auto">__a<a href="http://foo.bar/?q=__" rel="nofollow noreferrer noopener" target="_blank">http://foo.bar/?q=__</a></p>
wysiwyg: |-
<p>__a<a target="_blank" rel="noopener noreferrer nofollow" href="http://foo.bar/?q=__">http://foo.bar/?q=__</a></p>
-06_06__inlines__strikethrough_extension__01:
+06_06__inlines__strikethrough_extension__001:
canonical: |
<p><del>Hi</del> Hello, world!</p>
static: |-
<p data-sourcepos="1:1-1:20" dir="auto"><del>Hi</del> Hello, world!</p>
wysiwyg: |-
- <p>~~Hi~~ Hello, world!</p>
-06_06__inlines__strikethrough_extension__02:
+ <p><s>Hi</s> Hello, world!</p>
+06_06__inlines__strikethrough_extension__002:
canonical: |
<p>This ~~has a</p>
<p>new paragraph~~.</p>
static: |-
- <p data-sourcepos="1:1-1:12" dir="auto">This ~~has a</p>&#x000A;<p data-sourcepos="3:1-3:16" dir="auto">new paragraph~~.</p>
+ <p data-sourcepos="1:1-1:12" dir="auto">This ~~has a</p>
+ <p data-sourcepos="3:1-3:16" dir="auto">new paragraph~~.</p>
wysiwyg: |-
<p>This ~~has a</p>
-06_07__inlines__links__01:
+06_07__inlines__links__001:
canonical: |
<p><a href="/uri" title="title">link</a></p>
static: |-
<p data-sourcepos="1:1-1:20" dir="auto"><a href="/uri" title="title">link</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/uri" title="title">link</a></p>
-06_07__inlines__links__02:
+06_07__inlines__links__002:
canonical: |
<p><a href="/uri">link</a></p>
static: |-
<p data-sourcepos="1:1-1:12" dir="auto"><a href="/uri">link</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/uri">link</a></p>
-06_07__inlines__links__03:
+06_07__inlines__links__003:
canonical: |
<p><a href="">link</a></p>
static: |-
<p data-sourcepos="1:1-1:8" dir="auto"><a href="">link</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="">link</a></p>
-06_07__inlines__links__04:
+06_07__inlines__links__004:
canonical: |
<p><a href="">link</a></p>
static: |-
<p data-sourcepos="1:1-1:10" dir="auto"><a href="">link</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="">link</a></p>
-06_07__inlines__links__05:
+06_07__inlines__links__005:
canonical: |
<p>[link](/my uri)</p>
static: |-
<p data-sourcepos="1:1-1:15" dir="auto"><a href="/my%20uri">link</a></p>
wysiwyg: |-
<p>[link](/my uri)</p>
-06_07__inlines__links__06:
+06_07__inlines__links__006:
canonical: |
<p><a href="/my%20uri">link</a></p>
static: |-
<p data-sourcepos="1:1-1:17" dir="auto"><a href="/my%20uri">link</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/my%20uri">link</a></p>
-06_07__inlines__links__07:
+06_07__inlines__links__007:
canonical: |
<p>[link](foo
bar)</p>
static: |-
- <p data-sourcepos="1:1-2:4" dir="auto">[link](foo&#x000A;bar)</p>
+ <p data-sourcepos="1:1-2:4" dir="auto">[link](foo
+ bar)</p>
wysiwyg: |-
<p>[link](foo
bar)</p>
-06_07__inlines__links__08:
+06_07__inlines__links__008:
canonical: |
<p>[link](<foo
bar>)</p>
@@ -4790,183 +6129,189 @@
wysiwyg: |-
Error - check implementation:
Hast node of type "foo" not supported by this converter. Please, provide an specification.
-06_07__inlines__links__09:
+06_07__inlines__links__009:
canonical: |
<p><a href="b)c">a</a></p>
static: |-
<p data-sourcepos="1:1-1:10" dir="auto"><a href="b)c">a</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="b)c">a</a></p>
-06_07__inlines__links__10:
+06_07__inlines__links__010:
canonical: |
<p>[link](&lt;foo&gt;)</p>
static: |-
<p data-sourcepos="1:1-1:14" dir="auto"><a href="%3Cfoo%3E">link</a></p>
wysiwyg: |-
<p>[link](&lt;foo&gt;)</p>
-06_07__inlines__links__11:
+06_07__inlines__links__011:
canonical: |
<p>[a](&lt;b)c
[a](&lt;b)c&gt;
[a](<b>c)</p>
static: |-
- <p data-sourcepos="1:1-3:9" dir="auto"><a href="%3Cb">a</a>c&#x000A;<a href="%3Cb">a</a>c&gt;&#x000A;[a](<b>c)</b></p>
+ <p data-sourcepos="1:1-3:9" dir="auto"><a href="%3Cb">a</a>c
+ <a href="%3Cb">a</a>c&gt;
+ [a](<b>c)</b></p>
wysiwyg: |-
<p>[a](&lt;b)c
[a](&lt;b)c&gt;
[a](<strong>c)</strong></p>
-06_07__inlines__links__12:
+06_07__inlines__links__012:
canonical: |
<p><a href="(foo)">link</a></p>
static: |-
<p data-sourcepos="1:1-1:15" dir="auto"><a href="(foo)">link</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="(foo)">link</a></p>
-06_07__inlines__links__13:
+06_07__inlines__links__013:
canonical: |
<p><a href="foo(and(bar))">link</a></p>
static: |-
<p data-sourcepos="1:1-1:21" dir="auto"><a href="foo(and(bar))">link</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="foo(and(bar))">link</a></p>
-06_07__inlines__links__14:
+06_07__inlines__links__014:
canonical: |
<p><a href="foo(and(bar)">link</a></p>
static: |-
<p data-sourcepos="1:1-1:23" dir="auto"><a href="foo(and(bar)">link</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="foo(and(bar)">link</a></p>
-06_07__inlines__links__15:
+06_07__inlines__links__015:
canonical: |
<p><a href="foo(and(bar)">link</a></p>
static: |-
<p data-sourcepos="1:1-1:22" dir="auto"><a href="foo(and(bar)">link</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="foo(and(bar)">link</a></p>
-06_07__inlines__links__16:
+06_07__inlines__links__016:
canonical: |
<p><a href="foo):">link</a></p>
static: |-
<p data-sourcepos="1:1-1:15" dir="auto"><a>link</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="foo):">link</a></p>
-06_07__inlines__links__17:
+06_07__inlines__links__017:
canonical: |
<p><a href="#fragment">link</a></p>
<p><a href="http://example.com#fragment">link</a></p>
<p><a href="http://example.com?foo=3#frag">link</a></p>
static: |-
- <p data-sourcepos="1:1-1:17" dir="auto"><a href="#fragment">link</a></p>&#x000A;<p data-sourcepos="3:1-3:35" dir="auto"><a href="http://example.com#fragment" rel="nofollow noreferrer noopener" target="_blank">link</a></p>&#x000A;<p data-sourcepos="5:1-5:37" dir="auto"><a href="http://example.com?foo=3#frag" rel="nofollow noreferrer noopener" target="_blank">link</a></p>
+ <p data-sourcepos="1:1-1:17" dir="auto"><a href="#fragment">link</a></p>
+ <p data-sourcepos="3:1-3:35" dir="auto"><a href="http://example.com#fragment" rel="nofollow noreferrer noopener" target="_blank">link</a></p>
+ <p data-sourcepos="5:1-5:37" dir="auto"><a href="http://example.com?foo=3#frag" rel="nofollow noreferrer noopener" target="_blank">link</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="#fragment">link</a></p>
-06_07__inlines__links__18:
+06_07__inlines__links__018:
canonical: |
<p><a href="foo%5Cbar">link</a></p>
static: |-
<p data-sourcepos="1:1-1:15" dir="auto"><a href="foo%5Cbar">link</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="foo%5Cbar">link</a></p>
-06_07__inlines__links__19:
+06_07__inlines__links__019:
canonical: |
<p><a href="foo%20b%C3%A4">link</a></p>
static: |-
<p data-sourcepos="1:1-1:21" dir="auto"><a href="foo%20b%C3%A4">link</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="foo%20b%C3%A4">link</a></p>
-06_07__inlines__links__20:
+06_07__inlines__links__020:
canonical: |
<p><a href="%22title%22">link</a></p>
static: |-
<p data-sourcepos="1:1-1:15" dir="auto"><a href="%22title%22">link</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="%22title%22">link</a></p>
-06_07__inlines__links__21:
+06_07__inlines__links__021:
canonical: |
<p><a href="/url" title="title">link</a>
<a href="/url" title="title">link</a>
<a href="/url" title="title">link</a></p>
static: |-
- <p data-sourcepos="1:1-3:20" dir="auto"><a href="/url" title="title">link</a>&#x000A;<a href="/url" title="title">link</a>&#x000A;<a href="/url" title="title">link</a></p>
+ <p data-sourcepos="1:1-3:20" dir="auto"><a href="/url" title="title">link</a>
+ <a href="/url" title="title">link</a>
+ <a href="/url" title="title">link</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title">linklinklink</a></p>
-06_07__inlines__links__22:
+06_07__inlines__links__022:
canonical: |
<p><a href="/url" title="title &quot;&quot;">link</a></p>
static: |-
<p data-sourcepos="1:1-1:29" dir="auto"><a href="/url" title='title ""'>link</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title &quot;&quot;">link</a></p>
-06_07__inlines__links__23:
+06_07__inlines__links__023:
canonical: |
<p><a href="/url%C2%A0%22title%22">link</a></p>
static: |-
<p data-sourcepos="1:1-1:21" dir="auto"><a href="/url%C2%A0%22title%22">link</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/url%C2%A0%22title%22">link</a></p>
-06_07__inlines__links__24:
+06_07__inlines__links__024:
canonical: |
<p>[link](/url &quot;title &quot;and&quot; title&quot;)</p>
static: |-
<p data-sourcepos="1:1-1:32" dir="auto">[link](/url "title "and" title")</p>
wysiwyg: |-
<p>[link](/url "title "and" title")</p>
-06_07__inlines__links__25:
+06_07__inlines__links__025:
canonical: |
<p><a href="/url" title="title &quot;and&quot; title">link</a></p>
static: |-
<p data-sourcepos="1:1-1:32" dir="auto"><a href="/url" title='title "and" title'>link</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title &quot;and&quot; title">link</a></p>
-06_07__inlines__links__26:
+06_07__inlines__links__026:
canonical: |
<p><a href="/uri" title="title">link</a></p>
static: |-
<p data-sourcepos="1:1-2:12" dir="auto"><a href="/uri" title="title">link</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/uri" title="title">link</a></p>
-06_07__inlines__links__27:
+06_07__inlines__links__027:
canonical: |
<p>[link] (/uri)</p>
static: |-
<p data-sourcepos="1:1-1:13" dir="auto">[link] (/uri)</p>
wysiwyg: |-
<p>[link] (/uri)</p>
-06_07__inlines__links__28:
+06_07__inlines__links__028:
canonical: |
<p><a href="/uri">link [foo [bar]]</a></p>
static: |-
<p data-sourcepos="1:1-1:24" dir="auto"><a href="/uri">link [foo [bar]]</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/uri">link [foo [bar]]</a></p>
-06_07__inlines__links__29:
+06_07__inlines__links__029:
canonical: |
<p>[link] bar](/uri)</p>
static: |-
<p data-sourcepos="1:1-1:17" dir="auto">[link] bar](/uri)</p>
wysiwyg: |-
<p>[link] bar](/uri)</p>
-06_07__inlines__links__30:
+06_07__inlines__links__030:
canonical: |
<p>[link <a href="/uri">bar</a></p>
static: |-
<p data-sourcepos="1:1-1:17" dir="auto">[link <a href="/uri">bar</a></p>
wysiwyg: |-
<p>[link <a target="_blank" rel="noopener noreferrer nofollow" href="/uri">bar</a></p>
-06_07__inlines__links__31:
+06_07__inlines__links__031:
canonical: |
<p><a href="/uri">link [bar</a></p>
static: |-
<p data-sourcepos="1:1-1:18" dir="auto"><a href="/uri">link [bar</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/uri">link [bar</a></p>
-06_07__inlines__links__32:
+06_07__inlines__links__032:
canonical: |
<p><a href="/uri">link <em>foo <strong>bar</strong> <code>#</code></em></a></p>
static: |-
<p data-sourcepos="1:1-1:30" dir="auto"><a href="/uri">link <em>foo <strong>bar</strong> <code>#</code></em></a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/uri">link </a><em>foo </em><strong>bar</strong><code>#</code></p>
-06_07__inlines__links__33:
+06_07__inlines__links__033:
canonical: |
<p><a href="/uri"><img src="moon.jpg" alt="moon" /></a></p>
static: |-
@@ -4974,49 +6319,49 @@
wysiwyg: |-
Error - check implementation:
Cannot destructure property 'type' of 'this.stack.pop(...)' as it is undefined.
-06_07__inlines__links__34:
+06_07__inlines__links__034:
canonical: |
<p>[foo <a href="/uri">bar</a>](/uri)</p>
static: |-
<p data-sourcepos="1:1-1:23" dir="auto">[foo <a href="/uri">bar</a>](/uri)</p>
wysiwyg: |-
<p>[foo <a target="_blank" rel="noopener noreferrer nofollow" href="/uri">bar</a>](/uri)</p>
-06_07__inlines__links__35:
+06_07__inlines__links__035:
canonical: |
<p>[foo <em>[bar <a href="/uri">baz</a>](/uri)</em>](/uri)</p>
static: |-
<p data-sourcepos="1:1-1:37" dir="auto">[foo <em>[bar <a href="/uri">baz</a>](/uri)</em>](/uri)</p>
wysiwyg: |-
<p>[foo <em>[bar </em><a target="_blank" rel="noopener noreferrer nofollow" href="/uri">baz</a>](/uri)](/uri)</p>
-06_07__inlines__links__36:
+06_07__inlines__links__036:
canonical: |
<p><img src="uri3" alt="[foo](uri2)" /></p>
static: |-
<p data-sourcepos="1:1-1:28" dir="auto"><a class="no-attachment-icon" href="uri3" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="[foo](uri2)" decoding="async" class="lazy" data-src="uri3"></a></p>
wysiwyg: |-
<p><img src="uri3" alt="[foo](uri2)"></p>
-06_07__inlines__links__37:
+06_07__inlines__links__037:
canonical: |
<p>*<a href="/uri">foo*</a></p>
static: |-
<p data-sourcepos="1:1-1:13" dir="auto">*<a href="/uri">foo*</a></p>
wysiwyg: |-
<p>*<a target="_blank" rel="noopener noreferrer nofollow" href="/uri">foo*</a></p>
-06_07__inlines__links__38:
+06_07__inlines__links__038:
canonical: |
<p><a href="baz*">foo *bar</a></p>
static: |-
<p data-sourcepos="1:1-1:16" dir="auto"><a href="baz*">foo *bar</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="baz*">foo *bar</a></p>
-06_07__inlines__links__39:
+06_07__inlines__links__039:
canonical: |
<p><em>foo [bar</em> baz]</p>
static: |-
<p data-sourcepos="1:1-1:15" dir="auto"><em>foo [bar</em> baz]</p>
wysiwyg: |-
<p><em>foo [bar</em> baz]</p>
-06_07__inlines__links__40:
+06_07__inlines__links__040:
canonical: |
<p>[foo <bar attr="](baz)"></p>
static: |-
@@ -5024,49 +6369,49 @@
wysiwyg: |-
Error - check implementation:
Hast node of type "bar" not supported by this converter. Please, provide an specification.
-06_07__inlines__links__41:
+06_07__inlines__links__041:
canonical: |
<p>[foo<code>](/uri)</code></p>
static: |-
<p data-sourcepos="1:1-1:13" dir="auto">[foo<code>](/uri)</code></p>
wysiwyg: |-
<p>[foo<code>](/uri)</code></p>
-06_07__inlines__links__42:
+06_07__inlines__links__042:
canonical: |
<p>[foo<a href="http://example.com/?search=%5D(uri)">http://example.com/?search=](uri)</a></p>
static: |-
<p data-sourcepos="1:1-1:39" dir="auto">[foo<a href="http://example.com/?search=%5D(uri)" rel="nofollow noreferrer noopener" target="_blank">http://example.com/?search=](uri)</a></p>
wysiwyg: |-
<p>[foo<a target="_blank" rel="noopener noreferrer nofollow" href="http://example.com/?search=%5D(uri)">http://example.com/?search=](uri)</a></p>
-06_07__inlines__links__43:
+06_07__inlines__links__043:
canonical: |
<p><a href="/url" title="title">foo</a></p>
static: |-
<p data-sourcepos="1:1-1:10" dir="auto"><a href="/url" title="title">foo</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title">foo</a></p>
-06_07__inlines__links__44:
+06_07__inlines__links__044:
canonical: |
<p><a href="/uri">link [foo [bar]]</a></p>
static: |-
<p data-sourcepos="1:1-1:23" dir="auto"><a href="/uri">link [foo [bar]]</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/uri">link [foo [bar]]</a></p>
-06_07__inlines__links__45:
+06_07__inlines__links__045:
canonical: |
<p><a href="/uri">link [bar</a></p>
static: |-
<p data-sourcepos="1:1-1:17" dir="auto"><a href="/uri">link [bar</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/uri">link [bar</a></p>
-06_07__inlines__links__46:
+06_07__inlines__links__046:
canonical: |
<p><a href="/uri">link <em>foo <strong>bar</strong> <code>#</code></em></a></p>
static: |-
<p data-sourcepos="1:1-1:29" dir="auto"><a href="/uri">link <em>foo <strong>bar</strong> <code>#</code></em></a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/uri">link </a><em>foo </em><strong>bar</strong><code>#</code></p>
-06_07__inlines__links__47:
+06_07__inlines__links__047:
canonical: |
<p><a href="/uri"><img src="moon.jpg" alt="moon" /></a></p>
static: |-
@@ -5074,35 +6419,35 @@
wysiwyg: |-
Error - check implementation:
Cannot destructure property 'type' of 'this.stack.pop(...)' as it is undefined.
-06_07__inlines__links__48:
+06_07__inlines__links__048:
canonical: |
<p>[foo <a href="/uri">bar</a>]<a href="/uri">ref</a></p>
static: |-
<p data-sourcepos="1:1-1:22" dir="auto">[foo <a href="/uri">bar</a>]<a href="/uri">ref</a></p>
wysiwyg: |-
<p>[foo <a target="_blank" rel="noopener noreferrer nofollow" href="/uri">bar</a>]<a target="_blank" rel="noopener noreferrer nofollow" href="/uri">ref</a></p>
-06_07__inlines__links__49:
+06_07__inlines__links__049:
canonical: |
<p>[foo <em>bar <a href="/uri">baz</a></em>]<a href="/uri">ref</a></p>
static: |-
<p data-sourcepos="1:1-1:27" dir="auto">[foo <em>bar <a href="/uri">baz</a></em>]<a href="/uri">ref</a></p>
wysiwyg: |-
<p>[foo <em>bar </em><a target="_blank" rel="noopener noreferrer nofollow" href="/uri">baz</a>]<a target="_blank" rel="noopener noreferrer nofollow" href="/uri">ref</a></p>
-06_07__inlines__links__50:
+06_07__inlines__links__050:
canonical: |
<p>*<a href="/uri">foo*</a></p>
static: |-
<p data-sourcepos="1:1-1:12" dir="auto">*<a href="/uri">foo*</a></p>
wysiwyg: |-
<p>*<a target="_blank" rel="noopener noreferrer nofollow" href="/uri">foo*</a></p>
-06_07__inlines__links__51:
+06_07__inlines__links__051:
canonical: |
<p><a href="/uri">foo *bar</a></p>
static: |-
<p data-sourcepos="1:1-1:15" dir="auto"><a href="/uri">foo *bar</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/uri">foo *bar</a></p>
-06_07__inlines__links__52:
+06_07__inlines__links__052:
canonical: |
<p>[foo <bar attr="][ref]"></p>
static: |-
@@ -5110,641 +6455,663 @@
wysiwyg: |-
Error - check implementation:
Hast node of type "bar" not supported by this converter. Please, provide an specification.
-06_07__inlines__links__53:
+06_07__inlines__links__053:
canonical: |
<p>[foo<code>][ref]</code></p>
static: |-
<p data-sourcepos="1:1-1:12" dir="auto">[foo<code>][ref]</code></p>
wysiwyg: |-
<p>[foo<code>][ref]</code></p>
-06_07__inlines__links__54:
+06_07__inlines__links__054:
canonical: |
<p>[foo<a href="http://example.com/?search=%5D%5Bref%5D">http://example.com/?search=][ref]</a></p>
static: |-
<p data-sourcepos="1:1-1:39" dir="auto">[foo<a href="http://example.com/?search=%5D%5Bref%5D" rel="nofollow noreferrer noopener" target="_blank">http://example.com/?search=][ref]</a></p>
wysiwyg: |-
<p>[foo<a target="_blank" rel="noopener noreferrer nofollow" href="http://example.com/?search=%5D%5Bref%5D">http://example.com/?search=][ref]</a></p>
-06_07__inlines__links__55:
+06_07__inlines__links__055:
canonical: |
<p><a href="/url" title="title">foo</a></p>
static: |-
<p data-sourcepos="1:1-1:10" dir="auto"><a href="/url" title="title">foo</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title">foo</a></p>
-06_07__inlines__links__56:
+06_07__inlines__links__056:
canonical: |
<p><a href="/url">Толпой</a> is a Russian word.</p>
static: |-
<p data-sourcepos="1:1-1:47" dir="auto"><a href="/url">Толпой</a> is a Russian word.</p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/url">Толпой</a> is a Russian word.</p>
-06_07__inlines__links__57:
+06_07__inlines__links__057:
canonical: |
<p><a href="/url">Baz</a></p>
static: |-
<p data-sourcepos="4:1-4:14" dir="auto"><a href="/url">Baz</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/url">Baz</a></p>
-06_07__inlines__links__58:
+06_07__inlines__links__058:
canonical: |
<p>[foo] <a href="/url" title="title">bar</a></p>
static: |-
<p data-sourcepos="1:1-1:11" dir="auto">[foo] <a href="/url" title="title">bar</a></p>
wysiwyg: |-
<p>[foo] <a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title">bar</a></p>
-06_07__inlines__links__59:
+06_07__inlines__links__059:
canonical: |
<p>[foo]
<a href="/url" title="title">bar</a></p>
static: |-
- <p data-sourcepos="1:1-2:5" dir="auto">[foo]&#x000A;<a href="/url" title="title">bar</a></p>
+ <p data-sourcepos="1:1-2:5" dir="auto">[foo]
+ <a href="/url" title="title">bar</a></p>
wysiwyg: |-
<p>[foo]
<a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title">bar</a></p>
-06_07__inlines__links__60:
+06_07__inlines__links__060:
canonical: |
<p><a href="/url1">bar</a></p>
static: |-
<p data-sourcepos="5:1-5:10" dir="auto"><a href="/url1">bar</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/url1">bar</a></p>
-06_07__inlines__links__61:
+06_07__inlines__links__061:
canonical: |
<p>[bar][foo!]</p>
static: |-
<p data-sourcepos="1:1-1:32" dir="auto">[bar][foo<span>!</span>]</p>
wysiwyg: |-
<p>[bar][foo!]</p>
-06_07__inlines__links__62:
+06_07__inlines__links__062:
canonical: |
<p>[foo][ref[]</p>
<p>[ref[]: /uri</p>
static: |-
- <p data-sourcepos="1:1-1:11" dir="auto">[foo][ref[]</p>&#x000A;<p data-sourcepos="3:1-3:12" dir="auto">[ref[]: /uri</p>
+ <p data-sourcepos="1:1-1:11" dir="auto">[foo][ref[]</p>
+ <p data-sourcepos="3:1-3:12" dir="auto">[ref[]: /uri</p>
wysiwyg: |-
<p>[foo][ref[]</p>
-06_07__inlines__links__63:
+06_07__inlines__links__063:
canonical: |
<p>[foo][ref[bar]]</p>
<p>[ref[bar]]: /uri</p>
static: |-
- <p data-sourcepos="1:1-1:15" dir="auto">[foo][ref[bar]]</p>&#x000A;<p data-sourcepos="3:1-3:16" dir="auto">[ref[bar]]: /uri</p>
+ <p data-sourcepos="1:1-1:15" dir="auto">[foo][ref[bar]]</p>
+ <p data-sourcepos="3:1-3:16" dir="auto">[ref[bar]]: /uri</p>
wysiwyg: |-
<p>[foo][ref[bar]]</p>
-06_07__inlines__links__64:
+06_07__inlines__links__064:
canonical: |
<p>[[[foo]]]</p>
<p>[[[foo]]]: /url</p>
static: |-
- <p data-sourcepos="1:1-1:9" dir="auto">[[[foo]]]</p>&#x000A;<p data-sourcepos="3:1-3:15" dir="auto">[[[foo]]]: /url</p>
+ <p data-sourcepos="1:1-1:9" dir="auto">[[[foo]]]</p>
+ <p data-sourcepos="3:1-3:15" dir="auto">[[[foo]]]: /url</p>
wysiwyg: |-
<p>[[[foo]]]</p>
-06_07__inlines__links__65:
+06_07__inlines__links__065:
canonical: |
<p><a href="/uri">foo</a></p>
static: |-
<p data-sourcepos="1:1-1:12" dir="auto"><a href="/uri">foo</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/uri">foo</a></p>
-06_07__inlines__links__66:
+06_07__inlines__links__066:
canonical: |
<p><a href="/uri">bar\</a></p>
static: |-
<p data-sourcepos="3:1-3:7" dir="auto"><a href="/uri">bar\</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/uri">bar\</a></p>
-06_07__inlines__links__67:
+06_07__inlines__links__067:
canonical: |
<p>[]</p>
<p>[]: /uri</p>
static: |-
- <p data-sourcepos="1:1-1:2" dir="auto">[]</p>&#x000A;<p data-sourcepos="3:1-3:8" dir="auto">[]: /uri</p>
+ <p data-sourcepos="1:1-1:2" dir="auto">[]</p>
+ <p data-sourcepos="3:1-3:8" dir="auto">[]: /uri</p>
wysiwyg: |-
<p>[]</p>
-06_07__inlines__links__68:
+06_07__inlines__links__068:
canonical: |
<p>[
]</p>
<p>[
]: /uri</p>
static: |-
- <p data-sourcepos="1:1-2:2" dir="auto">[&#x000A;]</p>&#x000A;<p data-sourcepos="4:1-5:8" dir="auto">[&#x000A;]: /uri</p>
+ <p data-sourcepos="1:1-2:2" dir="auto">[
+ ]</p>
+ <p data-sourcepos="4:1-5:8" dir="auto">[
+ ]: /uri</p>
wysiwyg: |-
<p>[
]</p>
-06_07__inlines__links__69:
+06_07__inlines__links__069:
canonical: |
<p><a href="/url" title="title">foo</a></p>
static: |-
<p data-sourcepos="1:1-1:7" dir="auto"><a href="/url" title="title">foo</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title">foo</a></p>
-06_07__inlines__links__70:
+06_07__inlines__links__070:
canonical: |
<p><a href="/url" title="title"><em>foo</em> bar</a></p>
static: |-
<p data-sourcepos="1:1-1:13" dir="auto"><a href="/url" title="title"><em>foo</em> bar</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title"><em>foo</em></a> bar</p>
-06_07__inlines__links__71:
+06_07__inlines__links__071:
canonical: |
<p><a href="/url" title="title">Foo</a></p>
static: |-
<p data-sourcepos="1:1-1:7" dir="auto"><a href="/url" title="title">Foo</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title">Foo</a></p>
-06_07__inlines__links__72:
+06_07__inlines__links__072:
canonical: |
<p><a href="/url" title="title">foo</a>
[]</p>
static: |-
- <p data-sourcepos="1:1-2:2" dir="auto"><a href="/url" title="title">foo</a>&#x000A;[]</p>
+ <p data-sourcepos="1:1-2:2" dir="auto"><a href="/url" title="title">foo</a>
+ []</p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title">foo</a>
[]</p>
-06_07__inlines__links__73:
+06_07__inlines__links__073:
canonical: |
<p><a href="/url" title="title">foo</a></p>
static: |-
<p data-sourcepos="1:1-1:5" dir="auto"><a href="/url" title="title">foo</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title">foo</a></p>
-06_07__inlines__links__74:
+06_07__inlines__links__074:
canonical: |
<p><a href="/url" title="title"><em>foo</em> bar</a></p>
static: |-
<p data-sourcepos="1:1-1:11" dir="auto"><a href="/url" title="title"><em>foo</em> bar</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title"><em>foo</em></a> bar</p>
-06_07__inlines__links__75:
+06_07__inlines__links__075:
canonical: |
<p>[<a href="/url" title="title"><em>foo</em> bar</a>]</p>
static: |-
<p data-sourcepos="1:1-1:13" dir="auto">[<a href="/url" title="title"><em>foo</em> bar</a>]</p>
wysiwyg: |-
<p>[<a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title"><em>foo</em></a> bar]</p>
-06_07__inlines__links__76:
+06_07__inlines__links__076:
canonical: |
<p>[[bar <a href="/url">foo</a></p>
static: |-
<p data-sourcepos="1:1-1:11" dir="auto">[[bar <a href="/url">foo</a></p>
wysiwyg: |-
<p>[[bar <a target="_blank" rel="noopener noreferrer nofollow" href="/url">foo</a></p>
-06_07__inlines__links__77:
+06_07__inlines__links__077:
canonical: |
<p><a href="/url" title="title">Foo</a></p>
static: |-
<p data-sourcepos="1:1-1:5" dir="auto"><a href="/url" title="title">Foo</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title">Foo</a></p>
-06_07__inlines__links__78:
+06_07__inlines__links__078:
canonical: |
<p><a href="/url">foo</a> bar</p>
static: |-
<p data-sourcepos="1:1-1:9" dir="auto"><a href="/url">foo</a> bar</p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/url">foo</a> bar</p>
-06_07__inlines__links__79:
+06_07__inlines__links__079:
canonical: |
<p>[foo]</p>
static: |-
<p data-sourcepos="1:1-1:6" dir="auto">[foo]</p>
wysiwyg: |-
<p>[foo]</p>
-06_07__inlines__links__80:
+06_07__inlines__links__080:
canonical: |
<p>*<a href="/url">foo*</a></p>
static: |-
<p data-sourcepos="3:1-3:7" dir="auto">*<a href="/url">foo*</a></p>
wysiwyg: |-
<p>*<a target="_blank" rel="noopener noreferrer nofollow" href="/url">foo*</a></p>
-06_07__inlines__links__81:
+06_07__inlines__links__081:
canonical: |
<p><a href="/url2">foo</a></p>
static: |-
<p data-sourcepos="1:1-1:10" dir="auto"><a href="/url2">foo</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/url2">foo</a></p>
-06_07__inlines__links__82:
+06_07__inlines__links__082:
canonical: |
<p><a href="/url1">foo</a></p>
static: |-
<p data-sourcepos="1:1-1:7" dir="auto"><a href="/url1">foo</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/url1">foo</a></p>
-06_07__inlines__links__83:
+06_07__inlines__links__083:
canonical: |
<p><a href="">foo</a></p>
static: |-
<p data-sourcepos="1:1-1:7" dir="auto"><a href="">foo</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="">foo</a></p>
-06_07__inlines__links__84:
+06_07__inlines__links__084:
canonical: |
<p><a href="/url1">foo</a>(not a link)</p>
static: |-
<p data-sourcepos="1:1-1:17" dir="auto"><a href="/url1">foo</a>(not a link)</p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/url1">foo</a>(not a link)</p>
-06_07__inlines__links__85:
+06_07__inlines__links__085:
canonical: |
<p>[foo]<a href="/url">bar</a></p>
static: |-
<p data-sourcepos="1:1-1:15" dir="auto">[foo]<a href="/url">bar</a></p>
wysiwyg: |-
<p>[foo]<a target="_blank" rel="noopener noreferrer nofollow" href="/url">bar</a></p>
-06_07__inlines__links__86:
+06_07__inlines__links__086:
canonical: |
<p><a href="/url2">foo</a><a href="/url1">baz</a></p>
static: |-
<p data-sourcepos="1:1-1:15" dir="auto"><a href="/url2">foo</a><a href="/url1">baz</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="/url2">foo</a><a target="_blank" rel="noopener noreferrer nofollow" href="/url1">baz</a></p>
-06_07__inlines__links__87:
+06_07__inlines__links__087:
canonical: |
<p>[foo]<a href="/url1">bar</a></p>
static: |-
<p data-sourcepos="1:1-1:15" dir="auto">[foo]<a href="/url1">bar</a></p>
wysiwyg: |-
<p>[foo]<a target="_blank" rel="noopener noreferrer nofollow" href="/url1">bar</a></p>
-06_08__inlines__images__01:
+06_08__inlines__images__001:
canonical: |
<p><img src="/url" alt="foo" title="title" /></p>
static: |-
<p data-sourcepos="1:1-1:20" dir="auto"><a class="no-attachment-icon" href="/url" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo" title="title" decoding="async" class="lazy" data-src="/url"></a></p>
wysiwyg: |-
<p><img src="/url" alt="foo" title="title"></p>
-06_08__inlines__images__02:
+06_08__inlines__images__002:
canonical: |
<p><img src="train.jpg" alt="foo bar" title="train &amp; tracks" /></p>
static: |-
<p data-sourcepos="1:1-1:12" dir="auto"><a class="no-attachment-icon" href="train.jpg" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo bar" title="train &amp; tracks" decoding="async" class="lazy" data-src="train.jpg"></a></p>
wysiwyg: |-
<p><img src="train.jpg" alt="foo bar" title="train &amp; tracks"></p>
-06_08__inlines__images__03:
+06_08__inlines__images__003:
canonical: |
<p><img src="/url2" alt="foo bar" /></p>
static: |-
<p data-sourcepos="1:1-1:26" dir="auto"><a class="no-attachment-icon" href="/url2" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo bar" decoding="async" class="lazy" data-src="/url2"></a></p>
wysiwyg: |-
<p><img src="/url2" alt="foo bar"></p>
-06_08__inlines__images__04:
+06_08__inlines__images__004:
canonical: |
<p><img src="/url2" alt="foo bar" /></p>
static: |-
<p data-sourcepos="1:1-1:25" dir="auto"><a class="no-attachment-icon" href="/url2" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo bar" decoding="async" class="lazy" data-src="/url2"></a></p>
wysiwyg: |-
<p><img src="/url2" alt="foo bar"></p>
-06_08__inlines__images__05:
+06_08__inlines__images__005:
canonical: |
<p><img src="train.jpg" alt="foo bar" title="train &amp; tracks" /></p>
static: |-
<p data-sourcepos="1:1-1:14" dir="auto"><a class="no-attachment-icon" href="train.jpg" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo bar" title="train &amp; tracks" decoding="async" class="lazy" data-src="train.jpg"></a></p>
wysiwyg: |-
<p><img src="train.jpg" alt="foo bar" title="train &amp; tracks"></p>
-06_08__inlines__images__06:
+06_08__inlines__images__006:
canonical: |
<p><img src="train.jpg" alt="foo bar" title="train &amp; tracks" /></p>
static: |-
<p data-sourcepos="1:1-1:20" dir="auto"><a class="no-attachment-icon" href="train.jpg" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo bar" title="train &amp; tracks" decoding="async" class="lazy" data-src="train.jpg"></a></p>
wysiwyg: |-
<p><img src="train.jpg" alt="foo bar" title="train &amp; tracks"></p>
-06_08__inlines__images__07:
+06_08__inlines__images__007:
canonical: |
<p><img src="train.jpg" alt="foo" /></p>
static: |-
<p data-sourcepos="1:1-1:17" dir="auto"><a class="no-attachment-icon" href="train.jpg" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo" decoding="async" class="lazy" data-src="train.jpg"></a></p>
wysiwyg: |-
<p><img src="train.jpg" alt="foo"></p>
-06_08__inlines__images__08:
+06_08__inlines__images__008:
canonical: |
<p>My <img src="/path/to/train.jpg" alt="foo bar" title="title" /></p>
static: |-
<p data-sourcepos="1:1-1:45" dir="auto">My <a class="no-attachment-icon" href="/path/to/train.jpg" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo bar" title="title" decoding="async" class="lazy" data-src="/path/to/train.jpg"></a></p>
wysiwyg: |-
<p>My <img src="/path/to/train.jpg" alt="foo bar" title="title"></p>
-06_08__inlines__images__09:
+06_08__inlines__images__009:
canonical: |
<p><img src="url" alt="foo" /></p>
static: |-
<p data-sourcepos="1:1-1:13" dir="auto"><a class="no-attachment-icon" href="url" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo" decoding="async" class="lazy" data-src="url"></a></p>
wysiwyg: |-
<p><img src="url" alt="foo"></p>
-06_08__inlines__images__10:
+06_08__inlines__images__010:
canonical: |
<p><img src="/url" alt="" /></p>
static: |-
<p data-sourcepos="1:1-1:9" dir="auto"><a class="no-attachment-icon" href="/url" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="" decoding="async" class="lazy" data-src="/url"></a></p>
wysiwyg: |-
<p><img src="/url" alt=""></p>
-06_08__inlines__images__11:
+06_08__inlines__images__011:
canonical: |
<p><img src="/url" alt="foo" /></p>
static: |-
<p data-sourcepos="1:1-1:11" dir="auto"><a class="no-attachment-icon" href="/url" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo" decoding="async" class="lazy" data-src="/url"></a></p>
wysiwyg: |-
<p><img src="/url" alt="foo"></p>
-06_08__inlines__images__12:
+06_08__inlines__images__012:
canonical: |
<p><img src="/url" alt="foo" /></p>
static: |-
<p data-sourcepos="1:1-1:11" dir="auto"><a class="no-attachment-icon" href="/url" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo" decoding="async" class="lazy" data-src="/url"></a></p>
wysiwyg: |-
<p><img src="/url" alt="foo"></p>
-06_08__inlines__images__13:
+06_08__inlines__images__013:
canonical: |
<p><img src="/url" alt="foo" title="title" /></p>
static: |-
<p data-sourcepos="1:1-1:8" dir="auto"><a class="no-attachment-icon" href="/url" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo" title="title" decoding="async" class="lazy" data-src="/url"></a></p>
wysiwyg: |-
<p><img src="/url" alt="foo" title="title"></p>
-06_08__inlines__images__14:
+06_08__inlines__images__014:
canonical: |
<p><img src="/url" alt="foo bar" title="title" /></p>
static: |-
<p data-sourcepos="1:1-1:14" dir="auto"><a class="no-attachment-icon" href="/url" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo bar" title="title" decoding="async" class="lazy" data-src="/url"></a></p>
wysiwyg: |-
<p><img src="/url" alt="foo bar" title="title"></p>
-06_08__inlines__images__15:
+06_08__inlines__images__015:
canonical: |
<p><img src="/url" alt="Foo" title="title" /></p>
static: |-
<p data-sourcepos="1:1-1:8" dir="auto"><a class="no-attachment-icon" href="/url" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="Foo" title="title" decoding="async" class="lazy" data-src="/url"></a></p>
wysiwyg: |-
<p><img src="/url" alt="Foo" title="title"></p>
-06_08__inlines__images__16:
+06_08__inlines__images__016:
canonical: |
<p><img src="/url" alt="foo" title="title" />
[]</p>
static: |-
- <p data-sourcepos="1:1-2:2" dir="auto"><a class="no-attachment-icon" href="/url" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo" title="title" decoding="async" class="lazy" data-src="/url"></a>&#x000A;[]</p>
+ <p data-sourcepos="1:1-2:2" dir="auto"><a class="no-attachment-icon" href="/url" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo" title="title" decoding="async" class="lazy" data-src="/url"></a>
+ []</p>
wysiwyg: |-
<p><img src="/url" alt="foo" title="title">
[]</p>
-06_08__inlines__images__17:
+06_08__inlines__images__017:
canonical: |
<p><img src="/url" alt="foo" title="title" /></p>
static: |-
<p data-sourcepos="1:1-1:6" dir="auto"><a class="no-attachment-icon" href="/url" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo" title="title" decoding="async" class="lazy" data-src="/url"></a></p>
wysiwyg: |-
<p><img src="/url" alt="foo" title="title"></p>
-06_08__inlines__images__18:
+06_08__inlines__images__018:
canonical: |
<p><img src="/url" alt="foo bar" title="title" /></p>
static: |-
<p data-sourcepos="1:1-1:12" dir="auto"><a class="no-attachment-icon" href="/url" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="foo bar" title="title" decoding="async" class="lazy" data-src="/url"></a></p>
wysiwyg: |-
<p><img src="/url" alt="foo bar" title="title"></p>
-06_08__inlines__images__19:
+06_08__inlines__images__019:
canonical: |
<p>![[foo]]</p>
<p>[[foo]]: /url &quot;title&quot;</p>
static: |-
- <p data-sourcepos="1:1-1:8" dir="auto">![[foo]]</p>&#x000A;<p data-sourcepos="3:1-3:21" dir="auto">[[foo]]: /url "title"</p>
+ <p data-sourcepos="1:1-1:8" dir="auto">![[foo]]</p>
+ <p data-sourcepos="3:1-3:21" dir="auto">[[foo]]: /url "title"</p>
wysiwyg: |-
<p>![[foo]]</p>
-06_08__inlines__images__20:
+06_08__inlines__images__020:
canonical: |
<p><img src="/url" alt="Foo" title="title" /></p>
static: |-
<p data-sourcepos="1:1-1:6" dir="auto"><a class="no-attachment-icon" href="/url" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="Foo" title="title" decoding="async" class="lazy" data-src="/url"></a></p>
wysiwyg: |-
<p><img src="/url" alt="Foo" title="title"></p>
-06_08__inlines__images__21:
+06_08__inlines__images__021:
canonical: |
<p>![foo]</p>
static: |-
<p data-sourcepos="1:1-1:7" dir="auto">![foo]</p>
wysiwyg: |-
<p>![foo]</p>
-06_08__inlines__images__22:
+06_08__inlines__images__022:
canonical: |
<p>!<a href="/url" title="title">foo</a></p>
static: |-
<p data-sourcepos="1:1-1:27" dir="auto"><span>!</span><a href="/url" title="title">foo</a></p>
wysiwyg: |-
<p>!<a target="_blank" rel="noopener noreferrer nofollow" href="/url" title="title">foo</a></p>
-06_09__inlines__autolinks__01:
+06_09__inlines__autolinks__001:
canonical: |
<p><a href="http://foo.bar.baz">http://foo.bar.baz</a></p>
static: |-
<p data-sourcepos="1:1-1:20" dir="auto"><a href="http://foo.bar.baz" rel="nofollow noreferrer noopener" target="_blank">http://foo.bar.baz</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="http://foo.bar.baz">http://foo.bar.baz</a></p>
-06_09__inlines__autolinks__02:
+06_09__inlines__autolinks__002:
canonical: |
<p><a href="http://foo.bar.baz/test?q=hello&amp;id=22&amp;boolean">http://foo.bar.baz/test?q=hello&amp;id=22&amp;boolean</a></p>
static: |-
<p data-sourcepos="1:1-1:47" dir="auto"><a href="http://foo.bar.baz/test?q=hello&amp;id=22&amp;boolean" rel="nofollow noreferrer noopener" target="_blank">http://foo.bar.baz/test?q=hello&amp;id=22&amp;boolean</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="http://foo.bar.baz/test?q=hello&amp;id=22&amp;boolean">http://foo.bar.baz/test?q=hello&amp;id=22&amp;boolean</a></p>
-06_09__inlines__autolinks__03:
+06_09__inlines__autolinks__003:
canonical: |
<p><a href="irc://foo.bar:2233/baz">irc://foo.bar:2233/baz</a></p>
static: |-
<p data-sourcepos="1:1-1:24" dir="auto"><a href="irc://foo.bar:2233/baz">irc://foo.bar:2233/baz</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="irc://foo.bar:2233/baz">irc://foo.bar:2233/baz</a></p>
-06_09__inlines__autolinks__04:
+06_09__inlines__autolinks__004:
canonical: |
<p><a href="MAILTO:FOO@BAR.BAZ">MAILTO:FOO@BAR.BAZ</a></p>
static: |-
<p data-sourcepos="1:1-1:20" dir="auto"><a href="mailto:FOO@BAR.BAZ">MAILTO:FOO@BAR.BAZ</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="MAILTO:FOO@BAR.BAZ">MAILTO:FOO@BAR.BAZ</a></p>
-06_09__inlines__autolinks__05:
+06_09__inlines__autolinks__005:
canonical: |
<p><a href="a+b+c:d">a+b+c:d</a></p>
static: |-
<p data-sourcepos="1:1-1:9" dir="auto"><a href="a+b+c:d">a+b+c:d</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="a+b+c:d">a+b+c:d</a></p>
-06_09__inlines__autolinks__06:
+06_09__inlines__autolinks__006:
canonical: |
<p><a href="made-up-scheme://foo,bar">made-up-scheme://foo,bar</a></p>
static: |-
<p data-sourcepos="1:1-1:26" dir="auto"><a href="made-up-scheme://foo,bar">made-up-scheme://foo,bar</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="made-up-scheme://foo,bar">made-up-scheme://foo,bar</a></p>
-06_09__inlines__autolinks__07:
+06_09__inlines__autolinks__007:
canonical: |
<p><a href="http://../">http://../</a></p>
static: |-
<p data-sourcepos="1:1-1:12" dir="auto"><a href="http://../" rel="nofollow noreferrer noopener" target="_blank">http://../</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="http://../">http://../</a></p>
-06_09__inlines__autolinks__08:
+06_09__inlines__autolinks__008:
canonical: |
<p><a href="localhost:5001/foo">localhost:5001/foo</a></p>
static: |-
<p data-sourcepos="1:1-1:20" dir="auto"><a href="localhost:5001/foo">localhost:5001/foo</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="localhost:5001/foo">localhost:5001/foo</a></p>
-06_09__inlines__autolinks__09:
+06_09__inlines__autolinks__009:
canonical: |
<p>&lt;http://foo.bar/baz bim&gt;</p>
static: |-
<p data-sourcepos="1:1-1:24" dir="auto">&lt;<a href="http://foo.bar/baz" rel="nofollow noreferrer noopener" target="_blank">http://foo.bar/baz</a> bim&gt;</p>
wysiwyg: |-
- <p>&lt;http://foo.bar/baz bim&gt;</p>
-06_09__inlines__autolinks__10:
+ <p>&lt;<a target="_blank" rel="noopener noreferrer nofollow" href="http://foo.bar/baz">http://foo.bar/baz</a> bim&gt;</p>
+06_09__inlines__autolinks__010:
canonical: |
<p><a href="http://example.com/%5C%5B%5C">http://example.com/\[\</a></p>
static: |-
<p data-sourcepos="1:1-1:24" dir="auto"><a href="http://example.com/%5C%5B%5C" rel="nofollow noreferrer noopener" target="_blank">http://example.com/\[\</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="http://example.com/%5C%5B%5C">http://example.com/\[\</a></p>
-06_09__inlines__autolinks__11:
+06_09__inlines__autolinks__011:
canonical: |
<p><a href="mailto:foo@bar.example.com">foo@bar.example.com</a></p>
static: |-
<p data-sourcepos="1:1-1:21" dir="auto"><a href="mailto:foo@bar.example.com">foo@bar.example.com</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="mailto:foo@bar.example.com">foo@bar.example.com</a></p>
-06_09__inlines__autolinks__12:
+06_09__inlines__autolinks__012:
canonical: |
<p><a href="mailto:foo+special@Bar.baz-bar0.com">foo+special@Bar.baz-bar0.com</a></p>
static: |-
<p data-sourcepos="1:1-1:30" dir="auto"><a href="mailto:foo+special@Bar.baz-bar0.com">foo+special@Bar.baz-bar0.com</a></p>
wysiwyg: |-
<p><a target="_blank" rel="noopener noreferrer nofollow" href="mailto:foo+special@Bar.baz-bar0.com">foo+special@Bar.baz-bar0.com</a></p>
-06_09__inlines__autolinks__13:
+06_09__inlines__autolinks__013:
canonical: |
<p>&lt;foo+@bar.example.com&gt;</p>
static: |-
<p data-sourcepos="1:1-1:23" dir="auto">&lt;<a href="mailto:foo+@bar.example.com">foo+@bar.example.com</a>&gt;</p>
wysiwyg: |-
- <p>&lt;foo+@bar.example.com&gt;</p>
-06_09__inlines__autolinks__14:
+ Error - check implementation:
+ Cannot read properties of undefined (reading 'end')
+06_09__inlines__autolinks__014:
canonical: |
<p>&lt;&gt;</p>
static: |-
<p data-sourcepos="1:1-1:2" dir="auto">&lt;&gt;</p>
wysiwyg: |-
<p>&lt;&gt;</p>
-06_09__inlines__autolinks__15:
+06_09__inlines__autolinks__015:
canonical: |
<p>&lt; http://foo.bar &gt;</p>
static: |-
<p data-sourcepos="1:1-1:18" dir="auto">&lt; <a href="http://foo.bar" rel="nofollow noreferrer noopener" target="_blank">http://foo.bar</a> &gt;</p>
wysiwyg: |-
- <p>&lt; http://foo.bar &gt;</p>
-06_09__inlines__autolinks__16:
+ <p>&lt; <a target="_blank" rel="noopener noreferrer nofollow" href="http://foo.bar">http://foo.bar</a> &gt;</p>
+06_09__inlines__autolinks__016:
canonical: |
<p>&lt;m:abc&gt;</p>
static: |-
<p data-sourcepos="1:1-1:7" dir="auto">&lt;m:abc&gt;</p>
wysiwyg: |-
<p>&lt;m:abc&gt;</p>
-06_09__inlines__autolinks__17:
+06_09__inlines__autolinks__017:
canonical: |
<p>&lt;foo.bar.baz&gt;</p>
static: |-
<p data-sourcepos="1:1-1:13" dir="auto">&lt;foo.bar.baz&gt;</p>
wysiwyg: |-
<p>&lt;foo.bar.baz&gt;</p>
-06_09__inlines__autolinks__18:
+06_09__inlines__autolinks__018:
canonical: |
<p>http://example.com</p>
static: |-
<p data-sourcepos="1:1-1:18" dir="auto"><a href="http://example.com" rel="nofollow noreferrer noopener" target="_blank">http://example.com</a></p>
wysiwyg: |-
- <p>http://example.com</p>
-06_09__inlines__autolinks__19:
+ <p><a target="_blank" rel="noopener noreferrer nofollow" href="http://example.com">http://example.com</a></p>
+06_09__inlines__autolinks__019:
canonical: |
<p>foo@bar.example.com</p>
static: |-
<p data-sourcepos="1:1-1:19" dir="auto"><a href="mailto:foo@bar.example.com">foo@bar.example.com</a></p>
wysiwyg: |-
- <p>foo@bar.example.com</p>
-06_10__inlines__autolinks_extension__01:
+ <p><a target="_blank" rel="noopener noreferrer nofollow" href="mailto:foo@bar.example.com">foo@bar.example.com</a></p>
+06_10__inlines__autolinks_extension__001:
canonical: |
<p><a href="http://www.commonmark.org">www.commonmark.org</a></p>
static: |-
<p data-sourcepos="1:1-1:18" dir="auto"><a href="http://www.commonmark.org" rel="nofollow noreferrer noopener" target="_blank">www.commonmark.org</a></p>
wysiwyg: |-
- <p>www.commonmark.org</p>
-06_10__inlines__autolinks_extension__02:
+ <p><a target="_blank" rel="noopener noreferrer nofollow" href="http://www.commonmark.org">www.commonmark.org</a></p>
+06_10__inlines__autolinks_extension__002:
canonical: |
<p>Visit <a href="http://www.commonmark.org/help">www.commonmark.org/help</a> for more information.</p>
static: |-
<p data-sourcepos="1:1-1:51" dir="auto">Visit <a href="http://www.commonmark.org/help" rel="nofollow noreferrer noopener" target="_blank">www.commonmark.org/help</a> for more information.</p>
wysiwyg: |-
- <p>Visit www.commonmark.org/help for more information.</p>
-06_10__inlines__autolinks_extension__03:
+ <p>Visit <a target="_blank" rel="noopener noreferrer nofollow" href="http://www.commonmark.org/help">www.commonmark.org/help</a> for more information.</p>
+06_10__inlines__autolinks_extension__003:
canonical: |
<p>Visit <a href="http://www.commonmark.org">www.commonmark.org</a>.</p>
<p>Visit <a href="http://www.commonmark.org/a.b">www.commonmark.org/a.b</a>.</p>
static: |-
- <p data-sourcepos="1:1-1:25" dir="auto">Visit <a href="http://www.commonmark.org" rel="nofollow noreferrer noopener" target="_blank">www.commonmark.org</a>.</p>&#x000A;<p data-sourcepos="3:1-3:29" dir="auto">Visit <a href="http://www.commonmark.org/a.b" rel="nofollow noreferrer noopener" target="_blank">www.commonmark.org/a.b</a>.</p>
+ <p data-sourcepos="1:1-1:25" dir="auto">Visit <a href="http://www.commonmark.org" rel="nofollow noreferrer noopener" target="_blank">www.commonmark.org</a>.</p>
+ <p data-sourcepos="3:1-3:29" dir="auto">Visit <a href="http://www.commonmark.org/a.b" rel="nofollow noreferrer noopener" target="_blank">www.commonmark.org/a.b</a>.</p>
wysiwyg: |-
- <p>Visit www.commonmark.org.</p>
-06_10__inlines__autolinks_extension__04:
+ <p>Visit <a target="_blank" rel="noopener noreferrer nofollow" href="http://www.commonmark.org">www.commonmark.org</a>.</p>
+06_10__inlines__autolinks_extension__004:
canonical: |
<p><a href="http://www.google.com/search?q=Markup+(business)">www.google.com/search?q=Markup+(business)</a></p>
<p><a href="http://www.google.com/search?q=Markup+(business)">www.google.com/search?q=Markup+(business)</a>))</p>
<p>(<a href="http://www.google.com/search?q=Markup+(business)">www.google.com/search?q=Markup+(business)</a>)</p>
<p>(<a href="http://www.google.com/search?q=Markup+(business)">www.google.com/search?q=Markup+(business)</a></p>
static: |-
- <p data-sourcepos="1:1-1:41" dir="auto"><a href="http://www.google.com/search?q=Markup+(business)" rel="nofollow noreferrer noopener" target="_blank">www.google.com/search?q=Markup+(business)</a></p>&#x000A;<p data-sourcepos="3:1-3:43" dir="auto"><a href="http://www.google.com/search?q=Markup+(business)" rel="nofollow noreferrer noopener" target="_blank">www.google.com/search?q=Markup+(business)</a>))</p>&#x000A;<p data-sourcepos="5:1-5:43" dir="auto">(<a href="http://www.google.com/search?q=Markup+(business)" rel="nofollow noreferrer noopener" target="_blank">www.google.com/search?q=Markup+(business)</a>)</p>&#x000A;<p data-sourcepos="7:1-7:42" dir="auto">(<a href="http://www.google.com/search?q=Markup+(business)" rel="nofollow noreferrer noopener" target="_blank">www.google.com/search?q=Markup+(business)</a></p>
+ <p data-sourcepos="1:1-1:41" dir="auto"><a href="http://www.google.com/search?q=Markup+(business)" rel="nofollow noreferrer noopener" target="_blank">www.google.com/search?q=Markup+(business)</a></p>
+ <p data-sourcepos="3:1-3:43" dir="auto"><a href="http://www.google.com/search?q=Markup+(business)" rel="nofollow noreferrer noopener" target="_blank">www.google.com/search?q=Markup+(business)</a>))</p>
+ <p data-sourcepos="5:1-5:43" dir="auto">(<a href="http://www.google.com/search?q=Markup+(business)" rel="nofollow noreferrer noopener" target="_blank">www.google.com/search?q=Markup+(business)</a>)</p>
+ <p data-sourcepos="7:1-7:42" dir="auto">(<a href="http://www.google.com/search?q=Markup+(business)" rel="nofollow noreferrer noopener" target="_blank">www.google.com/search?q=Markup+(business)</a></p>
wysiwyg: |-
- <p>www.google.com/search?q=Markup+(business)</p>
-06_10__inlines__autolinks_extension__05:
+ <p><a target="_blank" rel="noopener noreferrer nofollow" href="http://www.google.com/search?q=Markup+(business)">www.google.com/search?q=Markup+(business)</a></p>
+06_10__inlines__autolinks_extension__005:
canonical: |
<p><a href="http://www.google.com/search?q=(business))+ok">www.google.com/search?q=(business))+ok</a></p>
static: |-
<p data-sourcepos="1:1-1:38" dir="auto"><a href="http://www.google.com/search?q=(business))+ok" rel="nofollow noreferrer noopener" target="_blank">www.google.com/search?q=(business))+ok</a></p>
wysiwyg: |-
- <p>www.google.com/search?q=(business))+ok</p>
-06_10__inlines__autolinks_extension__06:
+ <p><a target="_blank" rel="noopener noreferrer nofollow" href="http://www.google.com/search?q=(business))+ok">www.google.com/search?q=(business))+ok</a></p>
+06_10__inlines__autolinks_extension__006:
canonical: |
<p><a href="http://www.google.com/search?q=commonmark&amp;hl=en">www.google.com/search?q=commonmark&amp;hl=en</a></p>
<p><a href="http://www.google.com/search?q=commonmark">www.google.com/search?q=commonmark</a>&amp;hl;</p>
static: |-
- <p data-sourcepos="1:1-1:40" dir="auto"><a href="http://www.google.com/search?q=commonmark&amp;hl=en" rel="nofollow noreferrer noopener" target="_blank">www.google.com/search?q=commonmark&amp;hl=en</a></p>&#x000A;<p data-sourcepos="3:1-3:38" dir="auto"><a href="http://www.google.com/search?q=commonmark" rel="nofollow noreferrer noopener" target="_blank">www.google.com/search?q=commonmark</a>&amp;hl;</p>
+ <p data-sourcepos="1:1-1:40" dir="auto"><a href="http://www.google.com/search?q=commonmark&amp;hl=en" rel="nofollow noreferrer noopener" target="_blank">www.google.com/search?q=commonmark&amp;hl=en</a></p>
+ <p data-sourcepos="3:1-3:38" dir="auto"><a href="http://www.google.com/search?q=commonmark" rel="nofollow noreferrer noopener" target="_blank">www.google.com/search?q=commonmark</a>&amp;hl;</p>
wysiwyg: |-
- <p>www.google.com/search?q=commonmark&amp;hl=en</p>
-06_10__inlines__autolinks_extension__07:
+ <p><a target="_blank" rel="noopener noreferrer nofollow" href="http://www.google.com/search?q=commonmark&amp;hl=en">www.google.com/search?q=commonmark&amp;hl=en</a></p>
+06_10__inlines__autolinks_extension__007:
canonical: |
<p><a href="http://www.commonmark.org/he">www.commonmark.org/he</a>&lt;lp</p>
static: |-
<p data-sourcepos="1:1-1:24" dir="auto"><a href="http://www.commonmark.org/he" rel="nofollow noreferrer noopener" target="_blank">www.commonmark.org/he</a>&lt;lp</p>
wysiwyg: |-
- <p>www.commonmark.org/he&lt;lp</p>
-06_10__inlines__autolinks_extension__08:
+ <p><a target="_blank" rel="noopener noreferrer nofollow" href="http://www.commonmark.org/he">www.commonmark.org/he</a>&lt;lp</p>
+06_10__inlines__autolinks_extension__008:
canonical: |
<p><a href="http://commonmark.org">http://commonmark.org</a></p>
<p>(Visit <a href="https://encrypted.google.com/search?q=Markup+(business)">https://encrypted.google.com/search?q=Markup+(business)</a>)</p>
<p>Anonymous FTP is available at <a href="ftp://foo.bar.baz">ftp://foo.bar.baz</a>.</p>
static: |-
- <p data-sourcepos="1:1-1:21" dir="auto"><a href="http://commonmark.org" rel="nofollow noreferrer noopener" target="_blank">http://commonmark.org</a></p>&#x000A;<p data-sourcepos="3:1-3:63" dir="auto">(Visit <a href="https://encrypted.google.com/search?q=Markup+(business)" rel="nofollow noreferrer noopener" target="_blank">https://encrypted.google.com/search?q=Markup+(business)</a>)</p>&#x000A;<p data-sourcepos="5:1-5:48" dir="auto">Anonymous FTP is available at <a href="ftp://foo.bar.baz/">ftp://foo.bar.baz</a>.</p>
+ <p data-sourcepos="1:1-1:21" dir="auto"><a href="http://commonmark.org" rel="nofollow noreferrer noopener" target="_blank">http://commonmark.org</a></p>
+ <p data-sourcepos="3:1-3:63" dir="auto">(Visit <a href="https://encrypted.google.com/search?q=Markup+(business)" rel="nofollow noreferrer noopener" target="_blank">https://encrypted.google.com/search?q=Markup+(business)</a>)</p>
+ <p data-sourcepos="5:1-5:48" dir="auto">Anonymous FTP is available at <a href="ftp://foo.bar.baz/">ftp://foo.bar.baz</a>.</p>
wysiwyg: |-
- <p>http://commonmark.org</p>
-06_10__inlines__autolinks_extension__09:
+ <p><a target="_blank" rel="noopener noreferrer nofollow" href="http://commonmark.org">http://commonmark.org</a></p>
+06_10__inlines__autolinks_extension__009:
canonical: |
<p><a href="mailto:foo@bar.baz">foo@bar.baz</a></p>
static: |-
<p data-sourcepos="1:1-1:11" dir="auto"><a href="mailto:foo@bar.baz">foo@bar.baz</a></p>
wysiwyg: |-
- <p>foo@bar.baz</p>
-06_10__inlines__autolinks_extension__10:
+ <p><a target="_blank" rel="noopener noreferrer nofollow" href="mailto:foo@bar.baz">foo@bar.baz</a></p>
+06_10__inlines__autolinks_extension__010:
canonical: |
<p>hello@mail+xyz.example isn't valid, but <a href="mailto:hello+xyz@mail.example">hello+xyz@mail.example</a> is.</p>
static: |-
<p data-sourcepos="1:1-1:66" dir="auto">hello@mail+xyz.example isn't valid, but <a href="mailto:hello+xyz@mail.example">hello+xyz@mail.example</a> is.</p>
wysiwyg: |-
- <p>hello@mail+xyz.example isn't valid, but hello+xyz@mail.example is.</p>
-06_10__inlines__autolinks_extension__11:
+ <p>hello@mail+xyz.example isn't valid, but <a target="_blank" rel="noopener noreferrer nofollow" href="mailto:hello+xyz@mail.example">hello+xyz@mail.example</a> is.</p>
+06_10__inlines__autolinks_extension__011:
canonical: |
<p><a href="mailto:a.b-c_d@a.b">a.b-c_d@a.b</a></p>
<p><a href="mailto:a.b-c_d@a.b">a.b-c_d@a.b</a>.</p>
<p>a.b-c_d@a.b-</p>
<p>a.b-c_d@a.b_</p>
static: |-
- <p data-sourcepos="1:1-1:11" dir="auto"><a href="mailto:a.b-c_d@a.b">a.b-c_d@a.b</a></p>&#x000A;<p data-sourcepos="3:1-3:12" dir="auto"><a href="mailto:a.b-c_d@a.b">a.b-c_d@a.b</a>.</p>&#x000A;<p data-sourcepos="5:1-5:12" dir="auto">a.b-c_d@a.b-</p>&#x000A;<p data-sourcepos="7:1-7:12" dir="auto">a.b-c_d@a.b_</p>
+ <p data-sourcepos="1:1-1:11" dir="auto"><a href="mailto:a.b-c_d@a.b">a.b-c_d@a.b</a></p>
+ <p data-sourcepos="3:1-3:12" dir="auto"><a href="mailto:a.b-c_d@a.b">a.b-c_d@a.b</a>.</p>
+ <p data-sourcepos="5:1-5:12" dir="auto">a.b-c_d@a.b-</p>
+ <p data-sourcepos="7:1-7:12" dir="auto">a.b-c_d@a.b_</p>
wysiwyg: |-
- <p>a.b-c_d@a.b</p>
-06_11__inlines__raw_html__01:
+ <p><a target="_blank" rel="noopener noreferrer nofollow" href="mailto:a.b-c_d@a.b">a.b-c_d@a.b</a></p>
+06_11__inlines__raw_html__001:
canonical: |
<p><a><bab><c2c></p>
static: |-
@@ -5752,7 +7119,7 @@
wysiwyg: |-
Error - check implementation:
Hast node of type "bab" not supported by this converter. Please, provide an specification.
-06_11__inlines__raw_html__02:
+06_11__inlines__raw_html__002:
canonical: |
<p><a/><b2/></p>
static: |-
@@ -5760,7 +7127,7 @@
wysiwyg: |-
Error - check implementation:
Hast node of type "b2" not supported by this converter. Please, provide an specification.
-06_11__inlines__raw_html__03:
+06_11__inlines__raw_html__003:
canonical: |
<p><a /><b2
data="foo" ></p>
@@ -5769,7 +7136,7 @@
wysiwyg: |-
Error - check implementation:
Hast node of type "b2" not supported by this converter. Please, provide an specification.
-06_11__inlines__raw_html__04:
+06_11__inlines__raw_html__004:
canonical: |
<p><a foo="bar" bam = 'baz <em>"</em>'
_boolean zoop:33=zoop:33 /></p>
@@ -5777,7 +7144,7 @@
<p data-sourcepos="1:1-2:27" dir="auto"><a></a></p>
wysiwyg: |-
<p></p>
-06_11__inlines__raw_html__05:
+06_11__inlines__raw_html__005:
canonical: |
<p>Foo <responsive-image src="foo.jpg" /></p>
static: |-
@@ -5785,62 +7152,65 @@
wysiwyg: |-
Error - check implementation:
Hast node of type "responsive-image" not supported by this converter. Please, provide an specification.
-06_11__inlines__raw_html__06:
+06_11__inlines__raw_html__006:
canonical: |
<p>&lt;33&gt; &lt;__&gt;</p>
static: |-
<p data-sourcepos="1:1-1:9" dir="auto">&lt;33&gt; &lt;__&gt;</p>
wysiwyg: |-
<p>&lt;33&gt; &lt;__&gt;</p>
-06_11__inlines__raw_html__07:
+06_11__inlines__raw_html__007:
canonical: |
<p>&lt;a h*#ref=&quot;hi&quot;&gt;</p>
static: |-
<p data-sourcepos="1:1-1:15" dir="auto">&lt;a h*#ref="hi"&gt;</p>
wysiwyg: |-
<p>&lt;a h*#ref="hi"&gt;</p>
-06_11__inlines__raw_html__08:
+06_11__inlines__raw_html__008:
canonical: |
<p>&lt;a href=&quot;hi'&gt; &lt;a href=hi'&gt;</p>
static: |-
<p data-sourcepos="1:1-1:26" dir="auto">&lt;a href="hi'&gt; &lt;a href=hi'&gt;</p>
wysiwyg: |-
<p>&lt;a href="hi'&gt; &lt;a href=hi'&gt;</p>
-06_11__inlines__raw_html__09:
+06_11__inlines__raw_html__009:
canonical: |
<p>&lt; a&gt;&lt;
foo&gt;&lt;bar/ &gt;
&lt;foo bar=baz
bim!bop /&gt;</p>
static: |-
- <p data-sourcepos="1:1-4:10" dir="auto">&lt; a&gt;&lt;&#x000A;foo&gt;&lt;bar/ &gt;&#x000A;&lt;foo bar=baz&#x000A;bim!bop /&gt;</p>
+ <p data-sourcepos="1:1-4:10" dir="auto">&lt; a&gt;&lt;
+ foo&gt;&lt;bar/ &gt;
+ &lt;foo bar=baz
+ bim!bop /&gt;</p>
wysiwyg: |-
<p>&lt; a&gt;&lt;
foo&gt;&lt;bar/ &gt;
&lt;foo bar=baz
bim!bop /&gt;</p>
-06_11__inlines__raw_html__10:
+06_11__inlines__raw_html__010:
canonical: |
<p>&lt;a href='bar'title=title&gt;</p>
static: |-
<p data-sourcepos="1:1-1:25" dir="auto">&lt;a href='bar'title=title&gt;</p>
wysiwyg: |-
<p>&lt;a href='bar'title=title&gt;</p>
-06_11__inlines__raw_html__11:
+06_11__inlines__raw_html__011:
canonical: |
<p></a></foo ></p>
static: |-
<p data-sourcepos="1:1-1:11" dir="auto"></p>
wysiwyg: |-
<p></p>
-06_11__inlines__raw_html__12:
+06_11__inlines__raw_html__012:
canonical: |
<p>&lt;/a href=&quot;foo&quot;&gt;</p>
static: |-
<p data-sourcepos="1:1-1:15" dir="auto">&lt;/a href="foo"&gt;</p>
wysiwyg: |-
<p>&lt;/a href="foo"&gt;</p>
-06_11__inlines__raw_html__13:
+06_11__inlines__raw_html__013:
canonical: |
<p>foo <!-- this is a
comment - with hyphen --></p>
@@ -5848,162 +7218,173 @@
<p data-sourcepos="1:1-2:25" dir="auto">foo </p>
wysiwyg: |-
Error - check implementation:
- Hast node of type "comment" not supported by this converter. Please, provide an specification.
-06_11__inlines__raw_html__14:
+ Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
+06_11__inlines__raw_html__014:
canonical: |
<p>foo &lt;!-- not a comment -- two hyphens --&gt;</p>
static: |-
<p data-sourcepos="1:1-1:41" dir="auto">foo &lt;!-- not a comment -- two hyphens --&gt;</p>
wysiwyg: |-
<p>foo &lt;!-- not a comment -- two hyphens --&gt;</p>
-06_11__inlines__raw_html__15:
+06_11__inlines__raw_html__015:
canonical: |
<p>foo &lt;!--&gt; foo --&gt;</p>
<p>foo &lt;!-- foo---&gt;</p>
static: |-
- <p data-sourcepos="1:1-1:17" dir="auto">foo &lt;!--&gt; foo --&gt;</p>&#x000A;<p data-sourcepos="3:1-3:16" dir="auto">foo &lt;!-- foo---&gt;</p>
+ <p data-sourcepos="1:1-1:17" dir="auto">foo &lt;!--&gt; foo --&gt;</p>
+ <p data-sourcepos="3:1-3:16" dir="auto">foo &lt;!-- foo---&gt;</p>
wysiwyg: |-
<p>foo &lt;!--&gt; foo --&gt;</p>
-06_11__inlines__raw_html__16:
+06_11__inlines__raw_html__016:
canonical: |
<p>foo <?php echo $a; ?></p>
static: |-
<p data-sourcepos="1:1-1:21" dir="auto">foo <?php echo $a; ?></p>
wysiwyg: |-
Error - check implementation:
- Hast node of type "comment" not supported by this converter. Please, provide an specification.
-06_11__inlines__raw_html__17:
+ Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
+06_11__inlines__raw_html__017:
canonical: |
<p>foo <!ELEMENT br EMPTY></p>
static: |-
- <p data-sourcepos="1:1-1:23" dir="auto">foo </p>
+ <p data-sourcepos="1:1-1:23" dir="auto">foo &lt;!ELEMENT br EMPTY&gt;</p>
wysiwyg: |-
Error - check implementation:
- Hast node of type "comment" not supported by this converter. Please, provide an specification.
-06_11__inlines__raw_html__18:
+ Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
+06_11__inlines__raw_html__018:
canonical: |
<p>foo <![CDATA[>&<]]></p>
static: |-
- <p data-sourcepos="1:1-1:19" dir="auto">foo &amp;</p>
+ <p data-sourcepos="1:1-1:19" dir="auto">foo &lt;![CDATA[&gt;&amp;&lt;]]&gt;</p>
wysiwyg: |-
Error - check implementation:
- Hast node of type "comment" not supported by this converter. Please, provide an specification.
-06_11__inlines__raw_html__19:
+ Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
+06_11__inlines__raw_html__019:
canonical: |
<p>foo <a href="&ouml;"></p>
static: |-
<p data-sourcepos="1:1-1:21" dir="auto">foo <a href="%C3%B6" rel="nofollow noreferrer noopener" target="_blank"></a></p>
wysiwyg: |-
<p>foo </p>
-06_11__inlines__raw_html__20:
+06_11__inlines__raw_html__020:
canonical: |
<p>foo <a href="\*"></p>
static: |-
<p data-sourcepos="1:1-1:17" dir="auto">foo <a href="%5C*" rel="nofollow noreferrer noopener" target="_blank"></a></p>
wysiwyg: |-
<p>foo </p>
-06_11__inlines__raw_html__21:
+06_11__inlines__raw_html__021:
canonical: |
<p>&lt;a href=&quot;&quot;&quot;&gt;</p>
static: |-
<p data-sourcepos="1:1-1:13" dir="auto">&lt;a href="""&gt;</p>
wysiwyg: |-
<p>&lt;a href="""&gt;</p>
-06_12__inlines__disallowed_raw_html_extension__01:
+06_12__inlines__disallowed_raw_html_extension__001:
canonical: |
<p><strong> &lt;title> &lt;style> <em></p>
<blockquote>
&lt;xmp> is disallowed. &lt;XMP> is also disallowed.
</blockquote>
static: |-
- <p data-sourcepos="1:1-1:29" dir="auto"><strong> &lt;em&gt;&lt;/p&gt;&#x000A;&lt;blockquote&gt;&#x000A; &lt;xmp&gt; is disallowed. &lt;XMP&gt; is also disallowed.&#x000A;&lt;/blockquote&gt;</strong></p>
+ <p data-sourcepos="1:1-1:29" dir="auto"><strong> &lt;em&gt;&lt;/p&gt;
+ &lt;blockquote&gt;
+ &lt;xmp&gt; is disallowed. &lt;XMP&gt; is also disallowed.
+ &lt;/blockquote&gt;</strong></p>
wysiwyg: |-
Error - check implementation:
Hast node of type "title" not supported by this converter. Please, provide an specification.
-06_13__inlines__hard_line_breaks__01:
+06_13__inlines__hard_line_breaks__001:
canonical: |
<p>foo<br />
baz</p>
static: |-
- <p data-sourcepos="1:1-2:3" dir="auto">foo<br>&#x000A;baz</p>
+ <p data-sourcepos="1:1-2:3" dir="auto">foo<br>
+ baz</p>
wysiwyg: |-
<p>foo<br>
baz</p>
-06_13__inlines__hard_line_breaks__02:
+06_13__inlines__hard_line_breaks__002:
canonical: |
<p>foo<br />
baz</p>
static: |-
- <p data-sourcepos="1:1-2:3" dir="auto">foo<br>&#x000A;baz</p>
+ <p data-sourcepos="1:1-2:3" dir="auto">foo<br>
+ baz</p>
wysiwyg: |-
<p>foo<br>
baz</p>
-06_13__inlines__hard_line_breaks__03:
+06_13__inlines__hard_line_breaks__003:
canonical: |
<p>foo<br />
baz</p>
static: |-
- <p data-sourcepos="1:1-2:3" dir="auto">foo<br>&#x000A;baz</p>
+ <p data-sourcepos="1:1-2:3" dir="auto">foo<br>
+ baz</p>
wysiwyg: |-
<p>foo<br>
baz</p>
-06_13__inlines__hard_line_breaks__04:
+06_13__inlines__hard_line_breaks__004:
canonical: |
<p>foo<br />
bar</p>
static: |-
- <p data-sourcepos="1:1-2:8" dir="auto">foo<br>&#x000A;bar</p>
+ <p data-sourcepos="1:1-2:8" dir="auto">foo<br>
+ bar</p>
wysiwyg: |-
<p>foo<br>
bar</p>
-06_13__inlines__hard_line_breaks__05:
+06_13__inlines__hard_line_breaks__005:
canonical: |
<p>foo<br />
bar</p>
static: |-
- <p data-sourcepos="1:1-2:8" dir="auto">foo<br>&#x000A;bar</p>
+ <p data-sourcepos="1:1-2:8" dir="auto">foo<br>
+ bar</p>
wysiwyg: |-
<p>foo<br>
bar</p>
-06_13__inlines__hard_line_breaks__06:
+06_13__inlines__hard_line_breaks__006:
canonical: |
<p><em>foo<br />
bar</em></p>
static: |-
- <p data-sourcepos="1:1-2:4" dir="auto"><em>foo<br>&#x000A;bar</em></p>
+ <p data-sourcepos="1:1-2:4" dir="auto"><em>foo<br>
+ bar</em></p>
wysiwyg: |-
Error - check implementation:
Cannot destructure property 'type' of 'this.stack.pop(...)' as it is undefined.
-06_13__inlines__hard_line_breaks__07:
+06_13__inlines__hard_line_breaks__007:
canonical: |
<p><em>foo<br />
bar</em></p>
static: |-
- <p data-sourcepos="1:1-2:4" dir="auto"><em>foo<br>&#x000A;bar</em></p>
+ <p data-sourcepos="1:1-2:4" dir="auto"><em>foo<br>
+ bar</em></p>
wysiwyg: |-
Error - check implementation:
Cannot destructure property 'type' of 'this.stack.pop(...)' as it is undefined.
-06_13__inlines__hard_line_breaks__08:
+06_13__inlines__hard_line_breaks__008:
canonical: |
<p><code>code span</code></p>
static: |-
<p data-sourcepos="1:1-2:5" dir="auto"><code>code span</code></p>
wysiwyg: |-
<p><code>code span</code></p>
-06_13__inlines__hard_line_breaks__09:
+06_13__inlines__hard_line_breaks__009:
canonical: |
<p><code>code\ span</code></p>
static: |-
<p data-sourcepos="1:1-2:5" dir="auto"><code>code\ span</code></p>
wysiwyg: |-
<p><code>code\ span</code></p>
-06_13__inlines__hard_line_breaks__10:
+06_13__inlines__hard_line_breaks__010:
canonical: "<p><a href=\"foo \nbar\"></p>\n"
static: |-
<p data-sourcepos="1:1-2:5" dir="auto"><a href="foo%20%20%0Abar" rel="nofollow noreferrer noopener" target="_blank"></a></p>
wysiwyg: |-
<p></p>
-06_13__inlines__hard_line_breaks__11:
+06_13__inlines__hard_line_breaks__011:
canonical: |
<p><a href="foo\
bar"></p>
@@ -6011,87 +7392,88 @@
<p data-sourcepos="1:1-2:5" dir="auto"><a href="foo%5C%0Abar" rel="nofollow noreferrer noopener" target="_blank"></a></p>
wysiwyg: |-
<p></p>
-06_13__inlines__hard_line_breaks__12:
+06_13__inlines__hard_line_breaks__012:
canonical: |
<p>foo\</p>
static: |-
<p data-sourcepos="1:1-1:4" dir="auto">foo\</p>
wysiwyg: |-
<p>foo\</p>
-06_13__inlines__hard_line_breaks__13:
+06_13__inlines__hard_line_breaks__013:
canonical: |
<p>foo</p>
static: |-
<p data-sourcepos="1:1-1:5" dir="auto">foo</p>
wysiwyg: |-
<p>foo</p>
-06_13__inlines__hard_line_breaks__14:
+06_13__inlines__hard_line_breaks__014:
canonical: |
<h3>foo\</h3>
static: |-
- <h3 data-sourcepos="1:1-1:8" dir="auto">&#x000A;<a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo\</h3>
+ <h3 data-sourcepos="1:1-1:8" dir="auto">
+ <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo\</h3>
wysiwyg: |-
<h3>foo\</h3>
-06_13__inlines__hard_line_breaks__15:
+06_13__inlines__hard_line_breaks__015:
canonical: |
<h3>foo</h3>
static: |-
- <h3 data-sourcepos="1:1-1:7" dir="auto">&#x000A;<a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo</h3>
+ <h3 data-sourcepos="1:1-1:7" dir="auto">
+ <a id="user-content-foo" class="anchor" href="#foo" aria-hidden="true"></a>foo</h3>
wysiwyg: |-
<h3>foo</h3>
-06_14__inlines__soft_line_breaks__01:
+06_14__inlines__soft_line_breaks__001:
canonical: |
<p>foo
baz</p>
static: |-
- <p data-sourcepos="1:1-2:3" dir="auto">foo&#x000A;baz</p>
+ <p data-sourcepos="1:1-2:3" dir="auto">foo
+ baz</p>
wysiwyg: |-
<p>foo
baz</p>
-06_14__inlines__soft_line_breaks__02:
+06_14__inlines__soft_line_breaks__002:
canonical: |
<p>foo
baz</p>
static: |-
- <p data-sourcepos="1:1-2:4" dir="auto">foo&#x000A;baz</p>
+ <p data-sourcepos="1:1-2:4" dir="auto">foo
+ baz</p>
wysiwyg: |-
<p>foo
baz</p>
-06_15__inlines__textual_content__01:
+06_15__inlines__textual_content__001:
canonical: |
<p>hello $.;'there</p>
static: |-
<p data-sourcepos="1:1-1:15" dir="auto">hello $.;'there</p>
wysiwyg: |-
<p>hello $.;'there</p>
-06_15__inlines__textual_content__02:
+06_15__inlines__textual_content__002:
canonical: |
<p>Foo χÏῆν</p>
static: |-
<p data-sourcepos="1:1-1:13" dir="auto">Foo χÏῆν</p>
wysiwyg: |-
<p>Foo χÏῆν</p>
-06_15__inlines__textual_content__03:
+06_15__inlines__textual_content__003:
canonical: |
<p>Multiple spaces</p>
static: |-
<p data-sourcepos="1:1-1:19" dir="auto">Multiple spaces</p>
wysiwyg: |-
<p>Multiple spaces</p>
-07_01__first_gitlab_specific_section_with_examples__strong_but_with_two_asterisks__01:
- canonical: |
- <p><strong>bold</strong></p>
- static: |-
- <p data-sourcepos="1:1-1:8" dir="auto"><strong>bold</strong></p>
- wysiwyg: |-
- <p><strong>bold</strong></p>
-08_01__second_gitlab_specific_section_with_examples__strong_but_with_html__01:
- canonical: |
- <p><strong>
- bold
- </strong></p>
+07_01__gitlab_specific_markdown__footnotes__001:
+ canonical: ""
static: |-
- <strong>&#x000A;bold&#x000A;</strong>
+ <p data-sourcepos="1:1-1:27" dir="auto">footnote reference tag <sup class="footnote-ref"><a href="#fn-1-2118" id="fnref-1-2118" data-footnote-ref>1</a></sup></p>
+ <section data-footnotes class="footnotes">
+ <ol>
+ <li id="fn-1-2118">
+ <p data-sourcepos="3:7-3:19">footnote text <a href="#fnref-1-2118" data-footnote-backref aria-label="Back to content" class="footnote-backref"><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p>
+ </li>
+ </ol>
+ </section>
wysiwyg: |-
Error - check implementation:
- Cannot read properties of undefined (reading 'wrapTextInParagraph')
+ Hast node of type "sup" not supported by this converter. Please, provide an specification.
diff --git a/spec/fixtures/glfm/example_snapshots/markdown.yml b/spec/fixtures/glfm/example_snapshots/markdown.yml
index 8232b158050..d1fd16b10ce 100644
--- a/spec/fixtures/glfm/example_snapshots/markdown.yml
+++ b/spec/fixtures/glfm/example_snapshots/markdown.yml
@@ -1,144 +1,144 @@
---
-02_01__preliminaries__tabs__01: "\tfoo\tbaz\t\tbim\n"
-02_01__preliminaries__tabs__02: " \tfoo\tbaz\t\tbim\n"
-02_01__preliminaries__tabs__03: " a\ta\n á½\ta\n"
-02_01__preliminaries__tabs__04: " - foo\n\n\tbar\n"
-02_01__preliminaries__tabs__05: "- foo\n\n\t\tbar\n"
-02_01__preliminaries__tabs__06: ">\t\tfoo\n"
-02_01__preliminaries__tabs__07: "-\t\tfoo\n"
-02_01__preliminaries__tabs__08: " foo\n\tbar\n"
-02_01__preliminaries__tabs__09: " - foo\n - bar\n\t - baz\n"
-02_01__preliminaries__tabs__10: "#\tFoo\n"
-02_01__preliminaries__tabs__11: "*\t*\t*\t\n"
-03_01__blocks_and_inlines__precedence__01: |
+02_01__preliminaries__tabs__001: "\tfoo\tbaz\t\tbim\n"
+02_01__preliminaries__tabs__002: " \tfoo\tbaz\t\tbim\n"
+02_01__preliminaries__tabs__003: " a\ta\n á½\ta\n"
+02_01__preliminaries__tabs__004: " - foo\n\n\tbar\n"
+02_01__preliminaries__tabs__005: "- foo\n\n\t\tbar\n"
+02_01__preliminaries__tabs__006: ">\t\tfoo\n"
+02_01__preliminaries__tabs__007: "-\t\tfoo\n"
+02_01__preliminaries__tabs__008: " foo\n\tbar\n"
+02_01__preliminaries__tabs__009: " - foo\n - bar\n\t - baz\n"
+02_01__preliminaries__tabs__010: "#\tFoo\n"
+02_01__preliminaries__tabs__011: "*\t*\t*\t\n"
+03_01__blocks_and_inlines__precedence__001: |
- `one
- two`
-04_01__leaf_blocks__thematic_breaks__01: |
+04_01__leaf_blocks__thematic_breaks__001: |
***
---
___
-04_01__leaf_blocks__thematic_breaks__02: |
+04_01__leaf_blocks__thematic_breaks__002: |
+++
-04_01__leaf_blocks__thematic_breaks__03: |
+04_01__leaf_blocks__thematic_breaks__003: |
===
-04_01__leaf_blocks__thematic_breaks__04: |
+04_01__leaf_blocks__thematic_breaks__004: |
--
**
__
-04_01__leaf_blocks__thematic_breaks__05: |2
+04_01__leaf_blocks__thematic_breaks__005: |2
***
***
***
-04_01__leaf_blocks__thematic_breaks__06: |2
+04_01__leaf_blocks__thematic_breaks__006: |2
***
-04_01__leaf_blocks__thematic_breaks__07: |
+04_01__leaf_blocks__thematic_breaks__007: |
Foo
***
-04_01__leaf_blocks__thematic_breaks__08: |
+04_01__leaf_blocks__thematic_breaks__008: |
_____________________________________
-04_01__leaf_blocks__thematic_breaks__09: |2
+04_01__leaf_blocks__thematic_breaks__009: |2
- - -
-04_01__leaf_blocks__thematic_breaks__10: |2
+04_01__leaf_blocks__thematic_breaks__010: |2
** * ** * ** * **
-04_01__leaf_blocks__thematic_breaks__11: |
+04_01__leaf_blocks__thematic_breaks__011: |
- - - -
-04_01__leaf_blocks__thematic_breaks__12: "- - - - \n"
-04_01__leaf_blocks__thematic_breaks__13: |
+04_01__leaf_blocks__thematic_breaks__012: "- - - - \n"
+04_01__leaf_blocks__thematic_breaks__013: |
_ _ _ _ a
a------
---a---
-04_01__leaf_blocks__thematic_breaks__14: |2
+04_01__leaf_blocks__thematic_breaks__014: |2
*-*
-04_01__leaf_blocks__thematic_breaks__15: |
+04_01__leaf_blocks__thematic_breaks__015: |
- foo
***
- bar
-04_01__leaf_blocks__thematic_breaks__16: |
+04_01__leaf_blocks__thematic_breaks__016: |
Foo
***
bar
-04_01__leaf_blocks__thematic_breaks__17: |
+04_01__leaf_blocks__thematic_breaks__017: |
Foo
---
bar
-04_01__leaf_blocks__thematic_breaks__18: |
+04_01__leaf_blocks__thematic_breaks__018: |
* Foo
* * *
* Bar
-04_01__leaf_blocks__thematic_breaks__19: |
+04_01__leaf_blocks__thematic_breaks__019: |
- Foo
- * * *
-04_02__leaf_blocks__atx_headings__01: |
+04_02__leaf_blocks__atx_headings__001: |
# foo
## foo
### foo
#### foo
##### foo
###### foo
-04_02__leaf_blocks__atx_headings__02: |
+04_02__leaf_blocks__atx_headings__002: |
####### foo
-04_02__leaf_blocks__atx_headings__03: |
+04_02__leaf_blocks__atx_headings__003: |
#5 bolt
#hashtag
-04_02__leaf_blocks__atx_headings__04: |
+04_02__leaf_blocks__atx_headings__004: |
\## foo
-04_02__leaf_blocks__atx_headings__05: |
+04_02__leaf_blocks__atx_headings__005: |
# foo *bar* \*baz\*
-04_02__leaf_blocks__atx_headings__06: "# foo \n"
-04_02__leaf_blocks__atx_headings__07: |2
+04_02__leaf_blocks__atx_headings__006: "# foo \n"
+04_02__leaf_blocks__atx_headings__007: |2
### foo
## foo
# foo
-04_02__leaf_blocks__atx_headings__08: |2
+04_02__leaf_blocks__atx_headings__008: |2
# foo
-04_02__leaf_blocks__atx_headings__09: |
+04_02__leaf_blocks__atx_headings__009: |
foo
# bar
-04_02__leaf_blocks__atx_headings__10: |
+04_02__leaf_blocks__atx_headings__010: |
## foo ##
### bar ###
-04_02__leaf_blocks__atx_headings__11: |
+04_02__leaf_blocks__atx_headings__011: |
# foo ##################################
##### foo ##
-04_02__leaf_blocks__atx_headings__12: "### foo ### \n"
-04_02__leaf_blocks__atx_headings__13: |
+04_02__leaf_blocks__atx_headings__012: "### foo ### \n"
+04_02__leaf_blocks__atx_headings__013: |
### foo ### b
-04_02__leaf_blocks__atx_headings__14: |
+04_02__leaf_blocks__atx_headings__014: |
# foo#
-04_02__leaf_blocks__atx_headings__15: |
+04_02__leaf_blocks__atx_headings__015: |
### foo \###
## foo #\##
# foo \#
-04_02__leaf_blocks__atx_headings__16: |
+04_02__leaf_blocks__atx_headings__016: |
****
## foo
****
-04_02__leaf_blocks__atx_headings__17: |
+04_02__leaf_blocks__atx_headings__017: |
Foo bar
# baz
Bar foo
-04_02__leaf_blocks__atx_headings__18: "## \n#\n### ###\n"
-04_03__leaf_blocks__setext_headings__01: |
+04_02__leaf_blocks__atx_headings__018: "## \n#\n### ###\n"
+04_03__leaf_blocks__setext_headings__001: |
Foo *bar*
=========
Foo *bar*
---------
-04_03__leaf_blocks__setext_headings__02: |
+04_03__leaf_blocks__setext_headings__002: |
Foo *bar
baz*
====
-04_03__leaf_blocks__setext_headings__03: " Foo *bar\nbaz*\t\n====\n"
-04_03__leaf_blocks__setext_headings__04: |
+04_03__leaf_blocks__setext_headings__003: " Foo *bar\nbaz*\t\n====\n"
+04_03__leaf_blocks__setext_headings__004: |
Foo
-------------------------
Foo
=
-04_03__leaf_blocks__setext_headings__05: |2
+04_03__leaf_blocks__setext_headings__005: |2
Foo
---
@@ -147,27 +147,27 @@
Foo
===
-04_03__leaf_blocks__setext_headings__06: |2
+04_03__leaf_blocks__setext_headings__006: |2
Foo
---
Foo
---
-04_03__leaf_blocks__setext_headings__07: "Foo\n ---- \n"
-04_03__leaf_blocks__setext_headings__08: |
+04_03__leaf_blocks__setext_headings__007: "Foo\n ---- \n"
+04_03__leaf_blocks__setext_headings__008: |
Foo
---
-04_03__leaf_blocks__setext_headings__09: |
+04_03__leaf_blocks__setext_headings__009: |
Foo
= =
Foo
--- -
-04_03__leaf_blocks__setext_headings__10: "Foo \n-----\n"
-04_03__leaf_blocks__setext_headings__11: |
+04_03__leaf_blocks__setext_headings__010: "Foo \n-----\n"
+04_03__leaf_blocks__setext_headings__011: |
Foo\
----
-04_03__leaf_blocks__setext_headings__12: |
+04_03__leaf_blocks__setext_headings__012: |
`Foo
----
`
@@ -175,236 +175,236 @@
<a title="a lot
---
of dashes"/>
-04_03__leaf_blocks__setext_headings__13: |
+04_03__leaf_blocks__setext_headings__013: |
> Foo
---
-04_03__leaf_blocks__setext_headings__14: |
+04_03__leaf_blocks__setext_headings__014: |
> foo
bar
===
-04_03__leaf_blocks__setext_headings__15: |
+04_03__leaf_blocks__setext_headings__015: |
- Foo
---
-04_03__leaf_blocks__setext_headings__16: |
+04_03__leaf_blocks__setext_headings__016: |
Foo
Bar
---
-04_03__leaf_blocks__setext_headings__17: |
+04_03__leaf_blocks__setext_headings__017: |
---
Foo
---
Bar
---
Baz
-04_03__leaf_blocks__setext_headings__18: |2
+04_03__leaf_blocks__setext_headings__018: |2
====
-04_03__leaf_blocks__setext_headings__19: |
+04_03__leaf_blocks__setext_headings__019: |
---
---
-04_03__leaf_blocks__setext_headings__20: |
+04_03__leaf_blocks__setext_headings__020: |
- foo
-----
-04_03__leaf_blocks__setext_headings__21: |2
+04_03__leaf_blocks__setext_headings__021: |2
foo
---
-04_03__leaf_blocks__setext_headings__22: |
+04_03__leaf_blocks__setext_headings__022: |
> foo
-----
-04_03__leaf_blocks__setext_headings__23: |
+04_03__leaf_blocks__setext_headings__023: |
\> foo
------
-04_03__leaf_blocks__setext_headings__24: |
+04_03__leaf_blocks__setext_headings__024: |
Foo
bar
---
baz
-04_03__leaf_blocks__setext_headings__25: |
+04_03__leaf_blocks__setext_headings__025: |
Foo
bar
---
baz
-04_03__leaf_blocks__setext_headings__26: |
+04_03__leaf_blocks__setext_headings__026: |
Foo
bar
* * *
baz
-04_03__leaf_blocks__setext_headings__27: |
+04_03__leaf_blocks__setext_headings__027: |
Foo
bar
\---
baz
-04_04__leaf_blocks__indented_code_blocks__01: |2
+04_04__leaf_blocks__indented_code_blocks__001: |2
a simple
indented code block
-04_04__leaf_blocks__indented_code_blocks__02: |2
+04_04__leaf_blocks__indented_code_blocks__002: |2
- foo
bar
-04_04__leaf_blocks__indented_code_blocks__03: |
+04_04__leaf_blocks__indented_code_blocks__003: |
1. foo
- bar
-04_04__leaf_blocks__indented_code_blocks__04: |2
+04_04__leaf_blocks__indented_code_blocks__004: |2
<a/>
*hi*
- one
-04_04__leaf_blocks__indented_code_blocks__05: " chunk1\n\n chunk2\n \n \n \n
- \ chunk3\n"
-04_04__leaf_blocks__indented_code_blocks__06: " chunk1\n \n chunk2\n"
-04_04__leaf_blocks__indented_code_blocks__07: |+
+04_04__leaf_blocks__indented_code_blocks__005: " chunk1\n\n chunk2\n \n \n
+ \n chunk3\n"
+04_04__leaf_blocks__indented_code_blocks__006: " chunk1\n \n chunk2\n"
+04_04__leaf_blocks__indented_code_blocks__007: |+
Foo
bar
-04_04__leaf_blocks__indented_code_blocks__08: |2
+04_04__leaf_blocks__indented_code_blocks__008: |2
foo
bar
-04_04__leaf_blocks__indented_code_blocks__09: |
+04_04__leaf_blocks__indented_code_blocks__009: |
# Heading
foo
Heading
------
foo
----
-04_04__leaf_blocks__indented_code_blocks__10: |2
+04_04__leaf_blocks__indented_code_blocks__010: |2
foo
bar
-04_04__leaf_blocks__indented_code_blocks__11: "\n \n foo\n \n\n"
-04_04__leaf_blocks__indented_code_blocks__12: " foo \n"
-04_05__leaf_blocks__fenced_code_blocks__01: |
+04_04__leaf_blocks__indented_code_blocks__011: "\n \n foo\n \n\n"
+04_04__leaf_blocks__indented_code_blocks__012: " foo \n"
+04_05__leaf_blocks__fenced_code_blocks__001: |
```
<
>
```
-04_05__leaf_blocks__fenced_code_blocks__02: |
+04_05__leaf_blocks__fenced_code_blocks__002: |
~~~
<
>
~~~
-04_05__leaf_blocks__fenced_code_blocks__03: |
+04_05__leaf_blocks__fenced_code_blocks__003: |
``
foo
``
-04_05__leaf_blocks__fenced_code_blocks__04: |
+04_05__leaf_blocks__fenced_code_blocks__004: |
```
aaa
~~~
```
-04_05__leaf_blocks__fenced_code_blocks__05: |
+04_05__leaf_blocks__fenced_code_blocks__005: |
~~~
aaa
```
~~~
-04_05__leaf_blocks__fenced_code_blocks__06: |
+04_05__leaf_blocks__fenced_code_blocks__006: |
````
aaa
```
``````
-04_05__leaf_blocks__fenced_code_blocks__07: |
+04_05__leaf_blocks__fenced_code_blocks__007: |
~~~~
aaa
~~~
~~~~
-04_05__leaf_blocks__fenced_code_blocks__08: |
+04_05__leaf_blocks__fenced_code_blocks__008: |
```
-04_05__leaf_blocks__fenced_code_blocks__09: |
+04_05__leaf_blocks__fenced_code_blocks__009: |
`````
```
aaa
-04_05__leaf_blocks__fenced_code_blocks__10: |
+04_05__leaf_blocks__fenced_code_blocks__010: |
> ```
> aaa
bbb
-04_05__leaf_blocks__fenced_code_blocks__11: "```\n\n \n```\n"
-04_05__leaf_blocks__fenced_code_blocks__12: |
+04_05__leaf_blocks__fenced_code_blocks__011: "```\n\n \n```\n"
+04_05__leaf_blocks__fenced_code_blocks__012: |
```
```
-04_05__leaf_blocks__fenced_code_blocks__13: |2
+04_05__leaf_blocks__fenced_code_blocks__013: |2
```
aaa
aaa
```
-04_05__leaf_blocks__fenced_code_blocks__14: |2
+04_05__leaf_blocks__fenced_code_blocks__014: |2
```
aaa
aaa
aaa
```
-04_05__leaf_blocks__fenced_code_blocks__15: |2
+04_05__leaf_blocks__fenced_code_blocks__015: |2
```
aaa
aaa
aaa
```
-04_05__leaf_blocks__fenced_code_blocks__16: |2
+04_05__leaf_blocks__fenced_code_blocks__016: |2
```
aaa
```
-04_05__leaf_blocks__fenced_code_blocks__17: |
+04_05__leaf_blocks__fenced_code_blocks__017: |
```
aaa
```
-04_05__leaf_blocks__fenced_code_blocks__18: |2
+04_05__leaf_blocks__fenced_code_blocks__018: |2
```
aaa
```
-04_05__leaf_blocks__fenced_code_blocks__19: |
+04_05__leaf_blocks__fenced_code_blocks__019: |
```
aaa
```
-04_05__leaf_blocks__fenced_code_blocks__20: |
+04_05__leaf_blocks__fenced_code_blocks__020: |
``` ```
aaa
-04_05__leaf_blocks__fenced_code_blocks__21: |
+04_05__leaf_blocks__fenced_code_blocks__021: |
~~~~~~
aaa
~~~ ~~
-04_05__leaf_blocks__fenced_code_blocks__22: |
+04_05__leaf_blocks__fenced_code_blocks__022: |
foo
```
bar
```
baz
-04_05__leaf_blocks__fenced_code_blocks__23: |
+04_05__leaf_blocks__fenced_code_blocks__023: |
foo
---
~~~
bar
~~~
# baz
-04_05__leaf_blocks__fenced_code_blocks__24: |
+04_05__leaf_blocks__fenced_code_blocks__024: |
```ruby
def foo(x)
return 3
end
```
-04_05__leaf_blocks__fenced_code_blocks__25: |
+04_05__leaf_blocks__fenced_code_blocks__025: |
~~~~ ruby startline=3 $%@#$
def foo(x)
return 3
end
~~~~~~~
-04_05__leaf_blocks__fenced_code_blocks__26: |
+04_05__leaf_blocks__fenced_code_blocks__026: |
````;
````
-04_05__leaf_blocks__fenced_code_blocks__27: |
+04_05__leaf_blocks__fenced_code_blocks__027: |
``` aa ```
foo
-04_05__leaf_blocks__fenced_code_blocks__28: |
+04_05__leaf_blocks__fenced_code_blocks__028: |
~~~ aa ``` ~~~
foo
~~~
-04_05__leaf_blocks__fenced_code_blocks__29: |
+04_05__leaf_blocks__fenced_code_blocks__029: |
```
``` aaa
```
-04_06__leaf_blocks__html_blocks__01: |
+04_06__leaf_blocks__html_blocks__001: |
<table><tr><td>
<pre>
**Hello**,
@@ -412,7 +412,7 @@
_world_.
</pre>
</td></tr></table>
-04_06__leaf_blocks__html_blocks__02: |
+04_06__leaf_blocks__html_blocks__002: |
<table>
<tr>
<td>
@@ -422,80 +422,80 @@
</table>
okay.
-04_06__leaf_blocks__html_blocks__03: |2
+04_06__leaf_blocks__html_blocks__003: |2
<div>
*hello*
<foo><a>
-04_06__leaf_blocks__html_blocks__04: |
+04_06__leaf_blocks__html_blocks__004: |
</div>
*foo*
-04_06__leaf_blocks__html_blocks__05: |
+04_06__leaf_blocks__html_blocks__005: |
<DIV CLASS="foo">
*Markdown*
</DIV>
-04_06__leaf_blocks__html_blocks__06: |
+04_06__leaf_blocks__html_blocks__006: |
<div id="foo"
class="bar">
</div>
-04_06__leaf_blocks__html_blocks__07: |
+04_06__leaf_blocks__html_blocks__007: |
<div id="foo" class="bar
baz">
</div>
-04_06__leaf_blocks__html_blocks__08: |
+04_06__leaf_blocks__html_blocks__008: |
<div>
*foo*
*bar*
-04_06__leaf_blocks__html_blocks__09: |
+04_06__leaf_blocks__html_blocks__009: |
<div id="foo"
*hi*
-04_06__leaf_blocks__html_blocks__10: |
+04_06__leaf_blocks__html_blocks__010: |
<div class
foo
-04_06__leaf_blocks__html_blocks__11: |
+04_06__leaf_blocks__html_blocks__011: |
<div *???-&&&-<---
*foo*
-04_06__leaf_blocks__html_blocks__12: |
+04_06__leaf_blocks__html_blocks__012: |
<div><a href="bar">*foo*</a></div>
-04_06__leaf_blocks__html_blocks__13: |
+04_06__leaf_blocks__html_blocks__013: |
<table><tr><td>
foo
</td></tr></table>
-04_06__leaf_blocks__html_blocks__14: |
+04_06__leaf_blocks__html_blocks__014: |
<div></div>
``` c
int x = 33;
```
-04_06__leaf_blocks__html_blocks__15: |
+04_06__leaf_blocks__html_blocks__015: |
<a href="foo">
*bar*
</a>
-04_06__leaf_blocks__html_blocks__16: |
+04_06__leaf_blocks__html_blocks__016: |
<Warning>
*bar*
</Warning>
-04_06__leaf_blocks__html_blocks__17: |
+04_06__leaf_blocks__html_blocks__017: |
<i class="foo">
*bar*
</i>
-04_06__leaf_blocks__html_blocks__18: |
+04_06__leaf_blocks__html_blocks__018: |
</ins>
*bar*
-04_06__leaf_blocks__html_blocks__19: |
+04_06__leaf_blocks__html_blocks__019: |
<del>
*foo*
</del>
-04_06__leaf_blocks__html_blocks__20: |
+04_06__leaf_blocks__html_blocks__020: |
<del>
*foo*
</del>
-04_06__leaf_blocks__html_blocks__21: |
+04_06__leaf_blocks__html_blocks__021: |
<del>*foo*</del>
-04_06__leaf_blocks__html_blocks__22: |
+04_06__leaf_blocks__html_blocks__022: |
<pre language="haskell"><code>
import Text.HTML.TagSoup
@@ -503,14 +503,14 @@
main = print $ parseTags tags
</code></pre>
okay
-04_06__leaf_blocks__html_blocks__23: |
+04_06__leaf_blocks__html_blocks__023: |
<script type="text/javascript">
// JavaScript example
document.getElementById("demo").innerHTML = "Hello JavaScript!";
</script>
okay
-04_06__leaf_blocks__html_blocks__24: |
+04_06__leaf_blocks__html_blocks__024: |
<style
type="text/css">
h1 {color:red;}
@@ -518,45 +518,45 @@
p {color:blue;}
</style>
okay
-04_06__leaf_blocks__html_blocks__25: |
+04_06__leaf_blocks__html_blocks__025: |
<style
type="text/css">
foo
-04_06__leaf_blocks__html_blocks__26: |
+04_06__leaf_blocks__html_blocks__026: |
> <div>
> foo
bar
-04_06__leaf_blocks__html_blocks__27: |
+04_06__leaf_blocks__html_blocks__027: |
- <div>
- foo
-04_06__leaf_blocks__html_blocks__28: |
+04_06__leaf_blocks__html_blocks__028: |
<style>p{color:red;}</style>
*foo*
-04_06__leaf_blocks__html_blocks__29: |
+04_06__leaf_blocks__html_blocks__029: |
<!-- foo -->*bar*
*baz*
-04_06__leaf_blocks__html_blocks__30: |
+04_06__leaf_blocks__html_blocks__030: |
<script>
foo
</script>1. *bar*
-04_06__leaf_blocks__html_blocks__31: |
+04_06__leaf_blocks__html_blocks__031: |
<!-- Foo
bar
baz -->
okay
-04_06__leaf_blocks__html_blocks__32: |
+04_06__leaf_blocks__html_blocks__032: |
<?php
echo '>';
?>
okay
-04_06__leaf_blocks__html_blocks__33: |
+04_06__leaf_blocks__html_blocks__033: |
<!DOCTYPE html>
-04_06__leaf_blocks__html_blocks__34: |
+04_06__leaf_blocks__html_blocks__034: |
<![CDATA[
function matchwo(a,b)
{
@@ -570,39 +570,39 @@
}
]]>
okay
-04_06__leaf_blocks__html_blocks__35: |2
+04_06__leaf_blocks__html_blocks__035: |2
<!-- foo -->
<!-- foo -->
-04_06__leaf_blocks__html_blocks__36: |2
+04_06__leaf_blocks__html_blocks__036: |2
<div>
<div>
-04_06__leaf_blocks__html_blocks__37: |
+04_06__leaf_blocks__html_blocks__037: |
Foo
<div>
bar
</div>
-04_06__leaf_blocks__html_blocks__38: |
+04_06__leaf_blocks__html_blocks__038: |
<div>
bar
</div>
*foo*
-04_06__leaf_blocks__html_blocks__39: |
+04_06__leaf_blocks__html_blocks__039: |
Foo
<a href="bar">
baz
-04_06__leaf_blocks__html_blocks__40: |
+04_06__leaf_blocks__html_blocks__040: |
<div>
*Emphasized* text.
</div>
-04_06__leaf_blocks__html_blocks__41: |
+04_06__leaf_blocks__html_blocks__041: |
<div>
*Emphasized* text.
</div>
-04_06__leaf_blocks__html_blocks__42: |
+04_06__leaf_blocks__html_blocks__042: |
<table>
<tr>
@@ -614,7 +614,7 @@
</tr>
</table>
-04_06__leaf_blocks__html_blocks__43: |
+04_06__leaf_blocks__html_blocks__043: |
<table>
<tr>
@@ -626,23 +626,23 @@
</tr>
</table>
-04_07__leaf_blocks__link_reference_definitions__01: |
+04_07__leaf_blocks__link_reference_definitions__001: |
[foo]: /url "title"
[foo]
-04_07__leaf_blocks__link_reference_definitions__02: " [foo]: \n /url \n 'the
+04_07__leaf_blocks__link_reference_definitions__002: " [foo]: \n /url \n 'the
title' \n\n[foo]\n"
-04_07__leaf_blocks__link_reference_definitions__03: |
+04_07__leaf_blocks__link_reference_definitions__003: |
[Foo*bar\]]:my_(url) 'title (with parens)'
[Foo*bar\]]
-04_07__leaf_blocks__link_reference_definitions__04: |
+04_07__leaf_blocks__link_reference_definitions__004: |
[Foo bar]:
<my url>
'title'
[Foo bar]
-04_07__leaf_blocks__link_reference_definitions__05: |
+04_07__leaf_blocks__link_reference_definitions__005: |
[foo]: /url '
title
line1
@@ -650,91 +650,91 @@
'
[foo]
-04_07__leaf_blocks__link_reference_definitions__06: |
+04_07__leaf_blocks__link_reference_definitions__006: |
[foo]: /url 'title
with blank line'
[foo]
-04_07__leaf_blocks__link_reference_definitions__07: |
+04_07__leaf_blocks__link_reference_definitions__007: |
[foo]:
/url
[foo]
-04_07__leaf_blocks__link_reference_definitions__08: |
+04_07__leaf_blocks__link_reference_definitions__008: |
[foo]:
[foo]
-04_07__leaf_blocks__link_reference_definitions__09: |
+04_07__leaf_blocks__link_reference_definitions__009: |
[foo]: <>
[foo]
-04_07__leaf_blocks__link_reference_definitions__10: |
+04_07__leaf_blocks__link_reference_definitions__010: |
[foo]: <bar>(baz)
[foo]
-04_07__leaf_blocks__link_reference_definitions__11: |
+04_07__leaf_blocks__link_reference_definitions__011: |
[foo]: /url\bar\*baz "foo\"bar\baz"
[foo]
-04_07__leaf_blocks__link_reference_definitions__12: |
+04_07__leaf_blocks__link_reference_definitions__012: |
[foo]
[foo]: url
-04_07__leaf_blocks__link_reference_definitions__13: |
+04_07__leaf_blocks__link_reference_definitions__013: |
[foo]
[foo]: first
[foo]: second
-04_07__leaf_blocks__link_reference_definitions__14: |
+04_07__leaf_blocks__link_reference_definitions__014: |
[FOO]: /url
[Foo]
-04_07__leaf_blocks__link_reference_definitions__15: |
+04_07__leaf_blocks__link_reference_definitions__015: |
[ΑΓΩ]: /φου
[αγω]
-04_07__leaf_blocks__link_reference_definitions__16: |
+04_07__leaf_blocks__link_reference_definitions__016: |
[foo]: /url
-04_07__leaf_blocks__link_reference_definitions__17: |
+04_07__leaf_blocks__link_reference_definitions__017: |
[
foo
]: /url
bar
-04_07__leaf_blocks__link_reference_definitions__18: |
+04_07__leaf_blocks__link_reference_definitions__018: |
[foo]: /url "title" ok
-04_07__leaf_blocks__link_reference_definitions__19: |
+04_07__leaf_blocks__link_reference_definitions__019: |
[foo]: /url
"title" ok
-04_07__leaf_blocks__link_reference_definitions__20: |2
+04_07__leaf_blocks__link_reference_definitions__020: |2
[foo]: /url "title"
[foo]
-04_07__leaf_blocks__link_reference_definitions__21: |
+04_07__leaf_blocks__link_reference_definitions__021: |
```
[foo]: /url
```
[foo]
-04_07__leaf_blocks__link_reference_definitions__22: |
+04_07__leaf_blocks__link_reference_definitions__022: |
Foo
[bar]: /baz
[bar]
-04_07__leaf_blocks__link_reference_definitions__23: |
+04_07__leaf_blocks__link_reference_definitions__023: |
# [Foo]
[foo]: /url
> bar
-04_07__leaf_blocks__link_reference_definitions__24: |
+04_07__leaf_blocks__link_reference_definitions__024: |
[foo]: /url
bar
===
[foo]
-04_07__leaf_blocks__link_reference_definitions__25: |
+04_07__leaf_blocks__link_reference_definitions__025: |
[foo]: /url
===
[foo]
-04_07__leaf_blocks__link_reference_definitions__26: |
+04_07__leaf_blocks__link_reference_definitions__026: |
[foo]: /foo-url "foo"
[bar]: /bar-url
"bar"
@@ -743,211 +743,211 @@
[foo],
[bar],
[baz]
-04_07__leaf_blocks__link_reference_definitions__27: |
+04_07__leaf_blocks__link_reference_definitions__027: |
[foo]
> [foo]: /url
-04_07__leaf_blocks__link_reference_definitions__28: |
+04_07__leaf_blocks__link_reference_definitions__028: |
[foo]: /url
-04_08__leaf_blocks__paragraphs__01: |
+04_08__leaf_blocks__paragraphs__001: |
aaa
bbb
-04_08__leaf_blocks__paragraphs__02: |
+04_08__leaf_blocks__paragraphs__002: |
aaa
bbb
ccc
ddd
-04_08__leaf_blocks__paragraphs__03: |
+04_08__leaf_blocks__paragraphs__003: |
aaa
bbb
-04_08__leaf_blocks__paragraphs__04: |2
+04_08__leaf_blocks__paragraphs__004: |2
aaa
bbb
-04_08__leaf_blocks__paragraphs__05: |
+04_08__leaf_blocks__paragraphs__005: |
aaa
bbb
ccc
-04_08__leaf_blocks__paragraphs__06: |2
+04_08__leaf_blocks__paragraphs__006: |2
aaa
bbb
-04_08__leaf_blocks__paragraphs__07: |2
+04_08__leaf_blocks__paragraphs__007: |2
aaa
bbb
-04_08__leaf_blocks__paragraphs__08: "aaa \nbbb \n"
-04_09__leaf_blocks__blank_lines__01: " \n\naaa\n \n\n# aaa\n\n \n"
-04_10__leaf_blocks__tables_extension__01: |
+04_08__leaf_blocks__paragraphs__008: "aaa \nbbb \n"
+04_09__leaf_blocks__blank_lines__001: " \n\naaa\n \n\n# aaa\n\n \n"
+04_10__leaf_blocks__tables_extension__001: |
| foo | bar |
| --- | --- |
| baz | bim |
-04_10__leaf_blocks__tables_extension__02: |
+04_10__leaf_blocks__tables_extension__002: |
| abc | defghi |
:-: | -----------:
bar | baz
-04_10__leaf_blocks__tables_extension__03: |
+04_10__leaf_blocks__tables_extension__003: |
| f\|oo |
| ------ |
| b `\|` az |
| b **\|** im |
-04_10__leaf_blocks__tables_extension__04: |
+04_10__leaf_blocks__tables_extension__004: |
| abc | def |
| --- | --- |
| bar | baz |
> bar
-04_10__leaf_blocks__tables_extension__05: |
+04_10__leaf_blocks__tables_extension__005: |
| abc | def |
| --- | --- |
| bar | baz |
bar
bar
-04_10__leaf_blocks__tables_extension__06: |
+04_10__leaf_blocks__tables_extension__006: |
| abc | def |
| --- |
| bar |
-04_10__leaf_blocks__tables_extension__07: |
+04_10__leaf_blocks__tables_extension__007: |
| abc | def |
| --- | --- |
| bar |
| bar | baz | boo |
-04_10__leaf_blocks__tables_extension__08: |
+04_10__leaf_blocks__tables_extension__008: |
| abc | def |
| --- | --- |
-05_01__container_blocks__block_quotes__01: |
+05_01__container_blocks__block_quotes__001: |
> # Foo
> bar
> baz
-05_01__container_blocks__block_quotes__02: |
+05_01__container_blocks__block_quotes__002: |
># Foo
>bar
> baz
-05_01__container_blocks__block_quotes__03: |2
+05_01__container_blocks__block_quotes__003: |2
> # Foo
> bar
> baz
-05_01__container_blocks__block_quotes__04: |2
+05_01__container_blocks__block_quotes__004: |2
> # Foo
> bar
> baz
-05_01__container_blocks__block_quotes__05: |
+05_01__container_blocks__block_quotes__005: |
> # Foo
> bar
baz
-05_01__container_blocks__block_quotes__06: |
+05_01__container_blocks__block_quotes__006: |
> bar
baz
> foo
-05_01__container_blocks__block_quotes__07: |
+05_01__container_blocks__block_quotes__007: |
> foo
---
-05_01__container_blocks__block_quotes__08: |
+05_01__container_blocks__block_quotes__008: |
> - foo
- bar
-05_01__container_blocks__block_quotes__09: |
+05_01__container_blocks__block_quotes__009: |
> foo
bar
-05_01__container_blocks__block_quotes__10: |
+05_01__container_blocks__block_quotes__010: |
> ```
foo
```
-05_01__container_blocks__block_quotes__11: |
+05_01__container_blocks__block_quotes__011: |
> foo
- bar
-05_01__container_blocks__block_quotes__12: |
+05_01__container_blocks__block_quotes__012: |
>
-05_01__container_blocks__block_quotes__13: ">\n> \n> \n"
-05_01__container_blocks__block_quotes__14: ">\n> foo\n> \n"
-05_01__container_blocks__block_quotes__15: |
+05_01__container_blocks__block_quotes__013: ">\n> \n> \n"
+05_01__container_blocks__block_quotes__014: ">\n> foo\n> \n"
+05_01__container_blocks__block_quotes__015: |
> foo
> bar
-05_01__container_blocks__block_quotes__16: |
+05_01__container_blocks__block_quotes__016: |
> foo
> bar
-05_01__container_blocks__block_quotes__17: |
+05_01__container_blocks__block_quotes__017: |
> foo
>
> bar
-05_01__container_blocks__block_quotes__18: |
+05_01__container_blocks__block_quotes__018: |
foo
> bar
-05_01__container_blocks__block_quotes__19: |
+05_01__container_blocks__block_quotes__019: |
> aaa
***
> bbb
-05_01__container_blocks__block_quotes__20: |
+05_01__container_blocks__block_quotes__020: |
> bar
baz
-05_01__container_blocks__block_quotes__21: |
+05_01__container_blocks__block_quotes__021: |
> bar
baz
-05_01__container_blocks__block_quotes__22: |
+05_01__container_blocks__block_quotes__022: |
> bar
>
baz
-05_01__container_blocks__block_quotes__23: |
+05_01__container_blocks__block_quotes__023: |
> > > foo
bar
-05_01__container_blocks__block_quotes__24: |
+05_01__container_blocks__block_quotes__024: |
>>> foo
> bar
>>baz
-05_01__container_blocks__block_quotes__25: |
+05_01__container_blocks__block_quotes__025: |
> code
> not code
-05_02__container_blocks__list_items__01: |
+05_02__container_blocks__list_items__001: |
A paragraph
with two lines.
indented code
> A block quote.
-05_02__container_blocks__list_items__02: |
+05_02__container_blocks__list_items__002: |
1. A paragraph
with two lines.
indented code
> A block quote.
-05_02__container_blocks__list_items__03: |
+05_02__container_blocks__list_items__003: |
- one
two
-05_02__container_blocks__list_items__04: |
+05_02__container_blocks__list_items__004: |
- one
two
-05_02__container_blocks__list_items__05: |2
+05_02__container_blocks__list_items__005: |2
- one
two
-05_02__container_blocks__list_items__06: |2
+05_02__container_blocks__list_items__006: |2
- one
two
-05_02__container_blocks__list_items__07: |2
+05_02__container_blocks__list_items__007: |2
> > 1. one
>>
>> two
-05_02__container_blocks__list_items__08: |
+05_02__container_blocks__list_items__008: |
>>- one
>>
> > two
-05_02__container_blocks__list_items__09: |
+05_02__container_blocks__list_items__009: |
-one
2.two
-05_02__container_blocks__list_items__10: |
+05_02__container_blocks__list_items__010: |
- foo
bar
-05_02__container_blocks__list_items__11: |
+05_02__container_blocks__list_items__011: |
1. foo
```
@@ -957,62 +957,62 @@
baz
> bam
-05_02__container_blocks__list_items__12: |
+05_02__container_blocks__list_items__012: |
- Foo
bar
baz
-05_02__container_blocks__list_items__13: |
+05_02__container_blocks__list_items__013: |
123456789. ok
-05_02__container_blocks__list_items__14: |
+05_02__container_blocks__list_items__014: |
1234567890. not ok
-05_02__container_blocks__list_items__15: |
+05_02__container_blocks__list_items__015: |
0. ok
-05_02__container_blocks__list_items__16: |
+05_02__container_blocks__list_items__016: |
003. ok
-05_02__container_blocks__list_items__17: |
+05_02__container_blocks__list_items__017: |
-1. not ok
-05_02__container_blocks__list_items__18: |
+05_02__container_blocks__list_items__018: |
- foo
bar
-05_02__container_blocks__list_items__19: |2
+05_02__container_blocks__list_items__019: |2
10. foo
bar
-05_02__container_blocks__list_items__20: |2
+05_02__container_blocks__list_items__020: |2
indented code
paragraph
more code
-05_02__container_blocks__list_items__21: |
+05_02__container_blocks__list_items__021: |
1. indented code
paragraph
more code
-05_02__container_blocks__list_items__22: |
+05_02__container_blocks__list_items__022: |
1. indented code
paragraph
more code
-05_02__container_blocks__list_items__23: |2
+05_02__container_blocks__list_items__023: |2
foo
bar
-05_02__container_blocks__list_items__24: |
+05_02__container_blocks__list_items__024: |
- foo
bar
-05_02__container_blocks__list_items__25: |
+05_02__container_blocks__list_items__025: |
- foo
bar
-05_02__container_blocks__list_items__26: |
+05_02__container_blocks__list_items__026: |
-
foo
-
@@ -1021,98 +1021,98 @@
```
-
baz
-05_02__container_blocks__list_items__27: "- \n foo\n"
-05_02__container_blocks__list_items__28: |
+05_02__container_blocks__list_items__027: "- \n foo\n"
+05_02__container_blocks__list_items__028: |
-
foo
-05_02__container_blocks__list_items__29: |
+05_02__container_blocks__list_items__029: |
- foo
-
- bar
-05_02__container_blocks__list_items__30: "- foo\n- \n- bar\n"
-05_02__container_blocks__list_items__31: |
+05_02__container_blocks__list_items__030: "- foo\n- \n- bar\n"
+05_02__container_blocks__list_items__031: |
1. foo
2.
3. bar
-05_02__container_blocks__list_items__32: |
+05_02__container_blocks__list_items__032: |
*
-05_02__container_blocks__list_items__33: |
+05_02__container_blocks__list_items__033: |
foo
*
foo
1.
-05_02__container_blocks__list_items__34: |2
+05_02__container_blocks__list_items__034: |2
1. A paragraph
with two lines.
indented code
> A block quote.
-05_02__container_blocks__list_items__35: |2
+05_02__container_blocks__list_items__035: |2
1. A paragraph
with two lines.
indented code
> A block quote.
-05_02__container_blocks__list_items__36: |2
+05_02__container_blocks__list_items__036: |2
1. A paragraph
with two lines.
indented code
> A block quote.
-05_02__container_blocks__list_items__37: |2
+05_02__container_blocks__list_items__037: |2
1. A paragraph
with two lines.
indented code
> A block quote.
-05_02__container_blocks__list_items__38: |2
+05_02__container_blocks__list_items__038: |2
1. A paragraph
with two lines.
indented code
> A block quote.
-05_02__container_blocks__list_items__39: |2
+05_02__container_blocks__list_items__039: |2
1. A paragraph
with two lines.
-05_02__container_blocks__list_items__40: |
+05_02__container_blocks__list_items__040: |
> 1. > Blockquote
continued here.
-05_02__container_blocks__list_items__41: |
+05_02__container_blocks__list_items__041: |
> 1. > Blockquote
> continued here.
-05_02__container_blocks__list_items__42: |
+05_02__container_blocks__list_items__042: |
- foo
- bar
- baz
- boo
-05_02__container_blocks__list_items__43: |
+05_02__container_blocks__list_items__043: |
- foo
- bar
- baz
- boo
-05_02__container_blocks__list_items__44: |
+05_02__container_blocks__list_items__044: |
10) foo
- bar
-05_02__container_blocks__list_items__45: |
+05_02__container_blocks__list_items__045: |
10) foo
- bar
-05_02__container_blocks__list_items__46: |
+05_02__container_blocks__list_items__046: |
- - foo
-05_02__container_blocks__list_items__47: |
+05_02__container_blocks__list_items__047: |
1. - 2. foo
-05_02__container_blocks__list_items__48: |
+05_02__container_blocks__list_items__048: |
- # Foo
- Bar
---
baz
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__49: |
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__049: |
- [ ] foo
- [x] bar
- [x] foo
@@ -1122,35 +1122,35 @@
- foo
- bar
+ baz
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__50: |
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__050: |
1. foo
2. bar
3) baz
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__51: |
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__051: |
Foo
- bar
- baz
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__52: |
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__052: |
The number of windows in my house is
14. The number of doors is 6.
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__53: |
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__053: |
The number of windows in my house is
1. The number of doors is 6.
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__54: |
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__054: |
- foo
- bar
- baz
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__55: |
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__055: |
- foo
- bar
- baz
bim
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__56: |
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__056: |
- foo
- bar
@@ -1158,7 +1158,7 @@
- baz
- bim
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__57: |
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__057: |
- foo
notcode
@@ -1168,7 +1168,7 @@
<!-- -->
code
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__58: |
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__058: |
- a
- b
- c
@@ -1176,47 +1176,47 @@
- e
- f
- g
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__59: |
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__059: |
1. a
2. b
3. c
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__60: |
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__060: |
- a
- b
- c
- d
- e
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__61: |
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__061: |
1. a
2. b
3. c
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__62: |
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__062: |
- a
- b
- c
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__63: |
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__063: |
* a
*
* c
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__64: |
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__064: |
- a
- b
c
- d
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__65: |
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__065: |
- a
- b
[ref]: /url
- d
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__66: |
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__066: |
- a
- ```
b
@@ -1224,41 +1224,41 @@
```
- c
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__67: |
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__067: |
- a
- b
c
- d
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__68: |
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__068: |
* a
> b
>
* c
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__69: |
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__069: |
- a
> b
```
c
```
- d
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__70: |
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__070: |
- a
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__71: |
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__071: |
- a
- b
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__72: |
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__072: |
1. ```
foo
```
bar
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__73: |
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__073: |
* foo
* bar
baz
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__74: |
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__074: |
- a
- b
- c
@@ -1266,12 +1266,12 @@
- d
- e
- f
-06_01__inlines__01: |
+06_01__inlines__001: |
`hi`lo`
-06_02__inlines__backslash_escapes__01: |
+06_02__inlines__backslash_escapes__001: |
\!\"\#\$\%\&\'\(\)\*\+\,\-\.\/\:\;\<\=\>\?\@\[\\\]\^\_\`\{\|\}\~
-06_02__inlines__backslash_escapes__02: "\\\t\\A\\a\\ \\3\\φ\\«\n"
-06_02__inlines__backslash_escapes__03: |
+06_02__inlines__backslash_escapes__002: "\\\t\\A\\a\\ \\3\\φ\\«\n"
+06_02__inlines__backslash_escapes__003: |
\*not emphasized*
\<br/> not a tag
\[not a link](/foo)
@@ -1281,324 +1281,324 @@
\# not a heading
\[foo]: /url "not a reference"
\&ouml; not a character entity
-06_02__inlines__backslash_escapes__04: |
+06_02__inlines__backslash_escapes__004: |
\\*emphasis*
-06_02__inlines__backslash_escapes__05: |
+06_02__inlines__backslash_escapes__005: |
foo\
bar
-06_02__inlines__backslash_escapes__06: |
+06_02__inlines__backslash_escapes__006: |
`` \[\` ``
-06_02__inlines__backslash_escapes__07: |2
+06_02__inlines__backslash_escapes__007: |2
\[\]
-06_02__inlines__backslash_escapes__08: |
+06_02__inlines__backslash_escapes__008: |
~~~
\[\]
~~~
-06_02__inlines__backslash_escapes__09: |
+06_02__inlines__backslash_escapes__009: |
<http://example.com?find=\*>
-06_02__inlines__backslash_escapes__10: |
+06_02__inlines__backslash_escapes__010: |
<a href="/bar\/)">
-06_02__inlines__backslash_escapes__11: |
+06_02__inlines__backslash_escapes__011: |
[foo](/bar\* "ti\*tle")
-06_02__inlines__backslash_escapes__12: |
+06_02__inlines__backslash_escapes__012: |
[foo]
[foo]: /bar\* "ti\*tle"
-06_02__inlines__backslash_escapes__13: |
+06_02__inlines__backslash_escapes__013: |
``` foo\+bar
foo
```
-06_03__inlines__entity_and_numeric_character_references__01: |
+06_03__inlines__entity_and_numeric_character_references__001: |
&nbsp; &amp; &copy; &AElig; &Dcaron;
&frac34; &HilbertSpace; &DifferentialD;
&ClockwiseContourIntegral; &ngE;
-06_03__inlines__entity_and_numeric_character_references__02: |
+06_03__inlines__entity_and_numeric_character_references__002: |
&#35; &#1234; &#992; &#0;
-06_03__inlines__entity_and_numeric_character_references__03: |
+06_03__inlines__entity_and_numeric_character_references__003: |
&#X22; &#XD06; &#xcab;
-06_03__inlines__entity_and_numeric_character_references__04: |
+06_03__inlines__entity_and_numeric_character_references__004: |
&nbsp &x; &#; &#x;
&#987654321;
&#abcdef0;
&ThisIsNotDefined; &hi?;
-06_03__inlines__entity_and_numeric_character_references__05: |
+06_03__inlines__entity_and_numeric_character_references__005: |
&copy
-06_03__inlines__entity_and_numeric_character_references__06: |
+06_03__inlines__entity_and_numeric_character_references__006: |
&MadeUpEntity;
-06_03__inlines__entity_and_numeric_character_references__07: |
+06_03__inlines__entity_and_numeric_character_references__007: |
<a href="&ouml;&ouml;.html">
-06_03__inlines__entity_and_numeric_character_references__08: |
+06_03__inlines__entity_and_numeric_character_references__008: |
[foo](/f&ouml;&ouml; "f&ouml;&ouml;")
-06_03__inlines__entity_and_numeric_character_references__09: |
+06_03__inlines__entity_and_numeric_character_references__009: |
[foo]
[foo]: /f&ouml;&ouml; "f&ouml;&ouml;"
-06_03__inlines__entity_and_numeric_character_references__10: |
+06_03__inlines__entity_and_numeric_character_references__010: |
``` f&ouml;&ouml;
foo
```
-06_03__inlines__entity_and_numeric_character_references__11: |
+06_03__inlines__entity_and_numeric_character_references__011: |
`f&ouml;&ouml;`
-06_03__inlines__entity_and_numeric_character_references__12: |2
+06_03__inlines__entity_and_numeric_character_references__012: |2
f&ouml;f&ouml;
-06_03__inlines__entity_and_numeric_character_references__13: |
+06_03__inlines__entity_and_numeric_character_references__013: |
&#42;foo&#42;
*foo*
-06_03__inlines__entity_and_numeric_character_references__14: |
+06_03__inlines__entity_and_numeric_character_references__014: |
&#42; foo
* foo
-06_03__inlines__entity_and_numeric_character_references__15: |
+06_03__inlines__entity_and_numeric_character_references__015: |
foo&#10;&#10;bar
-06_03__inlines__entity_and_numeric_character_references__16: |
+06_03__inlines__entity_and_numeric_character_references__016: |
&#9;foo
-06_03__inlines__entity_and_numeric_character_references__17: |
+06_03__inlines__entity_and_numeric_character_references__017: |
[a](url &quot;tit&quot;)
-06_04__inlines__code_spans__01: |
+06_04__inlines__code_spans__001: |
`foo`
-06_04__inlines__code_spans__02: |
+06_04__inlines__code_spans__002: |
`` foo ` bar ``
-06_04__inlines__code_spans__03: |
+06_04__inlines__code_spans__003: |
` `` `
-06_04__inlines__code_spans__04: |
+06_04__inlines__code_spans__004: |
` `` `
-06_04__inlines__code_spans__05: |
+06_04__inlines__code_spans__005: |
` a`
-06_04__inlines__code_spans__06: |
+06_04__inlines__code_spans__006: |
` b `
-06_04__inlines__code_spans__07: |
+06_04__inlines__code_spans__007: |
` `
` `
-06_04__inlines__code_spans__08: "``\nfoo\nbar \nbaz\n``\n"
-06_04__inlines__code_spans__09: "``\nfoo \n``\n"
-06_04__inlines__code_spans__10: "`foo bar \nbaz`\n"
-06_04__inlines__code_spans__11: |
+06_04__inlines__code_spans__008: "``\nfoo\nbar \nbaz\n``\n"
+06_04__inlines__code_spans__009: "``\nfoo \n``\n"
+06_04__inlines__code_spans__010: "`foo bar \nbaz`\n"
+06_04__inlines__code_spans__011: |
`foo\`bar`
-06_04__inlines__code_spans__12: |
+06_04__inlines__code_spans__012: |
``foo`bar``
-06_04__inlines__code_spans__13: |
+06_04__inlines__code_spans__013: |
` foo `` bar `
-06_04__inlines__code_spans__14: |
+06_04__inlines__code_spans__014: |
*foo`*`
-06_04__inlines__code_spans__15: |
+06_04__inlines__code_spans__015: |
[not a `link](/foo`)
-06_04__inlines__code_spans__16: |
+06_04__inlines__code_spans__016: |
`<a href="`">`
-06_04__inlines__code_spans__17: |
+06_04__inlines__code_spans__017: |
<a href="`">`
-06_04__inlines__code_spans__18: |
+06_04__inlines__code_spans__018: |
`<http://foo.bar.`baz>`
-06_04__inlines__code_spans__19: |
+06_04__inlines__code_spans__019: |
<http://foo.bar.`baz>`
-06_04__inlines__code_spans__20: |
+06_04__inlines__code_spans__020: |
```foo``
-06_04__inlines__code_spans__21: |
+06_04__inlines__code_spans__021: |
`foo
-06_04__inlines__code_spans__22: |
+06_04__inlines__code_spans__022: |
`foo``bar``
-06_05__inlines__emphasis_and_strong_emphasis__01: |
+06_05__inlines__emphasis_and_strong_emphasis__001: |
*foo bar*
-06_05__inlines__emphasis_and_strong_emphasis__02: |
+06_05__inlines__emphasis_and_strong_emphasis__002: |
a * foo bar*
-06_05__inlines__emphasis_and_strong_emphasis__03: |
+06_05__inlines__emphasis_and_strong_emphasis__003: |
a*"foo"*
-06_05__inlines__emphasis_and_strong_emphasis__04: |
+06_05__inlines__emphasis_and_strong_emphasis__004: |
* a *
-06_05__inlines__emphasis_and_strong_emphasis__05: |
+06_05__inlines__emphasis_and_strong_emphasis__005: |
foo*bar*
-06_05__inlines__emphasis_and_strong_emphasis__06: |
+06_05__inlines__emphasis_and_strong_emphasis__006: |
5*6*78
-06_05__inlines__emphasis_and_strong_emphasis__07: |
+06_05__inlines__emphasis_and_strong_emphasis__007: |
_foo bar_
-06_05__inlines__emphasis_and_strong_emphasis__08: |
+06_05__inlines__emphasis_and_strong_emphasis__008: |
_ foo bar_
-06_05__inlines__emphasis_and_strong_emphasis__09: |
+06_05__inlines__emphasis_and_strong_emphasis__009: |
a_"foo"_
-06_05__inlines__emphasis_and_strong_emphasis__10: |
+06_05__inlines__emphasis_and_strong_emphasis__010: |
foo_bar_
-06_05__inlines__emphasis_and_strong_emphasis__11: |
+06_05__inlines__emphasis_and_strong_emphasis__011: |
5_6_78
-06_05__inlines__emphasis_and_strong_emphasis__12: |
+06_05__inlines__emphasis_and_strong_emphasis__012: |
приÑтанÑм_ÑтремÑÑ‚ÑÑ_
-06_05__inlines__emphasis_and_strong_emphasis__13: |
+06_05__inlines__emphasis_and_strong_emphasis__013: |
aa_"bb"_cc
-06_05__inlines__emphasis_and_strong_emphasis__14: |
+06_05__inlines__emphasis_and_strong_emphasis__014: |
foo-_(bar)_
-06_05__inlines__emphasis_and_strong_emphasis__15: |
+06_05__inlines__emphasis_and_strong_emphasis__015: |
_foo*
-06_05__inlines__emphasis_and_strong_emphasis__16: |
+06_05__inlines__emphasis_and_strong_emphasis__016: |
*foo bar *
-06_05__inlines__emphasis_and_strong_emphasis__17: |
+06_05__inlines__emphasis_and_strong_emphasis__017: |
*foo bar
*
-06_05__inlines__emphasis_and_strong_emphasis__18: |
+06_05__inlines__emphasis_and_strong_emphasis__018: |
*(*foo)
-06_05__inlines__emphasis_and_strong_emphasis__19: |
+06_05__inlines__emphasis_and_strong_emphasis__019: |
*(*foo*)*
-06_05__inlines__emphasis_and_strong_emphasis__20: |
+06_05__inlines__emphasis_and_strong_emphasis__020: |
*foo*bar
-06_05__inlines__emphasis_and_strong_emphasis__21: |
+06_05__inlines__emphasis_and_strong_emphasis__021: |
_foo bar _
-06_05__inlines__emphasis_and_strong_emphasis__22: |
+06_05__inlines__emphasis_and_strong_emphasis__022: |
_(_foo)
-06_05__inlines__emphasis_and_strong_emphasis__23: |
+06_05__inlines__emphasis_and_strong_emphasis__023: |
_(_foo_)_
-06_05__inlines__emphasis_and_strong_emphasis__24: |
+06_05__inlines__emphasis_and_strong_emphasis__024: |
_foo_bar
-06_05__inlines__emphasis_and_strong_emphasis__25: |
+06_05__inlines__emphasis_and_strong_emphasis__025: |
_приÑтанÑм_ÑтремÑÑ‚ÑÑ
-06_05__inlines__emphasis_and_strong_emphasis__26: |
+06_05__inlines__emphasis_and_strong_emphasis__026: |
_foo_bar_baz_
-06_05__inlines__emphasis_and_strong_emphasis__27: |
+06_05__inlines__emphasis_and_strong_emphasis__027: |
_(bar)_.
-06_05__inlines__emphasis_and_strong_emphasis__28: |
+06_05__inlines__emphasis_and_strong_emphasis__028: |
**foo bar**
-06_05__inlines__emphasis_and_strong_emphasis__29: |
+06_05__inlines__emphasis_and_strong_emphasis__029: |
** foo bar**
-06_05__inlines__emphasis_and_strong_emphasis__30: |
+06_05__inlines__emphasis_and_strong_emphasis__030: |
a**"foo"**
-06_05__inlines__emphasis_and_strong_emphasis__31: |
+06_05__inlines__emphasis_and_strong_emphasis__031: |
foo**bar**
-06_05__inlines__emphasis_and_strong_emphasis__32: |
+06_05__inlines__emphasis_and_strong_emphasis__032: |
__foo bar__
-06_05__inlines__emphasis_and_strong_emphasis__33: |
+06_05__inlines__emphasis_and_strong_emphasis__033: |
__ foo bar__
-06_05__inlines__emphasis_and_strong_emphasis__34: |
+06_05__inlines__emphasis_and_strong_emphasis__034: |
__
foo bar__
-06_05__inlines__emphasis_and_strong_emphasis__35: |
+06_05__inlines__emphasis_and_strong_emphasis__035: |
a__"foo"__
-06_05__inlines__emphasis_and_strong_emphasis__36: |
+06_05__inlines__emphasis_and_strong_emphasis__036: |
foo__bar__
-06_05__inlines__emphasis_and_strong_emphasis__37: |
+06_05__inlines__emphasis_and_strong_emphasis__037: |
5__6__78
-06_05__inlines__emphasis_and_strong_emphasis__38: |
+06_05__inlines__emphasis_and_strong_emphasis__038: |
приÑтанÑм__ÑтремÑÑ‚ÑÑ__
-06_05__inlines__emphasis_and_strong_emphasis__39: |
+06_05__inlines__emphasis_and_strong_emphasis__039: |
__foo, __bar__, baz__
-06_05__inlines__emphasis_and_strong_emphasis__40: |
+06_05__inlines__emphasis_and_strong_emphasis__040: |
foo-__(bar)__
-06_05__inlines__emphasis_and_strong_emphasis__41: |
+06_05__inlines__emphasis_and_strong_emphasis__041: |
**foo bar **
-06_05__inlines__emphasis_and_strong_emphasis__42: |
+06_05__inlines__emphasis_and_strong_emphasis__042: |
**(**foo)
-06_05__inlines__emphasis_and_strong_emphasis__43: |
+06_05__inlines__emphasis_and_strong_emphasis__043: |
*(**foo**)*
-06_05__inlines__emphasis_and_strong_emphasis__44: |
+06_05__inlines__emphasis_and_strong_emphasis__044: |
**Gomphocarpus (*Gomphocarpus physocarpus*, syn.
*Asclepias physocarpa*)**
-06_05__inlines__emphasis_and_strong_emphasis__45: |
+06_05__inlines__emphasis_and_strong_emphasis__045: |
**foo "*bar*" foo**
-06_05__inlines__emphasis_and_strong_emphasis__46: |
+06_05__inlines__emphasis_and_strong_emphasis__046: |
**foo**bar
-06_05__inlines__emphasis_and_strong_emphasis__47: |
+06_05__inlines__emphasis_and_strong_emphasis__047: |
__foo bar __
-06_05__inlines__emphasis_and_strong_emphasis__48: |
+06_05__inlines__emphasis_and_strong_emphasis__048: |
__(__foo)
-06_05__inlines__emphasis_and_strong_emphasis__49: |
+06_05__inlines__emphasis_and_strong_emphasis__049: |
_(__foo__)_
-06_05__inlines__emphasis_and_strong_emphasis__50: |
+06_05__inlines__emphasis_and_strong_emphasis__050: |
__foo__bar
-06_05__inlines__emphasis_and_strong_emphasis__51: |
+06_05__inlines__emphasis_and_strong_emphasis__051: |
__приÑтанÑм__ÑтремÑÑ‚ÑÑ
-06_05__inlines__emphasis_and_strong_emphasis__52: |
+06_05__inlines__emphasis_and_strong_emphasis__052: |
__foo__bar__baz__
-06_05__inlines__emphasis_and_strong_emphasis__53: |
+06_05__inlines__emphasis_and_strong_emphasis__053: |
__(bar)__.
-06_05__inlines__emphasis_and_strong_emphasis__54: |
+06_05__inlines__emphasis_and_strong_emphasis__054: |
*foo [bar](/url)*
-06_05__inlines__emphasis_and_strong_emphasis__55: |
+06_05__inlines__emphasis_and_strong_emphasis__055: |
*foo
bar*
-06_05__inlines__emphasis_and_strong_emphasis__56: |
+06_05__inlines__emphasis_and_strong_emphasis__056: |
_foo __bar__ baz_
-06_05__inlines__emphasis_and_strong_emphasis__57: |
+06_05__inlines__emphasis_and_strong_emphasis__057: |
_foo _bar_ baz_
-06_05__inlines__emphasis_and_strong_emphasis__58: |
+06_05__inlines__emphasis_and_strong_emphasis__058: |
__foo_ bar_
-06_05__inlines__emphasis_and_strong_emphasis__59: |
+06_05__inlines__emphasis_and_strong_emphasis__059: |
*foo *bar**
-06_05__inlines__emphasis_and_strong_emphasis__60: |
+06_05__inlines__emphasis_and_strong_emphasis__060: |
*foo **bar** baz*
-06_05__inlines__emphasis_and_strong_emphasis__61: |
+06_05__inlines__emphasis_and_strong_emphasis__061: |
*foo**bar**baz*
-06_05__inlines__emphasis_and_strong_emphasis__62: |
+06_05__inlines__emphasis_and_strong_emphasis__062: |
*foo**bar*
-06_05__inlines__emphasis_and_strong_emphasis__63: |
+06_05__inlines__emphasis_and_strong_emphasis__063: |
***foo** bar*
-06_05__inlines__emphasis_and_strong_emphasis__64: |
+06_05__inlines__emphasis_and_strong_emphasis__064: |
*foo **bar***
-06_05__inlines__emphasis_and_strong_emphasis__65: |
+06_05__inlines__emphasis_and_strong_emphasis__065: |
*foo**bar***
-06_05__inlines__emphasis_and_strong_emphasis__66: |
+06_05__inlines__emphasis_and_strong_emphasis__066: |
foo***bar***baz
-06_05__inlines__emphasis_and_strong_emphasis__67: |
+06_05__inlines__emphasis_and_strong_emphasis__067: |
foo******bar*********baz
-06_05__inlines__emphasis_and_strong_emphasis__68: |
+06_05__inlines__emphasis_and_strong_emphasis__068: |
*foo **bar *baz* bim** bop*
-06_05__inlines__emphasis_and_strong_emphasis__69: |
+06_05__inlines__emphasis_and_strong_emphasis__069: |
*foo [*bar*](/url)*
-06_05__inlines__emphasis_and_strong_emphasis__70: |
+06_05__inlines__emphasis_and_strong_emphasis__070: |
** is not an empty emphasis
-06_05__inlines__emphasis_and_strong_emphasis__71: |
+06_05__inlines__emphasis_and_strong_emphasis__071: |
**** is not an empty strong emphasis
-06_05__inlines__emphasis_and_strong_emphasis__72: |
+06_05__inlines__emphasis_and_strong_emphasis__072: |
**foo [bar](/url)**
-06_05__inlines__emphasis_and_strong_emphasis__73: |
+06_05__inlines__emphasis_and_strong_emphasis__073: |
**foo
bar**
-06_05__inlines__emphasis_and_strong_emphasis__74: |
+06_05__inlines__emphasis_and_strong_emphasis__074: |
__foo _bar_ baz__
-06_05__inlines__emphasis_and_strong_emphasis__75: |
+06_05__inlines__emphasis_and_strong_emphasis__075: |
__foo __bar__ baz__
-06_05__inlines__emphasis_and_strong_emphasis__76: |
+06_05__inlines__emphasis_and_strong_emphasis__076: |
____foo__ bar__
-06_05__inlines__emphasis_and_strong_emphasis__77: |
+06_05__inlines__emphasis_and_strong_emphasis__077: |
**foo **bar****
-06_05__inlines__emphasis_and_strong_emphasis__78: |
+06_05__inlines__emphasis_and_strong_emphasis__078: |
**foo *bar* baz**
-06_05__inlines__emphasis_and_strong_emphasis__79: |
+06_05__inlines__emphasis_and_strong_emphasis__079: |
**foo*bar*baz**
-06_05__inlines__emphasis_and_strong_emphasis__80: |
+06_05__inlines__emphasis_and_strong_emphasis__080: |
***foo* bar**
-06_05__inlines__emphasis_and_strong_emphasis__81: |
+06_05__inlines__emphasis_and_strong_emphasis__081: |
**foo *bar***
-06_05__inlines__emphasis_and_strong_emphasis__82: |
+06_05__inlines__emphasis_and_strong_emphasis__082: |
**foo *bar **baz**
bim* bop**
-06_05__inlines__emphasis_and_strong_emphasis__83: |
+06_05__inlines__emphasis_and_strong_emphasis__083: |
**foo [*bar*](/url)**
-06_05__inlines__emphasis_and_strong_emphasis__84: |
+06_05__inlines__emphasis_and_strong_emphasis__084: |
__ is not an empty emphasis
-06_05__inlines__emphasis_and_strong_emphasis__85: |
+06_05__inlines__emphasis_and_strong_emphasis__085: |
____ is not an empty strong emphasis
-06_05__inlines__emphasis_and_strong_emphasis__86: |
+06_05__inlines__emphasis_and_strong_emphasis__086: |
foo ***
-06_05__inlines__emphasis_and_strong_emphasis__87: |
+06_05__inlines__emphasis_and_strong_emphasis__087: |
foo *\**
-06_05__inlines__emphasis_and_strong_emphasis__88: |
+06_05__inlines__emphasis_and_strong_emphasis__088: |
foo *_*
-06_05__inlines__emphasis_and_strong_emphasis__89: |
+06_05__inlines__emphasis_and_strong_emphasis__089: |
foo *****
-06_05__inlines__emphasis_and_strong_emphasis__90: |
+06_05__inlines__emphasis_and_strong_emphasis__090: |
foo **\***
-06_05__inlines__emphasis_and_strong_emphasis__91: |
+06_05__inlines__emphasis_and_strong_emphasis__091: |
foo **_**
-06_05__inlines__emphasis_and_strong_emphasis__92: |
+06_05__inlines__emphasis_and_strong_emphasis__092: |
**foo*
-06_05__inlines__emphasis_and_strong_emphasis__93: |
+06_05__inlines__emphasis_and_strong_emphasis__093: |
*foo**
-06_05__inlines__emphasis_and_strong_emphasis__94: |
+06_05__inlines__emphasis_and_strong_emphasis__094: |
***foo**
-06_05__inlines__emphasis_and_strong_emphasis__95: |
+06_05__inlines__emphasis_and_strong_emphasis__095: |
****foo*
-06_05__inlines__emphasis_and_strong_emphasis__96: |
+06_05__inlines__emphasis_and_strong_emphasis__096: |
**foo***
-06_05__inlines__emphasis_and_strong_emphasis__97: |
+06_05__inlines__emphasis_and_strong_emphasis__097: |
*foo****
-06_05__inlines__emphasis_and_strong_emphasis__98: |
+06_05__inlines__emphasis_and_strong_emphasis__098: |
foo ___
-06_05__inlines__emphasis_and_strong_emphasis__99: |
+06_05__inlines__emphasis_and_strong_emphasis__099: |
foo _\__
06_05__inlines__emphasis_and_strong_emphasis__100: |
foo _*_
@@ -1664,411 +1664,411 @@
**a<http://foo.bar/?q=**>
06_05__inlines__emphasis_and_strong_emphasis__131: |
__a<http://foo.bar/?q=__>
-06_06__inlines__strikethrough_extension__01: |
+06_06__inlines__strikethrough_extension__001: |
~~Hi~~ Hello, world!
-06_06__inlines__strikethrough_extension__02: |
+06_06__inlines__strikethrough_extension__002: |
This ~~has a
new paragraph~~.
-06_07__inlines__links__01: |
+06_07__inlines__links__001: |
[link](/uri "title")
-06_07__inlines__links__02: |
+06_07__inlines__links__002: |
[link](/uri)
-06_07__inlines__links__03: |
+06_07__inlines__links__003: |
[link]()
-06_07__inlines__links__04: |
+06_07__inlines__links__004: |
[link](<>)
-06_07__inlines__links__05: |
+06_07__inlines__links__005: |
[link](/my uri)
-06_07__inlines__links__06: |
+06_07__inlines__links__006: |
[link](</my uri>)
-06_07__inlines__links__07: |
+06_07__inlines__links__007: |
[link](foo
bar)
-06_07__inlines__links__08: |
+06_07__inlines__links__008: |
[link](<foo
bar>)
-06_07__inlines__links__09: |
+06_07__inlines__links__009: |
[a](<b)c>)
-06_07__inlines__links__10: |
+06_07__inlines__links__010: |
[link](<foo\>)
-06_07__inlines__links__11: |
+06_07__inlines__links__011: |
[a](<b)c
[a](<b)c>
[a](<b>c)
-06_07__inlines__links__12: |
+06_07__inlines__links__012: |
[link](\(foo\))
-06_07__inlines__links__13: |
+06_07__inlines__links__013: |
[link](foo(and(bar)))
-06_07__inlines__links__14: |
+06_07__inlines__links__014: |
[link](foo\(and\(bar\))
-06_07__inlines__links__15: |
+06_07__inlines__links__015: |
[link](<foo(and(bar)>)
-06_07__inlines__links__16: |
+06_07__inlines__links__016: |
[link](foo\)\:)
-06_07__inlines__links__17: |
+06_07__inlines__links__017: |
[link](#fragment)
[link](http://example.com#fragment)
[link](http://example.com?foo=3#frag)
-06_07__inlines__links__18: |
+06_07__inlines__links__018: |
[link](foo\bar)
-06_07__inlines__links__19: |
+06_07__inlines__links__019: |
[link](foo%20b&auml;)
-06_07__inlines__links__20: |
+06_07__inlines__links__020: |
[link]("title")
-06_07__inlines__links__21: |
+06_07__inlines__links__021: |
[link](/url "title")
[link](/url 'title')
[link](/url (title))
-06_07__inlines__links__22: |
+06_07__inlines__links__022: |
[link](/url "title \"&quot;")
-06_07__inlines__links__23: |
+06_07__inlines__links__023: |
[link](/url "title")
-06_07__inlines__links__24: |
+06_07__inlines__links__024: |
[link](/url "title "and" title")
-06_07__inlines__links__25: |
+06_07__inlines__links__025: |
[link](/url 'title "and" title')
-06_07__inlines__links__26: |
+06_07__inlines__links__026: |
[link]( /uri
"title" )
-06_07__inlines__links__27: |
+06_07__inlines__links__027: |
[link] (/uri)
-06_07__inlines__links__28: |
+06_07__inlines__links__028: |
[link [foo [bar]]](/uri)
-06_07__inlines__links__29: |
+06_07__inlines__links__029: |
[link] bar](/uri)
-06_07__inlines__links__30: |
+06_07__inlines__links__030: |
[link [bar](/uri)
-06_07__inlines__links__31: |
+06_07__inlines__links__031: |
[link \[bar](/uri)
-06_07__inlines__links__32: |
+06_07__inlines__links__032: |
[link *foo **bar** `#`*](/uri)
-06_07__inlines__links__33: |
+06_07__inlines__links__033: |
[![moon](moon.jpg)](/uri)
-06_07__inlines__links__34: |
+06_07__inlines__links__034: |
[foo [bar](/uri)](/uri)
-06_07__inlines__links__35: |
+06_07__inlines__links__035: |
[foo *[bar [baz](/uri)](/uri)*](/uri)
-06_07__inlines__links__36: |
+06_07__inlines__links__036: |
![[[foo](uri1)](uri2)](uri3)
-06_07__inlines__links__37: |
+06_07__inlines__links__037: |
*[foo*](/uri)
-06_07__inlines__links__38: |
+06_07__inlines__links__038: |
[foo *bar](baz*)
-06_07__inlines__links__39: |
+06_07__inlines__links__039: |
*foo [bar* baz]
-06_07__inlines__links__40: |
+06_07__inlines__links__040: |
[foo <bar attr="](baz)">
-06_07__inlines__links__41: |
+06_07__inlines__links__041: |
[foo`](/uri)`
-06_07__inlines__links__42: |
+06_07__inlines__links__042: |
[foo<http://example.com/?search=](uri)>
-06_07__inlines__links__43: |
+06_07__inlines__links__043: |
[foo][bar]
[bar]: /url "title"
-06_07__inlines__links__44: |
+06_07__inlines__links__044: |
[link [foo [bar]]][ref]
[ref]: /uri
-06_07__inlines__links__45: |
+06_07__inlines__links__045: |
[link \[bar][ref]
[ref]: /uri
-06_07__inlines__links__46: |
+06_07__inlines__links__046: |
[link *foo **bar** `#`*][ref]
[ref]: /uri
-06_07__inlines__links__47: |
+06_07__inlines__links__047: |
[![moon](moon.jpg)][ref]
[ref]: /uri
-06_07__inlines__links__48: |
+06_07__inlines__links__048: |
[foo [bar](/uri)][ref]
[ref]: /uri
-06_07__inlines__links__49: |
+06_07__inlines__links__049: |
[foo *bar [baz][ref]*][ref]
[ref]: /uri
-06_07__inlines__links__50: |
+06_07__inlines__links__050: |
*[foo*][ref]
[ref]: /uri
-06_07__inlines__links__51: |
+06_07__inlines__links__051: |
[foo *bar][ref]
[ref]: /uri
-06_07__inlines__links__52: |
+06_07__inlines__links__052: |
[foo <bar attr="][ref]">
[ref]: /uri
-06_07__inlines__links__53: |
+06_07__inlines__links__053: |
[foo`][ref]`
[ref]: /uri
-06_07__inlines__links__54: |
+06_07__inlines__links__054: |
[foo<http://example.com/?search=][ref]>
[ref]: /uri
-06_07__inlines__links__55: |
+06_07__inlines__links__055: |
[foo][BaR]
[bar]: /url "title"
-06_07__inlines__links__56: |
+06_07__inlines__links__056: |
[Толпой][Толпой] is a Russian word.
[ТОЛПОЙ]: /url
-06_07__inlines__links__57: |
+06_07__inlines__links__057: |
[Foo
bar]: /url
[Baz][Foo bar]
-06_07__inlines__links__58: |
+06_07__inlines__links__058: |
[foo] [bar]
[bar]: /url "title"
-06_07__inlines__links__59: |
+06_07__inlines__links__059: |
[foo]
[bar]
[bar]: /url "title"
-06_07__inlines__links__60: |
+06_07__inlines__links__060: |
[foo]: /url1
[foo]: /url2
[bar][foo]
-06_07__inlines__links__61: |
+06_07__inlines__links__061: |
[bar][foo\!]
[foo!]: /url
-06_07__inlines__links__62: |
+06_07__inlines__links__062: |
[foo][ref[]
[ref[]: /uri
-06_07__inlines__links__63: |
+06_07__inlines__links__063: |
[foo][ref[bar]]
[ref[bar]]: /uri
-06_07__inlines__links__64: |
+06_07__inlines__links__064: |
[[[foo]]]
[[[foo]]]: /url
-06_07__inlines__links__65: |
+06_07__inlines__links__065: |
[foo][ref\[]
[ref\[]: /uri
-06_07__inlines__links__66: |
+06_07__inlines__links__066: |
[bar\\]: /uri
[bar\\]
-06_07__inlines__links__67: |
+06_07__inlines__links__067: |
[]
[]: /uri
-06_07__inlines__links__68: |
+06_07__inlines__links__068: |
[
]
[
]: /uri
-06_07__inlines__links__69: |
+06_07__inlines__links__069: |
[foo][]
[foo]: /url "title"
-06_07__inlines__links__70: |
+06_07__inlines__links__070: |
[*foo* bar][]
[*foo* bar]: /url "title"
-06_07__inlines__links__71: |
+06_07__inlines__links__071: |
[Foo][]
[foo]: /url "title"
-06_07__inlines__links__72: "[foo] \n[]\n\n[foo]: /url \"title\"\n"
-06_07__inlines__links__73: |
+06_07__inlines__links__072: "[foo] \n[]\n\n[foo]: /url \"title\"\n"
+06_07__inlines__links__073: |
[foo]
[foo]: /url "title"
-06_07__inlines__links__74: |
+06_07__inlines__links__074: |
[*foo* bar]
[*foo* bar]: /url "title"
-06_07__inlines__links__75: |
+06_07__inlines__links__075: |
[[*foo* bar]]
[*foo* bar]: /url "title"
-06_07__inlines__links__76: |
+06_07__inlines__links__076: |
[[bar [foo]
[foo]: /url
-06_07__inlines__links__77: |
+06_07__inlines__links__077: |
[Foo]
[foo]: /url "title"
-06_07__inlines__links__78: |
+06_07__inlines__links__078: |
[foo] bar
[foo]: /url
-06_07__inlines__links__79: |
+06_07__inlines__links__079: |
\[foo]
[foo]: /url "title"
-06_07__inlines__links__80: |
+06_07__inlines__links__080: |
[foo*]: /url
*[foo*]
-06_07__inlines__links__81: |
+06_07__inlines__links__081: |
[foo][bar]
[foo]: /url1
[bar]: /url2
-06_07__inlines__links__82: |
+06_07__inlines__links__082: |
[foo][]
[foo]: /url1
-06_07__inlines__links__83: |
+06_07__inlines__links__083: |
[foo]()
[foo]: /url1
-06_07__inlines__links__84: |
+06_07__inlines__links__084: |
[foo](not a link)
[foo]: /url1
-06_07__inlines__links__85: |
+06_07__inlines__links__085: |
[foo][bar][baz]
[baz]: /url
-06_07__inlines__links__86: |
+06_07__inlines__links__086: |
[foo][bar][baz]
[baz]: /url1
[bar]: /url2
-06_07__inlines__links__87: |
+06_07__inlines__links__087: |
[foo][bar][baz]
[baz]: /url1
[foo]: /url2
-06_08__inlines__images__01: |
+06_08__inlines__images__001: |
![foo](/url "title")
-06_08__inlines__images__02: |
+06_08__inlines__images__002: |
![foo *bar*]
[foo *bar*]: train.jpg "train & tracks"
-06_08__inlines__images__03: |
+06_08__inlines__images__003: |
![foo ![bar](/url)](/url2)
-06_08__inlines__images__04: |
+06_08__inlines__images__004: |
![foo [bar](/url)](/url2)
-06_08__inlines__images__05: |
+06_08__inlines__images__005: |
![foo *bar*][]
[foo *bar*]: train.jpg "train & tracks"
-06_08__inlines__images__06: |
+06_08__inlines__images__006: |
![foo *bar*][foobar]
[FOOBAR]: train.jpg "train & tracks"
-06_08__inlines__images__07: |
+06_08__inlines__images__007: |
![foo](train.jpg)
-06_08__inlines__images__08: |
+06_08__inlines__images__008: |
My ![foo bar](/path/to/train.jpg "title" )
-06_08__inlines__images__09: |
+06_08__inlines__images__009: |
![foo](<url>)
-06_08__inlines__images__10: |
+06_08__inlines__images__010: |
![](/url)
-06_08__inlines__images__11: |
+06_08__inlines__images__011: |
![foo][bar]
[bar]: /url
-06_08__inlines__images__12: |
+06_08__inlines__images__012: |
![foo][bar]
[BAR]: /url
-06_08__inlines__images__13: |
+06_08__inlines__images__013: |
![foo][]
[foo]: /url "title"
-06_08__inlines__images__14: |
+06_08__inlines__images__014: |
![*foo* bar][]
[*foo* bar]: /url "title"
-06_08__inlines__images__15: |
+06_08__inlines__images__015: |
![Foo][]
[foo]: /url "title"
-06_08__inlines__images__16: "![foo] \n[]\n\n[foo]: /url \"title\"\n"
-06_08__inlines__images__17: |
+06_08__inlines__images__016: "![foo] \n[]\n\n[foo]: /url \"title\"\n"
+06_08__inlines__images__017: |
![foo]
[foo]: /url "title"
-06_08__inlines__images__18: |
+06_08__inlines__images__018: |
![*foo* bar]
[*foo* bar]: /url "title"
-06_08__inlines__images__19: |
+06_08__inlines__images__019: |
![[foo]]
[[foo]]: /url "title"
-06_08__inlines__images__20: |
+06_08__inlines__images__020: |
![Foo]
[foo]: /url "title"
-06_08__inlines__images__21: |
+06_08__inlines__images__021: |
!\[foo]
[foo]: /url "title"
-06_08__inlines__images__22: |
+06_08__inlines__images__022: |
\![foo]
[foo]: /url "title"
-06_09__inlines__autolinks__01: |
+06_09__inlines__autolinks__001: |
<http://foo.bar.baz>
-06_09__inlines__autolinks__02: |
+06_09__inlines__autolinks__002: |
<http://foo.bar.baz/test?q=hello&id=22&boolean>
-06_09__inlines__autolinks__03: |
+06_09__inlines__autolinks__003: |
<irc://foo.bar:2233/baz>
-06_09__inlines__autolinks__04: |
+06_09__inlines__autolinks__004: |
<MAILTO:FOO@BAR.BAZ>
-06_09__inlines__autolinks__05: |
+06_09__inlines__autolinks__005: |
<a+b+c:d>
-06_09__inlines__autolinks__06: |
+06_09__inlines__autolinks__006: |
<made-up-scheme://foo,bar>
-06_09__inlines__autolinks__07: |
+06_09__inlines__autolinks__007: |
<http://../>
-06_09__inlines__autolinks__08: |
+06_09__inlines__autolinks__008: |
<localhost:5001/foo>
-06_09__inlines__autolinks__09: |
+06_09__inlines__autolinks__009: |
<http://foo.bar/baz bim>
-06_09__inlines__autolinks__10: |
+06_09__inlines__autolinks__010: |
<http://example.com/\[\>
-06_09__inlines__autolinks__11: |
+06_09__inlines__autolinks__011: |
<foo@bar.example.com>
-06_09__inlines__autolinks__12: |
+06_09__inlines__autolinks__012: |
<foo+special@Bar.baz-bar0.com>
-06_09__inlines__autolinks__13: |
+06_09__inlines__autolinks__013: |
<foo\+@bar.example.com>
-06_09__inlines__autolinks__14: |
+06_09__inlines__autolinks__014: |
<>
-06_09__inlines__autolinks__15: |
+06_09__inlines__autolinks__015: |
< http://foo.bar >
-06_09__inlines__autolinks__16: |
+06_09__inlines__autolinks__016: |
<m:abc>
-06_09__inlines__autolinks__17: |
+06_09__inlines__autolinks__017: |
<foo.bar.baz>
-06_09__inlines__autolinks__18: |
+06_09__inlines__autolinks__018: |
http://example.com
-06_09__inlines__autolinks__19: |
+06_09__inlines__autolinks__019: |
foo@bar.example.com
-06_10__inlines__autolinks_extension__01: |
+06_10__inlines__autolinks_extension__001: |
www.commonmark.org
-06_10__inlines__autolinks_extension__02: |
+06_10__inlines__autolinks_extension__002: |
Visit www.commonmark.org/help for more information.
-06_10__inlines__autolinks_extension__03: |
+06_10__inlines__autolinks_extension__003: |
Visit www.commonmark.org.
Visit www.commonmark.org/a.b.
-06_10__inlines__autolinks_extension__04: |
+06_10__inlines__autolinks_extension__004: |
www.google.com/search?q=Markup+(business)
www.google.com/search?q=Markup+(business)))
@@ -2076,25 +2076,25 @@
(www.google.com/search?q=Markup+(business))
(www.google.com/search?q=Markup+(business)
-06_10__inlines__autolinks_extension__05: |
+06_10__inlines__autolinks_extension__005: |
www.google.com/search?q=(business))+ok
-06_10__inlines__autolinks_extension__06: |
+06_10__inlines__autolinks_extension__006: |
www.google.com/search?q=commonmark&hl=en
www.google.com/search?q=commonmark&hl;
-06_10__inlines__autolinks_extension__07: |
+06_10__inlines__autolinks_extension__007: |
www.commonmark.org/he<lp
-06_10__inlines__autolinks_extension__08: |
+06_10__inlines__autolinks_extension__008: |
http://commonmark.org
(Visit https://encrypted.google.com/search?q=Markup+(business))
Anonymous FTP is available at ftp://foo.bar.baz.
-06_10__inlines__autolinks_extension__09: |
+06_10__inlines__autolinks_extension__009: |
foo@bar.baz
-06_10__inlines__autolinks_extension__10: |
+06_10__inlines__autolinks_extension__010: |
hello@mail+xyz.example isn't valid, but hello+xyz@mail.example is.
-06_10__inlines__autolinks_extension__11: |
+06_10__inlines__autolinks_extension__011: |
a.b-c_d@a.b
a.b-c_d@a.b.
@@ -2102,102 +2102,100 @@
a.b-c_d@a.b-
a.b-c_d@a.b_
-06_11__inlines__raw_html__01: |
+06_11__inlines__raw_html__001: |
<a><bab><c2c>
-06_11__inlines__raw_html__02: |
+06_11__inlines__raw_html__002: |
<a/><b2/>
-06_11__inlines__raw_html__03: |
+06_11__inlines__raw_html__003: |
<a /><b2
data="foo" >
-06_11__inlines__raw_html__04: |
+06_11__inlines__raw_html__004: |
<a foo="bar" bam = 'baz <em>"</em>'
_boolean zoop:33=zoop:33 />
-06_11__inlines__raw_html__05: |
+06_11__inlines__raw_html__005: |
Foo <responsive-image src="foo.jpg" />
-06_11__inlines__raw_html__06: |
+06_11__inlines__raw_html__006: |
<33> <__>
-06_11__inlines__raw_html__07: |
+06_11__inlines__raw_html__007: |
<a h*#ref="hi">
-06_11__inlines__raw_html__08: |
+06_11__inlines__raw_html__008: |
<a href="hi'> <a href=hi'>
-06_11__inlines__raw_html__09: |
+06_11__inlines__raw_html__009: |
< a><
foo><bar/ >
<foo bar=baz
bim!bop />
-06_11__inlines__raw_html__10: |
+06_11__inlines__raw_html__010: |
<a href='bar'title=title>
-06_11__inlines__raw_html__11: |
+06_11__inlines__raw_html__011: |
</a></foo >
-06_11__inlines__raw_html__12: |
+06_11__inlines__raw_html__012: |
</a href="foo">
-06_11__inlines__raw_html__13: |
+06_11__inlines__raw_html__013: |
foo <!-- this is a
comment - with hyphen -->
-06_11__inlines__raw_html__14: |
+06_11__inlines__raw_html__014: |
foo <!-- not a comment -- two hyphens -->
-06_11__inlines__raw_html__15: |
+06_11__inlines__raw_html__015: |
foo <!--> foo -->
foo <!-- foo--->
-06_11__inlines__raw_html__16: |
+06_11__inlines__raw_html__016: |
foo <?php echo $a; ?>
-06_11__inlines__raw_html__17: |
+06_11__inlines__raw_html__017: |
foo <!ELEMENT br EMPTY>
-06_11__inlines__raw_html__18: |
+06_11__inlines__raw_html__018: |
foo <![CDATA[>&<]]>
-06_11__inlines__raw_html__19: |
+06_11__inlines__raw_html__019: |
foo <a href="&ouml;">
-06_11__inlines__raw_html__20: |
+06_11__inlines__raw_html__020: |
foo <a href="\*">
-06_11__inlines__raw_html__21: |
+06_11__inlines__raw_html__021: |
<a href="\"">
-06_12__inlines__disallowed_raw_html_extension__01: |
+06_12__inlines__disallowed_raw_html_extension__001: |
<strong> <title> <style> <em>
<blockquote>
<xmp> is disallowed. <XMP> is also disallowed.
</blockquote>
-06_13__inlines__hard_line_breaks__01: "foo \nbaz\n"
-06_13__inlines__hard_line_breaks__02: |
+06_13__inlines__hard_line_breaks__001: "foo \nbaz\n"
+06_13__inlines__hard_line_breaks__002: |
foo\
baz
-06_13__inlines__hard_line_breaks__03: "foo \nbaz\n"
-06_13__inlines__hard_line_breaks__04: "foo \n bar\n"
-06_13__inlines__hard_line_breaks__05: |
+06_13__inlines__hard_line_breaks__003: "foo \nbaz\n"
+06_13__inlines__hard_line_breaks__004: "foo \n bar\n"
+06_13__inlines__hard_line_breaks__005: |
foo\
bar
-06_13__inlines__hard_line_breaks__06: "*foo \nbar*\n"
-06_13__inlines__hard_line_breaks__07: |
+06_13__inlines__hard_line_breaks__006: "*foo \nbar*\n"
+06_13__inlines__hard_line_breaks__007: |
*foo\
bar*
-06_13__inlines__hard_line_breaks__08: "`code \nspan`\n"
-06_13__inlines__hard_line_breaks__09: |
+06_13__inlines__hard_line_breaks__008: "`code \nspan`\n"
+06_13__inlines__hard_line_breaks__009: |
`code\
span`
-06_13__inlines__hard_line_breaks__10: "<a href=\"foo \nbar\">\n"
-06_13__inlines__hard_line_breaks__11: |
+06_13__inlines__hard_line_breaks__010: "<a href=\"foo \nbar\">\n"
+06_13__inlines__hard_line_breaks__011: |
<a href="foo\
bar">
-06_13__inlines__hard_line_breaks__12: |
+06_13__inlines__hard_line_breaks__012: |
foo\
-06_13__inlines__hard_line_breaks__13: "foo \n"
-06_13__inlines__hard_line_breaks__14: |
+06_13__inlines__hard_line_breaks__013: "foo \n"
+06_13__inlines__hard_line_breaks__014: |
### foo\
-06_13__inlines__hard_line_breaks__15: "### foo \n"
-06_14__inlines__soft_line_breaks__01: |
+06_13__inlines__hard_line_breaks__015: "### foo \n"
+06_14__inlines__soft_line_breaks__001: |
foo
baz
-06_14__inlines__soft_line_breaks__02: "foo \n baz\n"
-06_15__inlines__textual_content__01: |
+06_14__inlines__soft_line_breaks__002: "foo \n baz\n"
+06_15__inlines__textual_content__001: |
hello $.;'there
-06_15__inlines__textual_content__02: |
+06_15__inlines__textual_content__002: |
Foo χÏῆν
-06_15__inlines__textual_content__03: |
+06_15__inlines__textual_content__003: |
Multiple spaces
-07_01__first_gitlab_specific_section_with_examples__strong_but_with_two_asterisks__01: |
- **bold**
-08_01__second_gitlab_specific_section_with_examples__strong_but_with_html__01: |
- <strong>
- bold
- </strong>
+07_01__gitlab_specific_markdown__footnotes__001: |
+ footnote reference tag [^1]
+
+ [^1]: footnote text
diff --git a/spec/fixtures/glfm/example_snapshots/prosemirror_json.yml b/spec/fixtures/glfm/example_snapshots/prosemirror_json.yml
index 07d0235d22a..04196c26af0 100644
--- a/spec/fixtures/glfm/example_snapshots/prosemirror_json.yml
+++ b/spec/fixtures/glfm/example_snapshots/prosemirror_json.yml
@@ -1,5 +1,5 @@
---
-02_01__preliminaries__tabs__01: |-
+02_01__preliminaries__tabs__001: |-
{
"type": "doc",
"content": [
@@ -18,7 +18,7 @@
}
]
}
-02_01__preliminaries__tabs__02: |-
+02_01__preliminaries__tabs__002: |-
{
"type": "doc",
"content": [
@@ -37,7 +37,7 @@
}
]
}
-02_01__preliminaries__tabs__03: |-
+02_01__preliminaries__tabs__003: |-
{
"type": "doc",
"content": [
@@ -56,7 +56,7 @@
}
]
}
-02_01__preliminaries__tabs__04: |-
+02_01__preliminaries__tabs__004: |-
{
"type": "doc",
"content": [
@@ -93,7 +93,7 @@
}
]
}
-02_01__preliminaries__tabs__05: |-
+02_01__preliminaries__tabs__005: |-
{
"type": "doc",
"content": [
@@ -134,7 +134,7 @@
}
]
}
-02_01__preliminaries__tabs__06: |-
+02_01__preliminaries__tabs__006: |-
{
"type": "doc",
"content": [
@@ -161,7 +161,7 @@
}
]
}
-02_01__preliminaries__tabs__07: |-
+02_01__preliminaries__tabs__007: |-
{
"type": "doc",
"content": [
@@ -196,7 +196,7 @@
}
]
}
-02_01__preliminaries__tabs__08: |-
+02_01__preliminaries__tabs__008: |-
{
"type": "doc",
"content": [
@@ -215,7 +215,7 @@
}
]
}
-02_01__preliminaries__tabs__09: |-
+02_01__preliminaries__tabs__009: |-
{
"type": "doc",
"content": [
@@ -233,7 +233,7 @@
"content": [
{
"type": "text",
- "text": "foo\n"
+ "text": "foo"
}
]
},
@@ -251,7 +251,7 @@
"content": [
{
"type": "text",
- "text": "bar\n"
+ "text": "bar"
}
]
},
@@ -287,7 +287,7 @@
}
]
}
-02_01__preliminaries__tabs__10: |-
+02_01__preliminaries__tabs__010: |-
{
"type": "doc",
"content": [
@@ -305,7 +305,7 @@
}
]
}
-02_01__preliminaries__tabs__11: |-
+02_01__preliminaries__tabs__011: |-
{
"type": "doc",
"content": [
@@ -314,7 +314,7 @@
}
]
}
-03_01__blocks_and_inlines__precedence__01: |-
+03_01__blocks_and_inlines__precedence__001: |-
{
"type": "doc",
"content": [
@@ -356,7 +356,7 @@
}
]
}
-04_01__leaf_blocks__thematic_breaks__01: |-
+04_01__leaf_blocks__thematic_breaks__001: |-
{
"type": "doc",
"content": [
@@ -371,7 +371,7 @@
}
]
}
-04_01__leaf_blocks__thematic_breaks__02: |-
+04_01__leaf_blocks__thematic_breaks__002: |-
{
"type": "doc",
"content": [
@@ -386,7 +386,7 @@
}
]
}
-04_01__leaf_blocks__thematic_breaks__03: |-
+04_01__leaf_blocks__thematic_breaks__003: |-
{
"type": "doc",
"content": [
@@ -401,7 +401,7 @@
}
]
}
-04_01__leaf_blocks__thematic_breaks__04: |-
+04_01__leaf_blocks__thematic_breaks__004: |-
{
"type": "doc",
"content": [
@@ -416,7 +416,7 @@
}
]
}
-04_01__leaf_blocks__thematic_breaks__05: |-
+04_01__leaf_blocks__thematic_breaks__005: |-
{
"type": "doc",
"content": [
@@ -431,7 +431,7 @@
}
]
}
-04_01__leaf_blocks__thematic_breaks__06: |-
+04_01__leaf_blocks__thematic_breaks__006: |-
{
"type": "doc",
"content": [
@@ -450,7 +450,7 @@
}
]
}
-04_01__leaf_blocks__thematic_breaks__07: |-
+04_01__leaf_blocks__thematic_breaks__007: |-
{
"type": "doc",
"content": [
@@ -465,7 +465,7 @@
}
]
}
-04_01__leaf_blocks__thematic_breaks__08: |-
+04_01__leaf_blocks__thematic_breaks__008: |-
{
"type": "doc",
"content": [
@@ -474,7 +474,7 @@
}
]
}
-04_01__leaf_blocks__thematic_breaks__09: |-
+04_01__leaf_blocks__thematic_breaks__009: |-
{
"type": "doc",
"content": [
@@ -483,7 +483,7 @@
}
]
}
-04_01__leaf_blocks__thematic_breaks__10: |-
+04_01__leaf_blocks__thematic_breaks__010: |-
{
"type": "doc",
"content": [
@@ -492,7 +492,7 @@
}
]
}
-04_01__leaf_blocks__thematic_breaks__11: |-
+04_01__leaf_blocks__thematic_breaks__011: |-
{
"type": "doc",
"content": [
@@ -501,7 +501,7 @@
}
]
}
-04_01__leaf_blocks__thematic_breaks__12: |-
+04_01__leaf_blocks__thematic_breaks__012: |-
{
"type": "doc",
"content": [
@@ -510,7 +510,7 @@
}
]
}
-04_01__leaf_blocks__thematic_breaks__13: |-
+04_01__leaf_blocks__thematic_breaks__013: |-
{
"type": "doc",
"content": [
@@ -543,7 +543,7 @@
}
]
}
-04_01__leaf_blocks__thematic_breaks__14: |-
+04_01__leaf_blocks__thematic_breaks__014: |-
{
"type": "doc",
"content": [
@@ -563,7 +563,7 @@
}
]
}
-04_01__leaf_blocks__thematic_breaks__15: |-
+04_01__leaf_blocks__thematic_breaks__015: |-
{
"type": "doc",
"content": [
@@ -616,7 +616,7 @@
}
]
}
-04_01__leaf_blocks__thematic_breaks__16: |-
+04_01__leaf_blocks__thematic_breaks__016: |-
{
"type": "doc",
"content": [
@@ -643,7 +643,7 @@
}
]
}
-04_01__leaf_blocks__thematic_breaks__17: |-
+04_01__leaf_blocks__thematic_breaks__017: |-
{
"type": "doc",
"content": [
@@ -670,7 +670,7 @@
}
]
}
-04_01__leaf_blocks__thematic_breaks__18: |-
+04_01__leaf_blocks__thematic_breaks__018: |-
{
"type": "doc",
"content": [
@@ -723,7 +723,7 @@
}
]
}
-04_01__leaf_blocks__thematic_breaks__19: |-
+04_01__leaf_blocks__thematic_breaks__019: |-
{
"type": "doc",
"content": [
@@ -762,7 +762,7 @@
}
]
}
-04_02__leaf_blocks__atx_headings__01: |-
+04_02__leaf_blocks__atx_headings__001: |-
{
"type": "doc",
"content": [
@@ -840,7 +840,7 @@
}
]
}
-04_02__leaf_blocks__atx_headings__02: |-
+04_02__leaf_blocks__atx_headings__002: |-
{
"type": "doc",
"content": [
@@ -855,7 +855,7 @@
}
]
}
-04_02__leaf_blocks__atx_headings__03: |-
+04_02__leaf_blocks__atx_headings__003: |-
{
"type": "doc",
"content": [
@@ -879,7 +879,7 @@
}
]
}
-04_02__leaf_blocks__atx_headings__04: |-
+04_02__leaf_blocks__atx_headings__004: |-
{
"type": "doc",
"content": [
@@ -894,7 +894,7 @@
}
]
}
-04_02__leaf_blocks__atx_headings__05: |-
+04_02__leaf_blocks__atx_headings__005: |-
{
"type": "doc",
"content": [
@@ -925,7 +925,7 @@
}
]
}
-04_02__leaf_blocks__atx_headings__06: |-
+04_02__leaf_blocks__atx_headings__006: |-
{
"type": "doc",
"content": [
@@ -943,7 +943,7 @@
}
]
}
-04_02__leaf_blocks__atx_headings__07: |-
+04_02__leaf_blocks__atx_headings__007: |-
{
"type": "doc",
"content": [
@@ -985,7 +985,7 @@
}
]
}
-04_02__leaf_blocks__atx_headings__08: |-
+04_02__leaf_blocks__atx_headings__008: |-
{
"type": "doc",
"content": [
@@ -1004,7 +1004,7 @@
}
]
}
-04_02__leaf_blocks__atx_headings__09: |-
+04_02__leaf_blocks__atx_headings__009: |-
{
"type": "doc",
"content": [
@@ -1019,7 +1019,7 @@
}
]
}
-04_02__leaf_blocks__atx_headings__10: |-
+04_02__leaf_blocks__atx_headings__010: |-
{
"type": "doc",
"content": [
@@ -1049,7 +1049,7 @@
}
]
}
-04_02__leaf_blocks__atx_headings__11: |-
+04_02__leaf_blocks__atx_headings__011: |-
{
"type": "doc",
"content": [
@@ -1079,7 +1079,7 @@
}
]
}
-04_02__leaf_blocks__atx_headings__12: |-
+04_02__leaf_blocks__atx_headings__012: |-
{
"type": "doc",
"content": [
@@ -1097,7 +1097,7 @@
}
]
}
-04_02__leaf_blocks__atx_headings__13: |-
+04_02__leaf_blocks__atx_headings__013: |-
{
"type": "doc",
"content": [
@@ -1115,7 +1115,7 @@
}
]
}
-04_02__leaf_blocks__atx_headings__14: |-
+04_02__leaf_blocks__atx_headings__014: |-
{
"type": "doc",
"content": [
@@ -1133,7 +1133,7 @@
}
]
}
-04_02__leaf_blocks__atx_headings__15: |-
+04_02__leaf_blocks__atx_headings__015: |-
{
"type": "doc",
"content": [
@@ -1175,7 +1175,7 @@
}
]
}
-04_02__leaf_blocks__atx_headings__16: |-
+04_02__leaf_blocks__atx_headings__016: |-
{
"type": "doc",
"content": [
@@ -1199,7 +1199,7 @@
}
]
}
-04_02__leaf_blocks__atx_headings__17: |-
+04_02__leaf_blocks__atx_headings__017: |-
{
"type": "doc",
"content": [
@@ -1235,7 +1235,7 @@
}
]
}
-04_02__leaf_blocks__atx_headings__18: |-
+04_02__leaf_blocks__atx_headings__018: |-
{
"type": "doc",
"content": [
@@ -1259,7 +1259,7 @@
}
]
}
-04_03__leaf_blocks__setext_headings__01: |-
+04_03__leaf_blocks__setext_headings__001: |-
{
"type": "doc",
"content": [
@@ -1307,7 +1307,7 @@
}
]
}
-04_03__leaf_blocks__setext_headings__02: |-
+04_03__leaf_blocks__setext_headings__002: |-
{
"type": "doc",
"content": [
@@ -1334,7 +1334,7 @@
}
]
}
-04_03__leaf_blocks__setext_headings__03: |-
+04_03__leaf_blocks__setext_headings__003: |-
{
"type": "doc",
"content": [
@@ -1361,7 +1361,7 @@
}
]
}
-04_03__leaf_blocks__setext_headings__04: |-
+04_03__leaf_blocks__setext_headings__004: |-
{
"type": "doc",
"content": [
@@ -1391,7 +1391,7 @@
}
]
}
-04_03__leaf_blocks__setext_headings__05: |-
+04_03__leaf_blocks__setext_headings__005: |-
{
"type": "doc",
"content": [
@@ -1433,7 +1433,7 @@
}
]
}
-04_03__leaf_blocks__setext_headings__06: |-
+04_03__leaf_blocks__setext_headings__006: |-
{
"type": "doc",
"content": [
@@ -1455,7 +1455,7 @@
}
]
}
-04_03__leaf_blocks__setext_headings__07: |-
+04_03__leaf_blocks__setext_headings__007: |-
{
"type": "doc",
"content": [
@@ -1473,7 +1473,7 @@
}
]
}
-04_03__leaf_blocks__setext_headings__08: |-
+04_03__leaf_blocks__setext_headings__008: |-
{
"type": "doc",
"content": [
@@ -1488,7 +1488,7 @@
}
]
}
-04_03__leaf_blocks__setext_headings__09: |-
+04_03__leaf_blocks__setext_headings__009: |-
{
"type": "doc",
"content": [
@@ -1515,7 +1515,7 @@
}
]
}
-04_03__leaf_blocks__setext_headings__10: |-
+04_03__leaf_blocks__setext_headings__010: |-
{
"type": "doc",
"content": [
@@ -1533,7 +1533,7 @@
}
]
}
-04_03__leaf_blocks__setext_headings__11: |-
+04_03__leaf_blocks__setext_headings__011: |-
{
"type": "doc",
"content": [
@@ -1551,7 +1551,7 @@
}
]
}
-04_03__leaf_blocks__setext_headings__12: |-
+04_03__leaf_blocks__setext_headings__012: |-
{
"type": "doc",
"content": [
@@ -1599,7 +1599,7 @@
}
]
}
-04_03__leaf_blocks__setext_headings__13: |-
+04_03__leaf_blocks__setext_headings__013: |-
{
"type": "doc",
"content": [
@@ -1625,7 +1625,7 @@
}
]
}
-04_03__leaf_blocks__setext_headings__14: |-
+04_03__leaf_blocks__setext_headings__014: |-
{
"type": "doc",
"content": [
@@ -1648,7 +1648,7 @@
}
]
}
-04_03__leaf_blocks__setext_headings__15: |-
+04_03__leaf_blocks__setext_headings__015: |-
{
"type": "doc",
"content": [
@@ -1679,7 +1679,7 @@
}
]
}
-04_03__leaf_blocks__setext_headings__16: |-
+04_03__leaf_blocks__setext_headings__016: |-
{
"type": "doc",
"content": [
@@ -1697,7 +1697,7 @@
}
]
}
-04_03__leaf_blocks__setext_headings__17: |-
+04_03__leaf_blocks__setext_headings__017: |-
{
"type": "doc",
"content": [
@@ -1739,7 +1739,7 @@
}
]
}
-04_03__leaf_blocks__setext_headings__18: |-
+04_03__leaf_blocks__setext_headings__018: |-
{
"type": "doc",
"content": [
@@ -1754,7 +1754,7 @@
}
]
}
-04_03__leaf_blocks__setext_headings__19: |-
+04_03__leaf_blocks__setext_headings__019: |-
{
"type": "doc",
"content": [
@@ -1766,7 +1766,7 @@
}
]
}
-04_03__leaf_blocks__setext_headings__20: |-
+04_03__leaf_blocks__setext_headings__020: |-
{
"type": "doc",
"content": [
@@ -1797,7 +1797,7 @@
}
]
}
-04_03__leaf_blocks__setext_headings__21: |-
+04_03__leaf_blocks__setext_headings__021: |-
{
"type": "doc",
"content": [
@@ -1819,7 +1819,7 @@
}
]
}
-04_03__leaf_blocks__setext_headings__22: |-
+04_03__leaf_blocks__setext_headings__022: |-
{
"type": "doc",
"content": [
@@ -1845,7 +1845,7 @@
}
]
}
-04_03__leaf_blocks__setext_headings__23: |-
+04_03__leaf_blocks__setext_headings__023: |-
{
"type": "doc",
"content": [
@@ -1863,7 +1863,7 @@
}
]
}
-04_03__leaf_blocks__setext_headings__24: |-
+04_03__leaf_blocks__setext_headings__024: |-
{
"type": "doc",
"content": [
@@ -1899,7 +1899,7 @@
}
]
}
-04_03__leaf_blocks__setext_headings__25: |-
+04_03__leaf_blocks__setext_headings__025: |-
{
"type": "doc",
"content": [
@@ -1926,7 +1926,7 @@
}
]
}
-04_03__leaf_blocks__setext_headings__26: |-
+04_03__leaf_blocks__setext_headings__026: |-
{
"type": "doc",
"content": [
@@ -1953,7 +1953,7 @@
}
]
}
-04_03__leaf_blocks__setext_headings__27: |-
+04_03__leaf_blocks__setext_headings__027: |-
{
"type": "doc",
"content": [
@@ -1968,7 +1968,7 @@
}
]
}
-04_04__leaf_blocks__indented_code_blocks__01: |-
+04_04__leaf_blocks__indented_code_blocks__001: |-
{
"type": "doc",
"content": [
@@ -1987,7 +1987,7 @@
}
]
}
-04_04__leaf_blocks__indented_code_blocks__02: |-
+04_04__leaf_blocks__indented_code_blocks__002: |-
{
"type": "doc",
"content": [
@@ -2024,7 +2024,7 @@
}
]
}
-04_04__leaf_blocks__indented_code_blocks__03: |-
+04_04__leaf_blocks__indented_code_blocks__003: |-
{
"type": "doc",
"content": [
@@ -2075,7 +2075,7 @@
}
]
}
-04_04__leaf_blocks__indented_code_blocks__04: |-
+04_04__leaf_blocks__indented_code_blocks__004: |-
{
"type": "doc",
"content": [
@@ -2094,7 +2094,7 @@
}
]
}
-04_04__leaf_blocks__indented_code_blocks__05: |-
+04_04__leaf_blocks__indented_code_blocks__005: |-
{
"type": "doc",
"content": [
@@ -2113,7 +2113,7 @@
}
]
}
-04_04__leaf_blocks__indented_code_blocks__06: |-
+04_04__leaf_blocks__indented_code_blocks__006: |-
{
"type": "doc",
"content": [
@@ -2132,7 +2132,7 @@
}
]
}
-04_04__leaf_blocks__indented_code_blocks__07: |-
+04_04__leaf_blocks__indented_code_blocks__007: |-
{
"type": "doc",
"content": [
@@ -2147,7 +2147,7 @@
}
]
}
-04_04__leaf_blocks__indented_code_blocks__08: |-
+04_04__leaf_blocks__indented_code_blocks__008: |-
{
"type": "doc",
"content": [
@@ -2175,7 +2175,7 @@
}
]
}
-04_04__leaf_blocks__indented_code_blocks__09: |-
+04_04__leaf_blocks__indented_code_blocks__009: |-
{
"type": "doc",
"content": [
@@ -2234,7 +2234,7 @@
}
]
}
-04_04__leaf_blocks__indented_code_blocks__10: |-
+04_04__leaf_blocks__indented_code_blocks__010: |-
{
"type": "doc",
"content": [
@@ -2253,7 +2253,7 @@
}
]
}
-04_04__leaf_blocks__indented_code_blocks__11: |-
+04_04__leaf_blocks__indented_code_blocks__011: |-
{
"type": "doc",
"content": [
@@ -2272,7 +2272,7 @@
}
]
}
-04_04__leaf_blocks__indented_code_blocks__12: |-
+04_04__leaf_blocks__indented_code_blocks__012: |-
{
"type": "doc",
"content": [
@@ -2291,7 +2291,7 @@
}
]
}
-04_05__leaf_blocks__fenced_code_blocks__01: |-
+04_05__leaf_blocks__fenced_code_blocks__001: |-
{
"type": "doc",
"content": [
@@ -2310,7 +2310,7 @@
}
]
}
-04_05__leaf_blocks__fenced_code_blocks__02: |-
+04_05__leaf_blocks__fenced_code_blocks__002: |-
{
"type": "doc",
"content": [
@@ -2329,7 +2329,7 @@
}
]
}
-04_05__leaf_blocks__fenced_code_blocks__03: |-
+04_05__leaf_blocks__fenced_code_blocks__003: |-
{
"type": "doc",
"content": [
@@ -2349,7 +2349,7 @@
}
]
}
-04_05__leaf_blocks__fenced_code_blocks__04: |-
+04_05__leaf_blocks__fenced_code_blocks__004: |-
{
"type": "doc",
"content": [
@@ -2368,7 +2368,7 @@
}
]
}
-04_05__leaf_blocks__fenced_code_blocks__05: |-
+04_05__leaf_blocks__fenced_code_blocks__005: |-
{
"type": "doc",
"content": [
@@ -2387,7 +2387,7 @@
}
]
}
-04_05__leaf_blocks__fenced_code_blocks__06: |-
+04_05__leaf_blocks__fenced_code_blocks__006: |-
{
"type": "doc",
"content": [
@@ -2406,7 +2406,7 @@
}
]
}
-04_05__leaf_blocks__fenced_code_blocks__07: |-
+04_05__leaf_blocks__fenced_code_blocks__007: |-
{
"type": "doc",
"content": [
@@ -2425,7 +2425,7 @@
}
]
}
-04_05__leaf_blocks__fenced_code_blocks__08: |-
+04_05__leaf_blocks__fenced_code_blocks__008: |-
{
"type": "doc",
"content": [
@@ -2438,7 +2438,7 @@
}
]
}
-04_05__leaf_blocks__fenced_code_blocks__09: |-
+04_05__leaf_blocks__fenced_code_blocks__009: |-
{
"type": "doc",
"content": [
@@ -2457,7 +2457,7 @@
}
]
}
-04_05__leaf_blocks__fenced_code_blocks__10: |-
+04_05__leaf_blocks__fenced_code_blocks__010: |-
{
"type": "doc",
"content": [
@@ -2493,7 +2493,7 @@
}
]
}
-04_05__leaf_blocks__fenced_code_blocks__11: |-
+04_05__leaf_blocks__fenced_code_blocks__011: |-
{
"type": "doc",
"content": [
@@ -2512,7 +2512,7 @@
}
]
}
-04_05__leaf_blocks__fenced_code_blocks__12: |-
+04_05__leaf_blocks__fenced_code_blocks__012: |-
{
"type": "doc",
"content": [
@@ -2525,7 +2525,7 @@
}
]
}
-04_05__leaf_blocks__fenced_code_blocks__13: |-
+04_05__leaf_blocks__fenced_code_blocks__013: |-
{
"type": "doc",
"content": [
@@ -2544,7 +2544,7 @@
}
]
}
-04_05__leaf_blocks__fenced_code_blocks__14: |-
+04_05__leaf_blocks__fenced_code_blocks__014: |-
{
"type": "doc",
"content": [
@@ -2563,7 +2563,7 @@
}
]
}
-04_05__leaf_blocks__fenced_code_blocks__15: |-
+04_05__leaf_blocks__fenced_code_blocks__015: |-
{
"type": "doc",
"content": [
@@ -2582,7 +2582,7 @@
}
]
}
-04_05__leaf_blocks__fenced_code_blocks__16: |-
+04_05__leaf_blocks__fenced_code_blocks__016: |-
{
"type": "doc",
"content": [
@@ -2601,7 +2601,7 @@
}
]
}
-04_05__leaf_blocks__fenced_code_blocks__17: |-
+04_05__leaf_blocks__fenced_code_blocks__017: |-
{
"type": "doc",
"content": [
@@ -2620,7 +2620,7 @@
}
]
}
-04_05__leaf_blocks__fenced_code_blocks__18: |-
+04_05__leaf_blocks__fenced_code_blocks__018: |-
{
"type": "doc",
"content": [
@@ -2639,7 +2639,7 @@
}
]
}
-04_05__leaf_blocks__fenced_code_blocks__19: |-
+04_05__leaf_blocks__fenced_code_blocks__019: |-
{
"type": "doc",
"content": [
@@ -2658,7 +2658,7 @@
}
]
}
-04_05__leaf_blocks__fenced_code_blocks__20: |-
+04_05__leaf_blocks__fenced_code_blocks__020: |-
{
"type": "doc",
"content": [
@@ -2678,7 +2678,7 @@
}
]
}
-04_05__leaf_blocks__fenced_code_blocks__21: |-
+04_05__leaf_blocks__fenced_code_blocks__021: |-
{
"type": "doc",
"content": [
@@ -2697,7 +2697,7 @@
}
]
}
-04_05__leaf_blocks__fenced_code_blocks__22: |-
+04_05__leaf_blocks__fenced_code_blocks__022: |-
{
"type": "doc",
"content": [
@@ -2734,7 +2734,7 @@
}
]
}
-04_05__leaf_blocks__fenced_code_blocks__23: |-
+04_05__leaf_blocks__fenced_code_blocks__023: |-
{
"type": "doc",
"content": [
@@ -2777,7 +2777,7 @@
}
]
}
-04_05__leaf_blocks__fenced_code_blocks__24: |-
+04_05__leaf_blocks__fenced_code_blocks__024: |-
{
"type": "doc",
"content": [
@@ -2796,7 +2796,7 @@
}
]
}
-04_05__leaf_blocks__fenced_code_blocks__25: |-
+04_05__leaf_blocks__fenced_code_blocks__025: |-
{
"type": "doc",
"content": [
@@ -2815,7 +2815,7 @@
}
]
}
-04_05__leaf_blocks__fenced_code_blocks__26: |-
+04_05__leaf_blocks__fenced_code_blocks__026: |-
{
"type": "doc",
"content": [
@@ -2828,7 +2828,7 @@
}
]
}
-04_05__leaf_blocks__fenced_code_blocks__27: |-
+04_05__leaf_blocks__fenced_code_blocks__027: |-
{
"type": "doc",
"content": [
@@ -2852,7 +2852,7 @@
}
]
}
-04_05__leaf_blocks__fenced_code_blocks__28: |-
+04_05__leaf_blocks__fenced_code_blocks__028: |-
{
"type": "doc",
"content": [
@@ -2871,7 +2871,7 @@
}
]
}
-04_05__leaf_blocks__fenced_code_blocks__29: |-
+04_05__leaf_blocks__fenced_code_blocks__029: |-
{
"type": "doc",
"content": [
@@ -2890,31 +2890,43 @@
}
]
}
-04_06__leaf_blocks__html_blocks__01: |-
+04_06__leaf_blocks__html_blocks__001: |-
Error - check implementation:
Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__02: |-
+04_06__leaf_blocks__html_blocks__002: |-
Error - check implementation:
Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__03: |-
+04_06__leaf_blocks__html_blocks__003: |-
Error - check implementation:
Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__04: |-
- Error - check implementation:
- Cannot read properties of undefined (reading 'wrapTextInParagraph')
-04_06__leaf_blocks__html_blocks__05: |-
+04_06__leaf_blocks__html_blocks__004: |-
+ {
+ "type": "doc",
+ "content": [
+ {
+ "type": "paragraph",
+ "content": [
+ {
+ "type": "text",
+ "text": "\n*foo*"
+ }
+ ]
+ }
+ ]
+ }
+04_06__leaf_blocks__html_blocks__005: |-
Error - check implementation:
Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__06: |-
+04_06__leaf_blocks__html_blocks__006: |-
Error - check implementation:
Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__07: |-
+04_06__leaf_blocks__html_blocks__007: |-
Error - check implementation:
Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__08: |-
+04_06__leaf_blocks__html_blocks__008: |-
Error - check implementation:
Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__09: |-
+04_06__leaf_blocks__html_blocks__009: |-
{
"type": "doc",
"content": [
@@ -2923,7 +2935,7 @@
}
]
}
-04_06__leaf_blocks__html_blocks__10: |-
+04_06__leaf_blocks__html_blocks__010: |-
{
"type": "doc",
"content": [
@@ -2932,7 +2944,7 @@
}
]
}
-04_06__leaf_blocks__html_blocks__11: |-
+04_06__leaf_blocks__html_blocks__011: |-
{
"type": "doc",
"content": [
@@ -2941,37 +2953,127 @@
}
]
}
-04_06__leaf_blocks__html_blocks__12: |-
+04_06__leaf_blocks__html_blocks__012: |-
Error - check implementation:
Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__13: |-
+04_06__leaf_blocks__html_blocks__013: |-
Error - check implementation:
Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__14: |-
+04_06__leaf_blocks__html_blocks__014: |-
Error - check implementation:
Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__15: |-
- Error - check implementation:
- Cannot read properties of undefined (reading 'wrapTextInParagraph')
-04_06__leaf_blocks__html_blocks__16: |-
+04_06__leaf_blocks__html_blocks__015: |-
+ {
+ "type": "doc",
+ "content": [
+ {
+ "type": "paragraph",
+ "content": [
+ {
+ "type": "text",
+ "marks": [
+ {
+ "type": "link",
+ "attrs": {
+ "href": "foo",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": null
+ }
+ }
+ ],
+ "text": "\n*bar*\n"
+ }
+ ]
+ }
+ ]
+ }
+04_06__leaf_blocks__html_blocks__016: |-
Error - check implementation:
Hast node of type "warning" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__17: |-
- Error - check implementation:
- Cannot read properties of undefined (reading 'wrapTextInParagraph')
-04_06__leaf_blocks__html_blocks__18: |-
- Error - check implementation:
- Cannot read properties of undefined (reading 'wrapTextInParagraph')
-04_06__leaf_blocks__html_blocks__19: |-
- Error - check implementation:
- Hast node of type "del" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__20: |-
- Error - check implementation:
- Hast node of type "del" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__21: |-
+04_06__leaf_blocks__html_blocks__017: |-
+ {
+ "type": "doc",
+ "content": [
+ {
+ "type": "paragraph",
+ "content": [
+ {
+ "type": "text",
+ "marks": [
+ {
+ "type": "italic"
+ }
+ ],
+ "text": "\n*bar*\n"
+ }
+ ]
+ }
+ ]
+ }
+04_06__leaf_blocks__html_blocks__018: |-
+ {
+ "type": "doc",
+ "content": [
+ {
+ "type": "paragraph",
+ "content": [
+ {
+ "type": "text",
+ "text": "\n*bar*"
+ }
+ ]
+ }
+ ]
+ }
+04_06__leaf_blocks__html_blocks__019: |-
+ {
+ "type": "doc",
+ "content": [
+ {
+ "type": "paragraph",
+ "content": [
+ {
+ "type": "text",
+ "marks": [
+ {
+ "type": "strike"
+ }
+ ],
+ "text": "\n*foo*\n"
+ }
+ ]
+ }
+ ]
+ }
+04_06__leaf_blocks__html_blocks__020: |-
Error - check implementation:
- Hast node of type "del" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__22: |-
+ Cannot destructure property 'type' of 'this.stack.pop(...)' as it is undefined.
+04_06__leaf_blocks__html_blocks__021: |-
+ {
+ "type": "doc",
+ "content": [
+ {
+ "type": "paragraph",
+ "content": [
+ {
+ "type": "text",
+ "marks": [
+ {
+ "type": "italic"
+ },
+ {
+ "type": "strike"
+ }
+ ],
+ "text": "foo"
+ }
+ ]
+ }
+ ]
+ }
+04_06__leaf_blocks__html_blocks__022: |-
{
"type": "doc",
"content": [
@@ -2999,37 +3101,37 @@
}
]
}
-04_06__leaf_blocks__html_blocks__23: |-
+04_06__leaf_blocks__html_blocks__023: |-
Error - check implementation:
Hast node of type "script" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__24: |-
+04_06__leaf_blocks__html_blocks__024: |-
Error - check implementation:
Hast node of type "style" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__25: |-
+04_06__leaf_blocks__html_blocks__025: |-
Error - check implementation:
Hast node of type "style" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__26: |-
+04_06__leaf_blocks__html_blocks__026: |-
Error - check implementation:
Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__27: |-
+04_06__leaf_blocks__html_blocks__027: |-
Error - check implementation:
Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__28: |-
+04_06__leaf_blocks__html_blocks__028: |-
Error - check implementation:
Hast node of type "style" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__29: |-
+04_06__leaf_blocks__html_blocks__029: |-
Error - check implementation:
- Hast node of type "comment" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__30: |-
+ Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
+04_06__leaf_blocks__html_blocks__030: |-
Error - check implementation:
Hast node of type "script" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__31: |-
+04_06__leaf_blocks__html_blocks__031: |-
Error - check implementation:
- Hast node of type "comment" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__32: |-
+ Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
+04_06__leaf_blocks__html_blocks__032: |-
Error - check implementation:
- Hast node of type "comment" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__33: |-
+ Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
+04_06__leaf_blocks__html_blocks__033: |-
{
"type": "doc",
"content": [
@@ -3038,22 +3140,22 @@
}
]
}
-04_06__leaf_blocks__html_blocks__34: |-
+04_06__leaf_blocks__html_blocks__034: |-
Error - check implementation:
- Hast node of type "comment" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__35: |-
+ Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
+04_06__leaf_blocks__html_blocks__035: |-
Error - check implementation:
- Hast node of type "comment" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__36: |-
+ Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
+04_06__leaf_blocks__html_blocks__036: |-
Error - check implementation:
Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__37: |-
+04_06__leaf_blocks__html_blocks__037: |-
Error - check implementation:
Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__38: |-
+04_06__leaf_blocks__html_blocks__038: |-
Error - check implementation:
Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__39: |-
+04_06__leaf_blocks__html_blocks__039: |-
{
"type": "doc",
"content": [
@@ -3072,6 +3174,7 @@
"attrs": {
"href": "bar",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -3083,19 +3186,19 @@
}
]
}
-04_06__leaf_blocks__html_blocks__40: |-
+04_06__leaf_blocks__html_blocks__040: |-
Error - check implementation:
Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__41: |-
+04_06__leaf_blocks__html_blocks__041: |-
Error - check implementation:
Hast node of type "div" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__42: |-
+04_06__leaf_blocks__html_blocks__042: |-
Error - check implementation:
Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_06__leaf_blocks__html_blocks__43: |-
+04_06__leaf_blocks__html_blocks__043: |-
Error - check implementation:
Hast node of type "table" not supported by this converter. Please, provide an specification.
-04_07__leaf_blocks__link_reference_definitions__01: |-
+04_07__leaf_blocks__link_reference_definitions__001: |-
{
"type": "doc",
"content": [
@@ -3110,6 +3213,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": "title",
"canonicalSrc": null
}
@@ -3121,7 +3225,7 @@
}
]
}
-04_07__leaf_blocks__link_reference_definitions__02: |-
+04_07__leaf_blocks__link_reference_definitions__002: |-
{
"type": "doc",
"content": [
@@ -3136,6 +3240,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": "the title",
"canonicalSrc": null
}
@@ -3147,7 +3252,7 @@
}
]
}
-04_07__leaf_blocks__link_reference_definitions__03: |-
+04_07__leaf_blocks__link_reference_definitions__003: |-
{
"type": "doc",
"content": [
@@ -3162,6 +3267,7 @@
"attrs": {
"href": "my_(url)",
"target": "_blank",
+ "class": null,
"title": "title (with parens)",
"canonicalSrc": null
}
@@ -3173,7 +3279,7 @@
}
]
}
-04_07__leaf_blocks__link_reference_definitions__04: |-
+04_07__leaf_blocks__link_reference_definitions__004: |-
{
"type": "doc",
"content": [
@@ -3188,6 +3294,7 @@
"attrs": {
"href": "my%20url",
"target": "_blank",
+ "class": null,
"title": "title",
"canonicalSrc": null
}
@@ -3199,7 +3306,7 @@
}
]
}
-04_07__leaf_blocks__link_reference_definitions__05: |-
+04_07__leaf_blocks__link_reference_definitions__005: |-
{
"type": "doc",
"content": [
@@ -3214,6 +3321,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": "\ntitle\nline1\nline2\n",
"canonicalSrc": null
}
@@ -3225,7 +3333,7 @@
}
]
}
-04_07__leaf_blocks__link_reference_definitions__06: |-
+04_07__leaf_blocks__link_reference_definitions__006: |-
{
"type": "doc",
"content": [
@@ -3258,7 +3366,7 @@
}
]
}
-04_07__leaf_blocks__link_reference_definitions__07: |-
+04_07__leaf_blocks__link_reference_definitions__007: |-
{
"type": "doc",
"content": [
@@ -3273,6 +3381,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -3284,7 +3393,7 @@
}
]
}
-04_07__leaf_blocks__link_reference_definitions__08: |-
+04_07__leaf_blocks__link_reference_definitions__008: |-
{
"type": "doc",
"content": [
@@ -3308,7 +3417,7 @@
}
]
}
-04_07__leaf_blocks__link_reference_definitions__09: |-
+04_07__leaf_blocks__link_reference_definitions__009: |-
{
"type": "doc",
"content": [
@@ -3323,6 +3432,7 @@
"attrs": {
"href": "",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -3334,10 +3444,10 @@
}
]
}
-04_07__leaf_blocks__link_reference_definitions__10: |-
+04_07__leaf_blocks__link_reference_definitions__010: |-
Error - check implementation:
Hast node of type "bar" not supported by this converter. Please, provide an specification.
-04_07__leaf_blocks__link_reference_definitions__11: |-
+04_07__leaf_blocks__link_reference_definitions__011: |-
{
"type": "doc",
"content": [
@@ -3352,6 +3462,7 @@
"attrs": {
"href": "/url%5Cbar*baz",
"target": "_blank",
+ "class": null,
"title": "foo\"bar\\baz",
"canonicalSrc": null
}
@@ -3363,7 +3474,7 @@
}
]
}
-04_07__leaf_blocks__link_reference_definitions__12: |-
+04_07__leaf_blocks__link_reference_definitions__012: |-
{
"type": "doc",
"content": [
@@ -3378,6 +3489,7 @@
"attrs": {
"href": "url",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -3389,7 +3501,7 @@
}
]
}
-04_07__leaf_blocks__link_reference_definitions__13: |-
+04_07__leaf_blocks__link_reference_definitions__013: |-
{
"type": "doc",
"content": [
@@ -3404,6 +3516,7 @@
"attrs": {
"href": "first",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -3415,7 +3528,7 @@
}
]
}
-04_07__leaf_blocks__link_reference_definitions__14: |-
+04_07__leaf_blocks__link_reference_definitions__014: |-
{
"type": "doc",
"content": [
@@ -3430,6 +3543,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -3441,7 +3555,7 @@
}
]
}
-04_07__leaf_blocks__link_reference_definitions__15: |-
+04_07__leaf_blocks__link_reference_definitions__015: |-
{
"type": "doc",
"content": [
@@ -3456,6 +3570,7 @@
"attrs": {
"href": "/%CF%86%CE%BF%CF%85",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -3467,7 +3582,7 @@
}
]
}
-04_07__leaf_blocks__link_reference_definitions__16: |-
+04_07__leaf_blocks__link_reference_definitions__016: |-
{
"type": "doc",
"content": [
@@ -3476,7 +3591,7 @@
}
]
}
-04_07__leaf_blocks__link_reference_definitions__17: |-
+04_07__leaf_blocks__link_reference_definitions__017: |-
{
"type": "doc",
"content": [
@@ -3491,7 +3606,7 @@
}
]
}
-04_07__leaf_blocks__link_reference_definitions__18: |-
+04_07__leaf_blocks__link_reference_definitions__018: |-
{
"type": "doc",
"content": [
@@ -3506,7 +3621,7 @@
}
]
}
-04_07__leaf_blocks__link_reference_definitions__19: |-
+04_07__leaf_blocks__link_reference_definitions__019: |-
{
"type": "doc",
"content": [
@@ -3521,7 +3636,7 @@
}
]
}
-04_07__leaf_blocks__link_reference_definitions__20: |-
+04_07__leaf_blocks__link_reference_definitions__020: |-
{
"type": "doc",
"content": [
@@ -3549,7 +3664,7 @@
}
]
}
-04_07__leaf_blocks__link_reference_definitions__21: |-
+04_07__leaf_blocks__link_reference_definitions__021: |-
{
"type": "doc",
"content": [
@@ -3577,7 +3692,7 @@
}
]
}
-04_07__leaf_blocks__link_reference_definitions__22: |-
+04_07__leaf_blocks__link_reference_definitions__022: |-
{
"type": "doc",
"content": [
@@ -3601,7 +3716,7 @@
}
]
}
-04_07__leaf_blocks__link_reference_definitions__23: |-
+04_07__leaf_blocks__link_reference_definitions__023: |-
{
"type": "doc",
"content": [
@@ -3619,6 +3734,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -3647,7 +3763,7 @@
}
]
}
-04_07__leaf_blocks__link_reference_definitions__24: |-
+04_07__leaf_blocks__link_reference_definitions__024: |-
{
"type": "doc",
"content": [
@@ -3674,6 +3790,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -3685,7 +3802,7 @@
}
]
}
-04_07__leaf_blocks__link_reference_definitions__25: |-
+04_07__leaf_blocks__link_reference_definitions__025: |-
{
"type": "doc",
"content": [
@@ -3704,6 +3821,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -3715,7 +3833,7 @@
}
]
}
-04_07__leaf_blocks__link_reference_definitions__26: |-
+04_07__leaf_blocks__link_reference_definitions__026: |-
{
"type": "doc",
"content": [
@@ -3730,6 +3848,7 @@
"attrs": {
"href": "/foo-url",
"target": "_blank",
+ "class": null,
"title": "foo",
"canonicalSrc": null
}
@@ -3749,6 +3868,7 @@
"attrs": {
"href": "/bar-url",
"target": "_blank",
+ "class": null,
"title": "bar",
"canonicalSrc": null
}
@@ -3768,6 +3888,7 @@
"attrs": {
"href": "/baz-url",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -3779,7 +3900,7 @@
}
]
}
-04_07__leaf_blocks__link_reference_definitions__27: |-
+04_07__leaf_blocks__link_reference_definitions__027: |-
{
"type": "doc",
"content": [
@@ -3794,6 +3915,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -3816,7 +3938,7 @@
}
]
}
-04_07__leaf_blocks__link_reference_definitions__28: |-
+04_07__leaf_blocks__link_reference_definitions__028: |-
{
"type": "doc",
"content": [
@@ -3825,7 +3947,7 @@
}
]
}
-04_08__leaf_blocks__paragraphs__01: |-
+04_08__leaf_blocks__paragraphs__001: |-
{
"type": "doc",
"content": [
@@ -3849,7 +3971,7 @@
}
]
}
-04_08__leaf_blocks__paragraphs__02: |-
+04_08__leaf_blocks__paragraphs__002: |-
{
"type": "doc",
"content": [
@@ -3873,7 +3995,7 @@
}
]
}
-04_08__leaf_blocks__paragraphs__03: |-
+04_08__leaf_blocks__paragraphs__003: |-
{
"type": "doc",
"content": [
@@ -3897,7 +4019,7 @@
}
]
}
-04_08__leaf_blocks__paragraphs__04: |-
+04_08__leaf_blocks__paragraphs__004: |-
{
"type": "doc",
"content": [
@@ -3912,7 +4034,7 @@
}
]
}
-04_08__leaf_blocks__paragraphs__05: |-
+04_08__leaf_blocks__paragraphs__005: |-
{
"type": "doc",
"content": [
@@ -3927,7 +4049,7 @@
}
]
}
-04_08__leaf_blocks__paragraphs__06: |-
+04_08__leaf_blocks__paragraphs__006: |-
{
"type": "doc",
"content": [
@@ -3942,7 +4064,7 @@
}
]
}
-04_08__leaf_blocks__paragraphs__07: |-
+04_08__leaf_blocks__paragraphs__007: |-
{
"type": "doc",
"content": [
@@ -3970,7 +4092,7 @@
}
]
}
-04_08__leaf_blocks__paragraphs__08: |-
+04_08__leaf_blocks__paragraphs__008: |-
{
"type": "doc",
"content": [
@@ -3992,7 +4114,7 @@
}
]
}
-04_09__leaf_blocks__blank_lines__01: |-
+04_09__leaf_blocks__blank_lines__001: |-
{
"type": "doc",
"content": [
@@ -4019,134 +4141,22 @@
}
]
}
-04_10__leaf_blocks__tables_extension__01: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "| foo | bar |\n| --- | --- |\n| baz | bim |"
- }
- ]
- }
- ]
- }
-04_10__leaf_blocks__tables_extension__02: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "| abc | defghi |\n:-: | -----------:\nbar | baz"
- }
- ]
- }
- ]
- }
-04_10__leaf_blocks__tables_extension__03: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "| f|oo |\n| ------ |\n| b "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "code"
- }
- ],
- "text": "\\|"
- },
- {
- "type": "text",
- "text": " az |\n| b "
- },
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "|"
- },
- {
- "type": "text",
- "text": " im |"
- }
- ]
- }
- ]
- }
-04_10__leaf_blocks__tables_extension__04: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "| abc | def |\n| --- | --- |\n| bar | baz |"
- }
- ]
- },
- {
- "type": "blockquote",
- "attrs": {
- "multiline": false
- },
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
- ]
- }
-04_10__leaf_blocks__tables_extension__05: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "| abc | def |\n| --- | --- |\n| bar | baz |\nbar"
- }
- ]
- },
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
-04_10__leaf_blocks__tables_extension__06: |-
+04_10__leaf_blocks__tables_extension__001: |-
+ Error - check implementation:
+ Hast node of type "table" not supported by this converter. Please, provide an specification.
+04_10__leaf_blocks__tables_extension__002: |-
+ Error - check implementation:
+ Hast node of type "table" not supported by this converter. Please, provide an specification.
+04_10__leaf_blocks__tables_extension__003: |-
+ Error - check implementation:
+ Hast node of type "table" not supported by this converter. Please, provide an specification.
+04_10__leaf_blocks__tables_extension__004: |-
+ Error - check implementation:
+ Hast node of type "table" not supported by this converter. Please, provide an specification.
+04_10__leaf_blocks__tables_extension__005: |-
+ Error - check implementation:
+ Hast node of type "table" not supported by this converter. Please, provide an specification.
+04_10__leaf_blocks__tables_extension__006: |-
{
"type": "doc",
"content": [
@@ -4161,37 +4171,13 @@
}
]
}
-04_10__leaf_blocks__tables_extension__07: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "| abc | def |\n| --- | --- |\n| bar |\n| bar | baz | boo |"
- }
- ]
- }
- ]
- }
-04_10__leaf_blocks__tables_extension__08: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "| abc | def |\n| --- | --- |"
- }
- ]
- }
- ]
- }
-05_01__container_blocks__block_quotes__01: |-
+04_10__leaf_blocks__tables_extension__007: |-
+ Error - check implementation:
+ Hast node of type "table" not supported by this converter. Please, provide an specification.
+04_10__leaf_blocks__tables_extension__008: |-
+ Error - check implementation:
+ Hast node of type "table" not supported by this converter. Please, provide an specification.
+05_01__container_blocks__block_quotes__001: |-
{
"type": "doc",
"content": [
@@ -4226,7 +4212,7 @@
}
]
}
-05_01__container_blocks__block_quotes__02: |-
+05_01__container_blocks__block_quotes__002: |-
{
"type": "doc",
"content": [
@@ -4261,7 +4247,7 @@
}
]
}
-05_01__container_blocks__block_quotes__03: |-
+05_01__container_blocks__block_quotes__003: |-
{
"type": "doc",
"content": [
@@ -4296,7 +4282,7 @@
}
]
}
-05_01__container_blocks__block_quotes__04: |-
+05_01__container_blocks__block_quotes__004: |-
{
"type": "doc",
"content": [
@@ -4315,7 +4301,7 @@
}
]
}
-05_01__container_blocks__block_quotes__05: |-
+05_01__container_blocks__block_quotes__005: |-
{
"type": "doc",
"content": [
@@ -4350,7 +4336,7 @@
}
]
}
-05_01__container_blocks__block_quotes__06: |-
+05_01__container_blocks__block_quotes__006: |-
{
"type": "doc",
"content": [
@@ -4373,7 +4359,7 @@
}
]
}
-05_01__container_blocks__block_quotes__07: |-
+05_01__container_blocks__block_quotes__007: |-
{
"type": "doc",
"content": [
@@ -4399,7 +4385,7 @@
}
]
}
-05_01__container_blocks__block_quotes__08: |-
+05_01__container_blocks__block_quotes__008: |-
{
"type": "doc",
"content": [
@@ -4457,7 +4443,7 @@
}
]
}
-05_01__container_blocks__block_quotes__09: |-
+05_01__container_blocks__block_quotes__009: |-
{
"type": "doc",
"content": [
@@ -4497,7 +4483,7 @@
}
]
}
-05_01__container_blocks__block_quotes__10: |-
+05_01__container_blocks__block_quotes__010: |-
{
"type": "doc",
"content": [
@@ -4534,7 +4520,7 @@
}
]
}
-05_01__container_blocks__block_quotes__11: |-
+05_01__container_blocks__block_quotes__011: |-
{
"type": "doc",
"content": [
@@ -4557,7 +4543,7 @@
}
]
}
-05_01__container_blocks__block_quotes__12: |-
+05_01__container_blocks__block_quotes__012: |-
{
"type": "doc",
"content": [
@@ -4574,7 +4560,7 @@
}
]
}
-05_01__container_blocks__block_quotes__13: |-
+05_01__container_blocks__block_quotes__013: |-
{
"type": "doc",
"content": [
@@ -4591,7 +4577,7 @@
}
]
}
-05_01__container_blocks__block_quotes__14: |-
+05_01__container_blocks__block_quotes__014: |-
{
"type": "doc",
"content": [
@@ -4614,7 +4600,7 @@
}
]
}
-05_01__container_blocks__block_quotes__15: |-
+05_01__container_blocks__block_quotes__015: |-
{
"type": "doc",
"content": [
@@ -4654,7 +4640,7 @@
}
]
}
-05_01__container_blocks__block_quotes__16: |-
+05_01__container_blocks__block_quotes__016: |-
{
"type": "doc",
"content": [
@@ -4677,7 +4663,7 @@
}
]
}
-05_01__container_blocks__block_quotes__17: |-
+05_01__container_blocks__block_quotes__017: |-
{
"type": "doc",
"content": [
@@ -4709,7 +4695,7 @@
}
]
}
-05_01__container_blocks__block_quotes__18: |-
+05_01__container_blocks__block_quotes__018: |-
{
"type": "doc",
"content": [
@@ -4741,7 +4727,7 @@
}
]
}
-05_01__container_blocks__block_quotes__19: |-
+05_01__container_blocks__block_quotes__019: |-
{
"type": "doc",
"content": [
@@ -4784,7 +4770,7 @@
}
]
}
-05_01__container_blocks__block_quotes__20: |-
+05_01__container_blocks__block_quotes__020: |-
{
"type": "doc",
"content": [
@@ -4807,7 +4793,7 @@
}
]
}
-05_01__container_blocks__block_quotes__21: |-
+05_01__container_blocks__block_quotes__021: |-
{
"type": "doc",
"content": [
@@ -4839,7 +4825,7 @@
}
]
}
-05_01__container_blocks__block_quotes__22: |-
+05_01__container_blocks__block_quotes__022: |-
{
"type": "doc",
"content": [
@@ -4871,7 +4857,7 @@
}
]
}
-05_01__container_blocks__block_quotes__23: |-
+05_01__container_blocks__block_quotes__023: |-
{
"type": "doc",
"content": [
@@ -4910,7 +4896,7 @@
}
]
}
-05_01__container_blocks__block_quotes__24: |-
+05_01__container_blocks__block_quotes__024: |-
{
"type": "doc",
"content": [
@@ -4949,7 +4935,7 @@
}
]
}
-05_01__container_blocks__block_quotes__25: |-
+05_01__container_blocks__block_quotes__025: |-
{
"type": "doc",
"content": [
@@ -4993,7 +4979,7 @@
}
]
}
-05_02__container_blocks__list_items__01: |-
+05_02__container_blocks__list_items__001: |-
{
"type": "doc",
"content": [
@@ -5038,7 +5024,7 @@
}
]
}
-05_02__container_blocks__list_items__02: |-
+05_02__container_blocks__list_items__002: |-
{
"type": "doc",
"content": [
@@ -5097,7 +5083,7 @@
}
]
}
-05_02__container_blocks__list_items__03: |-
+05_02__container_blocks__list_items__003: |-
{
"type": "doc",
"content": [
@@ -5134,7 +5120,7 @@
}
]
}
-05_02__container_blocks__list_items__04: |-
+05_02__container_blocks__list_items__004: |-
{
"type": "doc",
"content": [
@@ -5171,7 +5157,7 @@
}
]
}
-05_02__container_blocks__list_items__05: |-
+05_02__container_blocks__list_items__005: |-
{
"type": "doc",
"content": [
@@ -5212,7 +5198,7 @@
}
]
}
-05_02__container_blocks__list_items__06: |-
+05_02__container_blocks__list_items__006: |-
{
"type": "doc",
"content": [
@@ -5249,7 +5235,7 @@
}
]
}
-05_02__container_blocks__list_items__07: |-
+05_02__container_blocks__list_items__007: |-
{
"type": "doc",
"content": [
@@ -5303,7 +5289,7 @@
}
]
}
-05_02__container_blocks__list_items__08: |-
+05_02__container_blocks__list_items__008: |-
{
"type": "doc",
"content": [
@@ -5356,7 +5342,7 @@
}
]
}
-05_02__container_blocks__list_items__09: |-
+05_02__container_blocks__list_items__009: |-
{
"type": "doc",
"content": [
@@ -5380,7 +5366,7 @@
}
]
}
-05_02__container_blocks__list_items__10: |-
+05_02__container_blocks__list_items__010: |-
{
"type": "doc",
"content": [
@@ -5417,7 +5403,7 @@
}
]
}
-05_02__container_blocks__list_items__11: |-
+05_02__container_blocks__list_items__011: |-
{
"type": "doc",
"content": [
@@ -5485,7 +5471,7 @@
}
]
}
-05_02__container_blocks__list_items__12: |-
+05_02__container_blocks__list_items__012: |-
{
"type": "doc",
"content": [
@@ -5526,7 +5512,7 @@
}
]
}
-05_02__container_blocks__list_items__13: |-
+05_02__container_blocks__list_items__013: |-
{
"type": "doc",
"content": [
@@ -5555,7 +5541,7 @@
}
]
}
-05_02__container_blocks__list_items__14: |-
+05_02__container_blocks__list_items__014: |-
{
"type": "doc",
"content": [
@@ -5570,7 +5556,7 @@
}
]
}
-05_02__container_blocks__list_items__15: |-
+05_02__container_blocks__list_items__015: |-
{
"type": "doc",
"content": [
@@ -5599,7 +5585,7 @@
}
]
}
-05_02__container_blocks__list_items__16: |-
+05_02__container_blocks__list_items__016: |-
{
"type": "doc",
"content": [
@@ -5628,7 +5614,7 @@
}
]
}
-05_02__container_blocks__list_items__17: |-
+05_02__container_blocks__list_items__017: |-
{
"type": "doc",
"content": [
@@ -5643,7 +5629,7 @@
}
]
}
-05_02__container_blocks__list_items__18: |-
+05_02__container_blocks__list_items__018: |-
{
"type": "doc",
"content": [
@@ -5684,7 +5670,7 @@
}
]
}
-05_02__container_blocks__list_items__19: |-
+05_02__container_blocks__list_items__019: |-
{
"type": "doc",
"content": [
@@ -5726,7 +5712,7 @@
}
]
}
-05_02__container_blocks__list_items__20: |-
+05_02__container_blocks__list_items__020: |-
{
"type": "doc",
"content": [
@@ -5767,7 +5753,7 @@
}
]
}
-05_02__container_blocks__list_items__21: |-
+05_02__container_blocks__list_items__021: |-
{
"type": "doc",
"content": [
@@ -5825,7 +5811,7 @@
}
]
}
-05_02__container_blocks__list_items__22: |-
+05_02__container_blocks__list_items__022: |-
{
"type": "doc",
"content": [
@@ -5883,7 +5869,7 @@
}
]
}
-05_02__container_blocks__list_items__23: |-
+05_02__container_blocks__list_items__023: |-
{
"type": "doc",
"content": [
@@ -5907,7 +5893,7 @@
}
]
}
-05_02__container_blocks__list_items__24: |-
+05_02__container_blocks__list_items__024: |-
{
"type": "doc",
"content": [
@@ -5944,7 +5930,7 @@
}
]
}
-05_02__container_blocks__list_items__25: |-
+05_02__container_blocks__list_items__025: |-
{
"type": "doc",
"content": [
@@ -5981,7 +5967,7 @@
}
]
}
-05_02__container_blocks__list_items__26: |-
+05_02__container_blocks__list_items__026: |-
{
"type": "doc",
"content": [
@@ -6051,7 +6037,7 @@
}
]
}
-05_02__container_blocks__list_items__27: |-
+05_02__container_blocks__list_items__027: |-
{
"type": "doc",
"content": [
@@ -6079,7 +6065,7 @@
}
]
}
-05_02__container_blocks__list_items__28: |-
+05_02__container_blocks__list_items__028: |-
{
"type": "doc",
"content": [
@@ -6110,7 +6096,7 @@
}
]
}
-05_02__container_blocks__list_items__29: |-
+05_02__container_blocks__list_items__029: |-
{
"type": "doc",
"content": [
@@ -6160,7 +6146,7 @@
}
]
}
-05_02__container_blocks__list_items__30: |-
+05_02__container_blocks__list_items__030: |-
{
"type": "doc",
"content": [
@@ -6210,7 +6196,7 @@
}
]
}
-05_02__container_blocks__list_items__31: |-
+05_02__container_blocks__list_items__031: |-
{
"type": "doc",
"content": [
@@ -6261,7 +6247,7 @@
}
]
}
-05_02__container_blocks__list_items__32: |-
+05_02__container_blocks__list_items__032: |-
{
"type": "doc",
"content": [
@@ -6283,7 +6269,7 @@
}
]
}
-05_02__container_blocks__list_items__33: |-
+05_02__container_blocks__list_items__033: |-
{
"type": "doc",
"content": [
@@ -6307,7 +6293,7 @@
}
]
}
-05_02__container_blocks__list_items__34: |-
+05_02__container_blocks__list_items__034: |-
{
"type": "doc",
"content": [
@@ -6366,7 +6352,7 @@
}
]
}
-05_02__container_blocks__list_items__35: |-
+05_02__container_blocks__list_items__035: |-
{
"type": "doc",
"content": [
@@ -6425,7 +6411,7 @@
}
]
}
-05_02__container_blocks__list_items__36: |-
+05_02__container_blocks__list_items__036: |-
{
"type": "doc",
"content": [
@@ -6484,7 +6470,7 @@
}
]
}
-05_02__container_blocks__list_items__37: |-
+05_02__container_blocks__list_items__037: |-
{
"type": "doc",
"content": [
@@ -6503,7 +6489,7 @@
}
]
}
-05_02__container_blocks__list_items__38: |-
+05_02__container_blocks__list_items__038: |-
{
"type": "doc",
"content": [
@@ -6562,7 +6548,7 @@
}
]
}
-05_02__container_blocks__list_items__39: |-
+05_02__container_blocks__list_items__039: |-
{
"type": "doc",
"content": [
@@ -6591,7 +6577,7 @@
}
]
}
-05_02__container_blocks__list_items__40: |-
+05_02__container_blocks__list_items__040: |-
{
"type": "doc",
"content": [
@@ -6639,7 +6625,7 @@
}
]
}
-05_02__container_blocks__list_items__41: |-
+05_02__container_blocks__list_items__041: |-
{
"type": "doc",
"content": [
@@ -6687,7 +6673,7 @@
}
]
}
-05_02__container_blocks__list_items__42: |-
+05_02__container_blocks__list_items__042: |-
{
"type": "doc",
"content": [
@@ -6705,7 +6691,7 @@
"content": [
{
"type": "text",
- "text": "foo\n"
+ "text": "foo"
}
]
},
@@ -6723,7 +6709,7 @@
"content": [
{
"type": "text",
- "text": "bar\n"
+ "text": "bar"
}
]
},
@@ -6741,7 +6727,7 @@
"content": [
{
"type": "text",
- "text": "baz\n"
+ "text": "baz"
}
]
},
@@ -6781,7 +6767,7 @@
}
]
}
-05_02__container_blocks__list_items__43: |-
+05_02__container_blocks__list_items__043: |-
{
"type": "doc",
"content": [
@@ -6851,7 +6837,7 @@
}
]
}
-05_02__container_blocks__list_items__44: |-
+05_02__container_blocks__list_items__044: |-
{
"type": "doc",
"content": [
@@ -6870,7 +6856,7 @@
"content": [
{
"type": "text",
- "text": "foo\n"
+ "text": "foo"
}
]
},
@@ -6902,7 +6888,7 @@
}
]
}
-05_02__container_blocks__list_items__45: |-
+05_02__container_blocks__list_items__045: |-
{
"type": "doc",
"content": [
@@ -6953,7 +6939,7 @@
}
]
}
-05_02__container_blocks__list_items__46: |-
+05_02__container_blocks__list_items__046: |-
{
"type": "doc",
"content": [
@@ -6997,7 +6983,7 @@
}
]
}
-05_02__container_blocks__list_items__47: |-
+05_02__container_blocks__list_items__047: |-
{
"type": "doc",
"content": [
@@ -7059,7 +7045,7 @@
}
]
}
-05_02__container_blocks__list_items__48: |-
+05_02__container_blocks__list_items__048: |-
{
"type": "doc",
"content": [
@@ -7113,7 +7099,7 @@
}
]
}
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__49: |-
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__049: |-
{
"type": "doc",
"content": [
@@ -7131,134 +7117,6 @@
"content": [
{
"type": "text",
- "text": "[ ] foo"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[x] bar"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[x] foo\n"
- }
- ]
- },
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[ ] bar"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[x] baz"
- }
- ]
- }
- ]
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "[ ] bim"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "foo"
- }
- ]
- }
- ]
- },
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "bar"
- }
- ]
- }
- ]
- }
- ]
- },
- {
- "type": "bulletList",
- "attrs": {
- "bullet": "*"
- },
- "content": [
- {
- "type": "listItem",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
"text": "baz"
}
]
@@ -7269,7 +7127,7 @@
}
]
}
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__50: |-
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__050: |-
{
"type": "doc",
"content": [
@@ -7335,7 +7193,7 @@
}
]
}
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__51: |-
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__051: |-
{
"type": "doc",
"content": [
@@ -7386,7 +7244,7 @@
}
]
}
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__52: |-
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__052: |-
{
"type": "doc",
"content": [
@@ -7401,7 +7259,7 @@
}
]
}
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__53: |-
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__053: |-
{
"type": "doc",
"content": [
@@ -7439,7 +7297,7 @@
}
]
}
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__54: |-
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__054: |-
{
"type": "doc",
"content": [
@@ -7495,7 +7353,7 @@
}
]
}
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__55: |-
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__055: |-
{
"type": "doc",
"content": [
@@ -7513,7 +7371,7 @@
"content": [
{
"type": "text",
- "text": "foo\n"
+ "text": "foo"
}
]
},
@@ -7531,7 +7389,7 @@
"content": [
{
"type": "text",
- "text": "bar\n"
+ "text": "bar"
}
]
},
@@ -7576,13 +7434,13 @@
}
]
}
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__56: |-
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__056: |-
Error - check implementation:
- Hast node of type "comment" not supported by this converter. Please, provide an specification.
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__57: |-
+ Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__057: |-
Error - check implementation:
- Hast node of type "comment" not supported by this converter. Please, provide an specification.
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__58: |-
+ Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__058: |-
{
"type": "doc",
"content": [
@@ -7694,7 +7552,7 @@
}
]
}
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__59: |-
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__059: |-
{
"type": "doc",
"content": [
@@ -7751,7 +7609,7 @@
}
]
}
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__60: |-
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__060: |-
{
"type": "doc",
"content": [
@@ -7821,7 +7679,7 @@
}
]
}
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__61: |-
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__061: |-
{
"type": "doc",
"content": [
@@ -7877,7 +7735,7 @@
}
]
}
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__62: |-
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__062: |-
{
"type": "doc",
"content": [
@@ -7933,7 +7791,7 @@
}
]
}
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__63: |-
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__063: |-
{
"type": "doc",
"content": [
@@ -7983,7 +7841,7 @@
}
]
}
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__64: |-
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__064: |-
{
"type": "doc",
"content": [
@@ -8048,7 +7906,7 @@
}
]
}
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__65: |-
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__065: |-
{
"type": "doc",
"content": [
@@ -8104,7 +7962,7 @@
}
]
}
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__66: |-
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__066: |-
{
"type": "doc",
"content": [
@@ -8167,7 +8025,7 @@
}
]
}
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__67: |-
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__067: |-
{
"type": "doc",
"content": [
@@ -8185,7 +8043,7 @@
"content": [
{
"type": "text",
- "text": "a\n"
+ "text": "a"
}
]
},
@@ -8240,7 +8098,7 @@
}
]
}
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__68: |-
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__068: |-
{
"type": "doc",
"content": [
@@ -8258,7 +8116,7 @@
"content": [
{
"type": "text",
- "text": "a\n"
+ "text": "a"
}
]
},
@@ -8299,7 +8157,7 @@
}
]
}
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__69: |-
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__069: |-
{
"type": "doc",
"content": [
@@ -8317,7 +8175,7 @@
"content": [
{
"type": "text",
- "text": "a\n"
+ "text": "a"
}
]
},
@@ -8371,7 +8229,7 @@
}
]
}
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__70: |-
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__070: |-
{
"type": "doc",
"content": [
@@ -8399,7 +8257,7 @@
}
]
}
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__71: |-
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__071: |-
{
"type": "doc",
"content": [
@@ -8417,7 +8275,7 @@
"content": [
{
"type": "text",
- "text": "a\n"
+ "text": "a"
}
]
},
@@ -8449,7 +8307,7 @@
}
]
}
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__72: |-
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__072: |-
{
"type": "doc",
"content": [
@@ -8494,7 +8352,7 @@
}
]
}
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__73: |-
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__073: |-
{
"type": "doc",
"content": [
@@ -8553,7 +8411,7 @@
}
]
}
-05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__74: |-
+05_02__container_blocks__list_items__motivation__task_list_items_extension__lists__074: |-
{
"type": "doc",
"content": [
@@ -8667,7 +8525,7 @@
}
]
}
-06_01__inlines__01: |-
+06_01__inlines__001: |-
{
"type": "doc",
"content": [
@@ -8691,7 +8549,7 @@
}
]
}
-06_02__inlines__backslash_escapes__01: |-
+06_02__inlines__backslash_escapes__001: |-
{
"type": "doc",
"content": [
@@ -8706,7 +8564,7 @@
}
]
}
-06_02__inlines__backslash_escapes__02: |-
+06_02__inlines__backslash_escapes__002: |-
{
"type": "doc",
"content": [
@@ -8721,7 +8579,7 @@
}
]
}
-06_02__inlines__backslash_escapes__03: |-
+06_02__inlines__backslash_escapes__003: |-
{
"type": "doc",
"content": [
@@ -8736,7 +8594,7 @@
}
]
}
-06_02__inlines__backslash_escapes__04: |-
+06_02__inlines__backslash_escapes__004: |-
{
"type": "doc",
"content": [
@@ -8760,7 +8618,7 @@
}
]
}
-06_02__inlines__backslash_escapes__05: |-
+06_02__inlines__backslash_escapes__005: |-
{
"type": "doc",
"content": [
@@ -8782,7 +8640,7 @@
}
]
}
-06_02__inlines__backslash_escapes__06: |-
+06_02__inlines__backslash_escapes__006: |-
{
"type": "doc",
"content": [
@@ -8802,7 +8660,7 @@
}
]
}
-06_02__inlines__backslash_escapes__07: |-
+06_02__inlines__backslash_escapes__007: |-
{
"type": "doc",
"content": [
@@ -8821,7 +8679,7 @@
}
]
}
-06_02__inlines__backslash_escapes__08: |-
+06_02__inlines__backslash_escapes__008: |-
{
"type": "doc",
"content": [
@@ -8840,7 +8698,7 @@
}
]
}
-06_02__inlines__backslash_escapes__09: |-
+06_02__inlines__backslash_escapes__009: |-
{
"type": "doc",
"content": [
@@ -8855,6 +8713,7 @@
"attrs": {
"href": "http://example.com?find=%5C*",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -8866,7 +8725,7 @@
}
]
}
-06_02__inlines__backslash_escapes__10: |-
+06_02__inlines__backslash_escapes__010: |-
{
"type": "doc",
"content": [
@@ -8875,7 +8734,7 @@
}
]
}
-06_02__inlines__backslash_escapes__11: |-
+06_02__inlines__backslash_escapes__011: |-
{
"type": "doc",
"content": [
@@ -8890,6 +8749,7 @@
"attrs": {
"href": "/bar*",
"target": "_blank",
+ "class": null,
"title": "ti*tle",
"canonicalSrc": null
}
@@ -8901,7 +8761,7 @@
}
]
}
-06_02__inlines__backslash_escapes__12: |-
+06_02__inlines__backslash_escapes__012: |-
{
"type": "doc",
"content": [
@@ -8916,6 +8776,7 @@
"attrs": {
"href": "/bar*",
"target": "_blank",
+ "class": null,
"title": "ti*tle",
"canonicalSrc": null
}
@@ -8927,7 +8788,7 @@
}
]
}
-06_02__inlines__backslash_escapes__13: |-
+06_02__inlines__backslash_escapes__013: |-
{
"type": "doc",
"content": [
@@ -8946,7 +8807,7 @@
}
]
}
-06_03__inlines__entity_and_numeric_character_references__01: |-
+06_03__inlines__entity_and_numeric_character_references__001: |-
{
"type": "doc",
"content": [
@@ -8961,7 +8822,7 @@
}
]
}
-06_03__inlines__entity_and_numeric_character_references__02: |-
+06_03__inlines__entity_and_numeric_character_references__002: |-
{
"type": "doc",
"content": [
@@ -8976,7 +8837,7 @@
}
]
}
-06_03__inlines__entity_and_numeric_character_references__03: |-
+06_03__inlines__entity_and_numeric_character_references__003: |-
{
"type": "doc",
"content": [
@@ -8991,7 +8852,7 @@
}
]
}
-06_03__inlines__entity_and_numeric_character_references__04: |-
+06_03__inlines__entity_and_numeric_character_references__004: |-
{
"type": "doc",
"content": [
@@ -9006,7 +8867,7 @@
}
]
}
-06_03__inlines__entity_and_numeric_character_references__05: |-
+06_03__inlines__entity_and_numeric_character_references__005: |-
{
"type": "doc",
"content": [
@@ -9021,7 +8882,7 @@
}
]
}
-06_03__inlines__entity_and_numeric_character_references__06: |-
+06_03__inlines__entity_and_numeric_character_references__006: |-
{
"type": "doc",
"content": [
@@ -9036,7 +8897,7 @@
}
]
}
-06_03__inlines__entity_and_numeric_character_references__07: |-
+06_03__inlines__entity_and_numeric_character_references__007: |-
{
"type": "doc",
"content": [
@@ -9045,7 +8906,7 @@
}
]
}
-06_03__inlines__entity_and_numeric_character_references__08: |-
+06_03__inlines__entity_and_numeric_character_references__008: |-
{
"type": "doc",
"content": [
@@ -9060,6 +8921,7 @@
"attrs": {
"href": "/f%C3%B6%C3%B6",
"target": "_blank",
+ "class": null,
"title": "föö",
"canonicalSrc": null
}
@@ -9071,7 +8933,7 @@
}
]
}
-06_03__inlines__entity_and_numeric_character_references__09: |-
+06_03__inlines__entity_and_numeric_character_references__009: |-
{
"type": "doc",
"content": [
@@ -9086,6 +8948,7 @@
"attrs": {
"href": "/f%C3%B6%C3%B6",
"target": "_blank",
+ "class": null,
"title": "föö",
"canonicalSrc": null
}
@@ -9097,7 +8960,7 @@
}
]
}
-06_03__inlines__entity_and_numeric_character_references__10: |-
+06_03__inlines__entity_and_numeric_character_references__010: |-
{
"type": "doc",
"content": [
@@ -9116,7 +8979,7 @@
}
]
}
-06_03__inlines__entity_and_numeric_character_references__11: |-
+06_03__inlines__entity_and_numeric_character_references__011: |-
{
"type": "doc",
"content": [
@@ -9136,7 +8999,7 @@
}
]
}
-06_03__inlines__entity_and_numeric_character_references__12: |-
+06_03__inlines__entity_and_numeric_character_references__012: |-
{
"type": "doc",
"content": [
@@ -9155,7 +9018,7 @@
}
]
}
-06_03__inlines__entity_and_numeric_character_references__13: |-
+06_03__inlines__entity_and_numeric_character_references__013: |-
{
"type": "doc",
"content": [
@@ -9179,7 +9042,7 @@
}
]
}
-06_03__inlines__entity_and_numeric_character_references__14: |-
+06_03__inlines__entity_and_numeric_character_references__014: |-
{
"type": "doc",
"content": [
@@ -9216,7 +9079,7 @@
}
]
}
-06_03__inlines__entity_and_numeric_character_references__15: |-
+06_03__inlines__entity_and_numeric_character_references__015: |-
{
"type": "doc",
"content": [
@@ -9231,7 +9094,7 @@
}
]
}
-06_03__inlines__entity_and_numeric_character_references__16: |-
+06_03__inlines__entity_and_numeric_character_references__016: |-
{
"type": "doc",
"content": [
@@ -9246,7 +9109,7 @@
}
]
}
-06_03__inlines__entity_and_numeric_character_references__17: |-
+06_03__inlines__entity_and_numeric_character_references__017: |-
{
"type": "doc",
"content": [
@@ -9261,7 +9124,7 @@
}
]
}
-06_04__inlines__code_spans__01: |-
+06_04__inlines__code_spans__001: |-
{
"type": "doc",
"content": [
@@ -9281,7 +9144,7 @@
}
]
}
-06_04__inlines__code_spans__02: |-
+06_04__inlines__code_spans__002: |-
{
"type": "doc",
"content": [
@@ -9301,7 +9164,7 @@
}
]
}
-06_04__inlines__code_spans__03: |-
+06_04__inlines__code_spans__003: |-
{
"type": "doc",
"content": [
@@ -9321,7 +9184,7 @@
}
]
}
-06_04__inlines__code_spans__04: |-
+06_04__inlines__code_spans__004: |-
{
"type": "doc",
"content": [
@@ -9341,7 +9204,7 @@
}
]
}
-06_04__inlines__code_spans__05: |-
+06_04__inlines__code_spans__005: |-
{
"type": "doc",
"content": [
@@ -9361,7 +9224,7 @@
}
]
}
-06_04__inlines__code_spans__06: |-
+06_04__inlines__code_spans__006: |-
{
"type": "doc",
"content": [
@@ -9381,7 +9244,7 @@
}
]
}
-06_04__inlines__code_spans__07: |-
+06_04__inlines__code_spans__007: |-
{
"type": "doc",
"content": [
@@ -9390,7 +9253,7 @@
}
]
}
-06_04__inlines__code_spans__08: |-
+06_04__inlines__code_spans__008: |-
{
"type": "doc",
"content": [
@@ -9410,7 +9273,7 @@
}
]
}
-06_04__inlines__code_spans__09: |-
+06_04__inlines__code_spans__009: |-
{
"type": "doc",
"content": [
@@ -9430,7 +9293,7 @@
}
]
}
-06_04__inlines__code_spans__10: |-
+06_04__inlines__code_spans__010: |-
{
"type": "doc",
"content": [
@@ -9450,7 +9313,7 @@
}
]
}
-06_04__inlines__code_spans__11: |-
+06_04__inlines__code_spans__011: |-
{
"type": "doc",
"content": [
@@ -9474,7 +9337,7 @@
}
]
}
-06_04__inlines__code_spans__12: |-
+06_04__inlines__code_spans__012: |-
{
"type": "doc",
"content": [
@@ -9494,7 +9357,7 @@
}
]
}
-06_04__inlines__code_spans__13: |-
+06_04__inlines__code_spans__013: |-
{
"type": "doc",
"content": [
@@ -9514,7 +9377,7 @@
}
]
}
-06_04__inlines__code_spans__14: |-
+06_04__inlines__code_spans__014: |-
{
"type": "doc",
"content": [
@@ -9538,7 +9401,7 @@
}
]
}
-06_04__inlines__code_spans__15: |-
+06_04__inlines__code_spans__015: |-
{
"type": "doc",
"content": [
@@ -9566,7 +9429,7 @@
}
]
}
-06_04__inlines__code_spans__16: |-
+06_04__inlines__code_spans__016: |-
{
"type": "doc",
"content": [
@@ -9590,7 +9453,7 @@
}
]
}
-06_04__inlines__code_spans__17: |-
+06_04__inlines__code_spans__017: |-
{
"type": "doc",
"content": [
@@ -9605,6 +9468,7 @@
"attrs": {
"href": "`",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -9616,7 +9480,7 @@
}
]
}
-06_04__inlines__code_spans__18: |-
+06_04__inlines__code_spans__018: |-
{
"type": "doc",
"content": [
@@ -9640,7 +9504,7 @@
}
]
}
-06_04__inlines__code_spans__19: |-
+06_04__inlines__code_spans__019: |-
{
"type": "doc",
"content": [
@@ -9655,6 +9519,7 @@
"attrs": {
"href": "http://foo.bar.%60baz",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -9670,7 +9535,7 @@
}
]
}
-06_04__inlines__code_spans__20: |-
+06_04__inlines__code_spans__020: |-
{
"type": "doc",
"content": [
@@ -9685,7 +9550,7 @@
}
]
}
-06_04__inlines__code_spans__21: |-
+06_04__inlines__code_spans__021: |-
{
"type": "doc",
"content": [
@@ -9700,7 +9565,7 @@
}
]
}
-06_04__inlines__code_spans__22: |-
+06_04__inlines__code_spans__022: |-
{
"type": "doc",
"content": [
@@ -9724,7 +9589,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__01: |-
+06_05__inlines__emphasis_and_strong_emphasis__001: |-
{
"type": "doc",
"content": [
@@ -9744,7 +9609,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__02: |-
+06_05__inlines__emphasis_and_strong_emphasis__002: |-
{
"type": "doc",
"content": [
@@ -9759,7 +9624,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__03: |-
+06_05__inlines__emphasis_and_strong_emphasis__003: |-
{
"type": "doc",
"content": [
@@ -9774,7 +9639,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__04: |-
+06_05__inlines__emphasis_and_strong_emphasis__004: |-
{
"type": "doc",
"content": [
@@ -9789,7 +9654,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__05: |-
+06_05__inlines__emphasis_and_strong_emphasis__005: |-
{
"type": "doc",
"content": [
@@ -9813,7 +9678,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__06: |-
+06_05__inlines__emphasis_and_strong_emphasis__006: |-
{
"type": "doc",
"content": [
@@ -9841,7 +9706,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__07: |-
+06_05__inlines__emphasis_and_strong_emphasis__007: |-
{
"type": "doc",
"content": [
@@ -9861,7 +9726,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__08: |-
+06_05__inlines__emphasis_and_strong_emphasis__008: |-
{
"type": "doc",
"content": [
@@ -9876,7 +9741,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__09: |-
+06_05__inlines__emphasis_and_strong_emphasis__009: |-
{
"type": "doc",
"content": [
@@ -9891,7 +9756,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__10: |-
+06_05__inlines__emphasis_and_strong_emphasis__010: |-
{
"type": "doc",
"content": [
@@ -9906,7 +9771,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__11: |-
+06_05__inlines__emphasis_and_strong_emphasis__011: |-
{
"type": "doc",
"content": [
@@ -9921,7 +9786,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__12: |-
+06_05__inlines__emphasis_and_strong_emphasis__012: |-
{
"type": "doc",
"content": [
@@ -9936,7 +9801,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__13: |-
+06_05__inlines__emphasis_and_strong_emphasis__013: |-
{
"type": "doc",
"content": [
@@ -9951,7 +9816,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__14: |-
+06_05__inlines__emphasis_and_strong_emphasis__014: |-
{
"type": "doc",
"content": [
@@ -9975,7 +9840,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__15: |-
+06_05__inlines__emphasis_and_strong_emphasis__015: |-
{
"type": "doc",
"content": [
@@ -9990,7 +9855,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__16: |-
+06_05__inlines__emphasis_and_strong_emphasis__016: |-
{
"type": "doc",
"content": [
@@ -10005,7 +9870,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__17: |-
+06_05__inlines__emphasis_and_strong_emphasis__017: |-
{
"type": "doc",
"content": [
@@ -10020,7 +9885,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__18: |-
+06_05__inlines__emphasis_and_strong_emphasis__018: |-
{
"type": "doc",
"content": [
@@ -10035,7 +9900,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__19: |-
+06_05__inlines__emphasis_and_strong_emphasis__019: |-
{
"type": "doc",
"content": [
@@ -10059,7 +9924,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__20: |-
+06_05__inlines__emphasis_and_strong_emphasis__020: |-
{
"type": "doc",
"content": [
@@ -10083,7 +9948,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__21: |-
+06_05__inlines__emphasis_and_strong_emphasis__021: |-
{
"type": "doc",
"content": [
@@ -10098,7 +9963,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__22: |-
+06_05__inlines__emphasis_and_strong_emphasis__022: |-
{
"type": "doc",
"content": [
@@ -10113,7 +9978,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__23: |-
+06_05__inlines__emphasis_and_strong_emphasis__023: |-
{
"type": "doc",
"content": [
@@ -10137,7 +10002,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__24: |-
+06_05__inlines__emphasis_and_strong_emphasis__024: |-
{
"type": "doc",
"content": [
@@ -10152,7 +10017,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__25: |-
+06_05__inlines__emphasis_and_strong_emphasis__025: |-
{
"type": "doc",
"content": [
@@ -10167,7 +10032,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__26: |-
+06_05__inlines__emphasis_and_strong_emphasis__026: |-
{
"type": "doc",
"content": [
@@ -10187,7 +10052,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__27: |-
+06_05__inlines__emphasis_and_strong_emphasis__027: |-
{
"type": "doc",
"content": [
@@ -10211,7 +10076,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__28: |-
+06_05__inlines__emphasis_and_strong_emphasis__028: |-
{
"type": "doc",
"content": [
@@ -10231,7 +10096,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__29: |-
+06_05__inlines__emphasis_and_strong_emphasis__029: |-
{
"type": "doc",
"content": [
@@ -10246,7 +10111,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__30: |-
+06_05__inlines__emphasis_and_strong_emphasis__030: |-
{
"type": "doc",
"content": [
@@ -10261,7 +10126,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__31: |-
+06_05__inlines__emphasis_and_strong_emphasis__031: |-
{
"type": "doc",
"content": [
@@ -10285,7 +10150,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__32: |-
+06_05__inlines__emphasis_and_strong_emphasis__032: |-
{
"type": "doc",
"content": [
@@ -10305,7 +10170,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__33: |-
+06_05__inlines__emphasis_and_strong_emphasis__033: |-
{
"type": "doc",
"content": [
@@ -10320,7 +10185,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__34: |-
+06_05__inlines__emphasis_and_strong_emphasis__034: |-
{
"type": "doc",
"content": [
@@ -10335,7 +10200,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__35: |-
+06_05__inlines__emphasis_and_strong_emphasis__035: |-
{
"type": "doc",
"content": [
@@ -10350,7 +10215,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__36: |-
+06_05__inlines__emphasis_and_strong_emphasis__036: |-
{
"type": "doc",
"content": [
@@ -10365,7 +10230,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__37: |-
+06_05__inlines__emphasis_and_strong_emphasis__037: |-
{
"type": "doc",
"content": [
@@ -10380,7 +10245,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__38: |-
+06_05__inlines__emphasis_and_strong_emphasis__038: |-
{
"type": "doc",
"content": [
@@ -10395,7 +10260,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__39: |-
+06_05__inlines__emphasis_and_strong_emphasis__039: |-
{
"type": "doc",
"content": [
@@ -10419,7 +10284,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__40: |-
+06_05__inlines__emphasis_and_strong_emphasis__040: |-
{
"type": "doc",
"content": [
@@ -10443,7 +10308,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__41: |-
+06_05__inlines__emphasis_and_strong_emphasis__041: |-
{
"type": "doc",
"content": [
@@ -10458,7 +10323,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__42: |-
+06_05__inlines__emphasis_and_strong_emphasis__042: |-
{
"type": "doc",
"content": [
@@ -10473,7 +10338,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__43: |-
+06_05__inlines__emphasis_and_strong_emphasis__043: |-
{
"type": "doc",
"content": [
@@ -10506,7 +10371,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__44: |-
+06_05__inlines__emphasis_and_strong_emphasis__044: |-
{
"type": "doc",
"content": [
@@ -10552,7 +10417,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__45: |-
+06_05__inlines__emphasis_and_strong_emphasis__045: |-
{
"type": "doc",
"content": [
@@ -10585,7 +10450,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__46: |-
+06_05__inlines__emphasis_and_strong_emphasis__046: |-
{
"type": "doc",
"content": [
@@ -10609,7 +10474,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__47: |-
+06_05__inlines__emphasis_and_strong_emphasis__047: |-
{
"type": "doc",
"content": [
@@ -10624,7 +10489,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__48: |-
+06_05__inlines__emphasis_and_strong_emphasis__048: |-
{
"type": "doc",
"content": [
@@ -10639,7 +10504,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__49: |-
+06_05__inlines__emphasis_and_strong_emphasis__049: |-
{
"type": "doc",
"content": [
@@ -10672,7 +10537,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__50: |-
+06_05__inlines__emphasis_and_strong_emphasis__050: |-
{
"type": "doc",
"content": [
@@ -10687,7 +10552,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__51: |-
+06_05__inlines__emphasis_and_strong_emphasis__051: |-
{
"type": "doc",
"content": [
@@ -10702,7 +10567,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__52: |-
+06_05__inlines__emphasis_and_strong_emphasis__052: |-
{
"type": "doc",
"content": [
@@ -10722,7 +10587,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__53: |-
+06_05__inlines__emphasis_and_strong_emphasis__053: |-
{
"type": "doc",
"content": [
@@ -10746,7 +10611,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__54: |-
+06_05__inlines__emphasis_and_strong_emphasis__054: |-
{
"type": "doc",
"content": [
@@ -10770,6 +10635,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -10781,7 +10647,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__55: |-
+06_05__inlines__emphasis_and_strong_emphasis__055: |-
{
"type": "doc",
"content": [
@@ -10801,7 +10667,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__56: |-
+06_05__inlines__emphasis_and_strong_emphasis__056: |-
{
"type": "doc",
"content": [
@@ -10834,7 +10700,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__57: |-
+06_05__inlines__emphasis_and_strong_emphasis__057: |-
{
"type": "doc",
"content": [
@@ -10858,7 +10724,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__58: |-
+06_05__inlines__emphasis_and_strong_emphasis__058: |-
{
"type": "doc",
"content": [
@@ -10882,7 +10748,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__59: |-
+06_05__inlines__emphasis_and_strong_emphasis__059: |-
{
"type": "doc",
"content": [
@@ -10902,7 +10768,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__60: |-
+06_05__inlines__emphasis_and_strong_emphasis__060: |-
{
"type": "doc",
"content": [
@@ -10935,7 +10801,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__61: |-
+06_05__inlines__emphasis_and_strong_emphasis__061: |-
{
"type": "doc",
"content": [
@@ -10968,7 +10834,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__62: |-
+06_05__inlines__emphasis_and_strong_emphasis__062: |-
{
"type": "doc",
"content": [
@@ -10988,7 +10854,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__63: |-
+06_05__inlines__emphasis_and_strong_emphasis__063: |-
{
"type": "doc",
"content": [
@@ -11015,7 +10881,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__64: |-
+06_05__inlines__emphasis_and_strong_emphasis__064: |-
{
"type": "doc",
"content": [
@@ -11044,7 +10910,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__65: |-
+06_05__inlines__emphasis_and_strong_emphasis__065: |-
{
"type": "doc",
"content": [
@@ -11073,7 +10939,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__66: |-
+06_05__inlines__emphasis_and_strong_emphasis__066: |-
{
"type": "doc",
"content": [
@@ -11104,7 +10970,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__67: |-
+06_05__inlines__emphasis_and_strong_emphasis__067: |-
{
"type": "doc",
"content": [
@@ -11132,7 +10998,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__68: |-
+06_05__inlines__emphasis_and_strong_emphasis__068: |-
{
"type": "doc",
"content": [
@@ -11174,7 +11040,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__69: |-
+06_05__inlines__emphasis_and_strong_emphasis__069: |-
{
"type": "doc",
"content": [
@@ -11198,6 +11064,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -11212,7 +11079,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__70: |-
+06_05__inlines__emphasis_and_strong_emphasis__070: |-
{
"type": "doc",
"content": [
@@ -11227,7 +11094,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__71: |-
+06_05__inlines__emphasis_and_strong_emphasis__071: |-
{
"type": "doc",
"content": [
@@ -11242,7 +11109,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__72: |-
+06_05__inlines__emphasis_and_strong_emphasis__072: |-
{
"type": "doc",
"content": [
@@ -11266,6 +11133,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -11277,7 +11145,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__73: |-
+06_05__inlines__emphasis_and_strong_emphasis__073: |-
{
"type": "doc",
"content": [
@@ -11297,7 +11165,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__74: |-
+06_05__inlines__emphasis_and_strong_emphasis__074: |-
{
"type": "doc",
"content": [
@@ -11330,7 +11198,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__75: |-
+06_05__inlines__emphasis_and_strong_emphasis__075: |-
{
"type": "doc",
"content": [
@@ -11354,7 +11222,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__76: |-
+06_05__inlines__emphasis_and_strong_emphasis__076: |-
{
"type": "doc",
"content": [
@@ -11378,7 +11246,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__77: |-
+06_05__inlines__emphasis_and_strong_emphasis__077: |-
{
"type": "doc",
"content": [
@@ -11398,7 +11266,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__78: |-
+06_05__inlines__emphasis_and_strong_emphasis__078: |-
{
"type": "doc",
"content": [
@@ -11431,7 +11299,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__79: |-
+06_05__inlines__emphasis_and_strong_emphasis__079: |-
{
"type": "doc",
"content": [
@@ -11464,7 +11332,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__80: |-
+06_05__inlines__emphasis_and_strong_emphasis__080: |-
{
"type": "doc",
"content": [
@@ -11491,7 +11359,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__81: |-
+06_05__inlines__emphasis_and_strong_emphasis__081: |-
{
"type": "doc",
"content": [
@@ -11520,7 +11388,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__82: |-
+06_05__inlines__emphasis_and_strong_emphasis__082: |-
{
"type": "doc",
"content": [
@@ -11562,7 +11430,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__83: |-
+06_05__inlines__emphasis_and_strong_emphasis__083: |-
{
"type": "doc",
"content": [
@@ -11586,6 +11454,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -11600,7 +11469,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__84: |-
+06_05__inlines__emphasis_and_strong_emphasis__084: |-
{
"type": "doc",
"content": [
@@ -11615,7 +11484,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__85: |-
+06_05__inlines__emphasis_and_strong_emphasis__085: |-
{
"type": "doc",
"content": [
@@ -11630,7 +11499,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__86: |-
+06_05__inlines__emphasis_and_strong_emphasis__086: |-
{
"type": "doc",
"content": [
@@ -11645,7 +11514,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__87: |-
+06_05__inlines__emphasis_and_strong_emphasis__087: |-
{
"type": "doc",
"content": [
@@ -11669,7 +11538,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__88: |-
+06_05__inlines__emphasis_and_strong_emphasis__088: |-
{
"type": "doc",
"content": [
@@ -11693,7 +11562,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__89: |-
+06_05__inlines__emphasis_and_strong_emphasis__089: |-
{
"type": "doc",
"content": [
@@ -11708,7 +11577,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__90: |-
+06_05__inlines__emphasis_and_strong_emphasis__090: |-
{
"type": "doc",
"content": [
@@ -11732,7 +11601,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__91: |-
+06_05__inlines__emphasis_and_strong_emphasis__091: |-
{
"type": "doc",
"content": [
@@ -11756,7 +11625,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__92: |-
+06_05__inlines__emphasis_and_strong_emphasis__092: |-
{
"type": "doc",
"content": [
@@ -11780,7 +11649,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__93: |-
+06_05__inlines__emphasis_and_strong_emphasis__093: |-
{
"type": "doc",
"content": [
@@ -11804,7 +11673,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__94: |-
+06_05__inlines__emphasis_and_strong_emphasis__094: |-
{
"type": "doc",
"content": [
@@ -11828,7 +11697,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__95: |-
+06_05__inlines__emphasis_and_strong_emphasis__095: |-
{
"type": "doc",
"content": [
@@ -11852,7 +11721,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__96: |-
+06_05__inlines__emphasis_and_strong_emphasis__096: |-
{
"type": "doc",
"content": [
@@ -11876,7 +11745,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__97: |-
+06_05__inlines__emphasis_and_strong_emphasis__097: |-
{
"type": "doc",
"content": [
@@ -11900,7 +11769,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__98: |-
+06_05__inlines__emphasis_and_strong_emphasis__098: |-
{
"type": "doc",
"content": [
@@ -11915,7 +11784,7 @@
}
]
}
-06_05__inlines__emphasis_and_strong_emphasis__99: |-
+06_05__inlines__emphasis_and_strong_emphasis__099: |-
{
"type": "doc",
"content": [
@@ -12480,6 +12349,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -12510,6 +12380,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -12653,6 +12524,7 @@
"attrs": {
"href": "http://foo.bar/?q=**",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -12683,6 +12555,7 @@
"attrs": {
"href": "http://foo.bar/?q=__",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -12694,7 +12567,7 @@
}
]
}
-06_06__inlines__strikethrough_extension__01: |-
+06_06__inlines__strikethrough_extension__001: |-
{
"type": "doc",
"content": [
@@ -12703,13 +12576,22 @@
"content": [
{
"type": "text",
- "text": "~~Hi~~ Hello, world!"
+ "marks": [
+ {
+ "type": "strike"
+ }
+ ],
+ "text": "Hi"
+ },
+ {
+ "type": "text",
+ "text": " Hello, world!"
}
]
}
]
}
-06_06__inlines__strikethrough_extension__02: |-
+06_06__inlines__strikethrough_extension__002: |-
{
"type": "doc",
"content": [
@@ -12733,7 +12615,7 @@
}
]
}
-06_07__inlines__links__01: |-
+06_07__inlines__links__001: |-
{
"type": "doc",
"content": [
@@ -12748,6 +12630,7 @@
"attrs": {
"href": "/uri",
"target": "_blank",
+ "class": null,
"title": "title",
"canonicalSrc": null
}
@@ -12759,7 +12642,7 @@
}
]
}
-06_07__inlines__links__02: |-
+06_07__inlines__links__002: |-
{
"type": "doc",
"content": [
@@ -12774,6 +12657,7 @@
"attrs": {
"href": "/uri",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -12785,7 +12669,7 @@
}
]
}
-06_07__inlines__links__03: |-
+06_07__inlines__links__003: |-
{
"type": "doc",
"content": [
@@ -12800,6 +12684,7 @@
"attrs": {
"href": "",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -12811,7 +12696,7 @@
}
]
}
-06_07__inlines__links__04: |-
+06_07__inlines__links__004: |-
{
"type": "doc",
"content": [
@@ -12826,6 +12711,7 @@
"attrs": {
"href": "",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -12837,7 +12723,7 @@
}
]
}
-06_07__inlines__links__05: |-
+06_07__inlines__links__005: |-
{
"type": "doc",
"content": [
@@ -12852,7 +12738,7 @@
}
]
}
-06_07__inlines__links__06: |-
+06_07__inlines__links__006: |-
{
"type": "doc",
"content": [
@@ -12867,6 +12753,7 @@
"attrs": {
"href": "/my%20uri",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -12878,7 +12765,7 @@
}
]
}
-06_07__inlines__links__07: |-
+06_07__inlines__links__007: |-
{
"type": "doc",
"content": [
@@ -12893,10 +12780,10 @@
}
]
}
-06_07__inlines__links__08: |-
+06_07__inlines__links__008: |-
Error - check implementation:
Hast node of type "foo" not supported by this converter. Please, provide an specification.
-06_07__inlines__links__09: |-
+06_07__inlines__links__009: |-
{
"type": "doc",
"content": [
@@ -12911,6 +12798,7 @@
"attrs": {
"href": "b)c",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -12922,7 +12810,7 @@
}
]
}
-06_07__inlines__links__10: |-
+06_07__inlines__links__010: |-
{
"type": "doc",
"content": [
@@ -12937,7 +12825,7 @@
}
]
}
-06_07__inlines__links__11: |-
+06_07__inlines__links__011: |-
{
"type": "doc",
"content": [
@@ -12961,7 +12849,7 @@
}
]
}
-06_07__inlines__links__12: |-
+06_07__inlines__links__012: |-
{
"type": "doc",
"content": [
@@ -12976,6 +12864,7 @@
"attrs": {
"href": "(foo)",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -12987,7 +12876,7 @@
}
]
}
-06_07__inlines__links__13: |-
+06_07__inlines__links__013: |-
{
"type": "doc",
"content": [
@@ -13002,6 +12891,7 @@
"attrs": {
"href": "foo(and(bar))",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -13013,7 +12903,7 @@
}
]
}
-06_07__inlines__links__14: |-
+06_07__inlines__links__014: |-
{
"type": "doc",
"content": [
@@ -13028,6 +12918,7 @@
"attrs": {
"href": "foo(and(bar)",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -13039,7 +12930,7 @@
}
]
}
-06_07__inlines__links__15: |-
+06_07__inlines__links__015: |-
{
"type": "doc",
"content": [
@@ -13054,6 +12945,7 @@
"attrs": {
"href": "foo(and(bar)",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -13065,7 +12957,7 @@
}
]
}
-06_07__inlines__links__16: |-
+06_07__inlines__links__016: |-
{
"type": "doc",
"content": [
@@ -13080,6 +12972,7 @@
"attrs": {
"href": "foo):",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -13091,7 +12984,7 @@
}
]
}
-06_07__inlines__links__17: |-
+06_07__inlines__links__017: |-
{
"type": "doc",
"content": [
@@ -13106,6 +12999,7 @@
"attrs": {
"href": "#fragment",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -13126,6 +13020,7 @@
"attrs": {
"href": "http://example.com#fragment",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -13146,6 +13041,7 @@
"attrs": {
"href": "http://example.com?foo=3#frag",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -13157,7 +13053,7 @@
}
]
}
-06_07__inlines__links__18: |-
+06_07__inlines__links__018: |-
{
"type": "doc",
"content": [
@@ -13172,6 +13068,7 @@
"attrs": {
"href": "foo%5Cbar",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -13183,7 +13080,7 @@
}
]
}
-06_07__inlines__links__19: |-
+06_07__inlines__links__019: |-
{
"type": "doc",
"content": [
@@ -13198,6 +13095,7 @@
"attrs": {
"href": "foo%20b%C3%A4",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -13209,7 +13107,7 @@
}
]
}
-06_07__inlines__links__20: |-
+06_07__inlines__links__020: |-
{
"type": "doc",
"content": [
@@ -13224,6 +13122,7 @@
"attrs": {
"href": "%22title%22",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -13235,7 +13134,7 @@
}
]
}
-06_07__inlines__links__21: |-
+06_07__inlines__links__021: |-
{
"type": "doc",
"content": [
@@ -13250,6 +13149,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": "title",
"canonicalSrc": null
}
@@ -13261,7 +13161,7 @@
}
]
}
-06_07__inlines__links__22: |-
+06_07__inlines__links__022: |-
{
"type": "doc",
"content": [
@@ -13276,6 +13176,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": "title \"\"",
"canonicalSrc": null
}
@@ -13287,7 +13188,7 @@
}
]
}
-06_07__inlines__links__23: |-
+06_07__inlines__links__023: |-
{
"type": "doc",
"content": [
@@ -13302,6 +13203,7 @@
"attrs": {
"href": "/url%C2%A0%22title%22",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -13313,7 +13215,7 @@
}
]
}
-06_07__inlines__links__24: |-
+06_07__inlines__links__024: |-
{
"type": "doc",
"content": [
@@ -13328,7 +13230,7 @@
}
]
}
-06_07__inlines__links__25: |-
+06_07__inlines__links__025: |-
{
"type": "doc",
"content": [
@@ -13343,6 +13245,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": "title \"and\" title",
"canonicalSrc": null
}
@@ -13354,7 +13257,7 @@
}
]
}
-06_07__inlines__links__26: |-
+06_07__inlines__links__026: |-
{
"type": "doc",
"content": [
@@ -13369,6 +13272,7 @@
"attrs": {
"href": "/uri",
"target": "_blank",
+ "class": null,
"title": "title",
"canonicalSrc": null
}
@@ -13380,7 +13284,7 @@
}
]
}
-06_07__inlines__links__27: |-
+06_07__inlines__links__027: |-
{
"type": "doc",
"content": [
@@ -13395,7 +13299,7 @@
}
]
}
-06_07__inlines__links__28: |-
+06_07__inlines__links__028: |-
{
"type": "doc",
"content": [
@@ -13410,6 +13314,7 @@
"attrs": {
"href": "/uri",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -13421,7 +13326,7 @@
}
]
}
-06_07__inlines__links__29: |-
+06_07__inlines__links__029: |-
{
"type": "doc",
"content": [
@@ -13436,7 +13341,7 @@
}
]
}
-06_07__inlines__links__30: |-
+06_07__inlines__links__030: |-
{
"type": "doc",
"content": [
@@ -13455,6 +13360,7 @@
"attrs": {
"href": "/uri",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -13466,7 +13372,7 @@
}
]
}
-06_07__inlines__links__31: |-
+06_07__inlines__links__031: |-
{
"type": "doc",
"content": [
@@ -13481,6 +13387,7 @@
"attrs": {
"href": "/uri",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -13492,7 +13399,7 @@
}
]
}
-06_07__inlines__links__32: |-
+06_07__inlines__links__032: |-
{
"type": "doc",
"content": [
@@ -13507,6 +13414,7 @@
"attrs": {
"href": "/uri",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -13545,10 +13453,10 @@
}
]
}
-06_07__inlines__links__33: |-
+06_07__inlines__links__033: |-
Error - check implementation:
Cannot destructure property 'type' of 'this.stack.pop(...)' as it is undefined.
-06_07__inlines__links__34: |-
+06_07__inlines__links__034: |-
{
"type": "doc",
"content": [
@@ -13567,6 +13475,7 @@
"attrs": {
"href": "/uri",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -13582,7 +13491,7 @@
}
]
}
-06_07__inlines__links__35: |-
+06_07__inlines__links__035: |-
{
"type": "doc",
"content": [
@@ -13610,6 +13519,7 @@
"attrs": {
"href": "/uri",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -13625,7 +13535,7 @@
}
]
}
-06_07__inlines__links__36: |-
+06_07__inlines__links__036: |-
{
"type": "doc",
"content": [
@@ -13646,7 +13556,7 @@
}
]
}
-06_07__inlines__links__37: |-
+06_07__inlines__links__037: |-
{
"type": "doc",
"content": [
@@ -13665,6 +13575,7 @@
"attrs": {
"href": "/uri",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -13676,7 +13587,7 @@
}
]
}
-06_07__inlines__links__38: |-
+06_07__inlines__links__038: |-
{
"type": "doc",
"content": [
@@ -13691,6 +13602,7 @@
"attrs": {
"href": "baz*",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -13702,7 +13614,7 @@
}
]
}
-06_07__inlines__links__39: |-
+06_07__inlines__links__039: |-
{
"type": "doc",
"content": [
@@ -13726,10 +13638,10 @@
}
]
}
-06_07__inlines__links__40: |-
+06_07__inlines__links__040: |-
Error - check implementation:
Hast node of type "bar" not supported by this converter. Please, provide an specification.
-06_07__inlines__links__41: |-
+06_07__inlines__links__041: |-
{
"type": "doc",
"content": [
@@ -13753,7 +13665,7 @@
}
]
}
-06_07__inlines__links__42: |-
+06_07__inlines__links__042: |-
{
"type": "doc",
"content": [
@@ -13772,6 +13684,7 @@
"attrs": {
"href": "http://example.com/?search=%5D(uri)",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -13783,7 +13696,7 @@
}
]
}
-06_07__inlines__links__43: |-
+06_07__inlines__links__043: |-
{
"type": "doc",
"content": [
@@ -13798,6 +13711,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": "title",
"canonicalSrc": null
}
@@ -13809,7 +13723,7 @@
}
]
}
-06_07__inlines__links__44: |-
+06_07__inlines__links__044: |-
{
"type": "doc",
"content": [
@@ -13824,6 +13738,7 @@
"attrs": {
"href": "/uri",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -13835,7 +13750,7 @@
}
]
}
-06_07__inlines__links__45: |-
+06_07__inlines__links__045: |-
{
"type": "doc",
"content": [
@@ -13850,6 +13765,7 @@
"attrs": {
"href": "/uri",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -13861,7 +13777,7 @@
}
]
}
-06_07__inlines__links__46: |-
+06_07__inlines__links__046: |-
{
"type": "doc",
"content": [
@@ -13876,6 +13792,7 @@
"attrs": {
"href": "/uri",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -13914,10 +13831,10 @@
}
]
}
-06_07__inlines__links__47: |-
+06_07__inlines__links__047: |-
Error - check implementation:
Cannot destructure property 'type' of 'this.stack.pop(...)' as it is undefined.
-06_07__inlines__links__48: |-
+06_07__inlines__links__048: |-
{
"type": "doc",
"content": [
@@ -13936,6 +13853,7 @@
"attrs": {
"href": "/uri",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -13955,6 +13873,7 @@
"attrs": {
"href": "/uri",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -13966,7 +13885,7 @@
}
]
}
-06_07__inlines__links__49: |-
+06_07__inlines__links__049: |-
{
"type": "doc",
"content": [
@@ -13994,6 +13913,7 @@
"attrs": {
"href": "/uri",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -14013,6 +13933,7 @@
"attrs": {
"href": "/uri",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -14024,7 +13945,7 @@
}
]
}
-06_07__inlines__links__50: |-
+06_07__inlines__links__050: |-
{
"type": "doc",
"content": [
@@ -14043,6 +13964,7 @@
"attrs": {
"href": "/uri",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -14054,7 +13976,7 @@
}
]
}
-06_07__inlines__links__51: |-
+06_07__inlines__links__051: |-
{
"type": "doc",
"content": [
@@ -14069,6 +13991,7 @@
"attrs": {
"href": "/uri",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -14080,10 +14003,10 @@
}
]
}
-06_07__inlines__links__52: |-
+06_07__inlines__links__052: |-
Error - check implementation:
Hast node of type "bar" not supported by this converter. Please, provide an specification.
-06_07__inlines__links__53: |-
+06_07__inlines__links__053: |-
{
"type": "doc",
"content": [
@@ -14107,7 +14030,7 @@
}
]
}
-06_07__inlines__links__54: |-
+06_07__inlines__links__054: |-
{
"type": "doc",
"content": [
@@ -14126,6 +14049,7 @@
"attrs": {
"href": "http://example.com/?search=%5D%5Bref%5D",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -14137,7 +14061,7 @@
}
]
}
-06_07__inlines__links__55: |-
+06_07__inlines__links__055: |-
{
"type": "doc",
"content": [
@@ -14152,6 +14076,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": "title",
"canonicalSrc": null
}
@@ -14163,7 +14088,7 @@
}
]
}
-06_07__inlines__links__56: |-
+06_07__inlines__links__056: |-
{
"type": "doc",
"content": [
@@ -14178,6 +14103,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -14193,7 +14119,7 @@
}
]
}
-06_07__inlines__links__57: |-
+06_07__inlines__links__057: |-
{
"type": "doc",
"content": [
@@ -14208,6 +14134,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -14219,7 +14146,7 @@
}
]
}
-06_07__inlines__links__58: |-
+06_07__inlines__links__058: |-
{
"type": "doc",
"content": [
@@ -14238,6 +14165,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": "title",
"canonicalSrc": null
}
@@ -14249,7 +14177,7 @@
}
]
}
-06_07__inlines__links__59: |-
+06_07__inlines__links__059: |-
{
"type": "doc",
"content": [
@@ -14268,6 +14196,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": "title",
"canonicalSrc": null
}
@@ -14279,7 +14208,7 @@
}
]
}
-06_07__inlines__links__60: |-
+06_07__inlines__links__060: |-
{
"type": "doc",
"content": [
@@ -14294,6 +14223,7 @@
"attrs": {
"href": "/url1",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -14305,7 +14235,7 @@
}
]
}
-06_07__inlines__links__61: |-
+06_07__inlines__links__061: |-
{
"type": "doc",
"content": [
@@ -14320,7 +14250,7 @@
}
]
}
-06_07__inlines__links__62: |-
+06_07__inlines__links__062: |-
{
"type": "doc",
"content": [
@@ -14344,7 +14274,7 @@
}
]
}
-06_07__inlines__links__63: |-
+06_07__inlines__links__063: |-
{
"type": "doc",
"content": [
@@ -14368,7 +14298,7 @@
}
]
}
-06_07__inlines__links__64: |-
+06_07__inlines__links__064: |-
{
"type": "doc",
"content": [
@@ -14392,7 +14322,7 @@
}
]
}
-06_07__inlines__links__65: |-
+06_07__inlines__links__065: |-
{
"type": "doc",
"content": [
@@ -14407,6 +14337,7 @@
"attrs": {
"href": "/uri",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -14418,7 +14349,7 @@
}
]
}
-06_07__inlines__links__66: |-
+06_07__inlines__links__066: |-
{
"type": "doc",
"content": [
@@ -14433,6 +14364,7 @@
"attrs": {
"href": "/uri",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -14444,7 +14376,7 @@
}
]
}
-06_07__inlines__links__67: |-
+06_07__inlines__links__067: |-
{
"type": "doc",
"content": [
@@ -14468,7 +14400,7 @@
}
]
}
-06_07__inlines__links__68: |-
+06_07__inlines__links__068: |-
{
"type": "doc",
"content": [
@@ -14492,7 +14424,7 @@
}
]
}
-06_07__inlines__links__69: |-
+06_07__inlines__links__069: |-
{
"type": "doc",
"content": [
@@ -14507,6 +14439,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": "title",
"canonicalSrc": null
}
@@ -14518,7 +14451,7 @@
}
]
}
-06_07__inlines__links__70: |-
+06_07__inlines__links__070: |-
{
"type": "doc",
"content": [
@@ -14533,6 +14466,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": "title",
"canonicalSrc": null
}
@@ -14551,7 +14485,7 @@
}
]
}
-06_07__inlines__links__71: |-
+06_07__inlines__links__071: |-
{
"type": "doc",
"content": [
@@ -14566,6 +14500,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": "title",
"canonicalSrc": null
}
@@ -14577,7 +14512,7 @@
}
]
}
-06_07__inlines__links__72: |-
+06_07__inlines__links__072: |-
{
"type": "doc",
"content": [
@@ -14592,6 +14527,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": "title",
"canonicalSrc": null
}
@@ -14607,7 +14543,7 @@
}
]
}
-06_07__inlines__links__73: |-
+06_07__inlines__links__073: |-
{
"type": "doc",
"content": [
@@ -14622,6 +14558,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": "title",
"canonicalSrc": null
}
@@ -14633,7 +14570,7 @@
}
]
}
-06_07__inlines__links__74: |-
+06_07__inlines__links__074: |-
{
"type": "doc",
"content": [
@@ -14648,6 +14585,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": "title",
"canonicalSrc": null
}
@@ -14666,7 +14604,7 @@
}
]
}
-06_07__inlines__links__75: |-
+06_07__inlines__links__075: |-
{
"type": "doc",
"content": [
@@ -14685,6 +14623,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": "title",
"canonicalSrc": null
}
@@ -14703,7 +14642,7 @@
}
]
}
-06_07__inlines__links__76: |-
+06_07__inlines__links__076: |-
{
"type": "doc",
"content": [
@@ -14722,6 +14661,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -14733,7 +14673,7 @@
}
]
}
-06_07__inlines__links__77: |-
+06_07__inlines__links__077: |-
{
"type": "doc",
"content": [
@@ -14748,6 +14688,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": "title",
"canonicalSrc": null
}
@@ -14759,7 +14700,7 @@
}
]
}
-06_07__inlines__links__78: |-
+06_07__inlines__links__078: |-
{
"type": "doc",
"content": [
@@ -14774,6 +14715,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -14789,7 +14731,7 @@
}
]
}
-06_07__inlines__links__79: |-
+06_07__inlines__links__079: |-
{
"type": "doc",
"content": [
@@ -14804,7 +14746,7 @@
}
]
}
-06_07__inlines__links__80: |-
+06_07__inlines__links__080: |-
{
"type": "doc",
"content": [
@@ -14823,6 +14765,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -14834,7 +14777,7 @@
}
]
}
-06_07__inlines__links__81: |-
+06_07__inlines__links__081: |-
{
"type": "doc",
"content": [
@@ -14849,6 +14792,7 @@
"attrs": {
"href": "/url2",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -14860,7 +14804,7 @@
}
]
}
-06_07__inlines__links__82: |-
+06_07__inlines__links__082: |-
{
"type": "doc",
"content": [
@@ -14875,6 +14819,7 @@
"attrs": {
"href": "/url1",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -14886,7 +14831,7 @@
}
]
}
-06_07__inlines__links__83: |-
+06_07__inlines__links__083: |-
{
"type": "doc",
"content": [
@@ -14901,6 +14846,7 @@
"attrs": {
"href": "",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -14912,7 +14858,7 @@
}
]
}
-06_07__inlines__links__84: |-
+06_07__inlines__links__084: |-
{
"type": "doc",
"content": [
@@ -14927,6 +14873,7 @@
"attrs": {
"href": "/url1",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -14942,7 +14889,7 @@
}
]
}
-06_07__inlines__links__85: |-
+06_07__inlines__links__085: |-
{
"type": "doc",
"content": [
@@ -14961,6 +14908,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -14972,7 +14920,7 @@
}
]
}
-06_07__inlines__links__86: |-
+06_07__inlines__links__086: |-
{
"type": "doc",
"content": [
@@ -14987,6 +14935,7 @@
"attrs": {
"href": "/url2",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -15002,6 +14951,7 @@
"attrs": {
"href": "/url1",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -15013,7 +14963,7 @@
}
]
}
-06_07__inlines__links__87: |-
+06_07__inlines__links__087: |-
{
"type": "doc",
"content": [
@@ -15032,6 +14982,7 @@
"attrs": {
"href": "/url1",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -15043,7 +14994,7 @@
}
]
}
-06_08__inlines__images__01: |-
+06_08__inlines__images__001: |-
{
"type": "doc",
"content": [
@@ -15064,7 +15015,7 @@
}
]
}
-06_08__inlines__images__02: |-
+06_08__inlines__images__002: |-
{
"type": "doc",
"content": [
@@ -15085,7 +15036,7 @@
}
]
}
-06_08__inlines__images__03: |-
+06_08__inlines__images__003: |-
{
"type": "doc",
"content": [
@@ -15106,7 +15057,7 @@
}
]
}
-06_08__inlines__images__04: |-
+06_08__inlines__images__004: |-
{
"type": "doc",
"content": [
@@ -15127,7 +15078,7 @@
}
]
}
-06_08__inlines__images__05: |-
+06_08__inlines__images__005: |-
{
"type": "doc",
"content": [
@@ -15148,7 +15099,7 @@
}
]
}
-06_08__inlines__images__06: |-
+06_08__inlines__images__006: |-
{
"type": "doc",
"content": [
@@ -15169,7 +15120,7 @@
}
]
}
-06_08__inlines__images__07: |-
+06_08__inlines__images__007: |-
{
"type": "doc",
"content": [
@@ -15190,7 +15141,7 @@
}
]
}
-06_08__inlines__images__08: |-
+06_08__inlines__images__008: |-
{
"type": "doc",
"content": [
@@ -15215,7 +15166,7 @@
}
]
}
-06_08__inlines__images__09: |-
+06_08__inlines__images__009: |-
{
"type": "doc",
"content": [
@@ -15236,7 +15187,7 @@
}
]
}
-06_08__inlines__images__10: |-
+06_08__inlines__images__010: |-
{
"type": "doc",
"content": [
@@ -15257,7 +15208,7 @@
}
]
}
-06_08__inlines__images__11: |-
+06_08__inlines__images__011: |-
{
"type": "doc",
"content": [
@@ -15278,7 +15229,7 @@
}
]
}
-06_08__inlines__images__12: |-
+06_08__inlines__images__012: |-
{
"type": "doc",
"content": [
@@ -15299,7 +15250,7 @@
}
]
}
-06_08__inlines__images__13: |-
+06_08__inlines__images__013: |-
{
"type": "doc",
"content": [
@@ -15320,7 +15271,7 @@
}
]
}
-06_08__inlines__images__14: |-
+06_08__inlines__images__014: |-
{
"type": "doc",
"content": [
@@ -15341,7 +15292,7 @@
}
]
}
-06_08__inlines__images__15: |-
+06_08__inlines__images__015: |-
{
"type": "doc",
"content": [
@@ -15362,7 +15313,7 @@
}
]
}
-06_08__inlines__images__16: |-
+06_08__inlines__images__016: |-
{
"type": "doc",
"content": [
@@ -15387,7 +15338,7 @@
}
]
}
-06_08__inlines__images__17: |-
+06_08__inlines__images__017: |-
{
"type": "doc",
"content": [
@@ -15408,7 +15359,7 @@
}
]
}
-06_08__inlines__images__18: |-
+06_08__inlines__images__018: |-
{
"type": "doc",
"content": [
@@ -15429,7 +15380,7 @@
}
]
}
-06_08__inlines__images__19: |-
+06_08__inlines__images__019: |-
{
"type": "doc",
"content": [
@@ -15453,7 +15404,7 @@
}
]
}
-06_08__inlines__images__20: |-
+06_08__inlines__images__020: |-
{
"type": "doc",
"content": [
@@ -15474,7 +15425,7 @@
}
]
}
-06_08__inlines__images__21: |-
+06_08__inlines__images__021: |-
{
"type": "doc",
"content": [
@@ -15489,7 +15440,7 @@
}
]
}
-06_08__inlines__images__22: |-
+06_08__inlines__images__022: |-
{
"type": "doc",
"content": [
@@ -15508,6 +15459,7 @@
"attrs": {
"href": "/url",
"target": "_blank",
+ "class": null,
"title": "title",
"canonicalSrc": null
}
@@ -15519,7 +15471,7 @@
}
]
}
-06_09__inlines__autolinks__01: |-
+06_09__inlines__autolinks__001: |-
{
"type": "doc",
"content": [
@@ -15534,6 +15486,7 @@
"attrs": {
"href": "http://foo.bar.baz",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -15545,7 +15498,7 @@
}
]
}
-06_09__inlines__autolinks__02: |-
+06_09__inlines__autolinks__002: |-
{
"type": "doc",
"content": [
@@ -15560,6 +15513,7 @@
"attrs": {
"href": "http://foo.bar.baz/test?q=hello&id=22&boolean",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -15571,7 +15525,7 @@
}
]
}
-06_09__inlines__autolinks__03: |-
+06_09__inlines__autolinks__003: |-
{
"type": "doc",
"content": [
@@ -15586,6 +15540,7 @@
"attrs": {
"href": "irc://foo.bar:2233/baz",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -15597,7 +15552,7 @@
}
]
}
-06_09__inlines__autolinks__04: |-
+06_09__inlines__autolinks__004: |-
{
"type": "doc",
"content": [
@@ -15612,6 +15567,7 @@
"attrs": {
"href": "MAILTO:FOO@BAR.BAZ",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -15623,7 +15579,7 @@
}
]
}
-06_09__inlines__autolinks__05: |-
+06_09__inlines__autolinks__005: |-
{
"type": "doc",
"content": [
@@ -15638,6 +15594,7 @@
"attrs": {
"href": "a+b+c:d",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -15649,7 +15606,7 @@
}
]
}
-06_09__inlines__autolinks__06: |-
+06_09__inlines__autolinks__006: |-
{
"type": "doc",
"content": [
@@ -15664,6 +15621,7 @@
"attrs": {
"href": "made-up-scheme://foo,bar",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -15675,7 +15633,7 @@
}
]
}
-06_09__inlines__autolinks__07: |-
+06_09__inlines__autolinks__007: |-
{
"type": "doc",
"content": [
@@ -15690,6 +15648,7 @@
"attrs": {
"href": "http://../",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -15701,7 +15660,7 @@
}
]
}
-06_09__inlines__autolinks__08: |-
+06_09__inlines__autolinks__008: |-
{
"type": "doc",
"content": [
@@ -15716,6 +15675,7 @@
"attrs": {
"href": "localhost:5001/foo",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -15727,7 +15687,7 @@
}
]
}
-06_09__inlines__autolinks__09: |-
+06_09__inlines__autolinks__009: |-
{
"type": "doc",
"content": [
@@ -15736,13 +15696,33 @@
"content": [
{
"type": "text",
- "text": "<http://foo.bar/baz bim>"
+ "text": "<"
+ },
+ {
+ "type": "text",
+ "marks": [
+ {
+ "type": "link",
+ "attrs": {
+ "href": "http://foo.bar/baz",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": null
+ }
+ }
+ ],
+ "text": "http://foo.bar/baz"
+ },
+ {
+ "type": "text",
+ "text": " bim>"
}
]
}
]
}
-06_09__inlines__autolinks__10: |-
+06_09__inlines__autolinks__010: |-
{
"type": "doc",
"content": [
@@ -15757,6 +15737,7 @@
"attrs": {
"href": "http://example.com/%5C%5B%5C",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -15768,7 +15749,7 @@
}
]
}
-06_09__inlines__autolinks__11: |-
+06_09__inlines__autolinks__011: |-
{
"type": "doc",
"content": [
@@ -15783,6 +15764,7 @@
"attrs": {
"href": "mailto:foo@bar.example.com",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -15794,7 +15776,7 @@
}
]
}
-06_09__inlines__autolinks__12: |-
+06_09__inlines__autolinks__012: |-
{
"type": "doc",
"content": [
@@ -15809,6 +15791,7 @@
"attrs": {
"href": "mailto:foo+special@Bar.baz-bar0.com",
"target": "_blank",
+ "class": null,
"title": null,
"canonicalSrc": null
}
@@ -15820,7 +15803,10 @@
}
]
}
-06_09__inlines__autolinks__13: |-
+06_09__inlines__autolinks__013: |-
+ Error - check implementation:
+ Cannot read properties of undefined (reading 'end')
+06_09__inlines__autolinks__014: |-
{
"type": "doc",
"content": [
@@ -15829,13 +15815,13 @@
"content": [
{
"type": "text",
- "text": "<foo+@bar.example.com>"
+ "text": "<>"
}
]
}
]
}
-06_09__inlines__autolinks__14: |-
+06_09__inlines__autolinks__015: |-
{
"type": "doc",
"content": [
@@ -15844,28 +15830,33 @@
"content": [
{
"type": "text",
- "text": "<>"
- }
- ]
- }
- ]
- }
-06_09__inlines__autolinks__15: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
+ "text": "< "
+ },
{
"type": "text",
- "text": "< http://foo.bar >"
+ "marks": [
+ {
+ "type": "link",
+ "attrs": {
+ "href": "http://foo.bar",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": null
+ }
+ }
+ ],
+ "text": "http://foo.bar"
+ },
+ {
+ "type": "text",
+ "text": " >"
}
]
}
]
}
-06_09__inlines__autolinks__16: |-
+06_09__inlines__autolinks__016: |-
{
"type": "doc",
"content": [
@@ -15880,7 +15871,7 @@
}
]
}
-06_09__inlines__autolinks__17: |-
+06_09__inlines__autolinks__017: |-
{
"type": "doc",
"content": [
@@ -15895,7 +15886,7 @@
}
]
}
-06_09__inlines__autolinks__18: |-
+06_09__inlines__autolinks__018: |-
{
"type": "doc",
"content": [
@@ -15904,13 +15895,25 @@
"content": [
{
"type": "text",
+ "marks": [
+ {
+ "type": "link",
+ "attrs": {
+ "href": "http://example.com",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": null
+ }
+ }
+ ],
"text": "http://example.com"
}
]
}
]
}
-06_09__inlines__autolinks__19: |-
+06_09__inlines__autolinks__019: |-
{
"type": "doc",
"content": [
@@ -15919,13 +15922,25 @@
"content": [
{
"type": "text",
+ "marks": [
+ {
+ "type": "link",
+ "attrs": {
+ "href": "mailto:foo@bar.example.com",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": null
+ }
+ }
+ ],
"text": "foo@bar.example.com"
}
]
}
]
}
-06_10__inlines__autolinks_extension__01: |-
+06_10__inlines__autolinks_extension__001: |-
{
"type": "doc",
"content": [
@@ -15934,13 +15949,25 @@
"content": [
{
"type": "text",
+ "marks": [
+ {
+ "type": "link",
+ "attrs": {
+ "href": "http://www.commonmark.org",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": null
+ }
+ }
+ ],
"text": "www.commonmark.org"
}
]
}
]
}
-06_10__inlines__autolinks_extension__02: |-
+06_10__inlines__autolinks_extension__002: |-
{
"type": "doc",
"content": [
@@ -15949,13 +15976,33 @@
"content": [
{
"type": "text",
- "text": "Visit www.commonmark.org/help for more information."
+ "text": "Visit "
+ },
+ {
+ "type": "text",
+ "marks": [
+ {
+ "type": "link",
+ "attrs": {
+ "href": "http://www.commonmark.org/help",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": null
+ }
+ }
+ ],
+ "text": "www.commonmark.org/help"
+ },
+ {
+ "type": "text",
+ "text": " for more information."
}
]
}
]
}
-06_10__inlines__autolinks_extension__03: |-
+06_10__inlines__autolinks_extension__003: |-
{
"type": "doc",
"content": [
@@ -15964,7 +16011,27 @@
"content": [
{
"type": "text",
- "text": "Visit www.commonmark.org."
+ "text": "Visit "
+ },
+ {
+ "type": "text",
+ "marks": [
+ {
+ "type": "link",
+ "attrs": {
+ "href": "http://www.commonmark.org",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": null
+ }
+ }
+ ],
+ "text": "www.commonmark.org"
+ },
+ {
+ "type": "text",
+ "text": "."
}
]
},
@@ -15973,13 +16040,33 @@
"content": [
{
"type": "text",
- "text": "Visit www.commonmark.org/a.b."
+ "text": "Visit "
+ },
+ {
+ "type": "text",
+ "marks": [
+ {
+ "type": "link",
+ "attrs": {
+ "href": "http://www.commonmark.org/a.b",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": null
+ }
+ }
+ ],
+ "text": "www.commonmark.org/a.b"
+ },
+ {
+ "type": "text",
+ "text": "."
}
]
}
]
}
-06_10__inlines__autolinks_extension__04: |-
+06_10__inlines__autolinks_extension__004: |-
{
"type": "doc",
"content": [
@@ -15988,6 +16075,18 @@
"content": [
{
"type": "text",
+ "marks": [
+ {
+ "type": "link",
+ "attrs": {
+ "href": "http://www.google.com/search?q=Markup+(business)",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": null
+ }
+ }
+ ],
"text": "www.google.com/search?q=Markup+(business)"
}
]
@@ -15997,7 +16096,23 @@
"content": [
{
"type": "text",
- "text": "www.google.com/search?q=Markup+(business)))"
+ "marks": [
+ {
+ "type": "link",
+ "attrs": {
+ "href": "http://www.google.com/search?q=Markup+(business)",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": null
+ }
+ }
+ ],
+ "text": "www.google.com/search?q=Markup+(business)"
+ },
+ {
+ "type": "text",
+ "text": "))"
}
]
},
@@ -16006,7 +16121,27 @@
"content": [
{
"type": "text",
- "text": "(www.google.com/search?q=Markup+(business))"
+ "text": "("
+ },
+ {
+ "type": "text",
+ "marks": [
+ {
+ "type": "link",
+ "attrs": {
+ "href": "http://www.google.com/search?q=Markup+(business)",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": null
+ }
+ }
+ ],
+ "text": "www.google.com/search?q=Markup+(business)"
+ },
+ {
+ "type": "text",
+ "text": ")"
}
]
},
@@ -16015,13 +16150,29 @@
"content": [
{
"type": "text",
- "text": "(www.google.com/search?q=Markup+(business)"
+ "text": "("
+ },
+ {
+ "type": "text",
+ "marks": [
+ {
+ "type": "link",
+ "attrs": {
+ "href": "http://www.google.com/search?q=Markup+(business)",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": null
+ }
+ }
+ ],
+ "text": "www.google.com/search?q=Markup+(business)"
}
]
}
]
}
-06_10__inlines__autolinks_extension__05: |-
+06_10__inlines__autolinks_extension__005: |-
{
"type": "doc",
"content": [
@@ -16030,13 +16181,25 @@
"content": [
{
"type": "text",
+ "marks": [
+ {
+ "type": "link",
+ "attrs": {
+ "href": "http://www.google.com/search?q=(business))+ok",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": null
+ }
+ }
+ ],
"text": "www.google.com/search?q=(business))+ok"
}
]
}
]
}
-06_10__inlines__autolinks_extension__06: |-
+06_10__inlines__autolinks_extension__006: |-
{
"type": "doc",
"content": [
@@ -16045,6 +16208,18 @@
"content": [
{
"type": "text",
+ "marks": [
+ {
+ "type": "link",
+ "attrs": {
+ "href": "http://www.google.com/search?q=commonmark&hl=en",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": null
+ }
+ }
+ ],
"text": "www.google.com/search?q=commonmark&hl=en"
}
]
@@ -16054,13 +16229,29 @@
"content": [
{
"type": "text",
- "text": "www.google.com/search?q=commonmark&hl;"
+ "marks": [
+ {
+ "type": "link",
+ "attrs": {
+ "href": "http://www.google.com/search?q=commonmark",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": null
+ }
+ }
+ ],
+ "text": "www.google.com/search?q=commonmark"
+ },
+ {
+ "type": "text",
+ "text": "&hl;"
}
]
}
]
}
-06_10__inlines__autolinks_extension__07: |-
+06_10__inlines__autolinks_extension__007: |-
{
"type": "doc",
"content": [
@@ -16069,13 +16260,29 @@
"content": [
{
"type": "text",
- "text": "www.commonmark.org/he<lp"
+ "marks": [
+ {
+ "type": "link",
+ "attrs": {
+ "href": "http://www.commonmark.org/he",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": null
+ }
+ }
+ ],
+ "text": "www.commonmark.org/he"
+ },
+ {
+ "type": "text",
+ "text": "<lp"
}
]
}
]
}
-06_10__inlines__autolinks_extension__08: |-
+06_10__inlines__autolinks_extension__008: |-
{
"type": "doc",
"content": [
@@ -16084,6 +16291,18 @@
"content": [
{
"type": "text",
+ "marks": [
+ {
+ "type": "link",
+ "attrs": {
+ "href": "http://commonmark.org",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": null
+ }
+ }
+ ],
"text": "http://commonmark.org"
}
]
@@ -16093,7 +16312,27 @@
"content": [
{
"type": "text",
- "text": "(Visit https://encrypted.google.com/search?q=Markup+(business))"
+ "text": "(Visit "
+ },
+ {
+ "type": "text",
+ "marks": [
+ {
+ "type": "link",
+ "attrs": {
+ "href": "https://encrypted.google.com/search?q=Markup+(business)",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": null
+ }
+ }
+ ],
+ "text": "https://encrypted.google.com/search?q=Markup+(business)"
+ },
+ {
+ "type": "text",
+ "text": ")"
}
]
},
@@ -16108,7 +16347,7 @@
}
]
}
-06_10__inlines__autolinks_extension__09: |-
+06_10__inlines__autolinks_extension__009: |-
{
"type": "doc",
"content": [
@@ -16117,13 +16356,25 @@
"content": [
{
"type": "text",
+ "marks": [
+ {
+ "type": "link",
+ "attrs": {
+ "href": "mailto:foo@bar.baz",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": null
+ }
+ }
+ ],
"text": "foo@bar.baz"
}
]
}
]
}
-06_10__inlines__autolinks_extension__10: |-
+06_10__inlines__autolinks_extension__010: |-
{
"type": "doc",
"content": [
@@ -16132,13 +16383,33 @@
"content": [
{
"type": "text",
- "text": "hello@mail+xyz.example isn't valid, but hello+xyz@mail.example is."
+ "text": "hello@mail+xyz.example isn't valid, but "
+ },
+ {
+ "type": "text",
+ "marks": [
+ {
+ "type": "link",
+ "attrs": {
+ "href": "mailto:hello+xyz@mail.example",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": null
+ }
+ }
+ ],
+ "text": "hello+xyz@mail.example"
+ },
+ {
+ "type": "text",
+ "text": " is."
}
]
}
]
}
-06_10__inlines__autolinks_extension__11: |-
+06_10__inlines__autolinks_extension__011: |-
{
"type": "doc",
"content": [
@@ -16147,6 +16418,18 @@
"content": [
{
"type": "text",
+ "marks": [
+ {
+ "type": "link",
+ "attrs": {
+ "href": "mailto:a.b-c_d@a.b",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": null
+ }
+ }
+ ],
"text": "a.b-c_d@a.b"
}
]
@@ -16156,7 +16439,23 @@
"content": [
{
"type": "text",
- "text": "a.b-c_d@a.b."
+ "marks": [
+ {
+ "type": "link",
+ "attrs": {
+ "href": "mailto:a.b-c_d@a.b",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": null
+ }
+ }
+ ],
+ "text": "a.b-c_d@a.b"
+ },
+ {
+ "type": "text",
+ "text": "."
}
]
},
@@ -16180,16 +16479,16 @@
}
]
}
-06_11__inlines__raw_html__01: |-
+06_11__inlines__raw_html__001: |-
Error - check implementation:
Hast node of type "bab" not supported by this converter. Please, provide an specification.
-06_11__inlines__raw_html__02: |-
+06_11__inlines__raw_html__002: |-
Error - check implementation:
Hast node of type "b2" not supported by this converter. Please, provide an specification.
-06_11__inlines__raw_html__03: |-
+06_11__inlines__raw_html__003: |-
Error - check implementation:
Hast node of type "b2" not supported by this converter. Please, provide an specification.
-06_11__inlines__raw_html__04: |-
+06_11__inlines__raw_html__004: |-
{
"type": "doc",
"content": [
@@ -16198,10 +16497,10 @@
}
]
}
-06_11__inlines__raw_html__05: |-
+06_11__inlines__raw_html__005: |-
Error - check implementation:
Hast node of type "responsive-image" not supported by this converter. Please, provide an specification.
-06_11__inlines__raw_html__06: |-
+06_11__inlines__raw_html__006: |-
{
"type": "doc",
"content": [
@@ -16216,7 +16515,7 @@
}
]
}
-06_11__inlines__raw_html__07: |-
+06_11__inlines__raw_html__007: |-
{
"type": "doc",
"content": [
@@ -16231,7 +16530,7 @@
}
]
}
-06_11__inlines__raw_html__08: |-
+06_11__inlines__raw_html__008: |-
{
"type": "doc",
"content": [
@@ -16246,7 +16545,7 @@
}
]
}
-06_11__inlines__raw_html__09: |-
+06_11__inlines__raw_html__009: |-
{
"type": "doc",
"content": [
@@ -16261,7 +16560,7 @@
}
]
}
-06_11__inlines__raw_html__10: |-
+06_11__inlines__raw_html__010: |-
{
"type": "doc",
"content": [
@@ -16276,7 +16575,7 @@
}
]
}
-06_11__inlines__raw_html__11: |-
+06_11__inlines__raw_html__011: |-
{
"type": "doc",
"content": [
@@ -16285,7 +16584,7 @@
}
]
}
-06_11__inlines__raw_html__12: |-
+06_11__inlines__raw_html__012: |-
{
"type": "doc",
"content": [
@@ -16300,10 +16599,10 @@
}
]
}
-06_11__inlines__raw_html__13: |-
+06_11__inlines__raw_html__013: |-
Error - check implementation:
- Hast node of type "comment" not supported by this converter. Please, provide an specification.
-06_11__inlines__raw_html__14: |-
+ Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
+06_11__inlines__raw_html__014: |-
{
"type": "doc",
"content": [
@@ -16318,7 +16617,7 @@
}
]
}
-06_11__inlines__raw_html__15: |-
+06_11__inlines__raw_html__015: |-
{
"type": "doc",
"content": [
@@ -16342,16 +16641,16 @@
}
]
}
-06_11__inlines__raw_html__16: |-
+06_11__inlines__raw_html__016: |-
Error - check implementation:
- Hast node of type "comment" not supported by this converter. Please, provide an specification.
-06_11__inlines__raw_html__17: |-
+ Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
+06_11__inlines__raw_html__017: |-
Error - check implementation:
- Hast node of type "comment" not supported by this converter. Please, provide an specification.
-06_11__inlines__raw_html__18: |-
+ Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
+06_11__inlines__raw_html__018: |-
Error - check implementation:
- Hast node of type "comment" not supported by this converter. Please, provide an specification.
-06_11__inlines__raw_html__19: |-
+ Cannot destructure property 'className' of 'hastNode.properties' as it is undefined.
+06_11__inlines__raw_html__019: |-
{
"type": "doc",
"content": [
@@ -16366,7 +16665,7 @@
}
]
}
-06_11__inlines__raw_html__20: |-
+06_11__inlines__raw_html__020: |-
{
"type": "doc",
"content": [
@@ -16381,7 +16680,7 @@
}
]
}
-06_11__inlines__raw_html__21: |-
+06_11__inlines__raw_html__021: |-
{
"type": "doc",
"content": [
@@ -16396,10 +16695,10 @@
}
]
}
-06_12__inlines__disallowed_raw_html_extension__01: |-
+06_12__inlines__disallowed_raw_html_extension__001: |-
Error - check implementation:
Hast node of type "title" not supported by this converter. Please, provide an specification.
-06_13__inlines__hard_line_breaks__01: |-
+06_13__inlines__hard_line_breaks__001: |-
{
"type": "doc",
"content": [
@@ -16421,7 +16720,7 @@
}
]
}
-06_13__inlines__hard_line_breaks__02: |-
+06_13__inlines__hard_line_breaks__002: |-
{
"type": "doc",
"content": [
@@ -16443,7 +16742,7 @@
}
]
}
-06_13__inlines__hard_line_breaks__03: |-
+06_13__inlines__hard_line_breaks__003: |-
{
"type": "doc",
"content": [
@@ -16465,7 +16764,7 @@
}
]
}
-06_13__inlines__hard_line_breaks__04: |-
+06_13__inlines__hard_line_breaks__004: |-
{
"type": "doc",
"content": [
@@ -16487,7 +16786,7 @@
}
]
}
-06_13__inlines__hard_line_breaks__05: |-
+06_13__inlines__hard_line_breaks__005: |-
{
"type": "doc",
"content": [
@@ -16509,13 +16808,13 @@
}
]
}
-06_13__inlines__hard_line_breaks__06: |-
+06_13__inlines__hard_line_breaks__006: |-
Error - check implementation:
Cannot destructure property 'type' of 'this.stack.pop(...)' as it is undefined.
-06_13__inlines__hard_line_breaks__07: |-
+06_13__inlines__hard_line_breaks__007: |-
Error - check implementation:
Cannot destructure property 'type' of 'this.stack.pop(...)' as it is undefined.
-06_13__inlines__hard_line_breaks__08: |-
+06_13__inlines__hard_line_breaks__008: |-
{
"type": "doc",
"content": [
@@ -16535,7 +16834,7 @@
}
]
}
-06_13__inlines__hard_line_breaks__09: |-
+06_13__inlines__hard_line_breaks__009: |-
{
"type": "doc",
"content": [
@@ -16555,7 +16854,7 @@
}
]
}
-06_13__inlines__hard_line_breaks__10: |-
+06_13__inlines__hard_line_breaks__010: |-
{
"type": "doc",
"content": [
@@ -16564,7 +16863,7 @@
}
]
}
-06_13__inlines__hard_line_breaks__11: |-
+06_13__inlines__hard_line_breaks__011: |-
{
"type": "doc",
"content": [
@@ -16573,7 +16872,7 @@
}
]
}
-06_13__inlines__hard_line_breaks__12: |-
+06_13__inlines__hard_line_breaks__012: |-
{
"type": "doc",
"content": [
@@ -16588,7 +16887,7 @@
}
]
}
-06_13__inlines__hard_line_breaks__13: |-
+06_13__inlines__hard_line_breaks__013: |-
{
"type": "doc",
"content": [
@@ -16603,7 +16902,7 @@
}
]
}
-06_13__inlines__hard_line_breaks__14: |-
+06_13__inlines__hard_line_breaks__014: |-
{
"type": "doc",
"content": [
@@ -16621,7 +16920,7 @@
}
]
}
-06_13__inlines__hard_line_breaks__15: |-
+06_13__inlines__hard_line_breaks__015: |-
{
"type": "doc",
"content": [
@@ -16639,7 +16938,7 @@
}
]
}
-06_14__inlines__soft_line_breaks__01: |-
+06_14__inlines__soft_line_breaks__001: |-
{
"type": "doc",
"content": [
@@ -16654,7 +16953,7 @@
}
]
}
-06_14__inlines__soft_line_breaks__02: |-
+06_14__inlines__soft_line_breaks__002: |-
{
"type": "doc",
"content": [
@@ -16669,7 +16968,7 @@
}
]
}
-06_15__inlines__textual_content__01: |-
+06_15__inlines__textual_content__001: |-
{
"type": "doc",
"content": [
@@ -16684,7 +16983,7 @@
}
]
}
-06_15__inlines__textual_content__02: |-
+06_15__inlines__textual_content__002: |-
{
"type": "doc",
"content": [
@@ -16699,7 +16998,7 @@
}
]
}
-06_15__inlines__textual_content__03: |-
+06_15__inlines__textual_content__003: |-
{
"type": "doc",
"content": [
@@ -16714,26 +17013,6 @@
}
]
}
-07_01__first_gitlab_specific_section_with_examples__strong_but_with_two_asterisks__01: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "bold"
- }
- ]
- }
- ]
- }
-08_01__second_gitlab_specific_section_with_examples__strong_but_with_html__01: |-
+07_01__gitlab_specific_markdown__footnotes__001: |-
Error - check implementation:
- Cannot read properties of undefined (reading 'wrapTextInParagraph')
+ Hast node of type "sup" not supported by this converter. Please, provide an specification.
diff --git a/spec/fixtures/project_services/campfire/rooms.json b/spec/fixtures/integrations/campfire/rooms.json
index 71e9645c955..71e9645c955 100644
--- a/spec/fixtures/project_services/campfire/rooms.json
+++ b/spec/fixtures/integrations/campfire/rooms.json
diff --git a/spec/fixtures/project_services/campfire/rooms2.json b/spec/fixtures/integrations/campfire/rooms2.json
index 3d5f635d8b3..3d5f635d8b3 100644
--- a/spec/fixtures/project_services/campfire/rooms2.json
+++ b/spec/fixtures/integrations/campfire/rooms2.json
diff --git a/spec/fixtures/lib/generators/gitlab/usage_metric_generator/sample_numbers_metric.rb b/spec/fixtures/lib/generators/gitlab/usage_metric_generator/sample_numbers_metric.rb
new file mode 100644
index 00000000000..ab6bdaf7a10
--- /dev/null
+++ b/spec/fixtures/lib/generators/gitlab/usage_metric_generator/sample_numbers_metric.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Usage
+ module Metrics
+ module Instrumentations
+ class CountFooMetric < NumbersMetric
+ operation :add
+
+ data do |time_frame|
+ [
+ # Insert numbers here
+ ]
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/fixtures/markdown/markdown_golden_master_examples.yml b/spec/fixtures/markdown/markdown_golden_master_examples.yml
index 5847e9f2cdf..a1ad88ef69c 100644
--- a/spec/fixtures/markdown/markdown_golden_master_examples.yml
+++ b/spec/fixtures/markdown/markdown_golden_master_examples.yml
@@ -290,7 +290,7 @@
</li>
</ul>
-- name: code_block
+- name: code_block_javascript
markdown: |-
```javascript
console.log('hello world')
@@ -301,6 +301,28 @@
<copy-code></copy-code>
</div>
+- name: code_block_plaintext
+ markdown: |-
+ ```
+ plaintext
+ ```
+ html: |-
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"> plaintext</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+
+- name: code_block_unknown
+ markdown: |-
+ ```foobar
+ custom_language = >> this <<
+ ```
+ html: |-
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="foobar" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"> custom_language = &gt;&gt; this &lt;&lt;</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+
- name: color_chips
markdown: |-
- `#F00`
@@ -475,19 +497,20 @@
markdown: |-
A footnote reference tag looks like this: [^1]
- This reference tag is a mix of letters and numbers. [^2]
+ This reference tag is a mix of letters and numbers. [^footnote]
[^1]: This is the text inside a footnote.
- [^2]: This is another footnote.
+
+ [^footnote]: This is another footnote.
html: |-
<p data-sourcepos="1:1-1:46" dir="auto">A footnote reference tag looks like this: <sup class="footnote-ref"><a href="#fn-1-2717" id="fnref-1-2717" data-footnote-ref="">1</a></sup></p>
- <p data-sourcepos="3:1-3:56" dir="auto">This reference tag is a mix of letters and numbers. <sup class="footnote-ref"><a href="#fn-2-2717" id="fnref-2-2717" data-footnote-ref="">2</a></sup></p>
+ <p data-sourcepos="3:1-3:56" dir="auto">This reference tag is a mix of letters and numbers. <sup class="footnote-ref"><a href="#fn-footnote-2717" id="fnref-footnote-2717" data-footnote-ref="">2</a></sup></p>
<section class="footnotes" data-footnotes><ol>
<li id="fn-1-2717">
<p data-sourcepos="5:7-5:41">This is the text inside a footnote. <a href="#fnref-1-2717" aria-label="Back to content" class="footnote-backref" data-footnote-backref=""><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p>
</li>
- <li id="fn-2-2717">
- <p data-sourcepos="6:7-6:31">This is another footnote. <a href="#fnref-2-2717" aria-label="Back to content" class="footnote-backref" data-footnote-backref=""><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p>
+ <li id="fn-footnote-2717">
+ <p data-sourcepos="6:7-6:31">This is another footnote. <a href="#fnref-footnote-2717" aria-label="Back to content" class="footnote-backref" data-footnote-backref=""><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p>
</li>
</ol></section>
@@ -750,7 +773,7 @@
markdown: |-
Hi @gfm_user - thank you for reporting this bug (#1) we hope to fix it in %1.1 as part of !1
html: |-
- <p data-sourcepos="1:1-1:92" dir="auto">Hi <a href="/gfm_user" data-user="1" data-reference-type="user" data-container="body" data-placement="top" class="gfm gfm-project_member js-user-link" title="John Doe1">@gfm_user</a> - thank you for reporting this bug (<a href="/group1/project1/-/issues/1" data-original="#1" data-link="false" data-link-reference="false" data-project="11" data-issue="11" data-issue-type="issue" data-reference-type="issue" data-container="body" data-placement="top" title="My title 1" class="gfm gfm-issue has-tooltip">#1</a>) we hope to fix it in <a href="/group1/project1/-/milestones/1" data-original="%1.1" data-link="false" data-link-reference="false" data-project="11" data-milestone="11" data-reference-type="milestone" data-container="body" data-placement="top" title="" class="gfm gfm-milestone has-tooltip">%1.1</a> as part of <a href="/group1/project1/-/merge_requests/1" data-original="!1" data-link="false" data-link-reference="false" data-project="11" data-merge-request="11" data-project-path="group1/project1" data-iid="1" data-mr-title="My title 2" data-reference-type="merge_request" data-container="body" data-placement="top" title="" class="gfm gfm-merge_request">!1</a></p>
+ <p data-sourcepos="1:1-1:92" dir="auto">Hi <a href="/gfm_user" data-user="1" data-reference-type="user" data-container="body" data-placement="top" class="gfm gfm-project_member js-user-link" title="John Doe1">@gfm_user</a> - thank you for reporting this bug (<a href="/group1/project1/-/issues/1" data-original="#1" data-link="false" data-link-reference="false" data-project="11" data-issue="11" data-project-path="group1/project1" data-iid="1" data-issue-type="issue" data-reference-type="issue" data-container="body" data-placement="top" title="My title 1" class="gfm gfm-issue">#1</a>) we hope to fix it in <a href="/group1/project1/-/milestones/1" data-original="%1.1" data-link="false" data-link-reference="false" data-project="11" data-milestone="11" data-reference-type="milestone" data-container="body" data-placement="top" title="" class="gfm gfm-milestone has-tooltip">%1.1</a> as part of <a href="/group1/project1/-/merge_requests/1" data-original="!1" data-link="false" data-link-reference="false" data-project="11" data-merge-request="11" data-project-path="group1/project1" data-iid="1" data-reference-type="merge_request" data-container="body" data-placement="top" title="My title 2" class="gfm gfm-merge_request">!1</a></p>
- name: strike
markdown: |-
~~del~~
diff --git a/spec/fixtures/security_reports/feature-branch/gl-secret-detection-report.json b/spec/fixtures/security_reports/feature-branch/gl-secret-detection-report.json
index 57a4dee3ddd..538364f84a2 100644
--- a/spec/fixtures/security_reports/feature-branch/gl-secret-detection-report.json
+++ b/spec/fixtures/security_reports/feature-branch/gl-secret-detection-report.json
@@ -1,5 +1,5 @@
{
- "version": "3.0",
+ "version": "14.1.2",
"vulnerabilities": [],
"remediations": []
}
diff --git a/spec/fixtures/security_reports/master/gl-sast-missing-scanner.json b/spec/fixtures/security_reports/master/gl-sast-missing-scanner.json
index f65580145b4..ab3ee348263 100644
--- a/spec/fixtures/security_reports/master/gl-sast-missing-scanner.json
+++ b/spec/fixtures/security_reports/master/gl-sast-missing-scanner.json
@@ -1,5 +1,5 @@
{
- "version": "1.2",
+ "version": "14.1.2",
"vulnerabilities": [
{
"category": "sast",
diff --git a/spec/fixtures/security_reports/master/gl-secret-detection-report.json b/spec/fixtures/security_reports/master/gl-secret-detection-report.json
index f0250ec9145..9b0b2a19beb 100644
--- a/spec/fixtures/security_reports/master/gl-secret-detection-report.json
+++ b/spec/fixtures/security_reports/master/gl-secret-detection-report.json
@@ -1,5 +1,5 @@
{
- "version": "3.0",
+ "version": "14.1.2",
"vulnerabilities": [
{
"id": "27d2322d519c94f803ffed1cf6d14e455df97e5a0668e229eb853fdb0d277d2c",
diff --git a/spec/frontend/__helpers__/dl_locator_helper.js b/spec/frontend/__helpers__/dl_locator_helper.js
new file mode 100644
index 00000000000..b507dcd599d
--- /dev/null
+++ b/spec/frontend/__helpers__/dl_locator_helper.js
@@ -0,0 +1,28 @@
+import { createWrapper, ErrorWrapper } from '@vue/test-utils';
+
+/**
+ * Find the definition (<dd>) that corresponds to this term (<dt>)
+ *
+ * Given html in the `wrapper`:
+ *
+ * <dl>
+ * <dt>My label</dt>
+ * <dd>Value</dd>
+ * </dl>
+ *
+ * findDd('My label', wrapper)
+ *
+ * Returns `<dd>Value</dd>`
+ *
+ * @param {object} wrapper - Parent wrapper
+ * @param {string} dtLabel - Label for this value
+ * @returns Wrapper
+ */
+export const findDd = (dtLabel, wrapper) => {
+ const dt = wrapper.findByText(dtLabel).element;
+ const dd = dt.nextElementSibling;
+ if (dt.tagName === 'DT' && dd.tagName === 'DD') {
+ return createWrapper(dd, {});
+ }
+ return ErrorWrapper(dtLabel);
+};
diff --git a/spec/frontend/__helpers__/emoji.js b/spec/frontend/__helpers__/emoji.js
index 014a7854024..6c9291bdc8f 100644
--- a/spec/frontend/__helpers__/emoji.js
+++ b/spec/frontend/__helpers__/emoji.js
@@ -58,6 +58,16 @@ export const validEmoji = {
unicodeVersion: '6.0',
description: 'because it contains multiple zero width joiners',
},
+ thumbsup: {
+ moji: 'ðŸ‘',
+ unicodeVersion: '6.0',
+ description: 'thumbs up sign',
+ },
+ thumbsdown: {
+ moji: '👎',
+ description: 'thumbs down sign',
+ unicodeVersion: '6.0',
+ },
};
export const invalidEmoji = {
diff --git a/spec/frontend/__helpers__/init_vue_mr_page_helper.js b/spec/frontend/__helpers__/init_vue_mr_page_helper.js
index ee01e9e6268..6b719a32480 100644
--- a/spec/frontend/__helpers__/init_vue_mr_page_helper.js
+++ b/spec/frontend/__helpers__/init_vue_mr_page_helper.js
@@ -13,16 +13,16 @@ export default function initVueMRPage() {
const diffsAppProjectPath = 'testproject';
const mrEl = document.createElement('div');
mrEl.className = 'merge-request fixture-mr';
- mrEl.setAttribute('data-mr-action', 'diffs');
+ mrEl.dataset.mrAction = 'diffs';
mrTestEl.appendChild(mrEl);
const mrDiscussionsEl = document.createElement('div');
mrDiscussionsEl.id = 'js-vue-mr-discussions';
- mrDiscussionsEl.setAttribute('data-current-user-data', JSON.stringify(userDataMock));
- mrDiscussionsEl.setAttribute('data-noteable-data', JSON.stringify(noteableDataMock));
- mrDiscussionsEl.setAttribute('data-notes-data', JSON.stringify(notesDataMock));
- mrDiscussionsEl.setAttribute('data-noteable-type', 'merge-request');
- mrDiscussionsEl.setAttribute('data-is-locked', 'false');
+ mrDiscussionsEl.dataset.currentUserData = JSON.stringify(userDataMock);
+ mrDiscussionsEl.dataset.noteableData = JSON.stringify(noteableDataMock);
+ mrDiscussionsEl.dataset.notesData = JSON.stringify(notesDataMock);
+ mrDiscussionsEl.dataset.noteableType = 'merge-request';
+ mrDiscussionsEl.dataset.isLocked = 'false';
mrTestEl.appendChild(mrDiscussionsEl);
const discussionCounterEl = document.createElement('div');
@@ -31,9 +31,9 @@ export default function initVueMRPage() {
const diffsAppEl = document.createElement('div');
diffsAppEl.id = 'js-diffs-app';
- diffsAppEl.setAttribute('data-endpoint', diffsAppEndpoint);
- diffsAppEl.setAttribute('data-project-path', diffsAppProjectPath);
- diffsAppEl.setAttribute('data-current-user-data', JSON.stringify(userDataMock));
+ diffsAppEl.dataset.endpoint = diffsAppEndpoint;
+ diffsAppEl.dataset.projectPath = diffsAppProjectPath;
+ diffsAppEl.dataset.currentUserData = JSON.stringify(userDataMock);
mrTestEl.appendChild(diffsAppEl);
const mock = new MockAdapter(axios);
diff --git a/spec/frontend/__helpers__/matchers/to_have_sprite_icon.js b/spec/frontend/__helpers__/matchers/to_have_sprite_icon.js
index bce9d93bea8..45b9c31c4db 100644
--- a/spec/frontend/__helpers__/matchers/to_have_sprite_icon.js
+++ b/spec/frontend/__helpers__/matchers/to_have_sprite_icon.js
@@ -9,7 +9,7 @@ export const toHaveSpriteIcon = (element, iconName) => {
const iconReferences = [].slice.apply(element.querySelectorAll('svg use'));
const matchingIcon = iconReferences.find(
- (reference) => reference.parentNode.getAttribute('data-testid') === `${iconName}-icon`,
+ (reference) => reference.parentNode.dataset.testid === `${iconName}-icon`,
);
const pass = Boolean(matchingIcon);
diff --git a/spec/frontend/access_tokens/components/access_token_table_app_spec.js b/spec/frontend/access_tokens/components/access_token_table_app_spec.js
new file mode 100644
index 00000000000..b45abe418e4
--- /dev/null
+++ b/spec/frontend/access_tokens/components/access_token_table_app_spec.js
@@ -0,0 +1,241 @@
+import { GlPagination, GlTable } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import AccessTokenTableApp from '~/access_tokens/components/access_token_table_app.vue';
+import { EVENT_SUCCESS, PAGE_SIZE } from '~/access_tokens/components/constants';
+import { __, s__, sprintf } from '~/locale';
+import DomElementListener from '~/vue_shared/components/dom_element_listener.vue';
+
+describe('~/access_tokens/components/access_token_table_app', () => {
+ let wrapper;
+
+ const accessTokenType = 'personal access token';
+ const accessTokenTypePlural = 'personal access tokens';
+ const initialActiveAccessTokens = [];
+ const noActiveTokensMessage = 'This user has no active personal access tokens.';
+ const showRole = false;
+
+ const defaultActiveAccessTokens = [
+ {
+ name: 'a',
+ scopes: ['api'],
+ created_at: '2021-05-01T00:00:00.000Z',
+ last_used_at: null,
+ expired: false,
+ expires_soon: true,
+ expires_at: null,
+ revoked: false,
+ revoke_path: '/-/profile/personal_access_tokens/1/revoke',
+ role: 'Maintainer',
+ },
+ {
+ name: 'b',
+ scopes: ['api', 'sudo'],
+ created_at: '2022-04-21T00:00:00.000Z',
+ last_used_at: '2022-04-21T00:00:00.000Z',
+ expired: true,
+ expires_soon: false,
+ expires_at: new Date().toISOString(),
+ revoked: false,
+ revoke_path: '/-/profile/personal_access_tokens/2/revoke',
+ role: 'Maintainer',
+ },
+ ];
+
+ const createComponent = (props = {}) => {
+ wrapper = mount(AccessTokenTableApp, {
+ provide: {
+ accessTokenType,
+ accessTokenTypePlural,
+ initialActiveAccessTokens,
+ noActiveTokensMessage,
+ showRole,
+ ...props,
+ },
+ });
+ };
+
+ const triggerSuccess = async (activeAccessTokens = defaultActiveAccessTokens) => {
+ wrapper
+ .findComponent(DomElementListener)
+ .vm.$emit(EVENT_SUCCESS, { detail: [{ active_access_tokens: activeAccessTokens }] });
+ await nextTick();
+ };
+
+ const findTable = () => wrapper.findComponent(GlTable);
+ const findHeaders = () => findTable().findAll('th > :first-child');
+ const findCells = () => findTable().findAll('td');
+ const findPagination = () => wrapper.findComponent(GlPagination);
+
+ afterEach(() => {
+ wrapper?.destroy();
+ });
+
+ it('should render the `GlTable` with default empty message', () => {
+ createComponent();
+
+ const cells = findCells();
+ expect(cells).toHaveLength(1);
+ expect(cells.at(0).text()).toBe(
+ sprintf(__('This user has no active %{accessTokenTypePlural}.'), { accessTokenTypePlural }),
+ );
+ });
+
+ it('should render the `GlTable` with custom empty message', () => {
+ const noTokensMessage = 'This group has no active access tokens.';
+ createComponent({ noActiveTokensMessage: noTokensMessage });
+
+ const cells = findCells();
+ expect(cells).toHaveLength(1);
+ expect(cells.at(0).text()).toBe(noTokensMessage);
+ });
+
+ it('should render an h5 element', () => {
+ createComponent();
+
+ expect(wrapper.find('h5').text()).toBe(
+ sprintf(__('Active %{accessTokenTypePlural} (%{totalAccessTokens})'), {
+ accessTokenTypePlural,
+ totalAccessTokens: initialActiveAccessTokens.length,
+ }),
+ );
+ });
+
+ it('should render the `GlTable` component with default 6 column headers', () => {
+ createComponent();
+
+ const headers = findHeaders();
+ expect(headers).toHaveLength(6);
+ [
+ __('Token name'),
+ __('Scopes'),
+ s__('AccessTokens|Created'),
+ __('Last Used'),
+ __('Expires'),
+ __('Action'),
+ ].forEach((text, index) => {
+ expect(headers.at(index).text()).toBe(text);
+ });
+ });
+
+ it('should render the `GlTable` component with 7 headers', () => {
+ createComponent({ showRole: true });
+
+ const headers = findHeaders();
+ expect(headers).toHaveLength(7);
+ [
+ __('Token name'),
+ __('Scopes'),
+ s__('AccessTokens|Created'),
+ __('Last Used'),
+ __('Expires'),
+ __('Role'),
+ __('Action'),
+ ].forEach((text, index) => {
+ expect(headers.at(index).text()).toBe(text);
+ });
+ });
+
+ it('`Last Used` header should contain a link and an assistive message', () => {
+ createComponent();
+
+ const headers = wrapper.findAll('th');
+ const lastUsed = headers.at(3);
+ const anchor = lastUsed.find('a');
+ const assistiveElement = lastUsed.find('.gl-sr-only');
+ expect(anchor.exists()).toBe(true);
+ expect(anchor.attributes('href')).toBe(
+ '/help/user/profile/personal_access_tokens.md#view-the-last-time-a-token-was-used',
+ );
+ expect(assistiveElement.text()).toBe(s__('AccessTokens|The last time a token was used'));
+ });
+
+ it('updates the table after a success AJAX event', async () => {
+ createComponent({ showRole: true });
+ await triggerSuccess();
+
+ const cells = findCells();
+ expect(cells).toHaveLength(14);
+
+ // First row
+ expect(cells.at(0).text()).toBe('a');
+ expect(cells.at(1).text()).toBe('api');
+ expect(cells.at(2).text()).not.toBe(__('Never'));
+ expect(cells.at(3).text()).toBe(__('Never'));
+ expect(cells.at(4).text()).toBe(__('Never'));
+ expect(cells.at(5).text()).toBe('Maintainer');
+ let anchor = cells.at(6).find('a');
+ expect(anchor.attributes()).toMatchObject({
+ 'aria-label': __('Revoke'),
+ 'data-qa-selector': __('revoke_button'),
+ href: '/-/profile/personal_access_tokens/1/revoke',
+ 'data-confirm': sprintf(
+ __(
+ 'Are you sure you want to revoke this %{accessTokenType}? This action cannot be undone.',
+ ),
+ { accessTokenType },
+ ),
+ });
+
+ expect(anchor.classes()).toContain('btn-danger-secondary');
+
+ // Second row
+ expect(cells.at(7).text()).toBe('b');
+ expect(cells.at(8).text()).toBe('api, sudo');
+ expect(cells.at(9).text()).not.toBe(__('Never'));
+ expect(cells.at(10).text()).not.toBe(__('Never'));
+ expect(cells.at(11).text()).toBe(__('Expired'));
+ expect(cells.at(12).text()).toBe('Maintainer');
+ anchor = cells.at(13).find('a');
+ expect(anchor.attributes('href')).toBe('/-/profile/personal_access_tokens/2/revoke');
+ expect(anchor.classes()).toEqual(['btn', 'btn-danger', 'btn-md', 'gl-button', 'btn-icon']);
+ });
+
+ it('sorts rows alphabetically', async () => {
+ createComponent({ showRole: true });
+ await triggerSuccess();
+
+ const cells = findCells();
+
+ // First and second rows
+ expect(cells.at(0).text()).toBe('a');
+ expect(cells.at(7).text()).toBe('b');
+
+ const headers = findHeaders();
+ await headers.at(0).trigger('click');
+ await headers.at(0).trigger('click');
+
+ // First and second rows have swapped
+ expect(cells.at(0).text()).toBe('b');
+ expect(cells.at(7).text()).toBe('a');
+ });
+
+ it('sorts rows by date', async () => {
+ createComponent({ showRole: true });
+ await triggerSuccess();
+
+ const cells = findCells();
+
+ // First and second rows
+ expect(cells.at(3).text()).toBe('Never');
+ expect(cells.at(10).text()).not.toBe('Never');
+
+ const headers = findHeaders();
+ await headers.at(3).trigger('click');
+
+ // First and second rows have swapped
+ expect(cells.at(3).text()).not.toBe('Never');
+ expect(cells.at(10).text()).toBe('Never');
+ });
+
+ it('should show the pagination component when needed', async () => {
+ createComponent();
+ expect(findPagination().exists()).toBe(false);
+
+ await triggerSuccess(Array(PAGE_SIZE).fill(defaultActiveAccessTokens[0]));
+ expect(findPagination().exists()).toBe(false);
+
+ await triggerSuccess(Array(PAGE_SIZE + 1).fill(defaultActiveAccessTokens[0]));
+ expect(findPagination().exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/access_tokens/components/expires_at_field_spec.js b/spec/frontend/access_tokens/components/expires_at_field_spec.js
index fc8edcb573f..cb899d10ba7 100644
--- a/spec/frontend/access_tokens/components/expires_at_field_spec.js
+++ b/spec/frontend/access_tokens/components/expires_at_field_spec.js
@@ -1,4 +1,5 @@
import { shallowMount } from '@vue/test-utils';
+import { GlDatepicker } from '@gitlab/ui';
import ExpiresAtField from '~/access_tokens/components/expires_at_field.vue';
describe('~/access_tokens/components/expires_at_field', () => {
@@ -12,22 +13,40 @@ describe('~/access_tokens/components/expires_at_field', () => {
},
};
- const createComponent = (propsData = defaultPropsData) => {
+ const findDatepicker = () => wrapper.findComponent(GlDatepicker);
+
+ const createComponent = (props = {}) => {
wrapper = shallowMount(ExpiresAtField, {
- propsData,
+ propsData: {
+ ...defaultPropsData,
+ ...props,
+ },
});
};
- beforeEach(() => {
- createComponent();
- });
-
afterEach(() => {
wrapper.destroy();
- wrapper = null;
});
it('should render datepicker with input info', () => {
+ createComponent();
+
expect(wrapper.element).toMatchSnapshot();
});
+
+ it('should set the date pickers minimum date', () => {
+ const minDate = new Date('1970-01-01');
+
+ createComponent({ minDate });
+
+ expect(findDatepicker().props('minDate')).toStrictEqual(minDate);
+ });
+
+ it('should set the date pickers maximum date', () => {
+ const maxDate = new Date('1970-01-01');
+
+ createComponent({ maxDate });
+
+ expect(findDatepicker().props('maxDate')).toStrictEqual(maxDate);
+ });
});
diff --git a/spec/frontend/access_tokens/components/new_access_token_app_spec.js b/spec/frontend/access_tokens/components/new_access_token_app_spec.js
new file mode 100644
index 00000000000..9ccadbebf7a
--- /dev/null
+++ b/spec/frontend/access_tokens/components/new_access_token_app_spec.js
@@ -0,0 +1,169 @@
+import { GlAlert } from '@gitlab/ui';
+import { nextTick } from 'vue';
+import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import NewAccessTokenApp from '~/access_tokens/components/new_access_token_app.vue';
+import { EVENT_ERROR, EVENT_SUCCESS, FORM_SELECTOR } from '~/access_tokens/components/constants';
+import { createAlert, VARIANT_INFO } from '~/flash';
+import { __, sprintf } from '~/locale';
+import DomElementListener from '~/vue_shared/components/dom_element_listener.vue';
+import InputCopyToggleVisibility from '~/vue_shared/components/form/input_copy_toggle_visibility.vue';
+
+jest.mock('~/flash');
+
+describe('~/access_tokens/components/new_access_token_app', () => {
+ let wrapper;
+
+ const accessTokenType = 'personal access token';
+
+ const createComponent = (provide = { accessTokenType }) => {
+ wrapper = mountExtended(NewAccessTokenApp, {
+ provide,
+ });
+ };
+
+ const triggerSuccess = async (newToken = 'new token') => {
+ wrapper.find(DomElementListener).vm.$emit(EVENT_SUCCESS, { detail: [{ new_token: newToken }] });
+ await nextTick();
+ };
+
+ const triggerError = async (errors = ['1', '2']) => {
+ wrapper.find(DomElementListener).vm.$emit(EVENT_ERROR, { detail: [{ errors }] });
+ await nextTick();
+ };
+
+ beforeEach(() => {
+ // NewAccessTokenApp observes a form element
+ setHTMLFixture(`<form id="${FORM_SELECTOR.slice(1)}"><input type="submit"/></form>`);
+
+ createComponent();
+ });
+
+ afterEach(() => {
+ resetHTMLFixture();
+ wrapper.destroy();
+ createAlert.mockClear();
+ });
+
+ it('should render nothing', () => {
+ expect(wrapper.findComponent(InputCopyToggleVisibility).exists()).toBe(false);
+ expect(wrapper.findComponent(GlAlert).exists()).toBe(false);
+ });
+
+ describe('on success', () => {
+ it('should render `InputCopyToggleVisibility` component', async () => {
+ const newToken = '12345';
+ await triggerSuccess(newToken);
+
+ expect(wrapper.findComponent(GlAlert).exists()).toBe(false);
+
+ const InputCopyToggleVisibilityComponent = wrapper.findComponent(InputCopyToggleVisibility);
+ expect(InputCopyToggleVisibilityComponent.props('value')).toBe(newToken);
+ expect(InputCopyToggleVisibilityComponent.props('copyButtonTitle')).toBe(
+ sprintf(__('Copy %{accessTokenType}'), { accessTokenType }),
+ );
+ expect(InputCopyToggleVisibilityComponent.props('initialVisibility')).toBe(true);
+ expect(InputCopyToggleVisibilityComponent.attributes('label')).toBe(
+ sprintf(__('Your new %{accessTokenType}'), { accessTokenType }),
+ );
+ });
+
+ it('input field should contain QA-related selectors', async () => {
+ const newToken = '12345';
+ await triggerSuccess(newToken);
+
+ expect(wrapper.findComponent(GlAlert).exists()).toBe(false);
+
+ const inputAttributes = wrapper
+ .findByLabelText(sprintf(__('Your new %{accessTokenType}'), { accessTokenType }))
+ .attributes();
+ expect(inputAttributes).toMatchObject({
+ class: expect.stringContaining('qa-created-access-token'),
+ 'data-qa-selector': 'created_access_token_field',
+ });
+ });
+
+ it('should render an info alert', async () => {
+ await triggerSuccess();
+
+ expect(createAlert).toHaveBeenCalledWith({
+ message: sprintf(__('Your new %{accessTokenType} has been created.'), {
+ accessTokenType,
+ }),
+ variant: VARIANT_INFO,
+ });
+ });
+
+ it('should reset the form', async () => {
+ const resetSpy = jest.spyOn(wrapper.vm.form, 'reset');
+
+ await triggerSuccess();
+
+ expect(resetSpy).toHaveBeenCalled();
+ });
+ });
+
+ describe('on error', () => {
+ it('should render an error alert', async () => {
+ await triggerError(['first', 'second']);
+
+ expect(wrapper.findComponent(InputCopyToggleVisibility).exists()).toBe(false);
+
+ let GlAlertComponent = wrapper.findComponent(GlAlert);
+ expect(GlAlertComponent.props('title')).toBe(__('The form contains the following errors:'));
+ expect(GlAlertComponent.props('variant')).toBe('danger');
+ let itemEls = wrapper.findAll('li');
+ expect(itemEls).toHaveLength(2);
+ expect(itemEls.at(0).text()).toBe('first');
+ expect(itemEls.at(1).text()).toBe('second');
+
+ await triggerError(['one']);
+
+ GlAlertComponent = wrapper.findComponent(GlAlert);
+ expect(GlAlertComponent.props('title')).toBe(__('The form contains the following error:'));
+ expect(GlAlertComponent.props('variant')).toBe('danger');
+ itemEls = wrapper.findAll('li');
+ expect(itemEls).toHaveLength(1);
+ });
+
+ it('the error alert should be dismissible', async () => {
+ await triggerError();
+
+ const GlAlertComponent = wrapper.findComponent(GlAlert);
+ expect(GlAlertComponent.exists()).toBe(true);
+
+ GlAlertComponent.vm.$emit('dismiss');
+ await nextTick();
+
+ expect(wrapper.findComponent(GlAlert).exists()).toBe(false);
+ });
+ });
+
+ describe('before error or success', () => {
+ it('should scroll to the container', async () => {
+ const containerEl = wrapper.vm.$refs.container;
+ const scrollIntoViewSpy = jest.spyOn(containerEl, 'scrollIntoView');
+
+ await triggerSuccess();
+
+ expect(scrollIntoViewSpy).toHaveBeenCalledWith(false);
+ expect(scrollIntoViewSpy).toHaveBeenCalledTimes(1);
+
+ await triggerError();
+
+ expect(scrollIntoViewSpy).toHaveBeenCalledWith(false);
+ expect(scrollIntoViewSpy).toHaveBeenCalledTimes(2);
+ });
+
+ it('should dismiss the info alert', async () => {
+ const dismissSpy = jest.fn();
+ createAlert.mockReturnValue({ dismiss: dismissSpy });
+
+ await triggerSuccess();
+ await triggerError();
+
+ expect(dismissSpy).toHaveBeenCalled();
+ expect(dismissSpy).toHaveBeenCalledTimes(1);
+ });
+ });
+});
diff --git a/spec/frontend/access_tokens/index_spec.js b/spec/frontend/access_tokens/index_spec.js
index 1d8ac7cec25..b6119f1d167 100644
--- a/spec/frontend/access_tokens/index_spec.js
+++ b/spec/frontend/access_tokens/index_spec.js
@@ -1,27 +1,118 @@
+/* eslint-disable vue/require-prop-types */
+/* eslint-disable vue/one-component-per-file */
import { createWrapper } from '@vue/test-utils';
import Vue from 'vue';
+import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
-import { initExpiresAtField, initProjectsField } from '~/access_tokens';
+import {
+ initAccessTokenTableApp,
+ initExpiresAtField,
+ initNewAccessTokenApp,
+ initProjectsField,
+ initTokensApp,
+} from '~/access_tokens';
+import * as AccessTokenTableApp from '~/access_tokens/components/access_token_table_app.vue';
import * as ExpiresAtField from '~/access_tokens/components/expires_at_field.vue';
+import * as NewAccessTokenApp from '~/access_tokens/components/new_access_token_app.vue';
import * as ProjectsField from '~/access_tokens/components/projects_field.vue';
+import * as TokensApp from '~/access_tokens/components/tokens_app.vue';
+import { FEED_TOKEN, INCOMING_EMAIL_TOKEN, STATIC_OBJECT_TOKEN } from '~/access_tokens/constants';
+import { __, sprintf } from '~/locale';
describe('access tokens', () => {
- const FakeComponent = Vue.component('FakeComponent', {
- props: {
- inputAttrs: {
- type: Object,
- required: true,
- },
- },
- render: () => null,
- });
+ let wrapper;
- beforeEach(() => {
- window.gon = { features: { personalAccessTokensScopedToProjects: true } };
+ afterEach(() => {
+ wrapper?.destroy();
+ resetHTMLFixture();
});
- afterEach(() => {
- document.body.innerHTML = '';
+ describe('initAccessTokenTableApp', () => {
+ const accessTokenType = 'personal access token';
+ const accessTokenTypePlural = 'personal access tokens';
+ const initialActiveAccessTokens = [{ id: '1' }];
+
+ const FakeAccessTokenTableApp = Vue.component('FakeComponent', {
+ inject: [
+ 'accessTokenType',
+ 'accessTokenTypePlural',
+ 'initialActiveAccessTokens',
+ 'noActiveTokensMessage',
+ 'showRole',
+ ],
+ props: [
+ 'accessTokenType',
+ 'accessTokenTypePlural',
+ 'initialActiveAccessTokens',
+ 'noActiveTokensMessage',
+ 'showRole',
+ ],
+ render: () => null,
+ });
+ AccessTokenTableApp.default = FakeAccessTokenTableApp;
+
+ it('mounts the component and provides required values', () => {
+ setHTMLFixture(
+ `<div id="js-access-token-table-app"
+ data-access-token-type="${accessTokenType}"
+ data-access-token-type-plural="${accessTokenTypePlural}"
+ data-initial-active-access-tokens=${JSON.stringify(initialActiveAccessTokens)}
+ >
+ </div>`,
+ );
+
+ const vueInstance = initAccessTokenTableApp();
+
+ wrapper = createWrapper(vueInstance);
+ const component = wrapper.findComponent(FakeAccessTokenTableApp);
+
+ expect(component.exists()).toBe(true);
+
+ expect(component.props()).toMatchObject({
+ // Required value
+ accessTokenType,
+ accessTokenTypePlural,
+ initialActiveAccessTokens,
+
+ // Default values
+ noActiveTokensMessage: sprintf(__('This user has no active %{accessTokenTypePlural}.'), {
+ accessTokenTypePlural,
+ }),
+ showRole: false,
+ });
+ });
+
+ it('mounts the component and provides all values', () => {
+ const noActiveTokensMessage = 'This group has no active access tokens.';
+ setHTMLFixture(
+ `<div id="js-access-token-table-app"
+ data-access-token-type="${accessTokenType}"
+ data-access-token-type-plural="${accessTokenTypePlural}"
+ data-initial-active-access-tokens=${JSON.stringify(initialActiveAccessTokens)}
+ data-no-active-tokens-message="${noActiveTokensMessage}"
+ data-show-role
+ >
+ </div>`,
+ );
+
+ const vueInstance = initAccessTokenTableApp();
+
+ wrapper = createWrapper(vueInstance);
+ const component = wrapper.findComponent(FakeAccessTokenTableApp);
+
+ expect(component.exists()).toBe(true);
+ expect(component.props()).toMatchObject({
+ accessTokenType,
+ accessTokenTypePlural,
+ initialActiveAccessTokens,
+ noActiveTokensMessage,
+ showRole: true,
+ });
+ });
+
+ it('returns `null`', () => {
+ expect(initNewAccessTokenApp()).toBe(null);
+ });
});
describe.each`
@@ -30,33 +121,42 @@ describe('access tokens', () => {
${initProjectsField} | ${'js-access-tokens-projects'} | ${'projects'} | ${ProjectsField}
`('$initFunction', ({ initFunction, mountSelector, fieldName, expectedComponent }) => {
describe('when mount element exists', () => {
+ const FakeComponent = Vue.component('FakeComponent', {
+ props: ['inputAttrs'],
+ render: () => null,
+ });
+
const nameAttribute = `access_tokens[${fieldName}]`;
const idAttribute = `access_tokens_${fieldName}`;
beforeEach(() => {
- const mountEl = document.createElement('div');
- mountEl.classList.add(mountSelector);
-
- const input = document.createElement('input');
- input.setAttribute('name', nameAttribute);
- input.setAttribute('data-js-name', fieldName);
- input.setAttribute('id', idAttribute);
- input.setAttribute('placeholder', 'Foo bar');
- input.setAttribute('value', '1,2');
+ window.gon = { features: { personalAccessTokensScopedToProjects: true } };
- mountEl.appendChild(input);
-
- document.body.appendChild(mountEl);
+ setHTMLFixture(
+ `<div class="${mountSelector}">
+ <input
+ name="${nameAttribute}"
+ data-js-name="${fieldName}"
+ id="${idAttribute}"
+ placeholder="Foo bar"
+ value="1,2"
+ />
+ </div>`,
+ );
// Mock component so we don't have to deal with mocking Apollo
// eslint-disable-next-line no-param-reassign
expectedComponent.default = FakeComponent;
});
+ afterEach(() => {
+ delete window.gon;
+ });
+
it('mounts component and sets `inputAttrs` prop', async () => {
const vueInstance = await initFunction();
- const wrapper = createWrapper(vueInstance);
+ wrapper = createWrapper(vueInstance);
const component = wrapper.findComponent(FakeComponent);
expect(component.exists()).toBe(true);
@@ -75,4 +175,64 @@ describe('access tokens', () => {
});
});
});
+
+ describe('initNewAccessTokenApp', () => {
+ it('mounts the component and sets `accessTokenType` prop', () => {
+ const accessTokenType = 'personal access token';
+ setHTMLFixture(
+ `<div id="js-new-access-token-app" data-access-token-type="${accessTokenType}"></div>`,
+ );
+
+ const FakeNewAccessTokenApp = Vue.component('FakeComponent', {
+ inject: ['accessTokenType'],
+ props: ['accessTokenType'],
+ render: () => null,
+ });
+ NewAccessTokenApp.default = FakeNewAccessTokenApp;
+
+ const vueInstance = initNewAccessTokenApp();
+
+ wrapper = createWrapper(vueInstance);
+ const component = wrapper.findComponent(FakeNewAccessTokenApp);
+
+ expect(component.exists()).toBe(true);
+ expect(component.props('accessTokenType')).toEqual(accessTokenType);
+ });
+
+ it('returns `null`', () => {
+ expect(initNewAccessTokenApp()).toBe(null);
+ });
+ });
+
+ describe('initTokensApp', () => {
+ it('mounts the component and provides`tokenTypes` ', () => {
+ const tokensData = {
+ [FEED_TOKEN]: FEED_TOKEN,
+ [INCOMING_EMAIL_TOKEN]: INCOMING_EMAIL_TOKEN,
+ [STATIC_OBJECT_TOKEN]: STATIC_OBJECT_TOKEN,
+ };
+ setHTMLFixture(
+ `<div id="js-tokens-app" data-tokens-data=${JSON.stringify(tokensData)}></div>`,
+ );
+
+ const FakeTokensApp = Vue.component('FakeComponent', {
+ inject: ['tokenTypes'],
+ props: ['tokenTypes'],
+ render: () => null,
+ });
+ TokensApp.default = FakeTokensApp;
+
+ const vueInstance = initTokensApp();
+
+ wrapper = createWrapper(vueInstance);
+ const component = wrapper.findComponent(FakeTokensApp);
+
+ expect(component.exists()).toBe(true);
+ expect(component.props('tokenTypes')).toEqual(tokensData);
+ });
+
+ it('returns `null`', () => {
+ expect(initNewAccessTokenApp()).toBe(null);
+ });
+ });
});
diff --git a/spec/frontend/admin/application_settings/inactive_project_deletion/components/form_spec.js b/spec/frontend/admin/application_settings/inactive_project_deletion/components/form_spec.js
new file mode 100644
index 00000000000..2db997942a7
--- /dev/null
+++ b/spec/frontend/admin/application_settings/inactive_project_deletion/components/form_spec.js
@@ -0,0 +1,148 @@
+import { GlFormCheckbox } from '@gitlab/ui';
+import { shallowMountExtended, mountExtended } from 'helpers/vue_test_utils_helper';
+import SettingsForm from '~/admin/application_settings/inactive_project_deletion/components/form.vue';
+
+describe('Form component', () => {
+ let wrapper;
+
+ const findEnabledCheckbox = () => wrapper.findComponent(GlFormCheckbox);
+ const findProjectDeletionSettings = () =>
+ wrapper.findByTestId('inactive-project-deletion-settings');
+ const findMinSizeGroup = () => wrapper.findByTestId('min-size-group');
+ const findMinSizeInputGroup = () => wrapper.findByTestId('min-size-input-group');
+ const findMinSizeInput = () => wrapper.findByTestId('min-size-input');
+ const findDeleteAfterMonthsGroup = () => wrapper.findByTestId('delete-after-months-group');
+ const findDeleteAfterMonthsInputGroup = () =>
+ wrapper.findByTestId('delete-after-months-input-group');
+ const findDeleteAfterMonthsInput = () => wrapper.findByTestId('delete-after-months-input');
+ const findSendWarningEmailAfterMonthsGroup = () =>
+ wrapper.findByTestId('send-warning-email-after-months-group');
+ const findSendWarningEmailAfterMonthsInputGroup = () =>
+ wrapper.findByTestId('send-warning-email-after-months-input-group');
+ const findSendWarningEmailAfterMonthsInput = () =>
+ wrapper.findByTestId('send-warning-email-after-months-input');
+
+ const createComponent = (
+ mountFn = shallowMountExtended,
+ propsData = { deleteInactiveProjects: true },
+ ) => {
+ wrapper = mountFn(SettingsForm, { propsData });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('Enable inactive project deletion', () => {
+ it('has the checkbox', () => {
+ createComponent();
+
+ expect(findEnabledCheckbox().exists()).toBe(true);
+ });
+
+ it.each([[true], [false]])(
+ 'when the checkbox is %s then the project deletion settings visibility is set to %s',
+ (visible) => {
+ createComponent(shallowMountExtended, { deleteInactiveProjects: visible });
+
+ expect(findProjectDeletionSettings().exists()).toBe(visible);
+ },
+ );
+ });
+
+ describe('Minimum size for deletion', () => {
+ beforeEach(() => {
+ createComponent(mountExtended);
+ });
+
+ it('has the minimum size input', () => {
+ expect(findMinSizeInput().exists()).toBe(true);
+ });
+
+ it('has the field description', () => {
+ expect(findMinSizeGroup().text()).toContain('Delete inactive projects that exceed');
+ });
+
+ it('has the appended text on the field', () => {
+ expect(findMinSizeInputGroup().text()).toContain('MB');
+ });
+
+ it.each`
+ value | valid
+ ${'0'} | ${true}
+ ${'250'} | ${true}
+ ${'-1'} | ${false}
+ `(
+ 'when the minimum size input has a value of $value, then its validity should be $valid',
+ async ({ value, valid }) => {
+ await findMinSizeInput().find('input').setValue(value);
+
+ expect(findMinSizeGroup().classes('is-valid')).toBe(valid);
+ expect(findMinSizeInput().classes('is-valid')).toBe(valid);
+ },
+ );
+ });
+
+ describe('Delete project after', () => {
+ beforeEach(() => {
+ createComponent(mountExtended);
+ });
+
+ it('has the delete after months input', () => {
+ expect(findDeleteAfterMonthsInput().exists()).toBe(true);
+ });
+
+ it('has the appended text on the field', () => {
+ expect(findDeleteAfterMonthsInputGroup().text()).toContain('months');
+ });
+
+ it.each`
+ value | valid
+ ${'0'} | ${false}
+ ${'1'} | ${false /* Less than the default send warning email months */}
+ ${'2'} | ${true}
+ `(
+ 'when the delete after months input has a value of $value, then its validity should be $valid',
+ async ({ value, valid }) => {
+ await findDeleteAfterMonthsInput().find('input').setValue(value);
+
+ expect(findDeleteAfterMonthsGroup().classes('is-valid')).toBe(valid);
+ expect(findDeleteAfterMonthsInput().classes('is-valid')).toBe(valid);
+ },
+ );
+ });
+
+ describe('Send warning email', () => {
+ beforeEach(() => {
+ createComponent(mountExtended);
+ });
+
+ it('has the send warning email after months input', () => {
+ expect(findSendWarningEmailAfterMonthsInput().exists()).toBe(true);
+ });
+
+ it('has the field description', () => {
+ expect(findSendWarningEmailAfterMonthsGroup().text()).toContain(
+ 'Send email to maintainers after project is inactive for',
+ );
+ });
+
+ it('has the appended text on the field', () => {
+ expect(findSendWarningEmailAfterMonthsInputGroup().text()).toContain('months');
+ });
+
+ it.each`
+ value | valid
+ ${'2'} | ${true}
+ ${'0'} | ${false}
+ `(
+ 'when the minimum size input has a value of $value, then its validity should be $valid',
+ async ({ value, valid }) => {
+ await findSendWarningEmailAfterMonthsInput().find('input').setValue(value);
+
+ expect(findSendWarningEmailAfterMonthsGroup().classes('is-valid')).toBe(valid);
+ expect(findSendWarningEmailAfterMonthsInput().classes('is-valid')).toBe(valid);
+ },
+ );
+ });
+});
diff --git a/spec/frontend/admin/users/index_spec.js b/spec/frontend/admin/users/index_spec.js
index 06dbadd6d3d..961fa96acdd 100644
--- a/spec/frontend/admin/users/index_spec.js
+++ b/spec/frontend/admin/users/index_spec.js
@@ -12,8 +12,8 @@ describe('initAdminUsersApp', () => {
beforeEach(() => {
el = document.createElement('div');
- el.setAttribute('data-users', JSON.stringify(users));
- el.setAttribute('data-paths', JSON.stringify(paths));
+ el.dataset.users = JSON.stringify(users);
+ el.dataset.paths = JSON.stringify(paths);
wrapper = createWrapper(initAdminUsersApp(el));
});
@@ -40,8 +40,8 @@ describe('initAdminUserActions', () => {
beforeEach(() => {
el = document.createElement('div');
- el.setAttribute('data-user', JSON.stringify(user));
- el.setAttribute('data-paths', JSON.stringify(paths));
+ el.dataset.user = JSON.stringify(user);
+ el.dataset.paths = JSON.stringify(paths);
wrapper = createWrapper(initAdminUserActions(el));
});
diff --git a/spec/frontend/analytics/usage_trends/components/usage_counts_spec.js b/spec/frontend/analytics/usage_trends/components/usage_counts_spec.js
index 703767dab47..f4cbc56be5c 100644
--- a/spec/frontend/analytics/usage_trends/components/usage_counts_spec.js
+++ b/spec/frontend/analytics/usage_trends/components/usage_counts_spec.js
@@ -1,4 +1,4 @@
-import { GlDeprecatedSkeletonLoading as GlSkeletonLoading } from '@gitlab/ui';
+import { GlSkeletonLoader } from '@gitlab/ui';
import { GlSingleStat } from '@gitlab/ui/dist/charts';
import { shallowMount } from '@vue/test-utils';
import UsageCounts from '~/analytics/usage_trends/components/usage_counts.vue';
@@ -30,7 +30,7 @@ describe('UsageCounts', () => {
wrapper.destroy();
});
- const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoading);
+ const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoader);
const findAllSingleStats = () => wrapper.findAllComponents(GlSingleStat);
describe('while loading', () => {
diff --git a/spec/frontend/api_spec.js b/spec/frontend/api_spec.js
index 5f162f498c4..1f92010b771 100644
--- a/spec/frontend/api_spec.js
+++ b/spec/frontend/api_spec.js
@@ -2,7 +2,6 @@ import MockAdapter from 'axios-mock-adapter';
import Api, { DEFAULT_PER_PAGE } from '~/api';
import axios from '~/lib/utils/axios_utils';
import httpStatus from '~/lib/utils/http_status';
-import createFlash from '~/flash';
jest.mock('~/flash');
@@ -608,30 +607,10 @@ describe('Api', () => {
},
]);
- return new Promise((resolve) => {
- Api.groupProjects(groupId, query, {}, (response) => {
- expect(response.length).toBe(1);
- expect(response[0].name).toBe('test');
- resolve();
- });
- });
- });
-
- it('uses flesh on error by default', async () => {
- const groupId = '123456';
- const query = 'dummy query';
- const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}/projects.json`;
- const flashCallback = (callCount) => {
- expect(createFlash).toHaveBeenCalledTimes(callCount);
- createFlash.mockClear();
- };
-
- mock.onGet(expectedUrl).reply(500, null);
-
- const response = await Api.groupProjects(groupId, query, {}, () => {}).then(() => {
- flashCallback(1);
+ return Api.groupProjects(groupId, query, {}).then((response) => {
+ expect(response.data.length).toBe(1);
+ expect(response.data[0].name).toBe('test');
});
- expect(response).toBeUndefined();
});
it('NOT uses flesh on error with param useCustomErrorHandler', async () => {
@@ -640,7 +619,7 @@ describe('Api', () => {
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}/projects.json`;
mock.onGet(expectedUrl).reply(500, null);
- const apiCall = Api.groupProjects(groupId, query, {}, () => {}, true);
+ const apiCall = Api.groupProjects(groupId, query, {});
await expect(apiCall).rejects.toThrow();
});
});
diff --git a/spec/frontend/authentication/two_factor_auth/index_spec.js b/spec/frontend/authentication/two_factor_auth/index_spec.js
index 0ff9d60f409..f9a6b2df662 100644
--- a/spec/frontend/authentication/two_factor_auth/index_spec.js
+++ b/spec/frontend/authentication/two_factor_auth/index_spec.js
@@ -15,8 +15,8 @@ describe('initRecoveryCodes', () => {
beforeEach(() => {
el = document.createElement('div');
el.setAttribute('class', 'js-2fa-recovery-codes');
- el.setAttribute('data-codes', codesJsonString);
- el.setAttribute('data-profile-account-path', profileAccountPath);
+ el.dataset.codes = codesJsonString;
+ el.dataset.profileAccountPath = profileAccountPath;
document.body.appendChild(el);
wrapper = createWrapper(initRecoveryCodes());
diff --git a/spec/frontend/awards_handler_spec.js b/spec/frontend/awards_handler_spec.js
index 5d657745615..b14bc5122b9 100644
--- a/spec/frontend/awards_handler_spec.js
+++ b/spec/frontend/awards_handler_spec.js
@@ -57,6 +57,18 @@ describe('AwardsHandler', () => {
d: 'white question mark ornament',
u: '6.0',
},
+ thumbsup: {
+ c: 'people',
+ e: 'ðŸ‘',
+ d: 'thumbs up sign',
+ u: '6.0',
+ },
+ thumbsdown: {
+ c: 'people',
+ e: '👎',
+ d: 'thumbs down sign',
+ u: '6.0',
+ },
};
const openAndWaitForEmojiMenu = (sel = '.js-add-award') => {
@@ -296,6 +308,23 @@ describe('AwardsHandler', () => {
awardsHandler.searchEmojis('👼');
expect($('[data-name=angel]').is(':visible')).toBe(true);
});
+
+ it('should show positive intent emoji first', async () => {
+ await openAndWaitForEmojiMenu();
+
+ awardsHandler.searchEmojis('thumb');
+
+ const $menu = $('.emoji-menu');
+ const $thumbsUpItem = $menu.find('[data-name=thumbsup]');
+ const $thumbsDownItem = $menu.find('[data-name=thumbsdown]');
+
+ expect($thumbsUpItem.is(':visible')).toBe(true);
+ expect($thumbsDownItem.is(':visible')).toBe(true);
+
+ expect($thumbsUpItem.parents('.emoji-menu-list-item').index()).toBeLessThan(
+ $thumbsDownItem.parents('.emoji-menu-list-item').index(),
+ );
+ });
});
describe('emoji menu', () => {
diff --git a/spec/frontend/batch_comments/components/submit_dropdown_spec.js b/spec/frontend/batch_comments/components/submit_dropdown_spec.js
new file mode 100644
index 00000000000..4f5ff797230
--- /dev/null
+++ b/spec/frontend/batch_comments/components/submit_dropdown_spec.js
@@ -0,0 +1,69 @@
+import Vue from 'vue';
+import Vuex from 'vuex';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import SubmitDropdown from '~/batch_comments/components/submit_dropdown.vue';
+
+Vue.use(Vuex);
+
+let wrapper;
+let publishReview;
+
+function factory() {
+ publishReview = jest.fn();
+
+ const store = new Vuex.Store({
+ getters: {
+ getNotesData: () => ({
+ markdownDocsPath: '/markdown/docs',
+ quickActionsDocsPath: '/quickactions/docs',
+ }),
+ getNoteableData: () => ({ id: 1, preview_note_path: '/preview' }),
+ noteableType: () => 'merge_request',
+ },
+ modules: {
+ batchComments: {
+ namespaced: true,
+ actions: {
+ publishReview,
+ },
+ },
+ },
+ });
+ wrapper = mountExtended(SubmitDropdown, {
+ store,
+ });
+}
+
+const findCommentTextarea = () => wrapper.findByTestId('comment-textarea');
+const findSubmitButton = () => wrapper.findByTestId('submit-review-button');
+const findForm = () => wrapper.findByTestId('submit-gl-form');
+
+describe('Batch comments submit dropdown', () => {
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('calls publishReview with note data', async () => {
+ factory();
+
+ findCommentTextarea().setValue('Hello world');
+
+ await findForm().vm.$emit('submit', { preventDefault: jest.fn() });
+
+ expect(publishReview).toHaveBeenCalledWith(expect.anything(), {
+ noteable_type: 'merge_request',
+ noteable_id: 1,
+ note: 'Hello world',
+ });
+ });
+
+ it('sets submit dropdown to loading', async () => {
+ factory();
+
+ findCommentTextarea().setValue('Hello world');
+
+ await findForm().vm.$emit('submit', { preventDefault: jest.fn() });
+
+ expect(findSubmitButton().props('loading')).toBe(true);
+ });
+});
diff --git a/spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js b/spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js
index e9535d8cc12..172b510645d 100644
--- a/spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js
+++ b/spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js
@@ -179,6 +179,16 @@ describe('Batch comments store actions', () => {
});
});
+ it('calls service with notes data', () => {
+ jest.spyOn(axios, 'post');
+
+ return actions
+ .publishReview({ dispatch, commit, getters, rootGetters }, { note: 'test' })
+ .then(() => {
+ expect(axios.post.mock.calls[0]).toEqual(['http://test.host', { note: 'test' }]);
+ });
+ });
+
it('dispatches error commits', () => {
mock.onAny().reply(500);
diff --git a/spec/frontend/behaviors/markdown/render_mermaid_spec.js b/spec/frontend/behaviors/markdown/render_mermaid_spec.js
deleted file mode 100644
index 51a345cab0e..00000000000
--- a/spec/frontend/behaviors/markdown/render_mermaid_spec.js
+++ /dev/null
@@ -1,25 +0,0 @@
-import { initMermaid } from '~/behaviors/markdown/render_mermaid';
-import * as ColorUtils from '~/lib/utils/color_utils';
-
-describe('Render mermaid diagrams for Gitlab Flavoured Markdown', () => {
- it.each`
- darkMode | expectedTheme
- ${false} | ${'neutral'}
- ${true} | ${'dark'}
- `('is $darkMode $expectedTheme', async ({ darkMode, expectedTheme }) => {
- jest.spyOn(ColorUtils, 'darkModeEnabled').mockImplementation(() => darkMode);
-
- const mermaid = {
- initialize: jest.fn(),
- };
-
- await initMermaid(mermaid);
-
- expect(mermaid.initialize).toHaveBeenCalledTimes(1);
- expect(mermaid.initialize).toHaveBeenCalledWith(
- expect.objectContaining({
- theme: expectedTheme,
- }),
- );
- });
-});
diff --git a/spec/frontend/blob/blob_file_dropzone_spec.js b/spec/frontend/blob/blob_file_dropzone_spec.js
deleted file mode 100644
index d6fc824258b..00000000000
--- a/spec/frontend/blob/blob_file_dropzone_spec.js
+++ /dev/null
@@ -1,49 +0,0 @@
-import $ from 'jquery';
-import { loadHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
-import BlobFileDropzone from '~/blob/blob_file_dropzone';
-
-describe('BlobFileDropzone', () => {
- let dropzone;
- let replaceFileButton;
-
- beforeEach(() => {
- loadHTMLFixture('blob/show.html');
- const form = $('.js-upload-blob-form');
- // eslint-disable-next-line no-new
- new BlobFileDropzone(form, 'POST');
- dropzone = $('.js-upload-blob-form .dropzone').get(0).dropzone;
- dropzone.processQueue = jest.fn();
- replaceFileButton = $('#submit-all');
- });
-
- afterEach(() => {
- resetHTMLFixture();
- });
-
- describe('submit button', () => {
- it('requires file', () => {
- jest.spyOn(window, 'alert').mockImplementation(() => {});
-
- replaceFileButton.click();
-
- expect(window.alert).toHaveBeenCalled();
- });
-
- it('is disabled while uploading', () => {
- jest.spyOn(window, 'alert').mockImplementation(() => {});
-
- const file = new File([], 'some-file.jpg');
- const fakeEvent = $.Event('drop', {
- dataTransfer: { files: [file] },
- });
-
- dropzone.listeners[0].events.drop(fakeEvent);
-
- replaceFileButton.click();
-
- expect(window.alert).not.toHaveBeenCalled();
- expect(replaceFileButton.is(':disabled')).toEqual(true);
- expect(dropzone.processQueue).toHaveBeenCalled();
- });
- });
-});
diff --git a/spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap b/spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap
index d698ee72ea4..fdbb9bdd0d0 100644
--- a/spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap
+++ b/spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap
@@ -7,7 +7,7 @@ exports[`Blob Header Filepath rendering matches the snapshot 1`] = `
<file-icon-stub
aria-hidden="true"
- cssclasses="mr-2"
+ cssclasses="gl-mr-3"
filemode=""
filename="foo/bar/dummy.md"
size="16"
@@ -32,7 +32,7 @@ exports[`Blob Header Filepath rendering matches the snapshot 1`] = `
/>
<small
- class="mr-2"
+ class="gl-mr-3"
>
a lot
</small>
diff --git a/spec/frontend/blob/components/blob_header_default_actions_spec.js b/spec/frontend/blob/components/blob_header_default_actions_spec.js
index af605b257de..aa538facae2 100644
--- a/spec/frontend/blob/components/blob_header_default_actions_spec.js
+++ b/spec/frontend/blob/components/blob_header_default_actions_spec.js
@@ -88,6 +88,14 @@ describe('Blob Header Default Actions', () => {
expect(findCopyButton().exists()).toBe(false);
expect(findViewRawButton().exists()).toBe(false);
});
+
+ it('emits a copy event if overrideCopy is set to true', () => {
+ createComponent({ overrideCopy: true });
+ jest.spyOn(wrapper.vm, '$emit');
+ findCopyButton().vm.$emit('click');
+
+ expect(wrapper.vm.$emit).toHaveBeenCalledWith('copy');
+ });
});
describe('view on environment button', () => {
diff --git a/spec/frontend/blob/components/table_contents_spec.js b/spec/frontend/blob/components/table_contents_spec.js
index 358ac31819c..2cbac809a0d 100644
--- a/spec/frontend/blob/components/table_contents_spec.js
+++ b/spec/frontend/blob/components/table_contents_spec.js
@@ -11,7 +11,7 @@ function createComponent() {
}
async function setLoaded(loaded) {
- document.querySelector('.blob-viewer').setAttribute('data-loaded', loaded);
+ document.querySelector('.blob-viewer').dataset.loaded = loaded;
await nextTick();
}
@@ -53,7 +53,7 @@ describe('Markdown table of contents component', () => {
it('does not show dropdown when viewing non-rich content', async () => {
createComponent();
- document.querySelector('.blob-viewer').setAttribute('data-type', 'simple');
+ document.querySelector('.blob-viewer').dataset.type = 'simple';
await setLoaded(true);
diff --git a/spec/frontend/blob/csv/csv_viewer_spec.js b/spec/frontend/blob/csv/csv_viewer_spec.js
index ff96193a20c..9364f76da5e 100644
--- a/spec/frontend/blob/csv/csv_viewer_spec.js
+++ b/spec/frontend/blob/csv/csv_viewer_spec.js
@@ -44,7 +44,7 @@ describe('app/assets/javascripts/blob/csv/csv_viewer.vue', () => {
describe('when the CSV contains errors', () => {
it('should render alert with correct props', async () => {
createComponent({ csv: brokenCsv });
- await nextTick;
+ await nextTick();
expect(findAlert().props()).toMatchObject({
papaParseErrors: [{ code: 'UndetectableDelimiter' }],
@@ -55,14 +55,14 @@ describe('app/assets/javascripts/blob/csv/csv_viewer.vue', () => {
describe('when the CSV contains no errors', () => {
it('should not render alert', async () => {
createComponent();
- await nextTick;
+ await nextTick();
expect(findAlert().exists()).toBe(false);
});
it('renders the CSV table with the correct attributes', async () => {
createComponent();
- await nextTick;
+ await nextTick();
expect(findCsvTable().attributes()).toMatchObject({
'empty-text': 'No CSV data to display.',
@@ -72,7 +72,7 @@ describe('app/assets/javascripts/blob/csv/csv_viewer.vue', () => {
it('renders the CSV table with the correct content', async () => {
createComponent({ mountFunction: mount });
- await nextTick;
+ await nextTick();
expect(getAllByRole(wrapper.element, 'row', { name: /One/i })).toHaveLength(1);
expect(getAllByRole(wrapper.element, 'row', { name: /Two/i })).toHaveLength(1);
@@ -93,7 +93,7 @@ describe('app/assets/javascripts/blob/csv/csv_viewer.vue', () => {
skipEmptyLines: true,
complete: expect.any(Function),
});
- await nextTick;
+ await nextTick();
expect(wrapper.vm.items).toEqual(validCsv.split(','));
});
});
diff --git a/spec/frontend/blob/viewer/index_spec.js b/spec/frontend/blob/viewer/index_spec.js
index 5f6baf3f63d..b2559af182b 100644
--- a/spec/frontend/blob/viewer/index_spec.js
+++ b/spec/frontend/blob/viewer/index_spec.js
@@ -80,9 +80,9 @@ describe('Blob viewer', () => {
return asyncClick()
.then(() => asyncClick())
.then(() => {
- expect(
- document.querySelector('.blob-viewer[data-type="simple"]').getAttribute('data-loaded'),
- ).toBe('true');
+ expect(document.querySelector('.blob-viewer[data-type="simple"]').dataset.loaded).toBe(
+ 'true',
+ );
});
});
diff --git a/spec/frontend/boards/components/board_column_spec.js b/spec/frontend/boards/components/board_column_spec.js
index f1964daa8b2..c13f7caba76 100644
--- a/spec/frontend/boards/components/board_column_spec.js
+++ b/spec/frontend/boards/components/board_column_spec.js
@@ -20,8 +20,6 @@ describe('Board Column Component', () => {
};
const createComponent = ({ listType = ListType.backlog, collapsed = false } = {}) => {
- const boardId = '1';
-
const listMock = {
...listObj,
listType,
@@ -39,9 +37,6 @@ describe('Board Column Component', () => {
disabled: false,
list: listMock,
},
- provide: {
- boardId,
- },
});
};
diff --git a/spec/frontend/boards/components/board_form_spec.js b/spec/frontend/boards/components/board_form_spec.js
index 6a659623b53..fdc16b46167 100644
--- a/spec/frontend/boards/components/board_form_spec.js
+++ b/spec/frontend/boards/components/board_form_spec.js
@@ -1,4 +1,6 @@
import { GlModal } from '@gitlab/ui';
+import Vue from 'vue';
+import Vuex from 'vuex';
import setWindowLocation from 'helpers/set_window_location_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
@@ -8,7 +10,6 @@ import { formType } from '~/boards/constants';
import createBoardMutation from '~/boards/graphql/board_create.mutation.graphql';
import destroyBoardMutation from '~/boards/graphql/board_destroy.mutation.graphql';
import updateBoardMutation from '~/boards/graphql/board_update.mutation.graphql';
-import { createStore } from '~/boards/stores';
import { visitUrl } from '~/lib/utils/url_utility';
jest.mock('~/lib/utils/url_utility', () => ({
@@ -16,6 +17,8 @@ jest.mock('~/lib/utils/url_utility', () => ({
visitUrl: jest.fn().mockName('visitUrlMock'),
}));
+Vue.use(Vuex);
+
const currentBoard = {
id: 'gid://gitlab/Board/1',
name: 'test',
@@ -46,11 +49,18 @@ describe('BoardForm', () => {
const findDeleteConfirmation = () => wrapper.findByTestId('delete-confirmation-message');
const findInput = () => wrapper.find('#board-new-name');
- const store = createStore({
+ const setBoardMock = jest.fn();
+ const setErrorMock = jest.fn();
+
+ const store = new Vuex.Store({
getters: {
isGroupBoard: () => true,
isProjectBoard: () => false,
},
+ actions: {
+ setBoard: setBoardMock,
+ setError: setErrorMock,
+ },
});
const createComponent = (props, data) => {
@@ -168,7 +178,7 @@ describe('BoardForm', () => {
expect(mutate).not.toHaveBeenCalled();
});
- it('calls a correct GraphQL mutation and redirects to correct page from existing board', async () => {
+ it('calls a correct GraphQL mutation and sets board in state', async () => {
createComponent({ canAdminBoard: true, currentPage: formType.new });
fillForm();
@@ -184,13 +194,12 @@ describe('BoardForm', () => {
});
await waitForPromises();
- expect(visitUrl).toHaveBeenCalledWith('test-path');
+ expect(setBoardMock).toHaveBeenCalledTimes(1);
});
- it('shows a GlAlert if GraphQL mutation fails', async () => {
+ it('sets error in state if GraphQL mutation fails', async () => {
mutate = jest.fn().mockRejectedValue('Houston, we have a problem');
createComponent({ canAdminBoard: true, currentPage: formType.new });
- jest.spyOn(wrapper.vm, 'setError').mockImplementation(() => {});
fillForm();
@@ -199,8 +208,8 @@ describe('BoardForm', () => {
expect(mutate).toHaveBeenCalled();
await waitForPromises();
- expect(visitUrl).not.toHaveBeenCalled();
- expect(wrapper.vm.setError).toHaveBeenCalled();
+ expect(setBoardMock).not.toHaveBeenCalled();
+ expect(setErrorMock).toHaveBeenCalled();
});
});
});
@@ -256,7 +265,8 @@ describe('BoardForm', () => {
});
await waitForPromises();
- expect(visitUrl).toHaveBeenCalledWith('test-path');
+ expect(setBoardMock).toHaveBeenCalledTimes(1);
+ expect(global.window.location.href).not.toContain('?group_by=epic');
});
it('calls GraphQL mutation with correct parameters when issues are grouped by epic', async () => {
@@ -282,13 +292,13 @@ describe('BoardForm', () => {
});
await waitForPromises();
- expect(visitUrl).toHaveBeenCalledWith('test-path?group_by=epic');
+ expect(setBoardMock).toHaveBeenCalledTimes(1);
+ expect(global.window.location.href).toContain('?group_by=epic');
});
- it('shows a GlAlert if GraphQL mutation fails', async () => {
+ it('sets error in state if GraphQL mutation fails', async () => {
mutate = jest.fn().mockRejectedValue('Houston, we have a problem');
createComponent({ canAdminBoard: true, currentPage: formType.edit });
- jest.spyOn(wrapper.vm, 'setError').mockImplementation(() => {});
findInput().trigger('keyup.enter', { metaKey: true });
@@ -297,8 +307,8 @@ describe('BoardForm', () => {
expect(mutate).toHaveBeenCalled();
await waitForPromises();
- expect(visitUrl).not.toHaveBeenCalled();
- expect(wrapper.vm.setError).toHaveBeenCalled();
+ expect(setBoardMock).not.toHaveBeenCalled();
+ expect(setErrorMock).toHaveBeenCalled();
});
});
diff --git a/spec/frontend/boards/components/boards_selector_spec.js b/spec/frontend/boards/components/boards_selector_spec.js
index f60d04af4fc..d91e81fe4d0 100644
--- a/spec/frontend/boards/components/boards_selector_spec.js
+++ b/spec/frontend/boards/components/boards_selector_spec.js
@@ -2,11 +2,10 @@ import { GlDropdown, GlLoadingIcon, GlDropdownSectionHeader } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import Vuex from 'vuex';
+import waitForPromises from 'helpers/wait_for_promises';
import { TEST_HOST } from 'spec/test_constants';
import BoardsSelector from '~/boards/components/boards_selector.vue';
import { BoardType } from '~/boards/constants';
-import groupBoardQuery from '~/boards/graphql/group_board.query.graphql';
-import projectBoardQuery from '~/boards/graphql/project_board.query.graphql';
import groupBoardsQuery from '~/boards/graphql/group_boards.query.graphql';
import projectBoardsQuery from '~/boards/graphql/project_boards.query.graphql';
import groupRecentBoardsQuery from '~/boards/graphql/group_recent_boards.query.graphql';
@@ -15,8 +14,7 @@ import defaultStore from '~/boards/stores';
import createMockApollo from 'helpers/mock_apollo_helper';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import {
- mockGroupBoardResponse,
- mockProjectBoardResponse,
+ mockBoard,
mockGroupAllBoardsResponse,
mockProjectAllBoardsResponse,
mockGroupRecentBoardsResponse,
@@ -49,6 +47,7 @@ describe('BoardsSelector', () => {
},
state: {
boardType: isGroupBoard ? 'group' : 'project',
+ board: mockBoard,
},
});
};
@@ -65,9 +64,6 @@ describe('BoardsSelector', () => {
const getLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findDropdown = () => wrapper.findComponent(GlDropdown);
- const projectBoardQueryHandlerSuccess = jest.fn().mockResolvedValue(mockProjectBoardResponse);
- const groupBoardQueryHandlerSuccess = jest.fn().mockResolvedValue(mockGroupBoardResponse);
-
const projectBoardsQueryHandlerSuccess = jest
.fn()
.mockResolvedValue(mockProjectAllBoardsResponse);
@@ -92,8 +88,6 @@ describe('BoardsSelector', () => {
projectRecentBoardsQueryHandler = projectRecentBoardsQueryHandlerSuccess,
} = {}) => {
fakeApollo = createMockApollo([
- [projectBoardQuery, projectBoardQueryHandlerSuccess],
- [groupBoardQuery, groupBoardQueryHandlerSuccess],
[projectBoardsQuery, projectBoardsQueryHandler],
[groupBoardsQuery, groupBoardsQueryHandlerSuccess],
[projectRecentBoardsQuery, projectRecentBoardsQueryHandler],
@@ -133,12 +127,13 @@ describe('BoardsSelector', () => {
describe('loading', () => {
// we are testing loading state, so don't resolve responses until after the tests
afterEach(async () => {
- await nextTick();
+ await waitForPromises();
});
- it('shows loading spinner', () => {
+ it('shows loading spinner', async () => {
// Emits gl-dropdown show event to simulate the dropdown is opened at initialization time
findDropdown().vm.$emit('show');
+ await nextTick();
expect(getLoadingIcon().exists()).toBe(true);
expect(getDropdownHeaders()).toHaveLength(0);
@@ -251,23 +246,4 @@ describe('BoardsSelector', () => {
expect(notCalledHandler).not.toHaveBeenCalled();
});
});
-
- describe('fetching current board', () => {
- it.each`
- boardType | queryHandler | notCalledHandler
- ${BoardType.group} | ${groupBoardQueryHandlerSuccess} | ${projectBoardQueryHandlerSuccess}
- ${BoardType.project} | ${projectBoardQueryHandlerSuccess} | ${groupBoardQueryHandlerSuccess}
- `('fetches $boardType board', async ({ boardType, queryHandler, notCalledHandler }) => {
- createStore({
- isProjectBoard: boardType === BoardType.project,
- isGroupBoard: boardType === BoardType.group,
- });
- createComponent();
-
- await nextTick();
-
- expect(queryHandler).toHaveBeenCalled();
- expect(notCalledHandler).not.toHaveBeenCalled();
- });
- });
});
diff --git a/spec/frontend/boards/mock_data.js b/spec/frontend/boards/mock_data.js
index 26ad9790840..6ec39be5d29 100644
--- a/spec/frontend/boards/mock_data.js
+++ b/spec/frontend/boards/mock_data.js
@@ -144,30 +144,6 @@ export const mockProjectRecentBoardsResponse = {
},
};
-export const mockGroupBoardResponse = {
- data: {
- workspace: {
- board: {
- id: 'gid://gitlab/Board/1',
- name: 'Development',
- },
- __typename: 'Group',
- },
- },
-};
-
-export const mockProjectBoardResponse = {
- data: {
- workspace: {
- board: {
- id: 'gid://gitlab/Board/2',
- name: 'Development',
- },
- __typename: 'Project',
- },
- },
-};
-
export const mockAssigneesList = [
{
id: 2,
@@ -802,3 +778,15 @@ export const boardListQueryResponse = (issuesCount = 20) => ({
},
},
});
+
+export const epicBoardListQueryResponse = (totalWeight = 5) => ({
+ data: {
+ epicBoardList: {
+ __typename: 'EpicList',
+ id: 'gid://gitlab/Boards::EpicList/3',
+ metadata: {
+ totalWeight,
+ },
+ },
+ },
+});
diff --git a/spec/frontend/boards/stores/actions_spec.js b/spec/frontend/boards/stores/actions_spec.js
index eacf9db191e..e48b946ff1b 100644
--- a/spec/frontend/boards/stores/actions_spec.js
+++ b/spec/frontend/boards/stores/actions_spec.js
@@ -77,7 +77,7 @@ describe('fetchBoard', () => {
},
};
- it('should commit mutation RECEIVE_BOARD_SUCCESS and dispatch setBoardConfig on success', async () => {
+ it('should commit mutation REQUEST_CURRENT_BOARD and dispatch setBoard on success', async () => {
jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse);
await testAction({
@@ -85,11 +85,10 @@ describe('fetchBoard', () => {
payload,
expectedMutations: [
{
- type: types.RECEIVE_BOARD_SUCCESS,
- payload: mockBoard,
+ type: types.REQUEST_CURRENT_BOARD,
},
],
- expectedActions: [{ type: 'setBoardConfig', payload: mockBoard }],
+ expectedActions: [{ type: 'setBoard', payload: mockBoard }],
});
});
@@ -101,6 +100,9 @@ describe('fetchBoard', () => {
payload,
expectedMutations: [
{
+ type: types.REQUEST_CURRENT_BOARD,
+ },
+ {
type: types.RECEIVE_BOARD_FAILURE,
},
],
@@ -133,6 +135,20 @@ describe('setBoardConfig', () => {
});
});
+describe('setBoard', () => {
+ it('dispatches setBoardConfig', () => {
+ return testAction({
+ action: actions.setBoard,
+ payload: mockBoard,
+ expectedMutations: [{ type: types.RECEIVE_BOARD_SUCCESS, payload: mockBoard }],
+ expectedActions: [
+ { type: 'setBoardConfig', payload: mockBoard },
+ { type: 'performSearch', payload: { resetLists: true } },
+ ],
+ });
+ });
+});
+
describe('setFilters', () => {
it.each([
[
@@ -172,7 +188,11 @@ describe('performSearch', () => {
{},
{},
[],
- [{ type: 'setFilters', payload: {} }, { type: 'fetchLists' }, { type: 'resetIssues' }],
+ [
+ { type: 'setFilters', payload: {} },
+ { type: 'fetchLists', payload: { resetLists: false } },
+ { type: 'resetIssues' },
+ ],
);
});
});
@@ -955,10 +975,6 @@ describe('fetchItemsForList', () => {
state,
[
{
- type: types.RESET_ITEMS_FOR_LIST,
- payload: listId,
- },
- {
type: types.REQUEST_ITEMS_FOR_LIST,
payload: { listId, fetchNext: false },
},
@@ -980,10 +996,6 @@ describe('fetchItemsForList', () => {
state,
[
{
- type: types.RESET_ITEMS_FOR_LIST,
- payload: listId,
- },
- {
type: types.REQUEST_ITEMS_FOR_LIST,
payload: { listId, fetchNext: false },
},
diff --git a/spec/frontend/boards/stores/mutations_spec.js b/spec/frontend/boards/stores/mutations_spec.js
index 738737bf4b6..7d79993a0ee 100644
--- a/spec/frontend/boards/stores/mutations_spec.js
+++ b/spec/frontend/boards/stores/mutations_spec.js
@@ -34,6 +34,14 @@ describe('Board Store Mutations', () => {
state = defaultState();
});
+ describe('REQUEST_CURRENT_BOARD', () => {
+ it('Should set isBoardLoading state to true', () => {
+ mutations[types.REQUEST_CURRENT_BOARD](state);
+
+ expect(state.isBoardLoading).toBe(true);
+ });
+ });
+
describe('RECEIVE_BOARD_SUCCESS', () => {
it('Should set board to state', () => {
mutations[types.RECEIVE_BOARD_SUCCESS](state, mockBoard);
@@ -292,24 +300,6 @@ describe('Board Store Mutations', () => {
});
});
- describe('RESET_ITEMS_FOR_LIST', () => {
- it('should remove issues from boardItemsByListId state', () => {
- const listId = 'gid://gitlab/List/1';
- const boardItemsByListId = {
- [listId]: [mockIssue.id],
- };
-
- state = {
- ...state,
- boardItemsByListId,
- };
-
- mutations[types.RESET_ITEMS_FOR_LIST](state, listId);
-
- expect(state.boardItemsByListId[listId]).toEqual([]);
- });
- });
-
describe('REQUEST_ITEMS_FOR_LIST', () => {
const listId = 'gid://gitlab/List/1';
const boardItemsByListId = {
diff --git a/spec/frontend/cascading_settings/components/lock_popovers_spec.js b/spec/frontend/cascading_settings/components/lock_popovers_spec.js
index 585e6ac505b..182e3c1c8ff 100644
--- a/spec/frontend/cascading_settings/components/lock_popovers_spec.js
+++ b/spec/frontend/cascading_settings/components/lock_popovers_spec.js
@@ -21,12 +21,12 @@ describe('LockPopovers', () => {
};
if (lockedByApplicationSetting) {
- popoverMountEl.setAttribute('data-popover-data', JSON.stringify(popoverData));
+ popoverMountEl.dataset.popoverData = JSON.stringify(popoverData);
} else if (lockedByAncestor) {
- popoverMountEl.setAttribute(
- 'data-popover-data',
- JSON.stringify({ ...popoverData, ancestor_namespace: mockNamespace }),
- );
+ popoverMountEl.dataset.popoverData = JSON.stringify({
+ ...popoverData,
+ ancestor_namespace: mockNamespace,
+ });
}
document.body.appendChild(popoverMountEl);
diff --git a/spec/frontend/ci_variable_list/components/ci_environments_dropdown_spec.js b/spec/frontend/ci_variable_list/components/legacy_ci_environments_dropdown_spec.js
index e7e4897abfa..b3e23ba4201 100644
--- a/spec/frontend/ci_variable_list/components/ci_environments_dropdown_spec.js
+++ b/spec/frontend/ci_variable_list/components/legacy_ci_environments_dropdown_spec.js
@@ -2,7 +2,7 @@ import { GlDropdown, GlDropdownItem, GlIcon } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
-import CiEnvironmentsDropdown from '~/ci_variable_list/components/ci_environments_dropdown.vue';
+import LegacyCiEnvironmentsDropdown from '~/ci_variable_list/components/legacy_ci_environments_dropdown.vue';
Vue.use(Vuex);
@@ -20,7 +20,7 @@ describe('Ci environments dropdown', () => {
},
});
- wrapper = mount(CiEnvironmentsDropdown, {
+ wrapper = mount(LegacyCiEnvironmentsDropdown, {
store,
propsData: {
value: term,
diff --git a/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js b/spec/frontend/ci_variable_list/components/legacy_ci_variable_modal_spec.js
index d26378d9382..42c6501dcce 100644
--- a/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
+++ b/spec/frontend/ci_variable_list/components/legacy_ci_variable_modal_spec.js
@@ -4,7 +4,7 @@ import Vue from 'vue';
import Vuex from 'vuex';
import { mockTracking } from 'helpers/tracking_helper';
import CiEnvironmentsDropdown from '~/ci_variable_list/components/ci_environments_dropdown.vue';
-import CiVariableModal from '~/ci_variable_list/components/ci_variable_modal.vue';
+import LegacyCiVariableModal from '~/ci_variable_list/components/legacy_ci_variable_modal.vue';
import {
AWS_ACCESS_KEY_ID,
EVENT_LABEL,
@@ -30,7 +30,7 @@ describe('Ci variable modal', () => {
isGroup: options.isGroup,
environmentScopeLink: '/help/environments',
});
- wrapper = method(CiVariableModal, {
+ wrapper = method(LegacyCiVariableModal, {
attachTo: document.body,
stubs: {
GlModal: ModalStub,
@@ -42,10 +42,7 @@ describe('Ci variable modal', () => {
const findCiEnvironmentsDropdown = () => wrapper.find(CiEnvironmentsDropdown);
const findModal = () => wrapper.find(ModalStub);
- const findAddorUpdateButton = () =>
- findModal()
- .findAll(GlButton)
- .wrappers.find((button) => button.props('variant') === 'confirm');
+ const findAddorUpdateButton = () => findModal().find('[data-testid="ciUpdateOrAddVariableBtn"]');
const deleteVariableButton = () =>
findModal()
.findAll(GlButton)
diff --git a/spec/frontend/ci_variable_list/components/ci_variable_settings_spec.js b/spec/frontend/ci_variable_list/components/legacy_ci_variable_settings_spec.js
index 13e417940a8..9c941f99982 100644
--- a/spec/frontend/ci_variable_list/components/ci_variable_settings_spec.js
+++ b/spec/frontend/ci_variable_list/components/legacy_ci_variable_settings_spec.js
@@ -1,7 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import Vuex from 'vuex';
-import CiVariableSettings from '~/ci_variable_list/components/ci_variable_settings.vue';
+import LegacyCiVariableSettings from '~/ci_variable_list/components/legacy_ci_variable_settings.vue';
import createStore from '~/ci_variable_list/store';
Vue.use(Vuex);
@@ -15,7 +15,7 @@ describe('Ci variable table', () => {
store = createStore();
store.state.isGroup = groupState;
jest.spyOn(store, 'dispatch').mockImplementation();
- wrapper = shallowMount(CiVariableSettings, {
+ wrapper = shallowMount(LegacyCiVariableSettings, {
store,
});
};
diff --git a/spec/frontend/ci_variable_list/components/ci_variable_table_spec.js b/spec/frontend/ci_variable_list/components/legacy_ci_variable_table_spec.js
index 62f9ae4eb4e..310afc8003a 100644
--- a/spec/frontend/ci_variable_list/components/ci_variable_table_spec.js
+++ b/spec/frontend/ci_variable_list/components/legacy_ci_variable_table_spec.js
@@ -1,7 +1,7 @@
import Vue from 'vue';
import Vuex from 'vuex';
import { mountExtended } from 'helpers/vue_test_utils_helper';
-import CiVariableTable from '~/ci_variable_list/components/ci_variable_table.vue';
+import LegacyCiVariableTable from '~/ci_variable_list/components/legacy_ci_variable_table.vue';
import createStore from '~/ci_variable_list/store';
import mockData from '../services/mock_data';
@@ -14,7 +14,7 @@ describe('Ci variable table', () => {
const createComponent = () => {
store = createStore();
jest.spyOn(store, 'dispatch').mockImplementation();
- wrapper = mountExtended(CiVariableTable, {
+ wrapper = mountExtended(LegacyCiVariableTable, {
attachTo: document.body,
store,
});
diff --git a/spec/frontend/clusters/agents/components/create_token_button_spec.js b/spec/frontend/clusters/agents/components/create_token_button_spec.js
index b9a3a851e57..fb1a3aa2963 100644
--- a/spec/frontend/clusters/agents/components/create_token_button_spec.js
+++ b/spec/frontend/clusters/agents/components/create_token_button_spec.js
@@ -11,6 +11,7 @@ import {
TOKEN_NAME_LIMIT,
TOKEN_STATUS_ACTIVE,
MAX_LIST_COUNT,
+ CREATE_TOKEN_MODAL,
} from '~/clusters/agents/constants';
import createNewAgentToken from '~/clusters/agents/graphql/mutations/create_new_agent_token.mutation.graphql';
import getClusterAgentQuery from '~/clusters/agents/graphql/queries/get_cluster_agent.query.graphql';
@@ -231,7 +232,11 @@ describe('CreateTokenButton', () => {
});
it('shows agent instructions', () => {
- expect(findAgentInstructions().exists()).toBe(true);
+ expect(findAgentInstructions().props()).toMatchObject({
+ agentName,
+ agentToken: 'token-secret',
+ modalId: CREATE_TOKEN_MODAL,
+ });
});
it('renders a close button', () => {
diff --git a/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap b/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap
index 42d81900911..46ee123a12d 100644
--- a/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap
+++ b/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap
@@ -1,167 +1,44 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
-exports[`Remove cluster confirmation modal renders splitbutton with modal included 1`] = `
+exports[`Remove cluster confirmation modal renders buttons with modal included 1`] = `
<div
- class="gl-display-flex gl-justify-content-end"
+ class="gl-display-flex"
>
- <div
- class="dropdown b-dropdown gl-new-dropdown btn-group"
- menu-class="dropdown-menu-large"
+ <button
+ class="btn gl-mr-3 btn-danger btn-md gl-button"
+ data-testid="remove-integration-and-resources-button"
+ type="button"
>
- <button
- class="btn btn-danger btn-md gl-button split-content-button"
- type="button"
+ <!---->
+
+ <!---->
+
+ <span
+ class="gl-button-text"
>
- <!---->
-
- <!---->
-
- <span
- class="gl-new-dropdown-button-text"
- >
- Remove integration and resources
- </span>
-
- <!---->
- </button>
- <button
- aria-expanded="false"
- aria-haspopup="true"
- class="btn dropdown-toggle btn-danger btn-md gl-button gl-dropdown-toggle dropdown-toggle-split"
- type="button"
- >
- <span
- class="sr-only"
- >
- Toggle dropdown
- </span>
- </button>
- <ul
- class="dropdown-menu dropdown-menu-large"
- role="menu"
- tabindex="-1"
+
+ Remove integration and resources
+
+ </span>
+ </button>
+
+ <button
+ class="btn btn-danger btn-md gl-button btn-danger-secondary"
+ data-testid="remove-integration-button"
+ type="button"
+ >
+ <!---->
+
+ <!---->
+
+ <span
+ class="gl-button-text"
>
- <div
- class="gl-new-dropdown-inner"
- >
- <!---->
-
- <!---->
-
- <div
- class="gl-new-dropdown-contents"
- >
- <!---->
-
- <li
- class="gl-new-dropdown-item"
- role="presentation"
- >
- <button
- class="dropdown-item"
- role="menuitem"
- type="button"
- >
- <svg
- aria-hidden="true"
- class="gl-icon s16 gl-new-dropdown-item-check-icon gl-mt-3 gl-align-self-start"
- data-testid="dropdown-item-checkbox"
- role="img"
- >
- <use
- href="#mobile-issue-close"
- />
- </svg>
-
- <!---->
-
- <!---->
-
- <div
- class="gl-new-dropdown-item-text-wrapper"
- >
- <p
- class="gl-new-dropdown-item-text-primary"
- >
- <strong>
- Remove integration and resources
- </strong>
-
- <div>
- Deletes all GitLab resources attached to this cluster during removal
- </div>
- </p>
-
- <!---->
- </div>
-
- <!---->
- </button>
- </li>
-
- <li
- class="gl-new-dropdown-divider"
- role="presentation"
- >
- <hr
- aria-orientation="horizontal"
- class="dropdown-divider"
- role="separator"
- />
- </li>
- <li
- class="gl-new-dropdown-item"
- role="presentation"
- >
- <button
- class="dropdown-item"
- role="menuitem"
- type="button"
- >
- <svg
- aria-hidden="true"
- class="gl-icon s16 gl-new-dropdown-item-check-icon gl-visibility-hidden gl-mt-3 gl-align-self-start"
- data-testid="dropdown-item-checkbox"
- role="img"
- >
- <use
- href="#mobile-issue-close"
- />
- </svg>
-
- <!---->
-
- <!---->
-
- <div
- class="gl-new-dropdown-item-text-wrapper"
- >
- <p
- class="gl-new-dropdown-item-text-primary"
- >
- <strong>
- Remove integration
- </strong>
-
- <div>
- Removes cluster from project but keeps associated resources
- </div>
- </p>
-
- <!---->
- </div>
-
- <!---->
- </button>
- </li>
-
- <!---->
- </div>
-
- <!---->
- </div>
- </ul>
- </div>
+
+ Remove integration
+
+ </span>
+ </button>
<!---->
</div>
diff --git a/spec/frontend/clusters/components/remove_cluster_confirmation_spec.js b/spec/frontend/clusters/components/remove_cluster_confirmation_spec.js
index 173fefe6167..53683af893a 100644
--- a/spec/frontend/clusters/components/remove_cluster_confirmation_spec.js
+++ b/spec/frontend/clusters/components/remove_cluster_confirmation_spec.js
@@ -3,7 +3,6 @@ import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
import { stubComponent } from 'helpers/stub_component';
import RemoveClusterConfirmation from '~/clusters/components/remove_cluster_confirmation.vue';
-import SplitButton from '~/vue_shared/components/split_button.vue';
describe('Remove cluster confirmation modal', () => {
let wrapper;
@@ -24,14 +23,17 @@ describe('Remove cluster confirmation modal', () => {
wrapper = null;
});
- it('renders splitbutton with modal included', () => {
+ it('renders buttons with modal included', () => {
createComponent();
expect(wrapper.element).toMatchSnapshot();
});
- describe('split button dropdown', () => {
+ describe('two buttons', () => {
const findModal = () => wrapper.findComponent(GlModal);
- const findSplitButton = () => wrapper.findComponent(SplitButton);
+ const findRemoveIntegrationButton = () =>
+ wrapper.find('[data-testid="remove-integration-button"]');
+ const findRemoveIntegrationAndResourcesButton = () =>
+ wrapper.find('[data-testid="remove-integration-and-resources-button"]');
beforeEach(() => {
createComponent({
@@ -41,8 +43,8 @@ describe('Remove cluster confirmation modal', () => {
jest.spyOn(findModal().vm, 'show').mockReturnValue();
});
- it('opens modal with "cleanup" option', async () => {
- findSplitButton().vm.$emit('remove-cluster-and-cleanup');
+ it('open modal with "cleanup" option', async () => {
+ findRemoveIntegrationAndResourcesButton().trigger('click');
await nextTick();
@@ -53,8 +55,8 @@ describe('Remove cluster confirmation modal', () => {
);
});
- it('opens modal without "cleanup" option', async () => {
- findSplitButton().vm.$emit('remove-cluster');
+ it('open modal without "cleanup" option', async () => {
+ findRemoveIntegrationButton().trigger('click');
await nextTick();
@@ -71,8 +73,8 @@ describe('Remove cluster confirmation modal', () => {
});
it('renders regular button instead', () => {
- expect(findSplitButton().exists()).toBe(false);
- expect(wrapper.find('[data-testid="btnRemove"]').exists()).toBe(true);
+ expect(findRemoveIntegrationAndResourcesButton().exists()).toBe(false);
+ expect(findRemoveIntegrationButton().exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/clusters_list/components/agent_token_spec.js b/spec/frontend/clusters_list/components/agent_token_spec.js
index cdd94d33545..8d3130b45a6 100644
--- a/spec/frontend/clusters_list/components/agent_token_spec.js
+++ b/spec/frontend/clusters_list/components/agent_token_spec.js
@@ -7,6 +7,7 @@ import CodeBlock from '~/vue_shared/components/code_block.vue';
import ModalCopyButton from '~/vue_shared/components/modal_copy_button.vue';
const kasAddress = 'kas.example.com';
+const agentName = 'my-agent';
const agentToken = 'agent-token';
const kasVersion = '15.0.0';
const modalId = INSTALL_AGENT_MODAL_ID;
@@ -26,6 +27,7 @@ describe('InstallAgentModal', () => {
};
const propsData = {
+ agentName,
agentToken,
modalId,
};
@@ -61,7 +63,12 @@ describe('InstallAgentModal', () => {
it('renders a copy button', () => {
expect(findCopyButton().props()).toMatchObject({
title: 'Copy command',
- text: generateAgentRegistrationCommand(agentToken, kasAddress, kasVersion),
+ text: generateAgentRegistrationCommand({
+ name: agentName,
+ token: agentToken,
+ version: kasVersion,
+ address: kasAddress,
+ }),
modalId,
});
});
@@ -71,6 +78,7 @@ describe('InstallAgentModal', () => {
});
it('shows code block with agent installation command', () => {
+ expect(findCodeBlock().props('code')).toContain(`helm upgrade --install ${agentName}`);
expect(findCodeBlock().props('code')).toContain(`--set config.token=${agentToken}`);
expect(findCodeBlock().props('code')).toContain(`--set config.kasAddress=${kasAddress}`);
expect(findCodeBlock().props('code')).toContain(`--set image.tag=v${kasVersion}`);
diff --git a/spec/frontend/clusters_list/components/clusters_spec.js b/spec/frontend/clusters_list/components/clusters_spec.js
index 3f3f5e0daf6..c150a7f05d0 100644
--- a/spec/frontend/clusters_list/components/clusters_spec.js
+++ b/spec/frontend/clusters_list/components/clusters_spec.js
@@ -1,9 +1,4 @@
-import {
- GlLoadingIcon,
- GlPagination,
- GlDeprecatedSkeletonLoading as GlSkeletonLoading,
- GlTableLite,
-} from '@gitlab/ui';
+import { GlLoadingIcon, GlPagination, GlSkeletonLoader, GlTableLite } from '@gitlab/ui';
import * as Sentry from '@sentry/browser';
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
@@ -171,7 +166,7 @@ describe('Clusters', () => {
if (nodeSize) {
expect(size.text()).toBe(nodeSize);
} else {
- expect(size.find(GlSkeletonLoading).exists()).toBe(true);
+ expect(size.findComponent(GlSkeletonLoader).exists()).toBe(true);
}
});
});
@@ -195,7 +190,7 @@ describe('Clusters', () => {
const size = sizes.at(lineNumber);
expect(size.text()).toContain(nodeText);
- expect(size.find(GlSkeletonLoading).exists()).toBe(false);
+ expect(size.findComponent(GlSkeletonLoader).exists()).toBe(false);
});
});
@@ -221,12 +216,12 @@ describe('Clusters', () => {
describe('cluster CPU', () => {
it.each`
clusterCpu | lineNumber
- ${''} | ${0}
+ ${'Loading'} | ${0}
${'1.93 (87% free)'} | ${1}
${'3.87 (86% free)'} | ${2}
${'(% free)'} | ${3}
${'(% free)'} | ${4}
- ${''} | ${5}
+ ${'Loading'} | ${5}
`('renders total cpu for each cluster', ({ clusterCpu, lineNumber }) => {
const clusterCpus = findTable().findAll('td:nth-child(4)');
const cpuData = clusterCpus.at(lineNumber);
@@ -238,12 +233,12 @@ describe('Clusters', () => {
describe('cluster Memory', () => {
it.each`
clusterMemory | lineNumber
- ${''} | ${0}
+ ${'Loading'} | ${0}
${'5.92 (78% free)'} | ${1}
${'12.86 (79% free)'} | ${2}
${'(% free)'} | ${3}
${'(% free)'} | ${4}
- ${''} | ${5}
+ ${'Loading'} | ${5}
`('renders total memory for each cluster', ({ clusterMemory, lineNumber }) => {
const clusterMemories = findTable().findAll('td:nth-child(5)');
const memoryData = clusterMemories.at(lineNumber);
diff --git a/spec/frontend/clusters_list/components/install_agent_modal_spec.js b/spec/frontend/clusters_list/components/install_agent_modal_spec.js
index 38f653509a8..29884675b24 100644
--- a/spec/frontend/clusters_list/components/install_agent_modal_spec.js
+++ b/spec/frontend/clusters_list/components/install_agent_modal_spec.js
@@ -15,6 +15,7 @@ import {
EVENT_ACTIONS_SELECT,
MODAL_TYPE_EMPTY,
MODAL_TYPE_REGISTER,
+ INSTALL_AGENT_MODAL_ID,
} from '~/clusters_list/constants';
import getAgentsQuery from '~/clusters_list/graphql/queries/get_agents.query.graphql';
import getAgentConfigurations from '~/clusters_list/graphql/queries/agent_configurations.query.graphql';
@@ -222,7 +223,11 @@ describe('InstallAgentModal', () => {
});
it('shows agent instructions', () => {
- expect(findAgentInstructions().exists()).toBe(true);
+ expect(findAgentInstructions().props()).toMatchObject({
+ agentName: 'agent-name',
+ agentToken: 'mock-agent-token',
+ modalId: INSTALL_AGENT_MODAL_ID,
+ });
});
describe('error creating agent', () => {
diff --git a/spec/frontend/code_navigation/store/actions_spec.js b/spec/frontend/code_navigation/store/actions_spec.js
index c47a9e697b6..8eee61d1342 100644
--- a/spec/frontend/code_navigation/store/actions_spec.js
+++ b/spec/frontend/code_navigation/store/actions_spec.js
@@ -195,8 +195,8 @@ describe('Code navigation actions', () => {
it('commits SET_CURRENT_DEFINITION with LSIF data', () => {
target.classList.add('js-code-navigation');
- target.setAttribute('data-line-index', '0');
- target.setAttribute('data-char-index', '0');
+ target.dataset.lineIndex = '0';
+ target.dataset.charIndex = '0';
return testAction(
actions.showDefinition,
@@ -218,8 +218,8 @@ describe('Code navigation actions', () => {
it('adds hll class to target element', () => {
target.classList.add('js-code-navigation');
- target.setAttribute('data-line-index', '0');
- target.setAttribute('data-char-index', '0');
+ target.dataset.lineIndex = '0';
+ target.dataset.charIndex = '0';
return testAction(
actions.showDefinition,
@@ -243,8 +243,8 @@ describe('Code navigation actions', () => {
it('caches current target element', () => {
target.classList.add('js-code-navigation');
- target.setAttribute('data-line-index', '0');
- target.setAttribute('data-char-index', '0');
+ target.dataset.lineIndex = '0';
+ target.dataset.charIndex = '0';
return testAction(
actions.showDefinition,
diff --git a/spec/frontend/confirm_modal_spec.js b/spec/frontend/confirm_modal_spec.js
index 53991349ee5..4224fb6be2a 100644
--- a/spec/frontend/confirm_modal_spec.js
+++ b/spec/frontend/confirm_modal_spec.js
@@ -31,9 +31,9 @@ describe('ConfirmModal', () => {
buttons.forEach((x) => {
const button = document.createElement('button');
button.setAttribute('class', 'js-confirm-modal-button');
- button.setAttribute('data-path', x.path);
- button.setAttribute('data-method', x.method);
- button.setAttribute('data-modal-attributes', JSON.stringify(x.modalAttributes));
+ button.dataset.path = x.path;
+ button.dataset.method = x.method;
+ button.dataset.modalAttributes = JSON.stringify(x.modalAttributes);
button.innerHTML = 'Action';
buttonContainer.appendChild(button);
});
diff --git a/spec/frontend/content_editor/components/__snapshots__/toolbar_link_button_spec.js.snap b/spec/frontend/content_editor/components/__snapshots__/toolbar_link_button_spec.js.snap
index 7abd6b422ad..b54f7cf17c8 100644
--- a/spec/frontend/content_editor/components/__snapshots__/toolbar_link_button_spec.js.snap
+++ b/spec/frontend/content_editor/components/__snapshots__/toolbar_link_button_spec.js.snap
@@ -16,15 +16,13 @@ exports[`content_editor/components/toolbar_link_button renders dropdown componen
<!---->
<li role=\\"presentation\\" class=\\"gl-px-3!\\">
<form tabindex=\\"-1\\" class=\\"b-dropdown-form gl-p-0\\">
- <div placeholder=\\"Link URL\\">
- <div role=\\"group\\" class=\\"input-group\\">
- <!---->
- <!----> <input type=\\"text\\" placeholder=\\"Link URL\\" class=\\"form-control gl-form-input\\">
- <div class=\\"input-group-append\\"><button type=\\"button\\" class=\\"btn btn-confirm btn-md gl-button\\">
- <!---->
- <!----> <span class=\\"gl-button-text\\">Apply</span></button></div>
- <!---->
- </div>
+ <div role=\\"group\\" class=\\"input-group\\" placeholder=\\"Link URL\\">
+ <!---->
+ <!----> <input type=\\"text\\" placeholder=\\"Link URL\\" class=\\"form-control gl-form-input\\">
+ <div class=\\"input-group-append\\"><button type=\\"button\\" class=\\"btn btn-confirm btn-md gl-button\\">
+ <!---->
+ <!----> <span class=\\"gl-button-text\\">Apply</span></button></div>
+ <!---->
</div>
</form>
</li>
diff --git a/spec/frontend/content_editor/components/bubble_menus/code_block_spec.js b/spec/frontend/content_editor/components/bubble_menus/code_block_spec.js
index 3a15ea45f40..646d068e795 100644
--- a/spec/frontend/content_editor/components/bubble_menus/code_block_spec.js
+++ b/spec/frontend/content_editor/components/bubble_menus/code_block_spec.js
@@ -1,21 +1,33 @@
import { BubbleMenu } from '@tiptap/vue-2';
-import { GlDropdown, GlDropdownItem, GlSearchBoxByType } from '@gitlab/ui';
-import Vue from 'vue';
+import {
+ GlDropdown,
+ GlDropdownForm,
+ GlDropdownItem,
+ GlSearchBoxByType,
+ GlFormInput,
+} from '@gitlab/ui';
+import { nextTick } from 'vue';
import { mountExtended } from 'helpers/vue_test_utils_helper';
+import { stubComponent } from 'helpers/stub_component';
import CodeBlockBubbleMenu from '~/content_editor/components/bubble_menus/code_block.vue';
import eventHubFactory from '~/helpers/event_hub_factory';
import CodeBlockHighlight from '~/content_editor/extensions/code_block_highlight';
+import Diagram from '~/content_editor/extensions/diagram';
import codeBlockLanguageLoader from '~/content_editor/services/code_block_language_loader';
import { createTestEditor, emitEditorEvent } from '../../test_utils';
+const createFakeEvent = () => ({ preventDefault: jest.fn(), stopPropagation: jest.fn() });
+
describe('content_editor/components/bubble_menus/code_block', () => {
let wrapper;
let tiptapEditor;
+ let contentEditor;
let bubbleMenu;
let eventHub;
const buildEditor = () => {
- tiptapEditor = createTestEditor({ extensions: [CodeBlockHighlight] });
+ tiptapEditor = createTestEditor({ extensions: [CodeBlockHighlight, Diagram] });
+ contentEditor = { renderDiagram: jest.fn() };
eventHub = eventHubFactory();
};
@@ -23,8 +35,12 @@ describe('content_editor/components/bubble_menus/code_block', () => {
wrapper = mountExtended(CodeBlockBubbleMenu, {
provide: {
tiptapEditor,
+ contentEditor,
eventHub,
},
+ stubs: {
+ GlDropdownItem: stubComponent(GlDropdownItem),
+ },
});
};
@@ -36,7 +52,7 @@ describe('content_editor/components/bubble_menus/code_block', () => {
checked: x.props('isChecked'),
}));
- beforeEach(() => {
+ beforeEach(async () => {
buildEditor();
buildWrapper();
});
@@ -73,6 +89,15 @@ describe('content_editor/components/bubble_menus/code_block', () => {
expect(wrapper.findComponent(GlDropdown).props('text')).toBe('Javascript');
});
+ it('selects diagram sytnax for mermaid', async () => {
+ tiptapEditor.commands.insertContent('<pre lang="mermaid">test</pre>');
+ bubbleMenu = wrapper.findComponent(BubbleMenu);
+
+ await emitEditorEvent({ event: 'transaction', tiptapEditor });
+
+ expect(wrapper.findComponent(GlDropdown).props('text')).toBe('Diagram (mermaid)');
+ });
+
it("selects Custom (syntax) if the language doesn't exist in the list", async () => {
tiptapEditor.commands.insertContent('<pre lang="nomnoml">test</pre>');
bubbleMenu = wrapper.findComponent(BubbleMenu);
@@ -104,22 +129,57 @@ describe('content_editor/components/bubble_menus/code_block', () => {
});
});
+ describe('preview button', () => {
+ it('does not appear for a regular code block', async () => {
+ tiptapEditor.commands.insertContent('<pre lang="javascript">var a = 2;</pre>');
+
+ expect(wrapper.findByTestId('preview-diagram').exists()).toBe(false);
+ });
+
+ it.each`
+ diagramType | diagramCode
+ ${'mermaid'} | ${'<pre lang="mermaid">graph TD;\n A-->B;</pre>'}
+ ${'nomnoml'} | ${'<img data-diagram="nomnoml" data-diagram-src="data:text/plain;base64,WzxmcmFtZT5EZWNvcmF0b3IgcGF0dGVybl0=">'}
+ `('toggles preview for a $diagramType diagram', async ({ diagramType, diagramCode }) => {
+ tiptapEditor.commands.insertContent(diagramCode);
+
+ await nextTick();
+ await wrapper.findByTestId('preview-diagram').vm.$emit('click');
+
+ expect(tiptapEditor.getAttributes(Diagram.name)).toEqual({
+ isDiagram: true,
+ language: diagramType,
+ showPreview: false,
+ });
+
+ await wrapper.findByTestId('preview-diagram').vm.$emit('click');
+
+ expect(tiptapEditor.getAttributes(Diagram.name)).toEqual({
+ isDiagram: true,
+ language: diagramType,
+ showPreview: true,
+ });
+ });
+ });
+
describe('when opened and search is changed', () => {
beforeEach(async () => {
tiptapEditor.commands.insertContent('<pre lang="javascript">var a = 2;</pre>');
wrapper.findComponent(GlSearchBoxByType).vm.$emit('input', 'js');
- await Vue.nextTick();
+ await nextTick();
});
it('shows dropdown items', () => {
- expect(findDropdownItemsData()).toEqual([
- { text: 'Javascript', visible: true, checked: true },
- { text: 'Java', visible: true, checked: false },
- { text: 'Javascript', visible: false, checked: false },
- { text: 'JSON', visible: true, checked: false },
- ]);
+ expect(findDropdownItemsData()).toEqual(
+ expect.arrayContaining([
+ { text: 'Javascript', visible: true, checked: true },
+ { text: 'Java', visible: true, checked: false },
+ { text: 'Javascript', visible: false, checked: false },
+ { text: 'JSON', visible: true, checked: false },
+ ]),
+ );
});
describe('when dropdown item is clicked', () => {
@@ -128,7 +188,7 @@ describe('content_editor/components/bubble_menus/code_block', () => {
findDropdownItems().at(1).vm.$emit('click');
- await Vue.nextTick();
+ await nextTick();
});
it('loads language', () => {
@@ -152,5 +212,78 @@ describe('content_editor/components/bubble_menus/code_block', () => {
expect(wrapper.findComponent(GlDropdown).props('text')).toBe('Java');
});
});
+
+ describe('Create custom type', () => {
+ beforeEach(async () => {
+ tiptapEditor.commands.insertContent('<pre lang="javascript">var a = 2;</pre>');
+
+ await wrapper.findComponent(GlDropdown).vm.show();
+ await wrapper.findByTestId('create-custom-type').trigger('click');
+ });
+
+ it('shows custom language input form and hides dropdown items', () => {
+ expect(wrapper.findComponent(GlDropdownItem).exists()).toBe(false);
+ expect(wrapper.findComponent(GlSearchBoxByType).exists()).toBe(false);
+ expect(wrapper.findComponent(GlDropdownForm).exists()).toBe(true);
+ });
+
+ describe('on clicking back', () => {
+ it('hides the custom language input form and shows dropdown items', async () => {
+ await wrapper.findByRole('button', { name: 'Go back' }).trigger('click');
+
+ expect(wrapper.findComponent(GlDropdownItem).exists()).toBe(true);
+ expect(wrapper.findComponent(GlSearchBoxByType).exists()).toBe(true);
+ expect(wrapper.findComponent(GlDropdownForm).exists()).toBe(false);
+ });
+ });
+
+ describe('on clicking cancel', () => {
+ it('hides the custom language input form and shows dropdown items', async () => {
+ await wrapper.findByRole('button', { name: 'Cancel' }).trigger('click');
+
+ expect(wrapper.findComponent(GlDropdownItem).exists()).toBe(true);
+ expect(wrapper.findComponent(GlSearchBoxByType).exists()).toBe(true);
+ expect(wrapper.findComponent(GlDropdownForm).exists()).toBe(false);
+ });
+ });
+
+ describe('on dropdown hide', () => {
+ it('hides the form', async () => {
+ wrapper.findComponent(GlFormInput).setValue('foobar');
+ await wrapper.findComponent(GlDropdown).vm.$emit('hide');
+
+ expect(wrapper.findComponent(GlDropdownItem).exists()).toBe(true);
+ expect(wrapper.findComponent(GlSearchBoxByType).exists()).toBe(true);
+ expect(wrapper.findComponent(GlDropdownForm).exists()).toBe(false);
+ });
+ });
+
+ describe('on clicking apply', () => {
+ beforeEach(async () => {
+ wrapper.findComponent(GlFormInput).setValue('foobar');
+ await wrapper.findComponent(GlDropdownForm).vm.$emit('submit', createFakeEvent());
+
+ await emitEditorEvent({ event: 'transaction', tiptapEditor });
+ });
+
+ it('hides the custom language input form and shows dropdown items', async () => {
+ expect(wrapper.findComponent(GlDropdownItem).exists()).toBe(true);
+ expect(wrapper.findComponent(GlSearchBoxByType).exists()).toBe(true);
+ expect(wrapper.findComponent(GlDropdownForm).exists()).toBe(false);
+ });
+
+ it('updates dropdown value to the custom language type', () => {
+ expect(wrapper.findComponent(GlDropdown).props('text')).toBe('Custom (foobar)');
+ });
+
+ it('updates tiptap editor to the custom language type', () => {
+ expect(tiptapEditor.getAttributes(CodeBlockHighlight.name)).toEqual(
+ expect.objectContaining({
+ language: 'foobar',
+ }),
+ );
+ });
+ });
+ });
});
});
diff --git a/spec/frontend/content_editor/components/toolbar_more_dropdown_spec.js b/spec/frontend/content_editor/components/toolbar_more_dropdown_spec.js
new file mode 100644
index 00000000000..0334a18c9a1
--- /dev/null
+++ b/spec/frontend/content_editor/components/toolbar_more_dropdown_spec.js
@@ -0,0 +1,54 @@
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import ToolbarMoreDropdown from '~/content_editor/components/toolbar_more_dropdown.vue';
+import Diagram from '~/content_editor/extensions/diagram';
+import HorizontalRule from '~/content_editor/extensions/horizontal_rule';
+import { createTestEditor, mockChainedCommands } from '../test_utils';
+
+describe('content_editor/components/toolbar_more_dropdown', () => {
+ let wrapper;
+ let tiptapEditor;
+
+ const buildEditor = () => {
+ tiptapEditor = createTestEditor({
+ extensions: [Diagram, HorizontalRule],
+ });
+ };
+
+ const buildWrapper = (propsData = {}) => {
+ wrapper = mountExtended(ToolbarMoreDropdown, {
+ provide: {
+ tiptapEditor,
+ },
+ propsData,
+ });
+ };
+
+ beforeEach(() => {
+ buildEditor();
+ buildWrapper();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe.each`
+ label | contentType | data
+ ${'Mermaid diagram'} | ${'diagram'} | ${{ language: 'mermaid' }}
+ ${'PlantUML diagram'} | ${'diagram'} | ${{ language: 'plantuml' }}
+ ${'Horizontal rule'} | ${'horizontalRule'} | ${undefined}
+ `('when option $label is clicked', ({ label, contentType, data }) => {
+ it(`inserts a ${contentType}`, async () => {
+ const commands = mockChainedCommands(tiptapEditor, ['setNode', 'focus', 'run']);
+
+ const btn = wrapper.findByRole('menuitem', { name: label });
+ await btn.trigger('click');
+
+ expect(commands.focus).toHaveBeenCalled();
+ expect(commands.setNode).toHaveBeenCalledWith(contentType, data);
+ expect(commands.run).toHaveBeenCalled();
+
+ expect(wrapper.emitted('execute')).toEqual([[{ contentType }]]);
+ });
+ });
+});
diff --git a/spec/frontend/content_editor/components/top_toolbar_spec.js b/spec/frontend/content_editor/components/top_toolbar_spec.js
index ec58877470c..d98a9a52aff 100644
--- a/spec/frontend/content_editor/components/top_toolbar_spec.js
+++ b/spec/frontend/content_editor/components/top_toolbar_spec.js
@@ -23,20 +23,21 @@ describe('content_editor/components/top_toolbar', () => {
});
describe.each`
- testId | controlProps
- ${'bold'} | ${{ contentType: 'bold', iconName: 'bold', label: 'Bold text', editorCommand: 'toggleBold' }}
- ${'italic'} | ${{ contentType: 'italic', iconName: 'italic', label: 'Italic text', editorCommand: 'toggleItalic' }}
- ${'strike'} | ${{ contentType: 'strike', iconName: 'strikethrough', label: 'Strikethrough', editorCommand: 'toggleStrike' }}
- ${'code'} | ${{ contentType: 'code', iconName: 'code', label: 'Code', editorCommand: 'toggleCode' }}
- ${'blockquote'} | ${{ contentType: 'blockquote', iconName: 'quote', label: 'Insert a quote', editorCommand: 'toggleBlockquote' }}
- ${'bullet-list'} | ${{ contentType: 'bulletList', iconName: 'list-bulleted', label: 'Add a bullet list', editorCommand: 'toggleBulletList' }}
- ${'ordered-list'} | ${{ contentType: 'orderedList', iconName: 'list-numbered', label: 'Add a numbered list', editorCommand: 'toggleOrderedList' }}
- ${'details'} | ${{ contentType: 'details', iconName: 'details-block', label: 'Add a collapsible section', editorCommand: 'toggleDetails' }}
- ${'horizontal-rule'} | ${{ contentType: 'horizontalRule', iconName: 'dash', label: 'Add a horizontal rule', editorCommand: 'setHorizontalRule' }}
- ${'code-block'} | ${{ contentType: 'codeBlock', iconName: 'doc-code', label: 'Insert a code block', editorCommand: 'toggleCodeBlock' }}
- ${'text-styles'} | ${{}}
- ${'link'} | ${{}}
- ${'image'} | ${{}}
+ testId | controlProps
+ ${'bold'} | ${{ contentType: 'bold', iconName: 'bold', label: 'Bold text', editorCommand: 'toggleBold' }}
+ ${'italic'} | ${{ contentType: 'italic', iconName: 'italic', label: 'Italic text', editorCommand: 'toggleItalic' }}
+ ${'strike'} | ${{ contentType: 'strike', iconName: 'strikethrough', label: 'Strikethrough', editorCommand: 'toggleStrike' }}
+ ${'code'} | ${{ contentType: 'code', iconName: 'code', label: 'Code', editorCommand: 'toggleCode' }}
+ ${'blockquote'} | ${{ contentType: 'blockquote', iconName: 'quote', label: 'Insert a quote', editorCommand: 'toggleBlockquote' }}
+ ${'bullet-list'} | ${{ contentType: 'bulletList', iconName: 'list-bulleted', label: 'Add a bullet list', editorCommand: 'toggleBulletList' }}
+ ${'ordered-list'} | ${{ contentType: 'orderedList', iconName: 'list-numbered', label: 'Add a numbered list', editorCommand: 'toggleOrderedList' }}
+ ${'details'} | ${{ contentType: 'details', iconName: 'details-block', label: 'Add a collapsible section', editorCommand: 'toggleDetails' }}
+ ${'code-block'} | ${{ contentType: 'codeBlock', iconName: 'doc-code', label: 'Insert a code block', editorCommand: 'toggleCodeBlock' }}
+ ${'text-styles'} | ${{}}
+ ${'link'} | ${{}}
+ ${'image'} | ${{}}
+ ${'table'} | ${{}}
+ ${'more'} | ${{}}
`('given a $testId toolbar control', ({ testId, controlProps }) => {
beforeEach(() => {
buildWrapper();
diff --git a/spec/frontend/content_editor/components/wrappers/code_block_spec.js b/spec/frontend/content_editor/components/wrappers/code_block_spec.js
index a564959a3a6..17a365e12bb 100644
--- a/spec/frontend/content_editor/components/wrappers/code_block_spec.js
+++ b/spec/frontend/content_editor/components/wrappers/code_block_spec.js
@@ -1,8 +1,14 @@
import { nextTick } from 'vue';
import { NodeViewWrapper, NodeViewContent } from '@tiptap/vue-2';
-import { shallowMount } from '@vue/test-utils';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import { stubComponent } from 'helpers/stub_component';
+import eventHubFactory from '~/helpers/event_hub_factory';
+import SandboxedMermaid from '~/behaviors/components/sandboxed_mermaid.vue';
+import CodeBlockHighlight from '~/content_editor/extensions/code_block_highlight';
+import Diagram from '~/content_editor/extensions/diagram';
import CodeBlockWrapper from '~/content_editor/components/wrappers/code_block.vue';
import codeBlockLanguageLoader from '~/content_editor/services/code_block_language_loader';
+import { emitEditorEvent, createTestEditor } from '../../test_utils';
jest.mock('~/content_editor/services/code_block_language_loader');
@@ -10,22 +16,43 @@ describe('content/components/wrappers/code_block', () => {
const language = 'yaml';
let wrapper;
let updateAttributesFn;
+ let tiptapEditor;
+ let contentEditor;
+ let eventHub;
+
+ const buildEditor = () => {
+ tiptapEditor = createTestEditor({ extensions: [CodeBlockHighlight, Diagram] });
+ contentEditor = { renderDiagram: jest.fn().mockResolvedValue('url/to/some/diagram') };
+ eventHub = eventHubFactory();
+ };
const createWrapper = async (nodeAttrs = { language }) => {
updateAttributesFn = jest.fn();
- wrapper = shallowMount(CodeBlockWrapper, {
+ wrapper = mountExtended(CodeBlockWrapper, {
propsData: {
+ editor: tiptapEditor,
node: {
attrs: nodeAttrs,
},
updateAttributes: updateAttributesFn,
},
+ stubs: {
+ NodeViewContent: stubComponent(NodeViewContent),
+ NodeViewWrapper: stubComponent(NodeViewWrapper),
+ },
+ provide: {
+ contentEditor,
+ tiptapEditor,
+ eventHub,
+ },
});
};
beforeEach(() => {
- codeBlockLanguageLoader.findLanguageBySyntax.mockReturnValue({ syntax: language });
+ buildEditor();
+
+ codeBlockLanguageLoader.findOrCreateLanguageBySyntax.mockReturnValue({ syntax: language });
});
afterEach(() => {
@@ -68,4 +95,56 @@ describe('content/components/wrappers/code_block', () => {
expect(updateAttributesFn).toHaveBeenCalledWith({ language });
});
+
+ describe('diagrams', () => {
+ beforeEach(() => {
+ jest.spyOn(tiptapEditor, 'isActive').mockReturnValue(true);
+ });
+
+ it('does not render a preview if showPreview: false', async () => {
+ createWrapper({ language: 'plantuml', isDiagram: true, showPreview: false });
+
+ expect(wrapper.find({ ref: 'diagramContainer' }).exists()).toBe(false);
+ });
+
+ it('does not update preview when diagram is not active', async () => {
+ createWrapper({ language: 'plantuml', isDiagram: true, showPreview: true });
+
+ await emitEditorEvent({ event: 'transaction', tiptapEditor });
+ await nextTick();
+
+ expect(wrapper.find('img').attributes('src')).toBe('url/to/some/diagram');
+
+ jest.spyOn(tiptapEditor, 'isActive').mockReturnValue(false);
+
+ const alternateUrl = 'url/to/another/diagram';
+
+ contentEditor.renderDiagram.mockResolvedValue(alternateUrl);
+
+ await emitEditorEvent({ event: 'transaction', tiptapEditor });
+ await nextTick();
+
+ expect(wrapper.find('img').attributes('src')).toBe('url/to/some/diagram');
+ });
+
+ it('renders an image with preview for a plantuml/kroki diagram', async () => {
+ createWrapper({ language: 'plantuml', isDiagram: true, showPreview: true });
+
+ await emitEditorEvent({ event: 'transaction', tiptapEditor });
+ await nextTick();
+
+ expect(wrapper.find('img').attributes('src')).toBe('url/to/some/diagram');
+ expect(wrapper.find(SandboxedMermaid).exists()).toBe(false);
+ });
+
+ it('renders an iframe with preview for a mermaid diagram', async () => {
+ createWrapper({ language: 'mermaid', isDiagram: true, showPreview: true });
+
+ await emitEditorEvent({ event: 'transaction', tiptapEditor });
+ await nextTick();
+
+ expect(wrapper.find(SandboxedMermaid).props('source')).toBe('');
+ expect(wrapper.find('img').exists()).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/content_editor/components/wrappers/footnote_definition_spec.js b/spec/frontend/content_editor/components/wrappers/footnote_definition_spec.js
new file mode 100644
index 00000000000..1ff750eb2ac
--- /dev/null
+++ b/spec/frontend/content_editor/components/wrappers/footnote_definition_spec.js
@@ -0,0 +1,30 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import FootnoteDefinitionWrapper from '~/content_editor/components/wrappers/footnote_definition.vue';
+
+describe('content/components/wrappers/footnote_definition', () => {
+ let wrapper;
+
+ const createWrapper = async (node = {}) => {
+ wrapper = shallowMountExtended(FootnoteDefinitionWrapper, {
+ propsData: {
+ node,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders footnote label as a readyonly element', () => {
+ const label = 'footnote';
+
+ createWrapper({
+ attrs: {
+ label,
+ },
+ });
+ expect(wrapper.text()).toContain(label);
+ expect(wrapper.findByTestId('footnote-label').attributes().contenteditable).toBe('false');
+ });
+});
diff --git a/spec/frontend/content_editor/extensions/footnote_definition_spec.js b/spec/frontend/content_editor/extensions/footnote_definition_spec.js
new file mode 100644
index 00000000000..d3dbc56ae0e
--- /dev/null
+++ b/spec/frontend/content_editor/extensions/footnote_definition_spec.js
@@ -0,0 +1,7 @@
+import FootnoteDefinition from '~/content_editor/extensions/footnote_definition';
+
+describe('content_editor/extensions/footnote_definition', () => {
+ it('sets the isolation option to true', () => {
+ expect(FootnoteDefinition.config.isolating).toBe(true);
+ });
+});
diff --git a/spec/frontend/content_editor/remark_markdown_processing_spec.js b/spec/frontend/content_editor/remark_markdown_processing_spec.js
index 6348b97d918..60dc540e192 100644
--- a/spec/frontend/content_editor/remark_markdown_processing_spec.js
+++ b/spec/frontend/content_editor/remark_markdown_processing_spec.js
@@ -3,6 +3,8 @@ import Blockquote from '~/content_editor/extensions/blockquote';
import BulletList from '~/content_editor/extensions/bullet_list';
import Code from '~/content_editor/extensions/code';
import CodeBlockHighlight from '~/content_editor/extensions/code_block_highlight';
+import FootnoteDefinition from '~/content_editor/extensions/footnote_definition';
+import FootnoteReference from '~/content_editor/extensions/footnote_reference';
import HardBreak from '~/content_editor/extensions/hard_break';
import Heading from '~/content_editor/extensions/heading';
import HorizontalRule from '~/content_editor/extensions/horizontal_rule';
@@ -11,11 +13,19 @@ import Italic from '~/content_editor/extensions/italic';
import Link from '~/content_editor/extensions/link';
import ListItem from '~/content_editor/extensions/list_item';
import OrderedList from '~/content_editor/extensions/ordered_list';
+import Paragraph from '~/content_editor/extensions/paragraph';
import Sourcemap from '~/content_editor/extensions/sourcemap';
+import Strike from '~/content_editor/extensions/strike';
+import Table from '~/content_editor/extensions/table';
+import TableHeader from '~/content_editor/extensions/table_header';
+import TableRow from '~/content_editor/extensions/table_row';
+import TableCell from '~/content_editor/extensions/table_cell';
+import TaskList from '~/content_editor/extensions/task_list';
+import TaskItem from '~/content_editor/extensions/task_item';
import remarkMarkdownDeserializer from '~/content_editor/services/remark_markdown_deserializer';
import markdownSerializer from '~/content_editor/services/markdown_serializer';
-import { createTestEditor } from './test_utils';
+import { createTestEditor, createDocBuilder } from './test_utils';
const tiptapEditor = createTestEditor({
extensions: [
@@ -24,6 +34,8 @@ const tiptapEditor = createTestEditor({
BulletList,
Code,
CodeBlockHighlight,
+ FootnoteDefinition,
+ FootnoteReference,
HardBreak,
Heading,
HorizontalRule,
@@ -33,9 +45,72 @@ const tiptapEditor = createTestEditor({
ListItem,
OrderedList,
Sourcemap,
+ Strike,
+ Table,
+ TableRow,
+ TableHeader,
+ TableCell,
+ TaskList,
+ TaskItem,
],
});
+const {
+ builders: {
+ doc,
+ paragraph,
+ bold,
+ blockquote,
+ bulletList,
+ code,
+ codeBlock,
+ footnoteDefinition,
+ footnoteReference,
+ hardBreak,
+ heading,
+ horizontalRule,
+ image,
+ italic,
+ link,
+ listItem,
+ orderedList,
+ strike,
+ table,
+ tableRow,
+ tableHeader,
+ tableCell,
+ taskItem,
+ taskList,
+ },
+} = createDocBuilder({
+ tiptapEditor,
+ names: {
+ blockquote: { nodeType: Blockquote.name },
+ bold: { markType: Bold.name },
+ bulletList: { nodeType: BulletList.name },
+ code: { markType: Code.name },
+ codeBlock: { nodeType: CodeBlockHighlight.name },
+ footnoteDefinition: { nodeType: FootnoteDefinition.name },
+ footnoteReference: { nodeType: FootnoteReference.name },
+ hardBreak: { nodeType: HardBreak.name },
+ heading: { nodeType: Heading.name },
+ horizontalRule: { nodeType: HorizontalRule.name },
+ image: { nodeType: Image.name },
+ italic: { nodeType: Italic.name },
+ link: { markType: Link.name },
+ listItem: { nodeType: ListItem.name },
+ orderedList: { nodeType: OrderedList.name },
+ paragraph: { nodeType: Paragraph.name },
+ strike: { nodeType: Strike.name },
+ table: { nodeType: Table.name },
+ tableCell: { nodeType: TableCell.name },
+ tableHeader: { nodeType: TableHeader.name },
+ tableRow: { nodeType: TableRow.name },
+ taskItem: { nodeType: TaskItem.name },
+ taskList: { nodeType: TaskList.name },
+ },
+});
+
describe('Client side Markdown processing', () => {
const deserialize = async (content) => {
const { document } = await remarkMarkdownDeserializer().deserialize({
@@ -52,197 +127,887 @@ describe('Client side Markdown processing', () => {
pristineDoc: document,
});
- it.each([
+ const sourceAttrs = (sourceMapKey, sourceMarkdown) => ({
+ sourceMapKey,
+ sourceMarkdown,
+ });
+
+ const examples = [
{
markdown: '__bold text__',
+ expectedDoc: doc(
+ paragraph(
+ sourceAttrs('0:13', '__bold text__'),
+ bold(sourceAttrs('0:13', '__bold text__'), 'bold text'),
+ ),
+ ),
},
{
markdown: '**bold text**',
+ expectedDoc: doc(
+ paragraph(
+ sourceAttrs('0:13', '**bold text**'),
+ bold(sourceAttrs('0:13', '**bold text**'), 'bold text'),
+ ),
+ ),
},
{
markdown: '<strong>bold text</strong>',
+ expectedDoc: doc(
+ paragraph(
+ sourceAttrs('0:26', '<strong>bold text</strong>'),
+ bold(sourceAttrs('0:26', '<strong>bold text</strong>'), 'bold text'),
+ ),
+ ),
},
{
markdown: '<b>bold text</b>',
+ expectedDoc: doc(
+ paragraph(
+ sourceAttrs('0:16', '<b>bold text</b>'),
+ bold(sourceAttrs('0:16', '<b>bold text</b>'), 'bold text'),
+ ),
+ ),
},
{
markdown: '_italic text_',
+ expectedDoc: doc(
+ paragraph(
+ sourceAttrs('0:13', '_italic text_'),
+ italic(sourceAttrs('0:13', '_italic text_'), 'italic text'),
+ ),
+ ),
},
{
markdown: '*italic text*',
+ expectedDoc: doc(
+ paragraph(
+ sourceAttrs('0:13', '*italic text*'),
+ italic(sourceAttrs('0:13', '*italic text*'), 'italic text'),
+ ),
+ ),
},
{
markdown: '<em>italic text</em>',
+ expectedDoc: doc(
+ paragraph(
+ sourceAttrs('0:20', '<em>italic text</em>'),
+ italic(sourceAttrs('0:20', '<em>italic text</em>'), 'italic text'),
+ ),
+ ),
},
{
markdown: '<i>italic text</i>',
+ expectedDoc: doc(
+ paragraph(
+ sourceAttrs('0:18', '<i>italic text</i>'),
+ italic(sourceAttrs('0:18', '<i>italic text</i>'), 'italic text'),
+ ),
+ ),
},
{
markdown: '`inline code`',
+ expectedDoc: doc(
+ paragraph(
+ sourceAttrs('0:13', '`inline code`'),
+ code(sourceAttrs('0:13', '`inline code`'), 'inline code'),
+ ),
+ ),
},
{
markdown: '**`inline code bold`**',
+ expectedDoc: doc(
+ paragraph(
+ sourceAttrs('0:22', '**`inline code bold`**'),
+ bold(
+ sourceAttrs('0:22', '**`inline code bold`**'),
+ code(sourceAttrs('2:20', '`inline code bold`'), 'inline code bold'),
+ ),
+ ),
+ ),
+ },
+ {
+ markdown: '_`inline code italics`_',
+ expectedDoc: doc(
+ paragraph(
+ sourceAttrs('0:23', '_`inline code italics`_'),
+ italic(
+ sourceAttrs('0:23', '_`inline code italics`_'),
+ code(sourceAttrs('1:22', '`inline code italics`'), 'inline code italics'),
+ ),
+ ),
+ ),
+ },
+ {
+ markdown: `
+<i class="foo">
+ *bar*
+</i>
+ `,
+ expectedDoc: doc(
+ paragraph(
+ sourceAttrs('0:28', '<i class="foo">\n *bar*\n</i>'),
+ italic(sourceAttrs('0:28', '<i class="foo">\n *bar*\n</i>'), '\n *bar*\n'),
+ ),
+ ),
+ },
+ {
+ markdown: `
+
+<img src="bar" alt="foo" />
+
+ `,
+ expectedDoc: doc(
+ paragraph(
+ sourceAttrs('0:27', '<img src="bar" alt="foo" />'),
+ image({ ...sourceAttrs('0:27', '<img src="bar" alt="foo" />'), alt: 'foo', src: 'bar' }),
+ ),
+ ),
},
{
- markdown: '__`inline code italics`__',
+ markdown: `
+- List item 1
+
+<img src="bar" alt="foo" />
+
+ `,
+ expectedDoc: doc(
+ bulletList(
+ sourceAttrs('0:13', '- List item 1'),
+ listItem(
+ sourceAttrs('0:13', '- List item 1'),
+ paragraph(sourceAttrs('2:13', 'List item 1'), 'List item 1'),
+ ),
+ ),
+ paragraph(
+ sourceAttrs('15:42', '<img src="bar" alt="foo" />'),
+ image({ ...sourceAttrs('15:42', '<img src="bar" alt="foo" />'), alt: 'foo', src: 'bar' }),
+ ),
+ ),
},
{
markdown: '[GitLab](https://gitlab.com "Go to GitLab")',
+ expectedDoc: doc(
+ paragraph(
+ sourceAttrs('0:43', '[GitLab](https://gitlab.com "Go to GitLab")'),
+ link(
+ {
+ ...sourceAttrs('0:43', '[GitLab](https://gitlab.com "Go to GitLab")'),
+ href: 'https://gitlab.com',
+ title: 'Go to GitLab',
+ },
+ 'GitLab',
+ ),
+ ),
+ ),
},
{
markdown: '**[GitLab](https://gitlab.com "Go to GitLab")**',
+ expectedDoc: doc(
+ paragraph(
+ sourceAttrs('0:47', '**[GitLab](https://gitlab.com "Go to GitLab")**'),
+ bold(
+ sourceAttrs('0:47', '**[GitLab](https://gitlab.com "Go to GitLab")**'),
+ link(
+ {
+ ...sourceAttrs('2:45', '[GitLab](https://gitlab.com "Go to GitLab")'),
+ href: 'https://gitlab.com',
+ title: 'Go to GitLab',
+ },
+ 'GitLab',
+ ),
+ ),
+ ),
+ ),
+ },
+ {
+ markdown: 'www.commonmark.org',
+ expectedDoc: doc(
+ paragraph(
+ sourceAttrs('0:18', 'www.commonmark.org'),
+ link(
+ {
+ ...sourceAttrs('0:18', 'www.commonmark.org'),
+ href: 'http://www.commonmark.org',
+ },
+ 'www.commonmark.org',
+ ),
+ ),
+ ),
+ },
+ {
+ markdown: 'Visit www.commonmark.org/help for more information.',
+ expectedDoc: doc(
+ paragraph(
+ sourceAttrs('0:51', 'Visit www.commonmark.org/help for more information.'),
+ 'Visit ',
+ link(
+ {
+ ...sourceAttrs('6:29', 'www.commonmark.org/help'),
+ href: 'http://www.commonmark.org/help',
+ },
+ 'www.commonmark.org/help',
+ ),
+ ' for more information.',
+ ),
+ ),
+ },
+ {
+ markdown: 'hello@mail+xyz.example isn’t valid, but hello+xyz@mail.example is.',
+ expectedDoc: doc(
+ paragraph(
+ sourceAttrs('0:66', 'hello@mail+xyz.example isn’t valid, but hello+xyz@mail.example is.'),
+ 'hello@mail+xyz.example isn’t valid, but ',
+ link(
+ {
+ ...sourceAttrs('40:62', 'hello+xyz@mail.example'),
+ href: 'mailto:hello+xyz@mail.example',
+ },
+ 'hello+xyz@mail.example',
+ ),
+ ' is.',
+ ),
+ ),
+ },
+ {
+ markdown: '[https://gitlab.com>',
+ expectedDoc: doc(
+ paragraph(
+ sourceAttrs('0:20', '[https://gitlab.com>'),
+ '[',
+ link(
+ {
+ ...sourceAttrs(),
+ href: 'https://gitlab.com',
+ },
+ 'https://gitlab.com',
+ ),
+ '>',
+ ),
+ ),
},
{
markdown: `
This is a paragraph with a\\
hard line break`,
+ expectedDoc: doc(
+ paragraph(
+ sourceAttrs('0:43', 'This is a paragraph with a\\\nhard line break'),
+ 'This is a paragraph with a',
+ hardBreak(sourceAttrs('26:28', '\\\n')),
+ '\nhard line break',
+ ),
+ ),
},
{
markdown: '![GitLab Logo](https://gitlab.com/logo.png "GitLab Logo")',
+ expectedDoc: doc(
+ paragraph(
+ sourceAttrs('0:57', '![GitLab Logo](https://gitlab.com/logo.png "GitLab Logo")'),
+ image({
+ ...sourceAttrs('0:57', '![GitLab Logo](https://gitlab.com/logo.png "GitLab Logo")'),
+ alt: 'GitLab Logo',
+ src: 'https://gitlab.com/logo.png',
+ title: 'GitLab Logo',
+ }),
+ ),
+ ),
},
{
markdown: '---',
+ expectedDoc: doc(horizontalRule(sourceAttrs('0:3', '---'))),
},
{
markdown: '***',
+ expectedDoc: doc(horizontalRule(sourceAttrs('0:3', '***'))),
},
{
markdown: '___',
+ expectedDoc: doc(horizontalRule(sourceAttrs('0:3', '___'))),
},
{
markdown: '<hr>',
+ expectedDoc: doc(horizontalRule(sourceAttrs('0:4', '<hr>'))),
},
{
markdown: '# Heading 1',
+ expectedDoc: doc(heading({ ...sourceAttrs('0:11', '# Heading 1'), level: 1 }, 'Heading 1')),
},
{
markdown: '## Heading 2',
+ expectedDoc: doc(heading({ ...sourceAttrs('0:12', '## Heading 2'), level: 2 }, 'Heading 2')),
},
{
markdown: '### Heading 3',
+ expectedDoc: doc(heading({ ...sourceAttrs('0:13', '### Heading 3'), level: 3 }, 'Heading 3')),
},
{
markdown: '#### Heading 4',
+ expectedDoc: doc(
+ heading({ ...sourceAttrs('0:14', '#### Heading 4'), level: 4 }, 'Heading 4'),
+ ),
},
{
markdown: '##### Heading 5',
+ expectedDoc: doc(
+ heading({ ...sourceAttrs('0:15', '##### Heading 5'), level: 5 }, 'Heading 5'),
+ ),
},
{
markdown: '###### Heading 6',
+ expectedDoc: doc(
+ heading({ ...sourceAttrs('0:16', '###### Heading 6'), level: 6 }, 'Heading 6'),
+ ),
},
-
{
markdown: `
- Heading
- one
- ======
- `,
+Heading
+one
+======
+ `,
+ expectedDoc: doc(
+ heading({ ...sourceAttrs('0:18', 'Heading\none\n======'), level: 1 }, 'Heading\none'),
+ ),
},
{
markdown: `
- Heading
- two
- -------
- `,
+Heading
+two
+-------
+ `,
+ expectedDoc: doc(
+ heading({ ...sourceAttrs('0:19', 'Heading\ntwo\n-------'), level: 2 }, 'Heading\ntwo'),
+ ),
},
{
markdown: `
- - List item 1
- - List item 2
- `,
+- List item 1
+- List item 2
+ `,
+ expectedDoc: doc(
+ bulletList(
+ sourceAttrs('0:27', '- List item 1\n- List item 2'),
+ listItem(
+ sourceAttrs('0:13', '- List item 1'),
+ paragraph(sourceAttrs('2:13', 'List item 1'), 'List item 1'),
+ ),
+ listItem(
+ sourceAttrs('14:27', '- List item 2'),
+ paragraph(sourceAttrs('16:27', 'List item 2'), 'List item 2'),
+ ),
+ ),
+ ),
},
{
markdown: `
- * List item 1
- * List item 2
- `,
+* List item 1
+* List item 2
+ `,
+ expectedDoc: doc(
+ bulletList(
+ sourceAttrs('0:27', '* List item 1\n* List item 2'),
+ listItem(
+ sourceAttrs('0:13', '* List item 1'),
+ paragraph(sourceAttrs('2:13', 'List item 1'), 'List item 1'),
+ ),
+ listItem(
+ sourceAttrs('14:27', '* List item 2'),
+ paragraph(sourceAttrs('16:27', 'List item 2'), 'List item 2'),
+ ),
+ ),
+ ),
},
{
markdown: `
- + List item 1
- + List item 2
- `,
++ List item 1
++ List item 2
+ `,
+ expectedDoc: doc(
+ bulletList(
+ sourceAttrs('0:27', '+ List item 1\n+ List item 2'),
+ listItem(
+ sourceAttrs('0:13', '+ List item 1'),
+ paragraph(sourceAttrs('2:13', 'List item 1'), 'List item 1'),
+ ),
+ listItem(
+ sourceAttrs('14:27', '+ List item 2'),
+ paragraph(sourceAttrs('16:27', 'List item 2'), 'List item 2'),
+ ),
+ ),
+ ),
},
{
markdown: `
- 1. List item 1
- 1. List item 2
- `,
+1. List item 1
+1. List item 2
+ `,
+ expectedDoc: doc(
+ orderedList(
+ sourceAttrs('0:29', '1. List item 1\n1. List item 2'),
+ listItem(
+ sourceAttrs('0:14', '1. List item 1'),
+ paragraph(sourceAttrs('3:14', 'List item 1'), 'List item 1'),
+ ),
+ listItem(
+ sourceAttrs('15:29', '1. List item 2'),
+ paragraph(sourceAttrs('18:29', 'List item 2'), 'List item 2'),
+ ),
+ ),
+ ),
},
{
markdown: `
- 1. List item 1
- 2. List item 2
- `,
+1. List item 1
+2. List item 2
+ `,
+ expectedDoc: doc(
+ orderedList(
+ sourceAttrs('0:29', '1. List item 1\n2. List item 2'),
+ listItem(
+ sourceAttrs('0:14', '1. List item 1'),
+ paragraph(sourceAttrs('3:14', 'List item 1'), 'List item 1'),
+ ),
+ listItem(
+ sourceAttrs('15:29', '2. List item 2'),
+ paragraph(sourceAttrs('18:29', 'List item 2'), 'List item 2'),
+ ),
+ ),
+ ),
},
{
markdown: `
- 1) List item 1
- 2) List item 2
- `,
+1) List item 1
+2) List item 2
+ `,
+ expectedDoc: doc(
+ orderedList(
+ sourceAttrs('0:29', '1) List item 1\n2) List item 2'),
+ listItem(
+ sourceAttrs('0:14', '1) List item 1'),
+ paragraph(sourceAttrs('3:14', 'List item 1'), 'List item 1'),
+ ),
+ listItem(
+ sourceAttrs('15:29', '2) List item 2'),
+ paragraph(sourceAttrs('18:29', 'List item 2'), 'List item 2'),
+ ),
+ ),
+ ),
},
{
markdown: `
- - List item 1
- - Sub list item 1
- `,
+- List item 1
+ - Sub list item 1
+ `,
+ expectedDoc: doc(
+ bulletList(
+ sourceAttrs('0:33', '- List item 1\n - Sub list item 1'),
+ listItem(
+ sourceAttrs('0:33', '- List item 1\n - Sub list item 1'),
+ paragraph(sourceAttrs('2:13', 'List item 1'), 'List item 1'),
+ bulletList(
+ sourceAttrs('16:33', '- Sub list item 1'),
+ listItem(
+ sourceAttrs('16:33', '- Sub list item 1'),
+ paragraph(sourceAttrs('18:33', 'Sub list item 1'), 'Sub list item 1'),
+ ),
+ ),
+ ),
+ ),
+ ),
},
{
markdown: `
- - List item 1 paragraph 1
+- List item 1 paragraph 1
- List item 1 paragraph 2
- - List item 2
- `,
+ List item 1 paragraph 2
+- List item 2
+ `,
+ expectedDoc: doc(
+ bulletList(
+ sourceAttrs(
+ '0:66',
+ '- List item 1 paragraph 1\n\n List item 1 paragraph 2\n- List item 2',
+ ),
+ listItem(
+ sourceAttrs('0:52', '- List item 1 paragraph 1\n\n List item 1 paragraph 2'),
+ paragraph(sourceAttrs('2:25', 'List item 1 paragraph 1'), 'List item 1 paragraph 1'),
+ paragraph(sourceAttrs('29:52', 'List item 1 paragraph 2'), 'List item 1 paragraph 2'),
+ ),
+ listItem(
+ sourceAttrs('53:66', '- List item 2'),
+ paragraph(sourceAttrs('55:66', 'List item 2'), 'List item 2'),
+ ),
+ ),
+ ),
},
{
markdown: `
- > This is a blockquote
- `,
+- List item with an image ![bar](foo.png)
+`,
+ expectedDoc: doc(
+ bulletList(
+ sourceAttrs('0:41', '- List item with an image ![bar](foo.png)'),
+ listItem(
+ sourceAttrs('0:41', '- List item with an image ![bar](foo.png)'),
+ paragraph(
+ sourceAttrs('2:41', 'List item with an image ![bar](foo.png)'),
+ 'List item with an image',
+ image({ ...sourceAttrs('26:41', '![bar](foo.png)'), alt: 'bar', src: 'foo.png' }),
+ ),
+ ),
+ ),
+ ),
},
{
markdown: `
- > - List item 1
- > - List item 2
- `,
+> This is a blockquote
+ `,
+ expectedDoc: doc(
+ blockquote(
+ sourceAttrs('0:22', '> This is a blockquote'),
+ paragraph(sourceAttrs('2:22', 'This is a blockquote'), 'This is a blockquote'),
+ ),
+ ),
},
{
markdown: `
- const fn = () => 'GitLab';
- `,
+> - List item 1
+> - List item 2
+ `,
+ expectedDoc: doc(
+ blockquote(
+ sourceAttrs('0:31', '> - List item 1\n> - List item 2'),
+ bulletList(
+ sourceAttrs('2:31', '- List item 1\n> - List item 2'),
+ listItem(
+ sourceAttrs('2:15', '- List item 1'),
+ paragraph(sourceAttrs('4:15', 'List item 1'), 'List item 1'),
+ ),
+ listItem(
+ sourceAttrs('18:31', '- List item 2'),
+ paragraph(sourceAttrs('20:31', 'List item 2'), 'List item 2'),
+ ),
+ ),
+ ),
+ ),
},
{
markdown: `
- \`\`\`javascript
- const fn = () => 'GitLab';
- \`\`\`\
- `,
+code block
+
+ const fn = () => 'GitLab';
+
+ `,
+ expectedDoc: doc(
+ paragraph(sourceAttrs('0:10', 'code block'), 'code block'),
+ codeBlock(
+ {
+ ...sourceAttrs('12:42', " const fn = () => 'GitLab';"),
+ class: 'code highlight',
+ language: null,
+ },
+ "const fn = () => 'GitLab';",
+ ),
+ ),
},
{
markdown: `
- ~~~javascript
- const fn = () => 'GitLab';
- ~~~
- `,
+\`\`\`javascript
+const fn = () => 'GitLab';
+\`\`\`\
+ `,
+ expectedDoc: doc(
+ codeBlock(
+ {
+ ...sourceAttrs('0:44', "```javascript\nconst fn = () => 'GitLab';\n```"),
+ class: 'code highlight',
+ language: 'javascript',
+ },
+ "const fn = () => 'GitLab';",
+ ),
+ ),
},
{
markdown: `
- \`\`\`
- \`\`\`\
- `,
+~~~javascript
+const fn = () => 'GitLab';
+~~~
+ `,
+ expectedDoc: doc(
+ codeBlock(
+ {
+ ...sourceAttrs('0:44', "~~~javascript\nconst fn = () => 'GitLab';\n~~~"),
+ class: 'code highlight',
+ language: 'javascript',
+ },
+ "const fn = () => 'GitLab';",
+ ),
+ ),
},
{
markdown: `
- \`\`\`javascript
- const fn = () => 'GitLab';
+\`\`\`
+\`\`\`\
+ `,
+ expectedDoc: doc(
+ codeBlock(
+ {
+ ...sourceAttrs('0:7', '```\n```'),
+ class: 'code highlight',
+ language: null,
+ },
+ '',
+ ),
+ ),
+ },
+ {
+ markdown: `
+\`\`\`javascript
+const fn = () => 'GitLab';
- \`\`\`\
- `,
+\`\`\`\
+ `,
+ expectedDoc: doc(
+ codeBlock(
+ {
+ ...sourceAttrs('0:45', "```javascript\nconst fn = () => 'GitLab';\n\n```"),
+ class: 'code highlight',
+ language: 'javascript',
+ },
+ "const fn = () => 'GitLab';\n",
+ ),
+ ),
+ },
+ {
+ markdown: '~~Strikedthrough text~~',
+ expectedDoc: doc(
+ paragraph(
+ sourceAttrs('0:23', '~~Strikedthrough text~~'),
+ strike(sourceAttrs('0:23', '~~Strikedthrough text~~'), 'Strikedthrough text'),
+ ),
+ ),
+ },
+ {
+ markdown: '<del>Strikedthrough text</del>',
+ expectedDoc: doc(
+ paragraph(
+ sourceAttrs('0:30', '<del>Strikedthrough text</del>'),
+ strike(sourceAttrs('0:30', '<del>Strikedthrough text</del>'), 'Strikedthrough text'),
+ ),
+ ),
+ },
+ {
+ markdown: '<strike>Strikedthrough text</strike>',
+ expectedDoc: doc(
+ paragraph(
+ sourceAttrs('0:36', '<strike>Strikedthrough text</strike>'),
+ strike(
+ sourceAttrs('0:36', '<strike>Strikedthrough text</strike>'),
+ 'Strikedthrough text',
+ ),
+ ),
+ ),
+ },
+ {
+ markdown: '<s>Strikedthrough text</s>',
+ expectedDoc: doc(
+ paragraph(
+ sourceAttrs('0:26', '<s>Strikedthrough text</s>'),
+ strike(sourceAttrs('0:26', '<s>Strikedthrough text</s>'), 'Strikedthrough text'),
+ ),
+ ),
},
- ])('processes %s correctly', async ({ markdown }) => {
+ {
+ markdown: `
+- [ ] task list item 1
+- [ ] task list item 2
+ `,
+ expectedDoc: doc(
+ taskList(
+ {
+ numeric: false,
+ ...sourceAttrs('0:45', '- [ ] task list item 1\n- [ ] task list item 2'),
+ },
+ taskItem(
+ {
+ checked: false,
+ ...sourceAttrs('0:22', '- [ ] task list item 1'),
+ },
+ paragraph(sourceAttrs('6:22', 'task list item 1'), 'task list item 1'),
+ ),
+ taskItem(
+ {
+ checked: false,
+ ...sourceAttrs('23:45', '- [ ] task list item 2'),
+ },
+ paragraph(sourceAttrs('29:45', 'task list item 2'), 'task list item 2'),
+ ),
+ ),
+ ),
+ },
+ {
+ markdown: `
+- [x] task list item 1
+- [x] task list item 2
+ `,
+ expectedDoc: doc(
+ taskList(
+ {
+ numeric: false,
+ ...sourceAttrs('0:45', '- [x] task list item 1\n- [x] task list item 2'),
+ },
+ taskItem(
+ {
+ checked: true,
+ ...sourceAttrs('0:22', '- [x] task list item 1'),
+ },
+ paragraph(sourceAttrs('6:22', 'task list item 1'), 'task list item 1'),
+ ),
+ taskItem(
+ {
+ checked: true,
+ ...sourceAttrs('23:45', '- [x] task list item 2'),
+ },
+ paragraph(sourceAttrs('29:45', 'task list item 2'), 'task list item 2'),
+ ),
+ ),
+ ),
+ },
+ {
+ markdown: `
+1. [ ] task list item 1
+2. [ ] task list item 2
+ `,
+ expectedDoc: doc(
+ taskList(
+ {
+ numeric: true,
+ ...sourceAttrs('0:47', '1. [ ] task list item 1\n2. [ ] task list item 2'),
+ },
+ taskItem(
+ {
+ checked: false,
+ ...sourceAttrs('0:23', '1. [ ] task list item 1'),
+ },
+ paragraph(sourceAttrs('7:23', 'task list item 1'), 'task list item 1'),
+ ),
+ taskItem(
+ {
+ checked: false,
+ ...sourceAttrs('24:47', '2. [ ] task list item 2'),
+ },
+ paragraph(sourceAttrs('31:47', 'task list item 2'), 'task list item 2'),
+ ),
+ ),
+ ),
+ },
+ {
+ markdown: `
+| a | b |
+|---|---|
+| c | d |
+`,
+ expectedDoc: doc(
+ table(
+ sourceAttrs('0:29', '| a | b |\n|---|---|\n| c | d |'),
+ tableRow(
+ sourceAttrs('0:9', '| a | b |'),
+ tableHeader(sourceAttrs('0:5', '| a |'), paragraph(sourceAttrs('2:3', 'a'), 'a')),
+ tableHeader(sourceAttrs('5:9', ' b |'), paragraph(sourceAttrs('6:7', 'b'), 'b')),
+ ),
+ tableRow(
+ sourceAttrs('20:29', '| c | d |'),
+ tableCell(sourceAttrs('20:25', '| c |'), paragraph(sourceAttrs('22:23', 'c'), 'c')),
+ tableCell(sourceAttrs('25:29', ' d |'), paragraph(sourceAttrs('26:27', 'd'), 'd')),
+ ),
+ ),
+ ),
+ },
+ {
+ markdown: `
+<table>
+ <tr>
+ <th colspan="2" rowspan="5">Header</th>
+ </tr>
+ <tr>
+ <td colspan="2" rowspan="5">Body</td>
+ </tr>
+</table>
+`,
+ expectedDoc: doc(
+ table(
+ sourceAttrs(
+ '0:132',
+ '<table>\n <tr>\n <th colspan="2" rowspan="5">Header</th>\n </tr>\n <tr>\n <td colspan="2" rowspan="5">Body</td>\n </tr>\n</table>',
+ ),
+ tableRow(
+ sourceAttrs('10:66', '<tr>\n <th colspan="2" rowspan="5">Header</th>\n </tr>'),
+ tableHeader(
+ {
+ ...sourceAttrs('19:58', '<th colspan="2" rowspan="5">Header</th>'),
+ colspan: 2,
+ rowspan: 5,
+ },
+ paragraph(sourceAttrs('47:53', 'Header'), 'Header'),
+ ),
+ ),
+ tableRow(
+ sourceAttrs('69:123', '<tr>\n <td colspan="2" rowspan="5">Body</td>\n </tr>'),
+ tableCell(
+ {
+ ...sourceAttrs('78:115', '<td colspan="2" rowspan="5">Body</td>'),
+ colspan: 2,
+ rowspan: 5,
+ },
+ paragraph(sourceAttrs('106:110', 'Body'), 'Body'),
+ ),
+ ),
+ ),
+ ),
+ },
+ {
+ markdown: `
+This is a footnote [^footnote]
+
+Paragraph
+
+[^footnote]: Footnote definition
+
+Paragraph
+`,
+ expectedDoc: doc(
+ paragraph(
+ sourceAttrs('0:30', 'This is a footnote [^footnote]'),
+ 'This is a footnote ',
+ footnoteReference({
+ ...sourceAttrs('19:30', '[^footnote]'),
+ identifier: 'footnote',
+ label: 'footnote',
+ }),
+ ),
+ paragraph(sourceAttrs('32:41', 'Paragraph'), 'Paragraph'),
+ footnoteDefinition(
+ {
+ ...sourceAttrs('43:75', '[^footnote]: Footnote definition'),
+ identifier: 'footnote',
+ label: 'footnote',
+ },
+ paragraph(sourceAttrs('56:75', 'Footnote definition'), 'Footnote definition'),
+ ),
+ paragraph(sourceAttrs('77:86', 'Paragraph'), 'Paragraph'),
+ ),
+ },
+ ];
+
+ const runOnly = examples.find((example) => example.only === true);
+ const runExamples = runOnly ? [runOnly] : examples;
+
+ it.each(runExamples)('processes %s correctly', async ({ markdown, expectedDoc }) => {
const trimmed = markdown.trim();
const document = await deserialize(trimmed);
+ expect(expectedDoc).not.toBeFalsy();
+ expect(document.toJSON()).toEqual(expectedDoc.toJSON());
expect(serialize(document)).toEqual(trimmed);
});
});
diff --git a/spec/frontend/content_editor/services/asset_resolver_spec.js b/spec/frontend/content_editor/services/asset_resolver_spec.js
index f4e7d9bf881..0a99f823be3 100644
--- a/spec/frontend/content_editor/services/asset_resolver_spec.js
+++ b/spec/frontend/content_editor/services/asset_resolver_spec.js
@@ -20,4 +20,14 @@ describe('content_editor/services/asset_resolver', () => {
);
});
});
+
+ describe('renderDiagram', () => {
+ it('resolves a diagram code to a url containing the diagram image', async () => {
+ renderMarkdown.mockResolvedValue(
+ '<p><img data-diagram="nomnoml" src="url/to/some/diagram"></p>',
+ );
+
+ expect(await assetResolver.renderDiagram('test')).toBe('url/to/some/diagram');
+ });
+ });
});
diff --git a/spec/frontend/content_editor/services/code_block_language_loader_spec.js b/spec/frontend/content_editor/services/code_block_language_loader_spec.js
index 943de327762..795f5219a3f 100644
--- a/spec/frontend/content_editor/services/code_block_language_loader_spec.js
+++ b/spec/frontend/content_editor/services/code_block_language_loader_spec.js
@@ -18,25 +18,32 @@ describe('content_editor/services/code_block_language_loader', () => {
languageLoader.lowlight = lowlight;
});
- describe('findLanguageBySyntax', () => {
+ describe('findOrCreateLanguageBySyntax', () => {
it.each`
syntax | language
${'javascript'} | ${{ syntax: 'javascript', label: 'Javascript' }}
${'js'} | ${{ syntax: 'javascript', label: 'Javascript' }}
${'jsx'} | ${{ syntax: 'javascript', label: 'Javascript' }}
`('returns a language by syntax and its variants', ({ syntax, language }) => {
- expect(languageLoader.findLanguageBySyntax(syntax)).toMatchObject(language);
+ expect(languageLoader.findOrCreateLanguageBySyntax(syntax)).toMatchObject(language);
});
it('returns Custom (syntax) if the language does not exist', () => {
- expect(languageLoader.findLanguageBySyntax('foobar')).toMatchObject({
+ expect(languageLoader.findOrCreateLanguageBySyntax('foobar')).toMatchObject({
syntax: 'foobar',
label: 'Custom (foobar)',
});
});
+ it('returns Diagram (syntax) if the language does not exist, and isDiagram = true', () => {
+ expect(languageLoader.findOrCreateLanguageBySyntax('foobar', true)).toMatchObject({
+ syntax: 'foobar',
+ label: 'Diagram (foobar)',
+ });
+ });
+
it('returns plaintext if no syntax is passed', () => {
- expect(languageLoader.findLanguageBySyntax('')).toMatchObject({
+ expect(languageLoader.findOrCreateLanguageBySyntax('')).toMatchObject({
syntax: 'plaintext',
label: 'Plain text',
});
diff --git a/spec/frontend/content_editor/services/markdown_serializer_spec.js b/spec/frontend/content_editor/services/markdown_serializer_spec.js
index 25b7483f234..13e9efaea59 100644
--- a/spec/frontend/content_editor/services/markdown_serializer_spec.js
+++ b/spec/frontend/content_editor/services/markdown_serializer_spec.js
@@ -13,7 +13,6 @@ import Figure from '~/content_editor/extensions/figure';
import FigureCaption from '~/content_editor/extensions/figure_caption';
import FootnoteDefinition from '~/content_editor/extensions/footnote_definition';
import FootnoteReference from '~/content_editor/extensions/footnote_reference';
-import FootnotesSection from '~/content_editor/extensions/footnotes_section';
import HardBreak from '~/content_editor/extensions/hard_break';
import Heading from '~/content_editor/extensions/heading';
import HorizontalRule from '~/content_editor/extensions/horizontal_rule';
@@ -53,7 +52,6 @@ const tiptapEditor = createTestEditor({
Emoji,
FootnoteDefinition,
FootnoteReference,
- FootnotesSection,
Figure,
FigureCaption,
HardBreak,
@@ -92,7 +90,6 @@ const {
emoji,
footnoteDefinition,
footnoteReference,
- footnotesSection,
figure,
figureCaption,
heading,
@@ -131,7 +128,6 @@ const {
figureCaption: { nodeType: FigureCaption.name },
footnoteDefinition: { nodeType: FootnoteDefinition.name },
footnoteReference: { nodeType: FootnoteReference.name },
- footnotesSection: { nodeType: FootnotesSection.name },
hardBreak: { nodeType: HardBreak.name },
heading: { nodeType: Heading.name },
horizontalRule: { nodeType: HorizontalRule.name },
@@ -200,7 +196,7 @@ describe('markdownSerializer', () => {
it('correctly serializes a plain URL link', () => {
expect(serialize(paragraph(link({ href: 'https://example.com' }, 'https://example.com')))).toBe(
- '<https://example.com>',
+ 'https://example.com',
);
});
@@ -1147,49 +1143,75 @@ there
it('correctly serializes footnotes', () => {
expect(
serialize(
- paragraph(
- 'Oranges are orange ',
- footnoteReference({ footnoteId: '1', footnoteNumber: '1' }),
- ),
- footnotesSection(footnoteDefinition(paragraph('Oranges are fruits'))),
+ paragraph('Oranges are orange ', footnoteReference({ label: '1', identifier: '1' })),
+ footnoteDefinition({ label: '1', identifier: '1' }, 'Oranges are fruits'),
),
).toBe(
`
Oranges are orange [^1]
[^1]: Oranges are fruits
- `.trim(),
+`.trimLeft(),
);
});
+ const defaultEditAction = (initialContent) => {
+ tiptapEditor.chain().setContent(initialContent.toJSON()).insertContent(' modified').run();
+ };
+
+ const prependContentEditAction = (initialContent) => {
+ tiptapEditor
+ .chain()
+ .setContent(initialContent.toJSON())
+ .setTextSelection(0)
+ .insertContent('modified ')
+ .run();
+ };
+
it.each`
- mark | content | modifiedContent
- ${'bold'} | ${'**bold**'} | ${'**bold modified**'}
- ${'bold'} | ${'__bold__'} | ${'__bold modified__'}
- ${'bold'} | ${'<strong>bold</strong>'} | ${'<strong>bold modified</strong>'}
- ${'bold'} | ${'<b>bold</b>'} | ${'<b>bold modified</b>'}
- ${'italic'} | ${'_italic_'} | ${'_italic modified_'}
- ${'italic'} | ${'*italic*'} | ${'*italic modified*'}
- ${'italic'} | ${'<em>italic</em>'} | ${'<em>italic modified</em>'}
- ${'italic'} | ${'<i>italic</i>'} | ${'<i>italic modified</i>'}
- ${'link'} | ${'[gitlab](https://gitlab.com)'} | ${'[gitlab modified](https://gitlab.com)'}
- ${'link'} | ${'<a href="https://gitlab.com">link</a>'} | ${'<a href="https://gitlab.com">link modified</a>'}
- ${'code'} | ${'`code`'} | ${'`code modified`'}
- ${'code'} | ${'<code>code</code>'} | ${'<code>code modified</code>'}
+ mark | content | modifiedContent | editAction
+ ${'bold'} | ${'**bold**'} | ${'**bold modified**'} | ${defaultEditAction}
+ ${'bold'} | ${'__bold__'} | ${'__bold modified__'} | ${defaultEditAction}
+ ${'bold'} | ${'<strong>bold</strong>'} | ${'<strong>bold modified</strong>'} | ${defaultEditAction}
+ ${'bold'} | ${'<b>bold</b>'} | ${'<b>bold modified</b>'} | ${defaultEditAction}
+ ${'italic'} | ${'_italic_'} | ${'_italic modified_'} | ${defaultEditAction}
+ ${'italic'} | ${'*italic*'} | ${'*italic modified*'} | ${defaultEditAction}
+ ${'italic'} | ${'<em>italic</em>'} | ${'<em>italic modified</em>'} | ${defaultEditAction}
+ ${'italic'} | ${'<i>italic</i>'} | ${'<i>italic modified</i>'} | ${defaultEditAction}
+ ${'link'} | ${'[gitlab](https://gitlab.com)'} | ${'[gitlab modified](https://gitlab.com)'} | ${defaultEditAction}
+ ${'link'} | ${'<a href="https://gitlab.com">link</a>'} | ${'<a href="https://gitlab.com">link modified</a>'} | ${defaultEditAction}
+ ${'link'} | ${'link www.gitlab.com'} | ${'modified link www.gitlab.com'} | ${prependContentEditAction}
+ ${'link'} | ${'link https://www.gitlab.com'} | ${'modified link https://www.gitlab.com'} | ${prependContentEditAction}
+ ${'link'} | ${'link(https://www.gitlab.com)'} | ${'modified link(https://www.gitlab.com)'} | ${prependContentEditAction}
+ ${'link'} | ${'link(engineering@gitlab.com)'} | ${'modified link(engineering@gitlab.com)'} | ${prependContentEditAction}
+ ${'link'} | ${'link <https://www.gitlab.com>'} | ${'modified link <https://www.gitlab.com>'} | ${prependContentEditAction}
+ ${'link'} | ${'link [https://www.gitlab.com>'} | ${'modified link \\[https://www.gitlab.com>'} | ${prependContentEditAction}
+ ${'link'} | ${'link <https://www.gitlab.com'} | ${'modified link <https://www.gitlab.com'} | ${prependContentEditAction}
+ ${'link'} | ${'link https://www.gitlab.com>'} | ${'modified link https://www.gitlab.com>'} | ${prependContentEditAction}
+ ${'link'} | ${'link **https://www.gitlab.com]**'} | ${'modified link [**https://www.gitlab.com\\]**](https://www.gitlab.com%5D)'} | ${prependContentEditAction}
+ ${'code'} | ${'`code`'} | ${'`code modified`'} | ${defaultEditAction}
+ ${'code'} | ${'<code>code</code>'} | ${'<code>code modified</code>'} | ${defaultEditAction}
+ ${'strike'} | ${'~~striked~~'} | ${'~~striked modified~~'} | ${defaultEditAction}
+ ${'strike'} | ${'<del>striked</del>'} | ${'<del>striked modified</del>'} | ${defaultEditAction}
+ ${'strike'} | ${'<strike>striked</strike>'} | ${'<strike>striked modified</strike>'} | ${defaultEditAction}
+ ${'strike'} | ${'<s>striked</s>'} | ${'<s>striked modified</s>'} | ${defaultEditAction}
+ ${'list'} | ${'- list item'} | ${'- list item modified'} | ${defaultEditAction}
+ ${'list'} | ${'* list item'} | ${'* list item modified'} | ${defaultEditAction}
+ ${'list'} | ${'+ list item'} | ${'+ list item modified'} | ${defaultEditAction}
+ ${'list'} | ${'- list item 1\n- list item 2'} | ${'- list item 1\n- list item 2 modified'} | ${defaultEditAction}
+ ${'list'} | ${'2) list item'} | ${'2) list item modified'} | ${defaultEditAction}
+ ${'list'} | ${'1. list item'} | ${'1. list item modified'} | ${defaultEditAction}
+ ${'taskList'} | ${'2) [ ] task list item'} | ${'2) [ ] task list item modified'} | ${defaultEditAction}
+ ${'taskList'} | ${'2) [x] task list item'} | ${'2) [x] task list item modified'} | ${defaultEditAction}
`(
- 'preserves original $mark syntax when sourceMarkdown is available',
- async ({ content, modifiedContent }) => {
+ 'preserves original $mark syntax when sourceMarkdown is available for $content',
+ async ({ content, modifiedContent, editAction }) => {
const { document } = await remarkMarkdownDeserializer().deserialize({
schema: tiptapEditor.schema,
content,
});
- tiptapEditor
- .chain()
- .setContent(document.toJSON())
- // changing the document ensures that block preservation doesn’t yield false positives
- .insertContent(' modified')
- .run();
+ editAction(document);
const serialized = markdownSerializer({}).serialize({
pristineDoc: document,
diff --git a/spec/frontend/custom_metrics/components/custom_metrics_form_spec.js b/spec/frontend/custom_metrics/components/custom_metrics_form_spec.js
index 384d6699150..af56b94f90b 100644
--- a/spec/frontend/custom_metrics/components/custom_metrics_form_spec.js
+++ b/spec/frontend/custom_metrics/components/custom_metrics_form_spec.js
@@ -18,7 +18,7 @@ describe('CustomMetricsForm', () => {
wrapper = shallowMount(CustomMetricsForm, {
propsData: {
customMetricsPath: '',
- editProjectServicePath: '',
+ editIntegrationPath: '',
metricPersisted,
validateQueryPath: '',
formData,
diff --git a/spec/frontend/cycle_analytics/path_navigation_spec.js b/spec/frontend/cycle_analytics/path_navigation_spec.js
index c6d72d3b571..e8c4ebd3a38 100644
--- a/spec/frontend/cycle_analytics/path_navigation_spec.js
+++ b/spec/frontend/cycle_analytics/path_navigation_spec.js
@@ -1,4 +1,4 @@
-import { GlPath, GlDeprecatedSkeletonLoading as GlSkeletonLoading } from '@gitlab/ui';
+import { GlPath, GlSkeletonLoader } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
@@ -73,7 +73,7 @@ describe('Project PathNavigation', () => {
});
it('hides the gl-skeleton-loading component', () => {
- expect(wrapper.find(GlSkeletonLoading).exists()).toBe(false);
+ expect(wrapper.find(GlSkeletonLoader).exists()).toBe(false);
});
it('renders each stage', () => {
@@ -116,7 +116,7 @@ describe('Project PathNavigation', () => {
});
it('displays the gl-skeleton-loading component', () => {
- expect(wrapper.find(GlSkeletonLoading).exists()).toBe(true);
+ expect(wrapper.find(GlSkeletonLoader).exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/cycle_analytics/stage_table_spec.js b/spec/frontend/cycle_analytics/stage_table_spec.js
index 0d15d67866d..473e1d5b664 100644
--- a/spec/frontend/cycle_analytics/stage_table_spec.js
+++ b/spec/frontend/cycle_analytics/stage_table_spec.js
@@ -27,6 +27,7 @@ const findTableHeadColumns = () => findTableHead().findAll('th');
const findStageEventTitle = (ev) => extendedWrapper(ev).findByTestId('vsa-stage-event-title');
const findStageEventLink = (ev) => extendedWrapper(ev).findByTestId('vsa-stage-event-link');
const findStageTime = () => wrapper.findByTestId('vsa-stage-event-time');
+const findStageLastEvent = () => wrapper.findByTestId('vsa-stage-last-event');
const findIcon = (name) => wrapper.findByTestId(`${name}-icon`);
function createComponent(props = {}, shallow = false) {
@@ -128,6 +129,10 @@ describe('StageTable', () => {
expect(findStageTime().text()).toBe(createdAt);
});
+ it('will render the end event', () => {
+ expect(findStageLastEvent().text()).toBe(firstIssueEvent.endEventTimestamp);
+ });
+
it('will render the author', () => {
expect(wrapper.findByTestId('vsa-stage-event-author').text()).toContain(
firstIssueEvent.author.name,
@@ -303,10 +308,20 @@ describe('StageTable', () => {
wrapper.destroy();
});
- it('can sort the table by each column', () => {
- findTableHeadColumns().wrappers.forEach((w) => {
- expect(w.attributes('aria-sort')).toBe('none');
- });
+ it('can sort the end event or duration', () => {
+ findTableHeadColumns()
+ .wrappers.slice(1)
+ .forEach((w) => {
+ expect(w.attributes('aria-sort')).toBe('none');
+ });
+ });
+
+ it('cannot be sorted by title', () => {
+ findTableHeadColumns()
+ .wrappers.slice(0, 1)
+ .forEach((w) => {
+ expect(w.attributes('aria-sort')).toBeUndefined();
+ });
});
it('clicking a table column will send tracking information', () => {
diff --git a/spec/frontend/cycle_analytics/value_stream_metrics_spec.js b/spec/frontend/cycle_analytics/value_stream_metrics_spec.js
index 4a3e8146b13..df86b10cba3 100644
--- a/spec/frontend/cycle_analytics/value_stream_metrics_spec.js
+++ b/spec/frontend/cycle_analytics/value_stream_metrics_spec.js
@@ -1,4 +1,4 @@
-import { GlDeprecatedSkeletonLoading as GlSkeletonLoading } from '@gitlab/ui';
+import { GlSkeletonLoader } from '@gitlab/ui';
import { nextTick } from 'vue';
import metricsData from 'test_fixtures/projects/analytics/value_stream_analytics/summary.json';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
@@ -61,7 +61,7 @@ describe('ValueStreamMetrics', () => {
it('will display a loader with pending requests', async () => {
await nextTick();
- expect(wrapper.findComponent(GlSkeletonLoading).exists()).toBe(true);
+ expect(wrapper.findComponent(GlSkeletonLoader).exists()).toBe(true);
});
describe('with data loaded', () => {
@@ -88,7 +88,7 @@ describe('ValueStreamMetrics', () => {
});
it('will not display a loading icon', () => {
- expect(wrapper.find(GlSkeletonLoading).exists()).toBe(false);
+ expect(wrapper.findComponent(GlSkeletonLoader).exists()).toBe(false);
});
describe('filterFn', () => {
diff --git a/spec/frontend/design_management/components/design_presentation_spec.js b/spec/frontend/design_management/components/design_presentation_spec.js
index d79dde84d46..30eddcee86a 100644
--- a/spec/frontend/design_management/components/design_presentation_spec.js
+++ b/spec/frontend/design_management/components/design_presentation_spec.js
@@ -36,6 +36,7 @@ describe('Design management design presentation component', () => {
discussions,
isAnnotating,
resolvedDiscussionsExpanded,
+ isLoading: false,
},
stubs,
});
diff --git a/spec/frontend/design_management/components/design_sidebar_spec.js b/spec/frontend/design_management/components/design_sidebar_spec.js
index e8426216c1c..40968d9204a 100644
--- a/spec/frontend/design_management/components/design_sidebar_spec.js
+++ b/spec/frontend/design_management/components/design_sidebar_spec.js
@@ -52,6 +52,7 @@ describe('Design management design sidebar component', () => {
design,
resolvedDiscussionsExpanded: false,
markdownPreviewPath: '',
+ isLoading: false,
...props,
},
mocks: {
diff --git a/spec/frontend/design_management/components/toolbar/__snapshots__/design_navigation_spec.js.snap b/spec/frontend/design_management/components/toolbar/__snapshots__/design_navigation_spec.js.snap
index 3cb48d7632f..b5a69b28a88 100644
--- a/spec/frontend/design_management/components/toolbar/__snapshots__/design_navigation_spec.js.snap
+++ b/spec/frontend/design_management/components/toolbar/__snapshots__/design_navigation_spec.js.snap
@@ -18,7 +18,7 @@ exports[`Design management pagination component renders navigation buttons 1`] =
category="primary"
class="js-previous-design"
disabled="true"
- icon="angle-left"
+ icon="chevron-lg-left"
size="medium"
title="Go to previous design"
variant="default"
@@ -29,7 +29,7 @@ exports[`Design management pagination component renders navigation buttons 1`] =
buttontextclasses=""
category="primary"
class="js-next-design"
- icon="angle-right"
+ icon="chevron-lg-right"
size="medium"
title="Go to next design"
variant="default"
diff --git a/spec/frontend/design_management/components/toolbar/__snapshots__/index_spec.js.snap b/spec/frontend/design_management/components/toolbar/__snapshots__/index_spec.js.snap
index 6dfd57906d8..3c4aa0f4d3c 100644
--- a/spec/frontend/design_management/components/toolbar/__snapshots__/index_spec.js.snap
+++ b/spec/frontend/design_management/components/toolbar/__snapshots__/index_spec.js.snap
@@ -56,7 +56,7 @@ exports[`Design management toolbar component renders design and updated data 1`]
buttonclass=""
buttonicon="archive"
buttonsize="medium"
- buttonvariant="warning"
+ buttonvariant="default"
class="gl-ml-3"
hasselecteddesigns="true"
title="Archive design"
diff --git a/spec/frontend/design_management/pages/__snapshots__/index_spec.js.snap b/spec/frontend/design_management/pages/__snapshots__/index_spec.js.snap
index 243cc9d891d..be736184e60 100644
--- a/spec/frontend/design_management/pages/__snapshots__/index_spec.js.snap
+++ b/spec/frontend/design_management/pages/__snapshots__/index_spec.js.snap
@@ -45,7 +45,7 @@ exports[`Design management index page designs renders loading icon 1`] = `
<gl-loading-icon-stub
color="dark"
label="Loading"
- size="md"
+ size="lg"
/>
</div>
diff --git a/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap b/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap
index 8f12dc8fb06..0f2857821ea 100644
--- a/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap
+++ b/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap
@@ -99,7 +99,7 @@ exports[`Design management design index page renders design index 1`] = `
variant="link"
>
Resolved Comments (1)
-
+
</gl-button-stub>
<gl-popover-stub
@@ -112,8 +112,8 @@ exports[`Design management design index page renders design index 1`] = `
>
<p>
- Comments you resolve can be viewed and unresolved by going to the "Resolved Comments" section below
-
+ Comments you resolve can be viewed and unresolved by going to the "Resolved Comments" section below
+
</p>
<a
@@ -144,19 +144,6 @@ exports[`Design management design index page renders design index 1`] = `
</div>
`;
-exports[`Design management design index page sets loading state 1`] = `
-<div
- class="design-detail js-design-detail fixed-top gl-w-full gl-bottom-0 gl-display-flex gl-justify-content-center gl-flex-direction-column gl-lg-flex-direction-row"
->
- <gl-loading-icon-stub
- class="gl-align-self-center"
- color="dark"
- label="Loading"
- size="xl"
- />
-</div>
-`;
-
exports[`Design management design index page with error GlAlert is rendered in correct position with correct content 1`] = `
<div
class="design-detail js-design-detail fixed-top gl-w-full gl-bottom-0 gl-display-flex gl-justify-content-center gl-flex-direction-column gl-lg-flex-direction-row"
@@ -185,8 +172,8 @@ exports[`Design management design index page with error GlAlert is rendered in c
variant="danger"
>
- woops
-
+ woops
+
</gl-alert-stub>
</div>
diff --git a/spec/frontend/design_management/pages/design/index_spec.js b/spec/frontend/design_management/pages/design/index_spec.js
index 55d0fabe402..17a299c5de1 100644
--- a/spec/frontend/design_management/pages/design/index_spec.js
+++ b/spec/frontend/design_management/pages/design/index_spec.js
@@ -91,7 +91,12 @@ describe('Design management design index page', () => {
function createComponent(
{ loading = false } = {},
- { data = {}, intialRouteOptions = {}, provide = {} } = {},
+ {
+ data = {},
+ intialRouteOptions = {},
+ provide = {},
+ stubs = { ApolloMutation, DesignSidebar, DesignReplyForm },
+ } = {},
) {
const $apollo = {
queries: {
@@ -109,11 +114,7 @@ describe('Design management design index page', () => {
wrapper = shallowMount(DesignIndex, {
propsData: { id: '1' },
mocks: { $apollo },
- stubs: {
- ApolloMutation,
- DesignSidebar,
- DesignReplyForm,
- },
+ stubs,
provide: {
issueIid: '1',
projectPath: 'project-path',
@@ -139,7 +140,7 @@ describe('Design management design index page', () => {
describe('when navigating to component', () => {
it('applies fullscreen layout class', () => {
jest.spyOn(utils, 'getPageLayoutElement').mockReturnValue(mockPageLayoutElement);
- createComponent({ loading: true });
+ createComponent({}, { stubs: {} });
expect(mockPageLayoutElement.classList.add).toHaveBeenCalledTimes(1);
expect(mockPageLayoutElement.classList.add).toHaveBeenCalledWith(
@@ -151,7 +152,7 @@ describe('Design management design index page', () => {
describe('when navigating within the component', () => {
it('`scale` prop of DesignPresentation component is 1', async () => {
jest.spyOn(utils, 'getPageLayoutElement').mockReturnValue(mockPageLayoutElement);
- createComponent({ loading: false }, { data: { design, scale: 2 } });
+ createComponent({}, { data: { design, scale: 2 } });
await nextTick();
expect(findDesignPresentation().props('scale')).toBe(2);
@@ -180,7 +181,8 @@ describe('Design management design index page', () => {
it('sets loading state', () => {
createComponent({ loading: true });
- expect(wrapper.element).toMatchSnapshot();
+ expect(wrapper.find(DesignPresentation).props('isLoading')).toBe(true);
+ expect(wrapper.find(DesignSidebar).props('isLoading')).toBe(true);
});
it('renders design index', () => {
@@ -197,6 +199,7 @@ describe('Design management design index page', () => {
design,
markdownPreviewPath: '/project-path/preview_markdown?target_type=Issue',
resolvedDiscussionsExpanded: false,
+ isLoading: false,
});
});
diff --git a/spec/frontend/design_management/pages/index_spec.js b/spec/frontend/design_management/pages/index_spec.js
index 87531e8b645..087655d10f7 100644
--- a/spec/frontend/design_management/pages/index_spec.js
+++ b/spec/frontend/design_management/pages/index_spec.js
@@ -4,6 +4,7 @@ import Vue, { nextTick } from 'vue';
import VueApollo, { ApolloMutation } from 'vue-apollo';
import VueRouter from 'vue-router';
+import { GlBreakpointInstance as breakpointInstance } from '@gitlab/ui/dist/utils';
import VueDraggable from 'vuedraggable';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
@@ -762,6 +763,25 @@ describe('Design management index page', () => {
expect(findDesigns().at(0).props('id')).toBe('2');
});
+ it.each`
+ breakpoint | reorderDisabled
+ ${'xs'} | ${true}
+ ${'sm'} | ${false}
+ ${'md'} | ${false}
+ ${'lg'} | ${false}
+ ${'xl'} | ${false}
+ `(
+ 'sets draggable disabled value to $reorderDisabled when breakpoint is $breakpoint',
+ async ({ breakpoint, reorderDisabled }) => {
+ jest.spyOn(breakpointInstance, 'getBreakpointSize').mockReturnValue(breakpoint);
+
+ createComponentWithApollo({});
+ await waitForPromises();
+
+ expect(draggableAttributes().disabled).toBe(reorderDisabled);
+ },
+ );
+
it('prevents reordering when reorderDesigns mutation is in progress', async () => {
createComponentWithApollo({});
await moveDesigns(wrapper);
diff --git a/spec/frontend/design_management/router_spec.js b/spec/frontend/design_management/router_spec.js
index 03ab79712a4..b9c62334223 100644
--- a/spec/frontend/design_management/router_spec.js
+++ b/spec/frontend/design_management/router_spec.js
@@ -20,6 +20,8 @@ function factory(routeArg) {
return mount(App, {
router,
+ provide: { issueIid: '1' },
+ stubs: { Toolbar: true },
mocks: {
$apollo: {
queries: {
diff --git a/spec/frontend/diffs/components/commit_item_spec.js b/spec/frontend/diffs/components/commit_item_spec.js
index eee17e118a0..e52c5abbc7b 100644
--- a/spec/frontend/diffs/components/commit_item_spec.js
+++ b/spec/frontend/diffs/components/commit_item_spec.js
@@ -6,8 +6,6 @@ import Component from '~/diffs/components/commit_item.vue';
import { getTimeago } from '~/lib/utils/datetime_utility';
import CommitPipelineStatus from '~/projects/tree/components/commit_pipeline_status_component.vue';
-jest.mock('~/user_popovers');
-
const TEST_AUTHOR_NAME = 'test';
const TEST_AUTHOR_EMAIL = 'test+test@gitlab.com';
const TEST_AUTHOR_GRAVATAR = `${TEST_HOST}/avatar/test?s=40`;
diff --git a/spec/frontend/diffs/components/diff_expansion_cell_spec.js b/spec/frontend/diffs/components/diff_expansion_cell_spec.js
index bd538996349..5ff0728b358 100644
--- a/spec/frontend/diffs/components/diff_expansion_cell_spec.js
+++ b/spec/frontend/diffs/components/diff_expansion_cell_spec.js
@@ -1,4 +1,3 @@
-import { getByText } from '@testing-library/dom';
import { mount } from '@vue/test-utils';
import { cloneDeep } from 'lodash';
import DiffExpansionCell from '~/diffs/components/diff_expansion_cell.vue';
@@ -81,7 +80,7 @@ describe('DiffExpansionCell', () => {
const findExpandUp = (wrapper) => wrapper.find(EXPAND_UP_CLASS);
const findExpandDown = (wrapper) => wrapper.find(EXPAND_DOWN_CLASS);
- const findExpandAll = ({ element }) => getByText(element, 'Show all unchanged lines');
+ const findExpandAll = (wrapper) => wrapper.find('.js-unfold-all');
describe('top row', () => {
it('should have "expand up" and "show all" option', () => {
@@ -90,9 +89,7 @@ describe('DiffExpansionCell', () => {
});
expect(findExpandUp(wrapper).exists()).toBe(true);
- expect(findExpandDown(wrapper).exists()).toBe(true);
expect(findExpandUp(wrapper).attributes('disabled')).not.toBeDefined();
- expect(findExpandDown(wrapper).attributes('disabled')).toBeDefined();
expect(findExpandAll(wrapper)).not.toBe(null);
});
});
@@ -114,9 +111,7 @@ describe('DiffExpansionCell', () => {
});
expect(findExpandDown(wrapper).exists()).toBe(true);
- expect(findExpandUp(wrapper).exists()).toBe(true);
expect(findExpandDown(wrapper).attributes('disabled')).not.toBeDefined();
- expect(findExpandUp(wrapper).attributes('disabled')).toBeDefined();
expect(findExpandAll(wrapper)).not.toBe(null);
});
});
@@ -144,9 +139,9 @@ describe('DiffExpansionCell', () => {
newLineNumber,
});
- const wrapper = createComponent({ file });
+ const wrapper = createComponent({ file, lineCountBetween: 10 });
- findExpandAll(wrapper).click();
+ findExpandAll(wrapper).trigger('click');
expect(store.dispatch).toHaveBeenCalledWith(
'diffs/loadMoreLines',
diff --git a/spec/frontend/diffs/components/diff_file_header_spec.js b/spec/frontend/diffs/components/diff_file_header_spec.js
index f22bd312a6d..d90afeb6b82 100644
--- a/spec/frontend/diffs/components/diff_file_header_spec.js
+++ b/spec/frontend/diffs/components/diff_file_header_spec.js
@@ -14,7 +14,6 @@ import { scrollToElement } from '~/lib/utils/common_utils';
import { truncateSha } from '~/lib/utils/text_utility';
import { __, sprintf } from '~/locale';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
-import FileIcon from '~/vue_shared/components/file_icon.vue';
import testAction from '../../__helpers__/vuex_action_helper';
import diffDiscussionsMockData from '../mock_data/diff_discussions';
@@ -224,16 +223,6 @@ describe('DiffFileHeader component', () => {
});
expect(findFileActions().exists()).toBe(false);
});
-
- it('renders submodule icon', () => {
- createComponent({
- props: {
- diffFile: submoduleDiffFile,
- },
- });
-
- expect(wrapper.find(FileIcon).props('submodule')).toBe(true);
- });
});
describe('for any file', () => {
diff --git a/spec/frontend/diffs/components/diff_line_note_form_spec.js b/spec/frontend/diffs/components/diff_line_note_form_spec.js
index fb9dc22ce25..b59043168b8 100644
--- a/spec/frontend/diffs/components/diff_line_note_form_spec.js
+++ b/spec/frontend/diffs/components/diff_line_note_form_spec.js
@@ -64,6 +64,16 @@ describe('DiffLineNoteForm', () => {
expect(confirmAction).toHaveBeenCalled();
});
+ it('should only ask for confirmation once', () => {
+ // Never resolve so we can test what happens when triggered while "confirmAction" is loading
+ confirmAction.mockImplementation(() => new Promise(() => {}));
+
+ findNoteForm().vm.$emit('cancelForm', true, true);
+ findNoteForm().vm.$emit('cancelForm', true, true);
+
+ expect(confirmAction).toHaveBeenCalledTimes(1);
+ });
+
it('should not ask for confirmation when one of the params false', () => {
confirmAction.mockResolvedValueOnce(false);
diff --git a/spec/frontend/diffs/components/diff_view_spec.js b/spec/frontend/diffs/components/diff_view_spec.js
index f982749d1de..dfbe30e460b 100644
--- a/spec/frontend/diffs/components/diff_view_spec.js
+++ b/spec/frontend/diffs/components/diff_view_spec.js
@@ -49,22 +49,6 @@ describe('DiffView', () => {
return shallowMount(DiffView, { propsData, store, stubs });
};
- it('renders a match line', () => {
- const wrapper = createWrapper({
- diffLines: [
- {
- isMatchLineLeft: true,
- left: {
- rich_text: '@@ -4,12 +4,12 @@ import createFlash from &#39;~/flash&#39;;',
- lineDraft: {},
- },
- },
- ],
- });
- expect(wrapper.find(DiffExpansionCell).exists()).toBe(true);
- expect(wrapper.text()).toContain("@@ -4,12 +4,12 @@ import createFlash from '~/flash';");
- });
-
it.each`
type | side | container | sides | total
${'parallel'} | ${'left'} | ${'.old'} | ${{ left: { lineDraft: {}, renderDiscussion: true }, right: { lineDraft: {}, renderDiscussion: true } }} | ${2}
diff --git a/spec/frontend/diffs/store/utils_spec.js b/spec/frontend/diffs/store/utils_spec.js
index 8ae51a58819..6f55f76d7b5 100644
--- a/spec/frontend/diffs/store/utils_spec.js
+++ b/spec/frontend/diffs/store/utils_spec.js
@@ -30,13 +30,6 @@ describe('DiffsStoreUtils', () => {
});
});
- describe('getReversePosition', () => {
- it('should return correct line position name', () => {
- expect(utils.getReversePosition(LINE_POSITION_RIGHT)).toEqual(LINE_POSITION_LEFT);
- expect(utils.getReversePosition(LINE_POSITION_LEFT)).toEqual(LINE_POSITION_RIGHT);
- });
- });
-
describe('findIndexInInlineLines', () => {
const expectSet = (method, lines, invalidLines) => {
expect(method(lines, { oldLineNumber: 3, newLineNumber: 5 })).toEqual(4);
diff --git a/spec/frontend/editor/helpers.js b/spec/frontend/editor/helpers.js
index 252d783ad6d..48d83a87a6e 100644
--- a/spec/frontend/editor/helpers.js
+++ b/spec/frontend/editor/helpers.js
@@ -49,6 +49,12 @@ export const SEConstExt = () => {
};
};
+export const SEExtWithoutAPI = () => {
+ return {
+ extensionName: 'SEExtWithoutAPI',
+ };
+};
+
export class SEWithSetupExt {
static get extensionName() {
return 'SEWithSetupExt';
diff --git a/spec/frontend/editor/schema/ci/ci_schema_spec.js b/spec/frontend/editor/schema/ci/ci_schema_spec.js
index 628c34a27c1..c59806a5d60 100644
--- a/spec/frontend/editor/schema/ci/ci_schema_spec.js
+++ b/spec/frontend/editor/schema/ci/ci_schema_spec.js
@@ -38,6 +38,7 @@ const ajv = new Ajv({
strictTuples: false,
allowMatchingProperties: true,
});
+ajv.addKeyword('markdownDescription');
AjvFormats(ajv);
const schema = ajv.compile(CiSchema);
diff --git a/spec/frontend/editor/source_editor_extension_spec.js b/spec/frontend/editor/source_editor_extension_spec.js
index c5fa795f3b7..78453aaa491 100644
--- a/spec/frontend/editor/source_editor_extension_spec.js
+++ b/spec/frontend/editor/source_editor_extension_spec.js
@@ -54,6 +54,7 @@ describe('Editor Extension', () => {
${helpers.SEClassExtension} | ${['shared', 'classExtMethod']}
${helpers.SEFnExtension} | ${['fnExtMethod']}
${helpers.SEConstExt} | ${['constExtMethod']}
+ ${helpers.SEExtWithoutAPI} | ${[]}
`('correctly returns API for $definition', ({ definition, expectedKeys }) => {
const extension = new EditorExtension({ definition });
const expectedApi = Object.fromEntries(
diff --git a/spec/frontend/editor/source_editor_markdown_livepreview_ext_spec.js b/spec/frontend/editor/source_editor_markdown_livepreview_ext_spec.js
index 1926f3e268e..fe20c23e4d7 100644
--- a/spec/frontend/editor/source_editor_markdown_livepreview_ext_spec.js
+++ b/spec/frontend/editor/source_editor_markdown_livepreview_ext_spec.js
@@ -1,4 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
+import { Emitter } from 'monaco-editor';
+import { useFakeRequestAnimationFrame } from 'helpers/fake_request_animation_frame';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import waitForPromises from 'helpers/wait_for_promises';
import {
@@ -64,7 +66,6 @@ describe('Markdown Live Preview Extension for Source Editor', () => {
afterEach(() => {
instance.dispose();
- editorEl.remove();
mockAxios.restore();
resetHTMLFixture();
});
@@ -75,11 +76,47 @@ describe('Markdown Live Preview Extension for Source Editor', () => {
actions: expect.any(Object),
shown: false,
modelChangeListener: undefined,
+ layoutChangeListener: {
+ dispose: expect.anything(),
+ },
path: previewMarkdownPath,
actionShowPreviewCondition: expect.any(Object),
});
});
+ describe('onDidLayoutChange', () => {
+ const emitter = new Emitter();
+ let layoutSpy;
+
+ useFakeRequestAnimationFrame();
+
+ beforeEach(() => {
+ instance.unuse(extension);
+ instance.onDidLayoutChange = emitter.event;
+ extension = instance.use({
+ definition: EditorMarkdownPreviewExtension,
+ setupOptions: { previewMarkdownPath },
+ });
+ layoutSpy = jest.spyOn(instance, 'layout');
+ });
+
+ it('does not trigger the layout when the preview is not active [default]', async () => {
+ expect(instance.markdownPreview.shown).toBe(false);
+ expect(layoutSpy).not.toHaveBeenCalled();
+ await emitter.fire();
+ expect(layoutSpy).not.toHaveBeenCalled();
+ });
+
+ it('triggers the layout if the preview panel is opened', async () => {
+ expect(layoutSpy).not.toHaveBeenCalled();
+ instance.togglePreview();
+ layoutSpy.mockReset();
+
+ await emitter.fire();
+ expect(layoutSpy).toHaveBeenCalledTimes(1);
+ });
+ });
+
describe('model change listener', () => {
let cleanupSpy;
let actionSpy;
@@ -111,6 +148,9 @@ describe('Markdown Live Preview Extension for Source Editor', () => {
mockAxios.onPost().reply(200, { body: responseData });
await togglePreview();
});
+ afterEach(() => {
+ jest.clearAllMocks();
+ });
it('removes the registered buttons from the toolbar', () => {
expect(instance.toolbar.removeItems).not.toHaveBeenCalled();
@@ -175,6 +215,31 @@ describe('Markdown Live Preview Extension for Source Editor', () => {
instance.unuse(extension);
expect(newWidth === width / EXTENSION_MARKDOWN_PREVIEW_PANEL_WIDTH).toBe(true);
});
+
+ it('disposes the layoutChange listener and does not re-layout on layout changes', () => {
+ expect(instance.markdownPreview.layoutChangeListener).toBeDefined();
+ instance.unuse(extension);
+
+ expect(instance.markdownPreview?.layoutChangeListener).toBeUndefined();
+ });
+
+ it('does not trigger the re-layout after instance is unused', async () => {
+ const emitter = new Emitter();
+
+ instance.unuse(extension);
+ instance.onDidLayoutChange = emitter.event;
+
+ // we have to re-use the extension to pick up the emitter
+ extension = instance.use({
+ definition: EditorMarkdownPreviewExtension,
+ setupOptions: { previewMarkdownPath },
+ });
+ instance.unuse(extension);
+ const layoutSpy = jest.spyOn(instance, 'layout');
+
+ await emitter.fire();
+ expect(layoutSpy).not.toHaveBeenCalled();
+ });
});
describe('fetchPreview', () => {
diff --git a/spec/frontend/editor/source_editor_spec.js b/spec/frontend/editor/source_editor_spec.js
index b3d914e6755..74aae7b899b 100644
--- a/spec/frontend/editor/source_editor_spec.js
+++ b/spec/frontend/editor/source_editor_spec.js
@@ -92,7 +92,7 @@ describe('Base editor', () => {
expect(monacoEditor.createModel).toHaveBeenCalledWith(
blobContent,
- undefined,
+ 'markdown',
expect.objectContaining({
path: uriFilePath,
}),
@@ -117,7 +117,7 @@ describe('Base editor', () => {
expect(modelSpy).toHaveBeenCalledWith(
blobContent,
- undefined,
+ 'markdown',
expect.objectContaining({
path: uriFilePath,
}),
@@ -177,6 +177,29 @@ describe('Base editor', () => {
expect(layoutSpy).toHaveBeenCalled();
});
+
+ it.each`
+ params | expectedLanguage
+ ${{}} | ${'markdown'}
+ ${{ blobPath: undefined }} | ${'plaintext'}
+ ${{ blobPath: undefined, language: 'ruby' }} | ${'ruby'}
+ ${{ language: 'go' }} | ${'go'}
+ ${{ blobPath: undefined, language: undefined }} | ${'plaintext'}
+ `(
+ 'correctly sets $expectedLanguage on the model when $params are passed',
+ ({ params, expectedLanguage }) => {
+ jest.spyOn(monacoEditor, 'createModel');
+ editor.createInstance({
+ ...defaultArguments,
+ ...params,
+ });
+ expect(monacoEditor.createModel).toHaveBeenCalledWith(
+ expect.anything(),
+ expectedLanguage,
+ expect.anything(),
+ );
+ },
+ );
});
describe('instance of the Diff Editor', () => {
@@ -210,7 +233,7 @@ describe('Base editor', () => {
expect(modelSpy).toHaveBeenCalledTimes(2);
expect(modelSpy.mock.calls[0]).toEqual([
blobContent,
- undefined,
+ 'markdown',
expect.objectContaining({
path: uriFilePath,
}),
diff --git a/spec/frontend/editor/source_editor_webide_ext_spec.js b/spec/frontend/editor/source_editor_webide_ext_spec.js
new file mode 100644
index 00000000000..096b6b1646f
--- /dev/null
+++ b/spec/frontend/editor/source_editor_webide_ext_spec.js
@@ -0,0 +1,55 @@
+import { Emitter } from 'monaco-editor';
+import { setHTMLFixture } from 'helpers/fixtures';
+import { EditorWebIdeExtension } from '~/editor/extensions/source_editor_webide_ext';
+import SourceEditor from '~/editor/source_editor';
+
+describe('Source Editor Web IDE Extension', () => {
+ let editorEl;
+ let editor;
+ let instance;
+
+ beforeEach(() => {
+ setHTMLFixture('<div id="editor" data-editor-loading></div>');
+ editorEl = document.getElementById('editor');
+ editor = new SourceEditor();
+ });
+ afterEach(() => {});
+
+ describe('onSetup', () => {
+ it.each`
+ width | renderSideBySide
+ ${'0'} | ${false}
+ ${'699px'} | ${false}
+ ${'700px'} | ${true}
+ `(
+ "correctly renders the Diff Editor when the parent element's width is $width",
+ ({ width, renderSideBySide }) => {
+ editorEl.style.width = width;
+ instance = editor.createDiffInstance({ el: editorEl });
+
+ const sideBySideSpy = jest.spyOn(instance, 'updateOptions');
+ instance.use({ definition: EditorWebIdeExtension });
+
+ expect(sideBySideSpy).toBeCalledWith({ renderSideBySide });
+ },
+ );
+
+ it('re-renders the Diff Editor when layout of the modified editor is changed', async () => {
+ const emitter = new Emitter();
+ editorEl.style.width = '700px';
+
+ instance = editor.createDiffInstance({ el: editorEl });
+ instance.getModifiedEditor().onDidLayoutChange = emitter.event;
+ instance.use({ definition: EditorWebIdeExtension });
+
+ const sideBySideSpy = jest.spyOn(instance, 'updateOptions');
+ await emitter.fire();
+
+ expect(sideBySideSpy).toBeCalledWith({ renderSideBySide: true });
+
+ editorEl.style.width = '0px';
+ await emitter.fire();
+ expect(sideBySideSpy).toBeCalledWith({ renderSideBySide: false });
+ });
+ });
+});
diff --git a/spec/frontend/emoji/index_spec.js b/spec/frontend/emoji/index_spec.js
index cc037586496..dc8f50e0e4b 100644
--- a/spec/frontend/emoji/index_spec.js
+++ b/spec/frontend/emoji/index_spec.js
@@ -24,6 +24,7 @@ import isEmojiUnicodeSupported, {
isHorceRacingSkinToneComboEmoji,
isPersonZwjEmoji,
} from '~/emoji/support/is_emoji_unicode_supported';
+import { NEUTRAL_INTENT_MULTIPLIER } from '~/emoji/constants';
const emptySupportMap = {
personZwj: false,
@@ -436,14 +437,28 @@ describe('emoji', () => {
it.each([undefined, null, ''])("should return all emoji when the input is '%s'", (input) => {
const search = searchEmoji(input);
- const expected = Object.keys(validEmoji).map((name) => {
- return {
- emoji: mockEmojiData[name],
- field: 'd',
- fieldValue: mockEmojiData[name].d,
- score: 0,
- };
- });
+ const expected = Object.keys(validEmoji)
+ .map((name) => {
+ let score = NEUTRAL_INTENT_MULTIPLIER;
+
+ // Positive intent value retrieved from ~/emoji/intents.json
+ if (name === 'thumbsup') {
+ score = 0.5;
+ }
+
+ // Negative intent value retrieved from ~/emoji/intents.json
+ if (name === 'thumbsdown') {
+ score = 1.5;
+ }
+
+ return {
+ emoji: mockEmojiData[name],
+ field: 'd',
+ fieldValue: mockEmojiData[name].d,
+ score,
+ };
+ })
+ .sort(sortEmoji);
expect(search).toEqual(expected);
});
@@ -457,7 +472,7 @@ describe('emoji', () => {
name: 'atom',
field: 'e',
fieldValue: 'atom',
- score: 0,
+ score: NEUTRAL_INTENT_MULTIPLIER,
},
],
],
@@ -469,7 +484,7 @@ describe('emoji', () => {
name: 'atom',
field: 'alias',
fieldValue: 'atom_symbol',
- score: 4,
+ score: 16,
},
],
],
@@ -481,7 +496,7 @@ describe('emoji', () => {
name: 'atom',
field: 'alias',
fieldValue: 'atom_symbol',
- score: 0,
+ score: NEUTRAL_INTENT_MULTIPLIER,
},
],
],
@@ -509,7 +524,7 @@ describe('emoji', () => {
{
name: 'atom',
field: 'd',
- score: 0,
+ score: NEUTRAL_INTENT_MULTIPLIER,
},
],
],
@@ -521,7 +536,7 @@ describe('emoji', () => {
{
name: 'atom',
field: 'd',
- score: 0,
+ score: NEUTRAL_INTENT_MULTIPLIER,
},
],
],
@@ -533,7 +548,7 @@ describe('emoji', () => {
{
name: 'grey_question',
field: 'name',
- score: 5,
+ score: 32,
},
],
],
@@ -544,7 +559,7 @@ describe('emoji', () => {
{
name: 'grey_question',
field: 'd',
- score: 24,
+ score: 16777216,
},
],
],
@@ -553,14 +568,14 @@ describe('emoji', () => {
'heart',
[
{
- name: 'black_heart',
- field: 'd',
- score: 6,
- },
- {
name: 'heart',
field: 'name',
- score: 0,
+ score: NEUTRAL_INTENT_MULTIPLIER,
+ },
+ {
+ name: 'black_heart',
+ field: 'd',
+ score: 64,
},
],
],
@@ -569,14 +584,14 @@ describe('emoji', () => {
'HEART',
[
{
- name: 'black_heart',
- field: 'd',
- score: 6,
- },
- {
name: 'heart',
field: 'name',
- score: 0,
+ score: NEUTRAL_INTENT_MULTIPLIER,
+ },
+ {
+ name: 'black_heart',
+ field: 'd',
+ score: 64,
},
],
],
@@ -585,14 +600,30 @@ describe('emoji', () => {
'star',
[
{
+ name: 'star',
+ field: 'name',
+ score: NEUTRAL_INTENT_MULTIPLIER,
+ },
+ {
name: 'custard',
field: 'd',
- score: 2,
+ score: 4,
+ },
+ ],
+ ],
+ [
+ 'searching for emoji with intentions assigned',
+ 'thumbs',
+ [
+ {
+ name: 'thumbsup',
+ field: 'd',
+ score: 0.5,
},
{
- name: 'star',
- field: 'name',
- score: 0,
+ name: 'thumbsdown',
+ field: 'd',
+ score: 1.5,
},
],
],
@@ -619,10 +650,10 @@ describe('emoji', () => {
[
{ score: 10, fieldValue: '', emoji: { name: 'a' } },
{ score: 5, fieldValue: '', emoji: { name: 'b' } },
- { score: 0, fieldValue: '', emoji: { name: 'c' } },
+ { score: 1, fieldValue: '', emoji: { name: 'c' } },
],
[
- { score: 0, fieldValue: '', emoji: { name: 'c' } },
+ { score: 1, fieldValue: '', emoji: { name: 'c' } },
{ score: 5, fieldValue: '', emoji: { name: 'b' } },
{ score: 10, fieldValue: '', emoji: { name: 'a' } },
],
@@ -630,25 +661,25 @@ describe('emoji', () => {
[
'should correctly sort by fieldValue',
[
- { score: 0, fieldValue: 'y', emoji: { name: 'b' } },
- { score: 0, fieldValue: 'x', emoji: { name: 'a' } },
- { score: 0, fieldValue: 'z', emoji: { name: 'c' } },
+ { score: 1, fieldValue: 'y', emoji: { name: 'b' } },
+ { score: 1, fieldValue: 'x', emoji: { name: 'a' } },
+ { score: 1, fieldValue: 'z', emoji: { name: 'c' } },
],
[
- { score: 0, fieldValue: 'x', emoji: { name: 'a' } },
- { score: 0, fieldValue: 'y', emoji: { name: 'b' } },
- { score: 0, fieldValue: 'z', emoji: { name: 'c' } },
+ { score: 1, fieldValue: 'x', emoji: { name: 'a' } },
+ { score: 1, fieldValue: 'y', emoji: { name: 'b' } },
+ { score: 1, fieldValue: 'z', emoji: { name: 'c' } },
],
],
[
'should correctly sort by score and then by fieldValue (in order)',
[
{ score: 5, fieldValue: 'y', emoji: { name: 'c' } },
- { score: 0, fieldValue: 'z', emoji: { name: 'a' } },
+ { score: 1, fieldValue: 'z', emoji: { name: 'a' } },
{ score: 5, fieldValue: 'x', emoji: { name: 'b' } },
],
[
- { score: 0, fieldValue: 'z', emoji: { name: 'a' } },
+ { score: 1, fieldValue: 'z', emoji: { name: 'a' } },
{ score: 5, fieldValue: 'x', emoji: { name: 'b' } },
{ score: 5, fieldValue: 'y', emoji: { name: 'c' } },
],
@@ -656,7 +687,7 @@ describe('emoji', () => {
];
it.each(testCases)('%s', (_, scoredItems, expected) => {
- expect(sortEmoji(scoredItems)).toEqual(expected);
+ expect(scoredItems.sort(sortEmoji)).toEqual(expected);
});
});
});
diff --git a/spec/frontend/emoji/utils_spec.js b/spec/frontend/emoji/utils_spec.js
new file mode 100644
index 00000000000..397388ca0ae
--- /dev/null
+++ b/spec/frontend/emoji/utils_spec.js
@@ -0,0 +1,15 @@
+import { getEmojiScoreWithIntent } from '~/emoji/utils';
+
+describe('Utils', () => {
+ describe('getEmojiScoreWithIntent', () => {
+ it.each`
+ emojiName | baseScore | finalScore
+ ${'thumbsup'} | ${1} | ${1}
+ ${'thumbsdown'} | ${1} | ${3}
+ ${'neutralemoji'} | ${1} | ${2}
+ ${'zerobaseemoji'} | ${0} | ${1}
+ `('returns the correct score for $emojiName', ({ emojiName, baseScore, finalScore }) => {
+ expect(getEmojiScoreWithIntent(emojiName, baseScore)).toBe(finalScore);
+ });
+ });
+});
diff --git a/spec/frontend/environments/deploy_board_wrapper_spec.js b/spec/frontend/environments/deploy_board_wrapper_spec.js
index c8e6df4d324..49eed68fa11 100644
--- a/spec/frontend/environments/deploy_board_wrapper_spec.js
+++ b/spec/frontend/environments/deploy_board_wrapper_spec.js
@@ -57,7 +57,7 @@ describe('~/environments/components/deploy_board_wrapper.vue', () => {
it('is collapsed by default', () => {
expect(collapse.attributes('visible')).toBeUndefined();
- expect(icon.props('name')).toBe('angle-right');
+ expect(icon.props('name')).toBe('chevron-lg-right');
});
it('opens on click', async () => {
@@ -65,7 +65,7 @@ describe('~/environments/components/deploy_board_wrapper.vue', () => {
expect(button.attributes('aria-label')).toBe(__('Collapse'));
expect(collapse.attributes('visible')).toBe('visible');
- expect(icon.props('name')).toBe('angle-down');
+ expect(icon.props('name')).toBe('chevron-lg-down');
const deployBoard = findDeployBoard();
expect(deployBoard.exists()).toBe(true);
diff --git a/spec/frontend/environments/environment_folder_spec.js b/spec/frontend/environments/environment_folder_spec.js
index 37b897bf65d..48624f2324b 100644
--- a/spec/frontend/environments/environment_folder_spec.js
+++ b/spec/frontend/environments/environment_folder_spec.js
@@ -82,7 +82,7 @@ describe('~/environments/components/environments_folder.vue', () => {
expect(collapse.attributes('visible')).toBeUndefined();
const iconNames = icons.wrappers.map((i) => i.props('name')).slice(0, 2);
- expect(iconNames).toEqual(['angle-right', 'folder-o']);
+ expect(iconNames).toEqual(['chevron-lg-right', 'folder-o']);
expect(folderName.classes('gl-font-weight-bold')).toBe(false);
expect(link.exists()).toBe(false);
});
@@ -95,7 +95,7 @@ describe('~/environments/components/environments_folder.vue', () => {
expect(button.attributes('aria-label')).toBe(__('Collapse'));
expect(collapse.attributes('visible')).toBe('visible');
const iconNames = icons.wrappers.map((i) => i.props('name')).slice(0, 2);
- expect(iconNames).toEqual(['angle-down', 'folder-open']);
+ expect(iconNames).toEqual(['chevron-lg-down', 'folder-open']);
expect(folderName.classes('gl-font-weight-bold')).toBe(true);
expect(link.attributes('href')).toBe(nestedEnvironment.latest.folderPath);
diff --git a/spec/frontend/environments/new_environment_item_spec.js b/spec/frontend/environments/new_environment_item_spec.js
index cf0c8a7e7ca..a151595bf64 100644
--- a/spec/frontend/environments/new_environment_item_spec.js
+++ b/spec/frontend/environments/new_environment_item_spec.js
@@ -374,7 +374,7 @@ describe('~/environments/components/new_environment_item.vue', () => {
it('is collapsed by default', () => {
expect(collapse.attributes('visible')).toBeUndefined();
- expect(icon.props('name')).toEqual('angle-right');
+ expect(icon.props('name')).toBe('chevron-lg-right');
expect(environmentName.classes('gl-font-weight-bold')).toBe(false);
});
@@ -385,7 +385,7 @@ describe('~/environments/components/new_environment_item.vue', () => {
expect(button.attributes('aria-label')).toBe(__('Collapse'));
expect(collapse.attributes('visible')).toBe('visible');
- expect(icon.props('name')).toEqual('angle-down');
+ expect(icon.props('name')).toBe('chevron-lg-down');
expect(environmentName.classes('gl-font-weight-bold')).toBe(true);
expect(findDeployment().isVisible()).toBe(true);
});
diff --git a/spec/frontend/error_tracking_settings/components/app_spec.js b/spec/frontend/error_tracking_settings/components/app_spec.js
index 4a0bbb1acbe..c660c9c4a99 100644
--- a/spec/frontend/error_tracking_settings/components/app_spec.js
+++ b/spec/frontend/error_tracking_settings/components/app_spec.js
@@ -177,7 +177,7 @@ describe('error tracking settings app', () => {
const clipBoardButton = findDsnSettings().findComponent(ClipboardButton);
expect(clipBoardInput.props('value')).toBe(TEST_GITLAB_DSN);
- expect(clipBoardInput.attributes('readonly')).toBeTruthy();
+ expect(clipBoardInput.attributes('readonly')).toBe('');
expect(clipBoardButton.props('text')).toBe(TEST_GITLAB_DSN);
});
});
diff --git a/spec/frontend/feature_flags/components/new_feature_flag_spec.js b/spec/frontend/feature_flags/components/new_feature_flag_spec.js
index 9c1657bc0d2..688ba54f919 100644
--- a/spec/frontend/feature_flags/components/new_feature_flag_spec.js
+++ b/spec/frontend/feature_flags/components/new_feature_flag_spec.js
@@ -61,7 +61,7 @@ describe('New feature flag form', () => {
});
it('renders form title', () => {
- expect(wrapper.find('h3').text()).toEqual('New feature flag');
+ expect(wrapper.text()).toContain('New feature flag');
});
it('should render feature flag form', () => {
diff --git a/spec/frontend/fixtures/services.rb b/spec/frontend/fixtures/integrations.rb
index f0bb8fb962f..1bafb0bfe78 100644
--- a/spec/frontend/fixtures/services.rb
+++ b/spec/frontend/fixtures/integrations.rb
@@ -2,11 +2,11 @@
require 'spec_helper'
-RSpec.describe Projects::ServicesController, '(JavaScript fixtures)', type: :controller do
+RSpec.describe Projects::Settings::IntegrationsController, '(JavaScript fixtures)', type: :controller do
include JavaScriptFixturesHelpers
let(:namespace) { create(:namespace, name: 'frontend-fixtures' )}
- let(:project) { create(:project_empty_repo, namespace: namespace, path: 'services-project') }
+ let(:project) { create(:project_empty_repo, namespace: namespace, path: 'integrations-project') }
let!(:service) { create(:custom_issue_tracker_integration, project: project) }
let(:user) { project.first_owner }
@@ -20,7 +20,7 @@ RSpec.describe Projects::ServicesController, '(JavaScript fixtures)', type: :con
remove_repository(project)
end
- it 'services/edit_service.html' do
+ it 'settings/integrations/edit.html' do
get :edit, params: {
namespace_id: namespace,
project_id: project,
diff --git a/spec/frontend/fixtures/prometheus_service.rb b/spec/frontend/fixtures/prometheus_integration.rb
index aed73dc1096..883dbb929a2 100644
--- a/spec/frontend/fixtures/prometheus_service.rb
+++ b/spec/frontend/fixtures/prometheus_integration.rb
@@ -2,11 +2,11 @@
require 'spec_helper'
-RSpec.describe Projects::ServicesController, '(JavaScript fixtures)', type: :controller do
+RSpec.describe Projects::Settings::IntegrationsController, '(JavaScript fixtures)', type: :controller do
include JavaScriptFixturesHelpers
let(:namespace) { create(:namespace, name: 'frontend-fixtures' )}
- let(:project) { create(:project_empty_repo, namespace: namespace, path: 'services-project') }
+ let(:project) { create(:project_empty_repo, namespace: namespace, path: 'integrations-project') }
let!(:integration) { create(:prometheus_integration, project: project) }
let(:user) { project.first_owner }
@@ -20,7 +20,7 @@ RSpec.describe Projects::ServicesController, '(JavaScript fixtures)', type: :con
remove_repository(project)
end
- it 'services/prometheus/prometheus_service.html' do
+ it 'integrations/prometheus/prometheus_integration.html' do
get :edit, params: {
namespace_id: namespace,
project_id: project,
diff --git a/spec/frontend/fixtures/runner.rb b/spec/frontend/fixtures/runner.rb
index e17e73a93c4..a79982fa647 100644
--- a/spec/frontend/fixtures/runner.rb
+++ b/spec/frontend/fixtures/runner.rb
@@ -26,6 +26,12 @@ RSpec.describe 'Runner (JavaScript fixtures)' do
remove_repository(project)
end
+ before do
+ allow(Gitlab::Ci::RunnerUpgradeCheck.instance)
+ .to receive(:check_runner_upgrade_status)
+ .and_return(:not_available)
+ end
+
describe do
before do
sign_in(admin)
diff --git a/spec/frontend/frequent_items/components/frequent_items_list_item_spec.js b/spec/frontend/frequent_items/components/frequent_items_list_item_spec.js
index 8220ea16342..eef5dc86c1a 100644
--- a/spec/frontend/frequent_items/components/frequent_items_list_item_spec.js
+++ b/spec/frontend/frequent_items/components/frequent_items_list_item_spec.js
@@ -117,7 +117,7 @@ describe('FrequentItemsListItemComponent', () => {
link.vm.$emit('click');
expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_link', {
- label: 'projects_dropdown_frequent_items_list_item',
+ label: 'projects_dropdown_frequent_items_list_item_git_lab_community_edition',
});
});
});
diff --git a/spec/frontend/google_tag_manager/index_spec.js b/spec/frontend/google_tag_manager/index_spec.js
index 6412fe8bb33..50811f43fc3 100644
--- a/spec/frontend/google_tag_manager/index_spec.js
+++ b/spec/frontend/google_tag_manager/index_spec.js
@@ -13,6 +13,7 @@ import {
trackCheckout,
trackTransaction,
trackAddToCartUsageTab,
+ getNamespaceId,
} from '~/google_tag_manager';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import { logError } from '~/lib/logger';
@@ -401,6 +402,7 @@ describe('~/google_tag_manager/index', () => {
{
brand: 'GitLab',
category: 'DevOps',
+ dimension36: 'not available',
id,
name,
price: revenue.toString(),
@@ -478,4 +480,26 @@ describe('~/google_tag_manager/index', () => {
resetHTMLFixture();
});
});
+
+ describe('when getting the namespace_id from Snowplow standard context', () => {
+ describe('when window.gl.snowplowStandardContext.data.namespace_id has a value', () => {
+ beforeEach(() => {
+ window.gl = { snowplowStandardContext: { data: { namespace_id: '321' } } };
+ });
+
+ it('returns the value', () => {
+ expect(getNamespaceId()).toBe('321');
+ });
+ });
+
+ describe('when window.gl.snowplowStandardContext.data.namespace_id is undefined', () => {
+ beforeEach(() => {
+ window.gl = {};
+ });
+
+ it('returns a placeholder value', () => {
+ expect(getNamespaceId()).toBe('not available');
+ });
+ });
+ });
});
diff --git a/spec/frontend/groups/components/app_spec.js b/spec/frontend/groups/components/app_spec.js
index 848e50c86ba..9e4666ffc70 100644
--- a/spec/frontend/groups/components/app_spec.js
+++ b/spec/frontend/groups/components/app_spec.js
@@ -10,8 +10,10 @@ import groupItemComponent from '~/groups/components/group_item.vue';
import eventHub from '~/groups/event_hub';
import GroupsService from '~/groups/service/groups_service';
import GroupsStore from '~/groups/store/groups_store';
+import EmptyState from '~/groups/components/empty_state.vue';
import axios from '~/lib/utils/axios_utils';
import * as urlUtilities from '~/lib/utils/url_utility';
+import setWindowLocation from 'helpers/set_window_location_helper';
import {
mockEndpoint,
@@ -38,17 +40,23 @@ describe('AppComponent', () => {
const store = new GroupsStore({ hideProjects: false });
const service = new GroupsService(mockEndpoint);
- const createShallowComponent = (hideProjects = false) => {
+ const createShallowComponent = ({ propsData = {}, provide = {} } = {}) => {
store.state.pageInfo = mockPageInfo;
wrapper = shallowMount(appComponent, {
propsData: {
store,
service,
- hideProjects,
+ hideProjects: false,
+ containerId: 'js-groups-tree',
+ ...propsData,
},
mocks: {
$toast,
},
+ provide: {
+ renderEmptyState: false,
+ ...provide,
+ },
});
vm = wrapper.vm;
};
@@ -64,6 +72,14 @@ describe('AppComponent', () => {
Vue.component('GroupFolder', groupFolderComponent);
Vue.component('GroupItem', groupItemComponent);
+ document.body.innerHTML = `
+ <div id="js-groups-tree">
+ <div class="empty-state hidden" data-testid="legacy-empty-state">
+ <p>There are no projects shared with this group yet</p>
+ </div>
+ </div>
+ `;
+
createShallowComponent();
getGroupsSpy = jest.spyOn(vm.service, 'getGroups');
await nextTick();
@@ -386,7 +402,10 @@ describe('AppComponent', () => {
expect(vm.store.setSearchedGroups).toHaveBeenCalledWith(mockGroups);
});
- it('should set `isSearchEmpty` prop based on groups count', () => {
+ it('should set `isSearchEmpty` prop based on groups count and `filter` query param', () => {
+ setWindowLocation('?filter=foobar');
+ createShallowComponent();
+
vm.updateGroups(mockGroups);
expect(vm.isSearchEmpty).toBe(false);
@@ -395,6 +414,47 @@ describe('AppComponent', () => {
expect(vm.isSearchEmpty).toBe(true);
});
+
+ describe.each`
+ action | groups | fromSearch | renderEmptyState | expected
+ ${'subgroups_and_projects'} | ${[]} | ${false} | ${true} | ${true}
+ ${''} | ${[]} | ${false} | ${true} | ${false}
+ ${'subgroups_and_projects'} | ${mockGroups} | ${false} | ${true} | ${false}
+ ${'subgroups_and_projects'} | ${[]} | ${true} | ${true} | ${false}
+ `(
+ 'when `action` is $action, `groups` is $groups, `fromSearch` is $fromSearch, and `renderEmptyState` is $renderEmptyState',
+ ({ action, groups, fromSearch, renderEmptyState, expected }) => {
+ it(expected ? 'renders empty state' : 'does not render empty state', async () => {
+ createShallowComponent({
+ propsData: { action },
+ provide: { renderEmptyState },
+ });
+
+ vm.updateGroups(groups, fromSearch);
+
+ await nextTick();
+
+ expect(wrapper.findComponent(EmptyState).exists()).toBe(expected);
+ });
+ },
+ );
+ });
+
+ describe('when `action` is subgroups_and_projects, `groups` is [], `fromSearch` is `false`, and `renderEmptyState` is `false`', () => {
+ it('renders legacy empty state', async () => {
+ createShallowComponent({
+ propsData: { action: 'subgroups_and_projects' },
+ provide: { renderEmptyState: false },
+ });
+
+ vm.updateGroups([], false);
+
+ await nextTick();
+
+ expect(
+ document.querySelector('[data-testid="legacy-empty-state"]').classList.contains('hidden'),
+ ).toBe(false);
+ });
});
});
@@ -419,7 +479,7 @@ describe('AppComponent', () => {
});
it('should initialize `searchEmptyMessage` prop with correct string when `hideProjects` is `true`', async () => {
- createShallowComponent(true);
+ createShallowComponent({ propsData: { hideProjects: true } });
await nextTick();
expect(vm.searchEmptyMessage).toBe('No groups matched your search');
});
diff --git a/spec/frontend/groups/components/empty_state_spec.js b/spec/frontend/groups/components/empty_state_spec.js
new file mode 100644
index 00000000000..c0e71e814d0
--- /dev/null
+++ b/spec/frontend/groups/components/empty_state_spec.js
@@ -0,0 +1,78 @@
+import { GlEmptyState } from '@gitlab/ui';
+
+import { mountExtended } from 'jest/__helpers__/vue_test_utils_helper';
+import EmptyState from '~/groups/components/empty_state.vue';
+
+let wrapper;
+
+const defaultProvide = {
+ newProjectIllustration: '/assets/illustrations/project-create-new-sm.svg',
+ newProjectPath: '/projects/new?namespace_id=231',
+ newSubgroupIllustration: '/assets/illustrations/group-new.svg',
+ newSubgroupPath: '/groups/new?parent_id=231',
+ emptySubgroupIllustration: '/assets/illustrations/empty-state/empty-subgroup-md.svg',
+ canCreateSubgroups: true,
+ canCreateProjects: true,
+};
+
+const createComponent = ({ provide = {} } = {}) => {
+ wrapper = mountExtended(EmptyState, {
+ provide: {
+ ...defaultProvide,
+ ...provide,
+ },
+ });
+};
+
+afterEach(() => {
+ wrapper.destroy();
+});
+
+const findNewSubgroupLink = () =>
+ wrapper.findByRole('link', {
+ name: new RegExp(EmptyState.i18n.withLinks.subgroup.title),
+ });
+const findNewProjectLink = () =>
+ wrapper.findByRole('link', {
+ name: new RegExp(EmptyState.i18n.withLinks.project.title),
+ });
+const findNewSubgroupIllustration = () =>
+ wrapper.findByRole('img', { name: EmptyState.i18n.withLinks.subgroup.title });
+const findNewProjectIllustration = () =>
+ wrapper.findByRole('img', { name: EmptyState.i18n.withLinks.project.title });
+
+describe('EmptyState', () => {
+ describe('when user has permission to create a subgroup', () => {
+ it('renders `Create new subgroup` link', () => {
+ createComponent();
+
+ expect(findNewSubgroupLink().attributes('href')).toBe(defaultProvide.newSubgroupPath);
+ expect(findNewSubgroupIllustration().attributes('src')).toBe(
+ defaultProvide.newSubgroupIllustration,
+ );
+ });
+ });
+
+ describe('when user has permission to create a project', () => {
+ it('renders `Create new project` link', () => {
+ createComponent();
+
+ expect(findNewProjectLink().attributes('href')).toBe(defaultProvide.newProjectPath);
+ expect(findNewProjectIllustration().attributes('src')).toBe(
+ defaultProvide.newProjectIllustration,
+ );
+ });
+ });
+
+ describe('when user does not have permissions to create a project or a subgroup', () => {
+ it('renders empty state', () => {
+ createComponent({ provide: { canCreateSubgroups: false, canCreateProjects: false } });
+
+ expect(wrapper.find(GlEmptyState).props()).toMatchObject({
+ title: EmptyState.i18n.withoutLinks.title,
+ description: EmptyState.i18n.withoutLinks.description,
+ svgPath: defaultProvide.emptySubgroupIllustration,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/groups/components/group_name_and_path_spec.js b/spec/frontend/groups/components/group_name_and_path_spec.js
new file mode 100644
index 00000000000..eaa0801ab50
--- /dev/null
+++ b/spec/frontend/groups/components/group_name_and_path_spec.js
@@ -0,0 +1,347 @@
+import { merge } from 'lodash';
+import { GlAlert } from '@gitlab/ui';
+import { mountExtended, extendedWrapper } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import GroupNameAndPath from '~/groups/components/group_name_and_path.vue';
+import { getGroupPathAvailability } from '~/rest_api';
+import { createAlert } from '~/flash';
+import { helpPagePath } from '~/helpers/help_page_helper';
+
+jest.mock('~/flash');
+jest.mock('~/rest_api', () => ({
+ getGroupPathAvailability: jest.fn(),
+}));
+
+describe('GroupNameAndPath', () => {
+ let wrapper;
+
+ const mockGroupName = 'My awesome group';
+ const mockGroupUrl = 'my-awesome-group';
+ const mockGroupUrlSuggested = 'my-awesome-group1';
+
+ const defaultProvide = {
+ basePath: 'http://gitlab.com/',
+ fields: {
+ name: { name: 'group[name]', id: 'group_name', value: '' },
+ path: {
+ name: 'group[path]',
+ id: 'group_path',
+ value: '',
+ maxLength: 255,
+ pattern: '[a-zA-Z0-9_\\.][a-zA-Z0-9_\\-\\.]*[a-zA-Z0-9_\\-]|[a-zA-Z0-9_]',
+ },
+ parentId: { name: 'group[parent_id]', id: 'group_parent_id', value: '1' },
+ groupId: { name: 'group[id]', id: 'group_id', value: '' },
+ },
+ mattermostEnabled: false,
+ };
+
+ const createComponent = ({ provide = {} } = {}) => {
+ wrapper = mountExtended(GroupNameAndPath, { provide: merge({}, defaultProvide, provide) });
+ };
+ const createComponentEditGroup = ({ path = mockGroupUrl } = {}) => {
+ createComponent({
+ provide: { fields: { groupId: { value: '1' }, path: { value: path } } },
+ });
+ };
+
+ const findGroupNameField = () => wrapper.findByLabelText(GroupNameAndPath.i18n.inputs.name.label);
+ const findGroupUrlField = () => wrapper.findByLabelText(GroupNameAndPath.i18n.inputs.path.label);
+ const findAlert = () => extendedWrapper(wrapper.findComponent(GlAlert));
+
+ const apiMockAvailablePath = () => {
+ getGroupPathAvailability.mockResolvedValue({
+ data: { exists: false, suggests: [] },
+ });
+ };
+ const apiMockUnavailablePath = (suggests = [mockGroupUrlSuggested]) => {
+ getGroupPathAvailability.mockResolvedValue({
+ data: { exists: true, suggests },
+ });
+ };
+ const apiMockLoading = () => {
+ getGroupPathAvailability.mockImplementation(() => new Promise(() => {}));
+ };
+
+ const expectLoadingMessageExists = () => {
+ expect(wrapper.findByText(GroupNameAndPath.i18n.apiLoadingMessage).exists()).toBe(true);
+ };
+
+ describe('when user types in the `Group name` field', () => {
+ describe('when creating a new group', () => {
+ it('updates `Group URL` field as user types', async () => {
+ createComponent();
+
+ await findGroupNameField().setValue(mockGroupName);
+
+ expect(findGroupUrlField().element.value).toBe(mockGroupUrl);
+ });
+ });
+
+ describe('when editing a group', () => {
+ it('does not update `Group URL` field and does not call API', async () => {
+ const groupUrl = 'foo-bar';
+
+ createComponentEditGroup({ path: groupUrl });
+
+ await findGroupNameField().setValue(mockGroupName);
+
+ expect(findGroupUrlField().element.value).toBe(groupUrl);
+ expect(getGroupPathAvailability).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('when `Group URL` field has been manually entered', () => {
+ it('does not update `Group URL` field and does not call API', async () => {
+ apiMockAvailablePath();
+
+ createComponent();
+
+ await findGroupUrlField().setValue(mockGroupUrl);
+ await waitForPromises();
+
+ getGroupPathAvailability.mockClear();
+
+ await findGroupNameField().setValue('Foo bar');
+
+ expect(findGroupUrlField().element.value).toBe(mockGroupUrl);
+ expect(getGroupPathAvailability).not.toHaveBeenCalled();
+ });
+ });
+
+ it('shows loading message', async () => {
+ apiMockLoading();
+
+ createComponent();
+
+ await findGroupNameField().setValue(mockGroupName);
+
+ expectLoadingMessageExists();
+ });
+
+ describe('when path is available', () => {
+ it('does not update `Group URL` field', async () => {
+ apiMockAvailablePath();
+
+ createComponent();
+
+ await findGroupNameField().setValue(mockGroupName);
+
+ expect(getGroupPathAvailability).toHaveBeenCalledWith(
+ mockGroupUrl,
+ defaultProvide.fields.parentId.value,
+ { signal: expect.any(AbortSignal) },
+ );
+
+ await waitForPromises();
+
+ expect(findGroupUrlField().element.value).toBe(mockGroupUrl);
+ });
+ });
+
+ describe('when path is not available', () => {
+ it('updates `Group URL` field', async () => {
+ apiMockUnavailablePath();
+
+ createComponent();
+
+ await findGroupNameField().setValue(mockGroupName);
+ await waitForPromises();
+
+ expect(findGroupUrlField().element.value).toBe(mockGroupUrlSuggested);
+ });
+ });
+
+ describe('when API returns no suggestions', () => {
+ it('calls `createAlert`', async () => {
+ apiMockUnavailablePath([]);
+
+ createComponent();
+
+ await findGroupNameField().setValue(mockGroupName);
+ await waitForPromises();
+
+ expect(createAlert).toHaveBeenCalledWith({
+ message: GroupNameAndPath.i18n.apiErrorMessage,
+ });
+ });
+ });
+
+ describe('when API call fails', () => {
+ it('calls `createAlert`', async () => {
+ getGroupPathAvailability.mockRejectedValue({});
+
+ createComponent();
+
+ await findGroupNameField().setValue(mockGroupName);
+ await waitForPromises();
+
+ expect(createAlert).toHaveBeenCalledWith({
+ message: GroupNameAndPath.i18n.apiErrorMessage,
+ });
+ });
+ });
+
+ describe('when multiple API calls are in-flight', () => {
+ it('aborts the first API call and resolves second API call', async () => {
+ apiMockLoading();
+ apiMockUnavailablePath();
+ const abortSpy = jest.spyOn(AbortController.prototype, 'abort');
+
+ createComponent();
+
+ await findGroupNameField().setValue('Foo');
+ await findGroupNameField().setValue(mockGroupName);
+ await waitForPromises();
+
+ expect(createAlert).not.toHaveBeenCalled();
+ expect(findGroupUrlField().element.value).toBe(mockGroupUrlSuggested);
+ expect(abortSpy).toHaveBeenCalled();
+ });
+ });
+
+ describe('when `Group URL` is empty', () => {
+ it('does not call API', async () => {
+ createComponent({
+ provide: { fields: { name: { value: mockGroupName }, path: mockGroupUrl } },
+ });
+
+ await findGroupNameField().setValue('');
+
+ expect(getGroupPathAvailability).not.toHaveBeenCalled();
+ });
+ });
+ });
+
+ describe('when `Group name` field is invalid', () => {
+ it('shows error message', async () => {
+ createComponent();
+
+ await findGroupNameField().trigger('invalid');
+
+ expect(wrapper.findByText(GroupNameAndPath.i18n.inputs.name.invalidFeedback).exists()).toBe(
+ true,
+ );
+ });
+ });
+
+ describe('when user types in `Group URL` field', () => {
+ it('shows loading message', async () => {
+ apiMockLoading();
+
+ createComponent();
+
+ await findGroupUrlField().setValue(mockGroupUrl);
+
+ expectLoadingMessageExists();
+ });
+
+ describe('when path is available', () => {
+ it('displays success message', async () => {
+ apiMockAvailablePath();
+
+ createComponent();
+
+ await findGroupUrlField().setValue(mockGroupUrl);
+ await waitForPromises();
+
+ expect(wrapper.findByText(GroupNameAndPath.i18n.inputs.path.validFeedback).exists()).toBe(
+ true,
+ );
+ });
+ });
+
+ describe('when path is not available', () => {
+ it('displays error message and updates `Group URL` field', async () => {
+ apiMockUnavailablePath();
+
+ createComponent();
+
+ await findGroupUrlField().setValue(mockGroupUrl);
+ await waitForPromises();
+
+ expect(
+ wrapper
+ .findByText(GroupNameAndPath.i18n.inputs.path.invalidFeedbackPathUnavailable)
+ .exists(),
+ ).toBe(true);
+ expect(findGroupUrlField().element.value).toBe(mockGroupUrlSuggested);
+ });
+ });
+
+ describe('when editing a group', () => {
+ it('calls API if `Group URL` does not equal the original `Group URL`', async () => {
+ const groupUrl = 'foo-bar';
+
+ apiMockAvailablePath();
+
+ createComponentEditGroup({ path: groupUrl });
+
+ await findGroupUrlField().setValue('foo-bar1');
+ await waitForPromises();
+
+ expect(getGroupPathAvailability).toHaveBeenCalled();
+ expect(wrapper.findByText(GroupNameAndPath.i18n.inputs.path.validFeedback).exists()).toBe(
+ true,
+ );
+
+ getGroupPathAvailability.mockClear();
+
+ await findGroupUrlField().setValue('foo-bar');
+
+ expect(getGroupPathAvailability).not.toHaveBeenCalled();
+ });
+ });
+ });
+
+ describe('when `Group URL` field is invalid', () => {
+ it('shows error message', async () => {
+ createComponent();
+
+ await findGroupUrlField().trigger('invalid');
+
+ expect(
+ wrapper
+ .findByText(GroupNameAndPath.i18n.inputs.path.invalidFeedbackInvalidPattern)
+ .exists(),
+ ).toBe(true);
+ });
+ });
+
+ describe('mattermost', () => {
+ it('adds `data-bind-in` attribute when enabled', () => {
+ createComponent({ provide: { mattermostEnabled: true } });
+
+ expect(findGroupUrlField().attributes('data-bind-in')).toBe(
+ GroupNameAndPath.mattermostDataBindName,
+ );
+ });
+
+ it('does not add `data-bind-in` attribute when disabled', () => {
+ createComponent();
+
+ expect(findGroupUrlField().attributes('data-bind-in')).toBeUndefined();
+ });
+ });
+
+ describe('when editing a group', () => {
+ it('shows warning alert with `Learn more` link', () => {
+ createComponentEditGroup();
+
+ expect(findAlert().exists()).toBe(true);
+ expect(findAlert().findByRole('link', { name: 'Learn more' }).attributes('href')).toBe(
+ helpPagePath('user/group/index', {
+ anchor: 'change-a-groups-path',
+ }),
+ );
+ });
+
+ it('shows `Group ID` field', () => {
+ createComponentEditGroup();
+
+ expect(
+ wrapper.findByLabelText(GroupNameAndPath.i18n.inputs.groupId.label).element.value,
+ ).toBe('1');
+ });
+ });
+});
diff --git a/spec/frontend/groups/components/item_caret_spec.js b/spec/frontend/groups/components/item_caret_spec.js
index cbe1f21d6e2..4bf92bb5642 100644
--- a/spec/frontend/groups/components/item_caret_spec.js
+++ b/spec/frontend/groups/components/item_caret_spec.js
@@ -35,8 +35,8 @@ describe('ItemCaret', () => {
it.each`
isGroupOpen | icon
- ${true} | ${'angle-down'}
- ${false} | ${'angle-right'}
+ ${true} | ${'chevron-down'}
+ ${false} | ${'chevron-right'}
`('renders "$icon" icon when `isGroupOpen` is $isGroupOpen', ({ isGroupOpen, icon }) => {
createComponent({
isGroupOpen,
diff --git a/spec/frontend/helpers/startup_css_helper_spec.js b/spec/frontend/helpers/startup_css_helper_spec.js
index 2236b5aa261..05161437c22 100644
--- a/spec/frontend/helpers/startup_css_helper_spec.js
+++ b/spec/frontend/helpers/startup_css_helper_spec.js
@@ -59,9 +59,10 @@ describe('waitForCSSLoaded', () => {
<link href="two.css" data-startupcss="loading">
`);
const events = waitForCSSLoaded(mockedCallback);
- document
- .querySelectorAll('[data-startupcss="loading"]')
- .forEach((elem) => elem.setAttribute('data-startupcss', 'loaded'));
+ document.querySelectorAll('[data-startupcss="loading"]').forEach((elem) => {
+ // eslint-disable-next-line no-param-reassign
+ elem.dataset.startupcss = 'loaded';
+ });
document.dispatchEvent(new CustomEvent('CSSStartupLinkLoaded'));
await events;
diff --git a/spec/frontend/ide/components/commit_sidebar/editor_header_spec.js b/spec/frontend/ide/components/commit_sidebar/editor_header_spec.js
index 6e4c66cb780..d77e8e3d04c 100644
--- a/spec/frontend/ide/components/commit_sidebar/editor_header_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/editor_header_spec.js
@@ -68,7 +68,7 @@ describe('IDE commit editor header', () => {
it('calls discardFileChanges if dialog result is confirmed', () => {
expect(store.dispatch).not.toHaveBeenCalled();
- findDiscardModal().vm.$emit('ok');
+ findDiscardModal().vm.$emit('primary');
expect(store.dispatch).toHaveBeenCalledWith('discardFileChanges', TEST_FILE_PATH);
});
diff --git a/spec/frontend/ide/components/commit_sidebar/new_merge_request_option_spec.js b/spec/frontend/ide/components/commit_sidebar/new_merge_request_option_spec.js
index 64b53264b4d..2a455c9d7c1 100644
--- a/spec/frontend/ide/components/commit_sidebar/new_merge_request_option_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/new_merge_request_option_spec.js
@@ -1,193 +1,97 @@
-import Vue, { nextTick } from 'vue';
-import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
-import { projectData, branches } from 'jest/ide/mock_data';
+import Vue from 'vue';
+import Vuex from 'vuex';
+import { GlFormCheckbox } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import NewMergeRequestOption from '~/ide/components/commit_sidebar/new_merge_request_option.vue';
-import { PERMISSION_CREATE_MR } from '~/ide/constants';
import { createStore } from '~/ide/stores';
-import {
- COMMIT_TO_CURRENT_BRANCH,
- COMMIT_TO_NEW_BRANCH,
-} from '~/ide/stores/modules/commit/constants';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
-describe('create new MR checkbox', () => {
- let store;
- let vm;
-
- const setMR = () => {
- vm.$store.state.currentMergeRequestId = '1';
- vm.$store.state.projects[store.state.currentProjectId].mergeRequests[
- store.state.currentMergeRequestId
- ] = { foo: 'bar' };
- };
-
- const setPermissions = (permissions) => {
- store.state.projects[store.state.currentProjectId].userPermissions = permissions;
- };
-
- const createComponent = ({ currentBranchId = 'main', createNewBranch = false } = {}) => {
- const Component = Vue.extend(NewMergeRequestOption);
-
- vm = createComponentWithStore(Component, store);
-
- vm.$store.state.commit.commitAction = createNewBranch
- ? COMMIT_TO_NEW_BRANCH
- : COMMIT_TO_CURRENT_BRANCH;
+Vue.use(Vuex);
- vm.$store.state.currentBranchId = currentBranchId;
-
- store.state.projects.abcproject.branches[currentBranchId] = branches.find(
- (branch) => branch.name === currentBranchId,
- );
-
- return vm.$mount();
- };
+describe('NewMergeRequestOption component', () => {
+ let store;
+ let wrapper;
- const findInput = () => vm.$el.querySelector('input[type="checkbox"]');
- const findLabel = () => vm.$el.querySelector('.js-ide-commit-new-mr');
+ const findCheckbox = () => wrapper.findComponent(GlFormCheckbox);
+ const findFieldset = () => wrapper.findByTestId('new-merge-request-fieldset');
+ const findTooltip = () => getBinding(findFieldset().element, 'gl-tooltip');
- beforeEach(() => {
+ const createComponent = ({
+ shouldHideNewMrOption = false,
+ shouldDisableNewMrOption = false,
+ shouldCreateMR = false,
+ } = {}) => {
store = createStore();
- store.state.currentProjectId = 'abcproject';
-
- const proj = JSON.parse(JSON.stringify(projectData));
- proj.userPermissions[PERMISSION_CREATE_MR] = true;
- Vue.set(store.state.projects, 'abcproject', proj);
- });
+ wrapper = shallowMountExtended(NewMergeRequestOption, {
+ store: {
+ ...store,
+ getters: {
+ 'commit/shouldHideNewMrOption': shouldHideNewMrOption,
+ 'commit/shouldDisableNewMrOption': shouldDisableNewMrOption,
+ 'commit/shouldCreateMR': shouldCreateMR,
+ },
+ },
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
+ });
+ };
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
- describe('for default branch', () => {
- describe('is rendered when pushing to a new branch', () => {
- beforeEach(() => {
- createComponent({
- currentBranchId: 'main',
- createNewBranch: true,
- });
- });
-
- it('has NO new MR', () => {
- expect(vm.$el.textContent).not.toBe('');
- });
-
- it('has new MR', async () => {
- setMR();
-
- await nextTick();
- expect(vm.$el.textContent).not.toBe('');
- });
+ describe('when the `shouldHideNewMrOption` getter returns false', () => {
+ beforeEach(() => {
+ createComponent();
+ jest.spyOn(store, 'dispatch').mockImplementation();
});
- describe('is NOT rendered when pushing to the same branch', () => {
- beforeEach(() => {
- createComponent({
- currentBranchId: 'main',
- createNewBranch: false,
- });
- });
-
- it('has NO new MR', () => {
- expect(vm.$el.textContent).toBe('');
- });
-
- it('has new MR', async () => {
- setMR();
-
- await nextTick();
- expect(vm.$el.textContent).toBe('');
- });
+ it('renders an enabled new MR checkbox', () => {
+ expect(findCheckbox().attributes('disabled')).toBeUndefined();
});
- });
- describe('for protected branch', () => {
- describe('when user does not have the write access', () => {
- beforeEach(() => {
- createComponent({
- currentBranchId: 'protected/no-access',
- });
- });
-
- it('is rendered if MR does not exists', () => {
- expect(vm.$el.textContent).not.toBe('');
- });
+ it("doesn't add `is-disabled` class to the fieldset", () => {
+ expect(findFieldset().classes()).not.toContain('is-disabled');
+ });
- it('is rendered if MR exists', async () => {
- setMR();
+ it('dispatches toggleShouldCreateMR when clicking checkbox', () => {
+ findCheckbox().vm.$emit('change');
- await nextTick();
- expect(vm.$el.textContent).not.toBe('');
- });
+ expect(store.dispatch).toHaveBeenCalledWith('commit/toggleShouldCreateMR', undefined);
});
- describe('when user has the write access', () => {
+ describe('when user cannot create an MR', () => {
beforeEach(() => {
createComponent({
- currentBranchId: 'protected/access',
+ shouldDisableNewMrOption: true,
});
});
- it('is rendered if MR does not exist', () => {
- expect(vm.$el.textContent).not.toBe('');
+ it('disables the new MR checkbox', () => {
+ expect(findCheckbox().attributes('disabled')).toBe('true');
});
- it('is hidden if MR exists', async () => {
- setMR();
+ it('adds `is-disabled` class to the fieldset', () => {
+ expect(findFieldset().classes()).toContain('is-disabled');
+ });
- await nextTick();
- expect(vm.$el.textContent).toBe('');
+ it('shows a tooltip', () => {
+ expect(findTooltip().value).toBe(wrapper.vm.$options.i18n.tooltipText);
});
});
});
- describe('for regular branch', () => {
+ describe('when the `shouldHideNewMrOption` getter returns true', () => {
beforeEach(() => {
createComponent({
- currentBranchId: 'regular',
+ shouldHideNewMrOption: true,
});
});
- it('is rendered if no MR exists', () => {
- expect(vm.$el.textContent).not.toBe('');
- });
-
- it('is hidden if MR exists', async () => {
- setMR();
-
- await nextTick();
- expect(vm.$el.textContent).toBe('');
- });
-
- it('shows enablded checkbox', () => {
- expect(findLabel().classList.contains('is-disabled')).toBe(false);
- expect(findInput().disabled).toBe(false);
+ it("doesn't render the new MR checkbox", () => {
+ expect(findCheckbox().exists()).toBe(false);
});
});
-
- describe('when user cannot create MR', () => {
- beforeEach(() => {
- setPermissions({ [PERMISSION_CREATE_MR]: false });
-
- createComponent({ currentBranchId: 'regular' });
- });
-
- it('disabled checkbox', () => {
- expect(findLabel().classList.contains('is-disabled')).toBe(true);
- expect(findInput().disabled).toBe(true);
- });
- });
-
- it('dispatches toggleShouldCreateMR when clicking checkbox', () => {
- createComponent({
- currentBranchId: 'regular',
- });
- const el = vm.$el.querySelector('input[type="checkbox"]');
- jest.spyOn(vm.$store, 'dispatch').mockImplementation(() => {});
- el.dispatchEvent(new Event('change'));
-
- expect(vm.$store.dispatch.mock.calls).toEqual(
- expect.arrayContaining([['commit/toggleShouldCreateMR', expect.any(Object)]]),
- );
- });
});
diff --git a/spec/frontend/ide/components/ide_side_bar_spec.js b/spec/frontend/ide/components/ide_side_bar_spec.js
index ace8988b8c9..4469c3fc901 100644
--- a/spec/frontend/ide/components/ide_side_bar_spec.js
+++ b/spec/frontend/ide/components/ide_side_bar_spec.js
@@ -1,4 +1,4 @@
-import { GlDeprecatedSkeletonLoading as GlSkeletonLoading } from '@gitlab/ui';
+import { GlSkeletonLoader } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
@@ -47,7 +47,7 @@ describe('IdeSidebar', () => {
await nextTick();
- expect(wrapper.findAll(GlSkeletonLoading)).toHaveLength(3);
+ expect(wrapper.findAll(GlSkeletonLoader)).toHaveLength(3);
});
describe('deferred rendering components', () => {
diff --git a/spec/frontend/ide/components/ide_status_bar_spec.js b/spec/frontend/ide/components/ide_status_bar_spec.js
index 00ef75fcf3a..17a5aa17b1f 100644
--- a/spec/frontend/ide/components/ide_status_bar_spec.js
+++ b/spec/frontend/ide/components/ide_status_bar_spec.js
@@ -11,6 +11,8 @@ const TEST_PROJECT_ID = 'abcproject';
const TEST_MERGE_REQUEST_ID = '9001';
const TEST_MERGE_REQUEST_URL = `${TEST_HOST}merge-requests/${TEST_MERGE_REQUEST_ID}`;
+jest.mock('~/lib/utils/poll');
+
describe('ideStatusBar', () => {
let store;
let vm;
diff --git a/spec/frontend/ide/components/jobs/__snapshots__/stage_spec.js.snap b/spec/frontend/ide/components/jobs/__snapshots__/stage_spec.js.snap
index d1cf9f2e248..45444166a50 100644
--- a/spec/frontend/ide/components/jobs/__snapshots__/stage_spec.js.snap
+++ b/spec/frontend/ide/components/jobs/__snapshots__/stage_spec.js.snap
@@ -35,7 +35,7 @@ exports[`IDE pipeline stage renders stage details & icon 1`] = `
<gl-icon-stub
class="ide-stage-collapse-icon"
- name="angle-down"
+ name="chevron-lg-down"
size="16"
/>
</div>
diff --git a/spec/frontend/ide/components/jobs/detail/description_spec.js b/spec/frontend/ide/components/jobs/detail/description_spec.js
index 786a7661d97..128ccff6568 100644
--- a/spec/frontend/ide/components/jobs/detail/description_spec.js
+++ b/spec/frontend/ide/components/jobs/detail/description_spec.js
@@ -28,6 +28,12 @@ describe('IDE job description', () => {
).not.toBe(null);
});
+ it('renders a borderless CI icon', () => {
+ expect(
+ vm.$el.querySelector('.borderless [data-testid="status_success_borderless-icon"]'),
+ ).not.toBe(null);
+ });
+
it('renders bridge job details without the job link', () => {
vm = mountComponent(Component, {
job: { ...jobs[0], path: undefined },
diff --git a/spec/frontend/ide/components/repo_editor_spec.js b/spec/frontend/ide/components/repo_editor_spec.js
index 9a30fd5f5c3..b44651481e9 100644
--- a/spec/frontend/ide/components/repo_editor_spec.js
+++ b/spec/frontend/ide/components/repo_editor_spec.js
@@ -1,8 +1,8 @@
-import { shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import { editor as monacoEditor, Range } from 'monaco-editor';
import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
+import { shallowMount } from '@vue/test-utils';
import '~/behaviors/markdown/render_gfm';
import waitForPromises from 'helpers/wait_for_promises';
import { exampleConfigs, exampleFiles } from 'jest/ide/lib/editorconfig/mock_data';
@@ -11,57 +11,54 @@ import { EditorMarkdownExtension } from '~/editor/extensions/source_editor_markd
import { EditorMarkdownPreviewExtension } from '~/editor/extensions/source_editor_markdown_livepreview_ext';
import SourceEditor from '~/editor/source_editor';
import RepoEditor from '~/ide/components/repo_editor.vue';
-import {
- leftSidebarViews,
- FILE_VIEW_MODE_EDITOR,
- FILE_VIEW_MODE_PREVIEW,
- viewerTypes,
-} from '~/ide/constants';
+import { leftSidebarViews, FILE_VIEW_MODE_PREVIEW, viewerTypes } from '~/ide/constants';
import ModelManager from '~/ide/lib/common/model_manager';
import service from '~/ide/services';
import { createStoreOptions } from '~/ide/stores';
import axios from '~/lib/utils/axios_utils';
import ContentViewer from '~/vue_shared/components/content_viewer/content_viewer.vue';
import SourceEditorInstance from '~/editor/source_editor_instance';
-import { spyOnApi } from 'jest/editor/helpers';
import { file } from '../helpers';
const PREVIEW_MARKDOWN_PATH = '/foo/bar/preview_markdown';
const CURRENT_PROJECT_ID = 'gitlab-org/gitlab';
-const defaultFileProps = {
- ...file('file.txt'),
- content: 'hello world',
- active: true,
- tempFile: true,
+const dummyFile = {
+ text: {
+ ...file('file.txt'),
+ content: 'hello world',
+ active: true,
+ tempFile: true,
+ },
+ markdown: {
+ ...file('sample.md'),
+ projectId: 'namespace/project',
+ path: 'sample.md',
+ content: 'hello world',
+ tempFile: true,
+ active: true,
+ },
+ binary: {
+ ...file('file.dat'),
+ content: 'ðŸ±', // non-ascii binary content,
+ tempFile: true,
+ active: true,
+ },
+ empty: {
+ ...file('empty'),
+ tempFile: false,
+ content: '',
+ raw: '',
+ },
};
+
const createActiveFile = (props) => {
return {
- ...defaultFileProps,
+ ...dummyFile.text,
...props,
};
};
-const dummyFile = {
- markdown: (() =>
- createActiveFile({
- projectId: 'namespace/project',
- path: 'sample.md',
- name: 'sample.md',
- }))(),
- binary: (() =>
- createActiveFile({
- name: 'file.dat',
- content: 'ðŸ±', // non-ascii binary content,
- }))(),
- empty: (() =>
- createActiveFile({
- tempFile: false,
- content: '',
- raw: '',
- }))(),
-};
-
const prepareStore = (state, activeFile) => {
const localState = {
openFiles: [activeFile],
@@ -109,7 +106,7 @@ describe('RepoEditor', () => {
vm.$once('editorSetup', resolve);
});
- const createComponent = async ({ state = {}, activeFile = defaultFileProps } = {}) => {
+ const createComponent = async ({ state = {}, activeFile = dummyFile.text } = {}) => {
const store = prepareStore(state, activeFile);
wrapper = shallowMount(RepoEditor, {
store,
@@ -187,7 +184,7 @@ describe('RepoEditor', () => {
mock = new MockAdapter(axios);
mock.onPost(/(.*)\/preview_markdown/).reply(200, {
- body: `<p>${defaultFileProps.content}</p>`,
+ body: `<p>${dummyFile.text.content}</p>`,
});
});
@@ -196,11 +193,8 @@ describe('RepoEditor', () => {
});
describe('when files is markdown', () => {
- let layoutSpy;
-
beforeEach(async () => {
await createComponent({ activeFile });
- layoutSpy = jest.spyOn(wrapper.vm.editor, 'layout');
});
it('renders an Edit and a Preview Tab', () => {
@@ -214,11 +208,7 @@ describe('RepoEditor', () => {
it('renders markdown for tempFile', async () => {
findPreviewTab().trigger('click');
await waitForPromises();
- expect(wrapper.find(ContentViewer).html()).toContain(defaultFileProps.content);
- });
-
- it('should not trigger layout', async () => {
- expect(layoutSpy).not.toHaveBeenCalled();
+ expect(wrapper.find(ContentViewer).html()).toContain(dummyFile.text.content);
});
describe('when file changes to non-markdown file', () => {
@@ -229,10 +219,6 @@ describe('RepoEditor', () => {
it('should hide tabs', () => {
expect(findTabs()).toHaveLength(0);
});
-
- it('should trigger refresh dimensions', async () => {
- expect(layoutSpy).toHaveBeenCalledTimes(1);
- });
});
});
@@ -292,55 +278,20 @@ describe('RepoEditor', () => {
expect(vm.editor.methods[fn]).toBe('EditorWebIde');
});
});
-
- it.each`
- prefix | activeFile | viewer | shouldHaveMarkdownExtension
- ${'Should not'} | ${createActiveFile()} | ${viewerTypes.edit} | ${false}
- ${'Should'} | ${dummyFile.markdown} | ${viewerTypes.edit} | ${true}
- ${'Should not'} | ${dummyFile.empty} | ${viewerTypes.edit} | ${false}
- ${'Should not'} | ${createActiveFile()} | ${viewerTypes.diff} | ${false}
- ${'Should not'} | ${dummyFile.markdown} | ${viewerTypes.diff} | ${false}
- ${'Should not'} | ${dummyFile.empty} | ${viewerTypes.diff} | ${false}
- ${'Should not'} | ${createActiveFile()} | ${viewerTypes.mr} | ${false}
- ${'Should not'} | ${dummyFile.markdown} | ${viewerTypes.mr} | ${false}
- ${'Should not'} | ${dummyFile.empty} | ${viewerTypes.mr} | ${false}
- `(
- '$prefix install markdown extension for $activeFile.name in $viewer viewer',
- async ({ activeFile, viewer, shouldHaveMarkdownExtension } = {}) => {
- await createComponent({ state: { viewer }, activeFile });
-
- if (shouldHaveMarkdownExtension) {
- expect(applyExtensionSpy).toHaveBeenCalledWith({
- definition: EditorMarkdownPreviewExtension,
- setupOptions: { previewMarkdownPath: PREVIEW_MARKDOWN_PATH },
- });
- // TODO: spying on extensions causes Jest to blow up, so we have to assert on
- // the public property the extension adds, as opposed to the args passed to the ctor
- expect(wrapper.vm.editor.markdownPreview.path).toBe(PREVIEW_MARKDOWN_PATH);
- } else {
- expect(applyExtensionSpy).not.toHaveBeenCalledWith(
- wrapper.vm.editor,
- expect.any(EditorMarkdownExtension),
- );
- }
- },
- );
});
describe('setupEditor', () => {
- beforeEach(async () => {
+ it('creates new model on load', async () => {
await createComponent();
- });
-
- it('creates new model on load', () => {
// We always create two models per file to be able to build a diff of changes
expect(createModelSpy).toHaveBeenCalledTimes(2);
// The model with the most recent changes is the last one
const [content] = createModelSpy.mock.calls[1];
- expect(content).toBe(defaultFileProps.content);
+ expect(content).toBe(dummyFile.text.content);
});
- it('does not create a new model on subsequent calls to setupEditor and re-uses the already-existing model', () => {
+ it('does not create a new model on subsequent calls to setupEditor and re-uses the already-existing model', async () => {
+ await createComponent();
const existingModel = vm.model;
createModelSpy.mockClear();
@@ -350,7 +301,8 @@ describe('RepoEditor', () => {
expect(vm.model).toBe(existingModel);
});
- it('updates state with the value of the model', () => {
+ it('updates state with the value of the model', async () => {
+ await createComponent();
const newContent = 'As Gregor Samsa\n awoke one morning\n';
vm.model.setValue(newContent);
@@ -359,7 +311,8 @@ describe('RepoEditor', () => {
expect(vm.file.content).toBe(newContent);
});
- it('sets head model as staged file', () => {
+ it('sets head model as staged file', async () => {
+ await createComponent();
vm.modelManager.dispose();
const addModelSpy = jest.spyOn(ModelManager.prototype, 'addModel');
@@ -371,52 +324,54 @@ describe('RepoEditor', () => {
expect(addModelSpy).toHaveBeenCalledWith(vm.file, vm.$store.state.stagedFiles[0]);
});
- });
-
- describe('editor updateDimensions', () => {
- let updateDimensionsSpy;
- beforeEach(async () => {
- await createComponent();
- const ext = extensionsStore.get('EditorWebIde');
- updateDimensionsSpy = jest.fn();
- spyOnApi(ext, {
- updateDimensions: updateDimensionsSpy,
- });
- });
- it('calls updateDimensions only when panelResizing is false', async () => {
- expect(updateDimensionsSpy).not.toHaveBeenCalled();
- expect(vm.$store.state.panelResizing).toBe(false); // default value
-
- vm.$store.state.panelResizing = true;
- await nextTick();
-
- expect(updateDimensionsSpy).not.toHaveBeenCalled();
-
- vm.$store.state.panelResizing = false;
- await nextTick();
-
- expect(updateDimensionsSpy).toHaveBeenCalledTimes(1);
-
- vm.$store.state.panelResizing = true;
- await nextTick();
-
- expect(updateDimensionsSpy).toHaveBeenCalledTimes(1);
- });
-
- it('calls updateDimensions when rightPane is toggled', async () => {
- expect(updateDimensionsSpy).not.toHaveBeenCalled();
- expect(vm.$store.state.rightPane.isOpen).toBe(false); // default value
-
- vm.$store.state.rightPane.isOpen = true;
- await nextTick();
+ it.each`
+ prefix | activeFile | viewer | shouldHaveMarkdownExtension
+ ${'Should not'} | ${dummyFile.text} | ${viewerTypes.edit} | ${false}
+ ${'Should'} | ${dummyFile.markdown} | ${viewerTypes.edit} | ${true}
+ ${'Should not'} | ${dummyFile.empty} | ${viewerTypes.edit} | ${false}
+ ${'Should not'} | ${dummyFile.text} | ${viewerTypes.diff} | ${false}
+ ${'Should not'} | ${dummyFile.markdown} | ${viewerTypes.diff} | ${false}
+ ${'Should not'} | ${dummyFile.empty} | ${viewerTypes.diff} | ${false}
+ ${'Should not'} | ${dummyFile.text} | ${viewerTypes.mr} | ${false}
+ ${'Should not'} | ${dummyFile.markdown} | ${viewerTypes.mr} | ${false}
+ ${'Should not'} | ${dummyFile.empty} | ${viewerTypes.mr} | ${false}
+ `(
+ '$prefix install markdown extension for $activeFile.name in $viewer viewer',
+ async ({ activeFile, viewer, shouldHaveMarkdownExtension } = {}) => {
+ await createComponent({ state: { viewer }, activeFile });
- expect(updateDimensionsSpy).toHaveBeenCalledTimes(1);
+ if (shouldHaveMarkdownExtension) {
+ expect(applyExtensionSpy).toHaveBeenCalledWith({
+ definition: EditorMarkdownPreviewExtension,
+ setupOptions: { previewMarkdownPath: PREVIEW_MARKDOWN_PATH },
+ });
+ // TODO: spying on extensions causes Jest to blow up, so we have to assert on
+ // the public property the extension adds, as opposed to the args passed to the ctor
+ expect(wrapper.vm.editor.markdownPreview.path).toBe(PREVIEW_MARKDOWN_PATH);
+ } else {
+ expect(applyExtensionSpy).not.toHaveBeenCalledWith(
+ wrapper.vm.editor,
+ expect.any(EditorMarkdownExtension),
+ );
+ }
+ },
+ );
- vm.$store.state.rightPane.isOpen = false;
- await nextTick();
+ it('fetches the live preview extension even if markdown is not the first opened file', async () => {
+ const textFile = dummyFile.text;
+ const mdFile = dummyFile.markdown;
+ const previewExtConfig = {
+ definition: EditorMarkdownPreviewExtension,
+ setupOptions: { previewMarkdownPath: PREVIEW_MARKDOWN_PATH },
+ };
+ await createComponent({ activeFile: textFile });
+ applyExtensionSpy.mockClear();
+
+ await wrapper.setProps({ file: mdFile });
+ await waitForPromises();
- expect(updateDimensionsSpy).toHaveBeenCalledTimes(2);
+ expect(applyExtensionSpy).toHaveBeenCalledWith(previewExtConfig);
});
});
@@ -439,7 +394,6 @@ describe('RepoEditor', () => {
});
describe('files in preview mode', () => {
- let updateDimensionsSpy;
const changeViewMode = (viewMode) =>
vm.$store.dispatch('editor/updateFileEditor', {
path: vm.file.path,
@@ -451,12 +405,6 @@ describe('RepoEditor', () => {
activeFile: dummyFile.markdown,
});
- const ext = extensionsStore.get('EditorWebIde');
- updateDimensionsSpy = jest.fn();
- spyOnApi(ext, {
- updateDimensions: updateDimensionsSpy,
- });
-
changeViewMode(FILE_VIEW_MODE_PREVIEW);
await nextTick();
});
@@ -465,15 +413,6 @@ describe('RepoEditor', () => {
expect(vm.showEditor).toBe(false);
expect(findEditor().isVisible()).toBe(false);
});
-
- it('updates dimensions when switching view back to edit', async () => {
- expect(updateDimensionsSpy).not.toHaveBeenCalled();
-
- changeViewMode(FILE_VIEW_MODE_EDITOR);
- await nextTick();
-
- expect(updateDimensionsSpy).toHaveBeenCalled();
- });
});
describe('initEditor', () => {
@@ -487,7 +426,7 @@ describe('RepoEditor', () => {
it('does not fetch file information for temp entries', async () => {
await createComponent({
- activeFile: createActiveFile(),
+ activeFile: dummyFile.text,
});
expect(vm.getFileData).not.toHaveBeenCalled();
@@ -506,7 +445,7 @@ describe('RepoEditor', () => {
it('does not initialize editor for files already with content when shouldHideEditor is `true`', async () => {
await createComponent({
- activeFile: createActiveFile(),
+ activeFile: dummyFile.text,
});
await hideEditorAndRunFn();
@@ -677,9 +616,6 @@ describe('RepoEditor', () => {
activeFile: setFileName('bar.md'),
});
- vm.setupEditor();
-
- await waitForPromises();
// set cursor to line 2, column 1
vm.editor.setSelection(new Range(2, 1, 2, 1));
vm.editor.focus();
diff --git a/spec/frontend/import_entities/import_groups/components/import_table_spec.js b/spec/frontend/import_entities/import_groups/components/import_table_spec.js
index 1939e43e5dc..0279ad454d2 100644
--- a/spec/frontend/import_entities/import_groups/components/import_table_spec.js
+++ b/spec/frontend/import_entities/import_groups/components/import_table_spec.js
@@ -122,7 +122,7 @@ describe('import table', () => {
});
await waitForPromises();
- expect(wrapper.find(GlEmptyState).props().title).toBe('You have no groups to import');
+ expect(wrapper.find(GlEmptyState).props().title).toBe(i18n.NO_GROUPS_FOUND);
});
});
@@ -297,7 +297,7 @@ describe('import table', () => {
wrapper.find(PaginationLinks).props().change(REQUESTED_PAGE);
await waitForPromises();
- expect(wrapper.text()).toContain('Showing 21-21 of 38 groups from');
+ expect(wrapper.text()).toContain('Showing 21-21 of 38 groups that you own from');
});
});
@@ -349,7 +349,9 @@ describe('import table', () => {
await setFilter(FILTER_VALUE);
await waitForPromises();
- expect(wrapper.text()).toContain('Showing 1-1 of 40 groups matching filter "foo" from');
+ expect(wrapper.text()).toContain(
+ 'Showing 1-1 of 40 groups that you own matching filter "foo" from',
+ );
});
it('properly resets filter in graphql query when search box is cleared', async () => {
diff --git a/spec/frontend/incidents/components/incidents_list_spec.js b/spec/frontend/incidents/components/incidents_list_spec.js
index a556f3c17f3..356480f931e 100644
--- a/spec/frontend/incidents/components/incidents_list_spec.js
+++ b/spec/frontend/incidents/components/incidents_list_spec.js
@@ -85,7 +85,6 @@ describe('Incidents List', () => {
assigneeUsernameQuery: '',
slaFeatureAvailable: true,
canCreateIncident: true,
- incidentEscalationsAvailable: true,
...provide,
},
stubs: {
@@ -211,20 +210,6 @@ describe('Incidents List', () => {
expect(status.classes('gl-text-truncate')).toBe(true);
});
});
-
- describe('when feature is disabled', () => {
- beforeEach(() => {
- mountComponent({
- data: { incidents: { list: mockIncidents }, incidentsCount },
- provide: { incidentEscalationsAvailable: false },
- loading: false,
- });
- });
-
- it('is absent if feature flag is disabled', () => {
- expect(findEscalationStatus().length).toBe(0);
- });
- });
});
it('contains a link to the incident details page', async () => {
diff --git a/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap b/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap
index 7e24aa439d4..fae93196d2c 100644
--- a/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap
+++ b/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap
@@ -57,12 +57,12 @@ exports[`Alert integration settings form should match the default snapshot 1`] =
</gl-button-stub>
<gl-modal-stub
+ actioncancel="[object Object]"
+ actionprimary="[object Object]"
arialabel=""
dismisslabel="Close"
modalclass=""
modalid="resetWebhookModal"
- ok-title="Reset webhook URL"
- ok-variant="danger"
size="md"
title="Reset webhook URL"
titletag="h4"
diff --git a/spec/frontend/incidents_settings/components/pagerduty_form_spec.js b/spec/frontend/incidents_settings/components/pagerduty_form_spec.js
index d2b591d427d..521a861829b 100644
--- a/spec/frontend/incidents_settings/components/pagerduty_form_spec.js
+++ b/spec/frontend/incidents_settings/components/pagerduty_form_spec.js
@@ -47,7 +47,7 @@ describe('Alert integration settings form', () => {
resetWebhookUrl.mockResolvedValueOnce({
data: { pagerduty_webhook_url: newWebhookUrl },
});
- findModal().vm.$emit('ok');
+ findModal().vm.$emit('primary');
await waitForPromises();
expect(resetWebhookUrl).toHaveBeenCalled();
expect(findWebhookInput().attributes('value')).toBe(newWebhookUrl);
@@ -56,7 +56,7 @@ describe('Alert integration settings form', () => {
it('should show error message and NOT reset webhook url', async () => {
resetWebhookUrl.mockRejectedValueOnce();
- findModal().vm.$emit('ok');
+ findModal().vm.$emit('primary');
await waitForPromises();
expect(findAlert().attributes('variant')).toBe('danger');
});
diff --git a/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js b/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js
index b4c5d4f9957..fa91f8de45a 100644
--- a/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js
+++ b/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js
@@ -11,7 +11,6 @@ describe('JiraIssuesFields', () => {
const defaultProps = {
showJiraVulnerabilitiesIntegration: true,
- upgradePlanPath: 'https://gitlab.com',
};
const createComponent = ({
diff --git a/spec/frontend/integrations/edit/components/jira_upgrade_cta_spec.js b/spec/frontend/integrations/edit/components/jira_upgrade_cta_spec.js
deleted file mode 100644
index e90e9a5d2ac..00000000000
--- a/spec/frontend/integrations/edit/components/jira_upgrade_cta_spec.js
+++ /dev/null
@@ -1,31 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-
-import JiraUpgradeCta from '~/integrations/edit/components/jira_upgrade_cta.vue';
-
-describe('JiraUpgradeCta', () => {
- let wrapper;
-
- const contentMessage = 'Upgrade your plan to enable this feature of the Jira Integration.';
-
- const createComponent = (propsData) => {
- wrapper = shallowMount(JiraUpgradeCta, {
- propsData,
- });
- };
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('displays the correct message for premium and lower users', () => {
- createComponent({ showPremiumMessage: true });
- expect(wrapper.text()).toContain('This is a Premium feature');
- expect(wrapper.text()).toContain(contentMessage);
- });
-
- it('displays the correct message for ultimate and lower users', () => {
- createComponent({ showUltimateMessage: true });
- expect(wrapper.text()).toContain('This is an Ultimate feature');
- expect(wrapper.text()).toContain(contentMessage);
- });
-});
diff --git a/spec/frontend/integrations/edit/components/sections/configuration_spec.js b/spec/frontend/integrations/edit/components/sections/configuration_spec.js
new file mode 100644
index 00000000000..e697212ea0b
--- /dev/null
+++ b/spec/frontend/integrations/edit/components/sections/configuration_spec.js
@@ -0,0 +1,57 @@
+import { shallowMount } from '@vue/test-utils';
+
+import IntegrationSectionCoonfiguration from '~/integrations/edit/components/sections/configuration.vue';
+import DynamicField from '~/integrations/edit/components/dynamic_field.vue';
+import { createStore } from '~/integrations/edit/store';
+
+import { mockIntegrationProps } from '../../mock_data';
+
+describe('IntegrationSectionCoonfiguration', () => {
+ let wrapper;
+
+ const createComponent = ({ customStateProps = {}, props = {} } = {}) => {
+ const store = createStore({
+ customState: { ...mockIntegrationProps, ...customStateProps },
+ });
+ wrapper = shallowMount(IntegrationSectionCoonfiguration, {
+ propsData: { ...props },
+ store,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findAllDynamicFields = () => wrapper.findAllComponents(DynamicField);
+
+ describe('template', () => {
+ describe('DynamicField', () => {
+ it('renders DynamicField for each field', () => {
+ const fields = [
+ { name: 'username', type: 'text' },
+ { name: 'API token', type: 'password' },
+ ];
+
+ createComponent({
+ props: {
+ fields,
+ },
+ });
+
+ const dynamicFields = findAllDynamicFields();
+
+ expect(dynamicFields).toHaveLength(2);
+ dynamicFields.wrappers.forEach((field, index) => {
+ expect(field.props()).toMatchObject(fields[index]);
+ });
+ });
+
+ it('does not render DynamicField when field is empty', () => {
+ createComponent();
+
+ expect(findAllDynamicFields()).toHaveLength(0);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/integrations/edit/components/sections/trigger_spec.js b/spec/frontend/integrations/edit/components/sections/trigger_spec.js
new file mode 100644
index 00000000000..883f5c7bf79
--- /dev/null
+++ b/spec/frontend/integrations/edit/components/sections/trigger_spec.js
@@ -0,0 +1,38 @@
+import { shallowMount } from '@vue/test-utils';
+
+import IntegrationSectionTrigger from '~/integrations/edit/components/sections/trigger.vue';
+import TriggerField from '~/integrations/edit/components/trigger_field.vue';
+import { createStore } from '~/integrations/edit/store';
+
+import { mockIntegrationProps } from '../../mock_data';
+
+describe('IntegrationSectionTrigger', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ const store = createStore({
+ customState: { ...mockIntegrationProps },
+ });
+ wrapper = shallowMount(IntegrationSectionTrigger, {
+ store,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findAllTriggerFields = () => wrapper.findAllComponents(TriggerField);
+
+ describe('template', () => {
+ it('renders correct number of TriggerField components', () => {
+ createComponent();
+
+ const fields = findAllTriggerFields();
+ expect(fields.length).toBe(mockIntegrationProps.triggerEvents.length);
+ fields.wrappers.forEach((field, index) => {
+ expect(field.props('event')).toBe(mockIntegrationProps.triggerEvents[index]);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/integrations/edit/components/trigger_field_spec.js b/spec/frontend/integrations/edit/components/trigger_field_spec.js
new file mode 100644
index 00000000000..6a68337813e
--- /dev/null
+++ b/spec/frontend/integrations/edit/components/trigger_field_spec.js
@@ -0,0 +1,71 @@
+import { nextTick } from 'vue';
+import { shallowMount } from '@vue/test-utils';
+import { GlFormCheckbox } from '@gitlab/ui';
+
+import TriggerField from '~/integrations/edit/components/trigger_field.vue';
+import { integrationTriggerEventTitles } from '~/integrations/constants';
+
+describe('TriggerField', () => {
+ let wrapper;
+
+ const defaultProps = {
+ event: { name: 'push_events' },
+ };
+
+ const createComponent = ({ props = {}, isInheriting = false } = {}) => {
+ wrapper = shallowMount(TriggerField, {
+ propsData: { ...defaultProps, ...props },
+ computed: {
+ isInheriting: () => isInheriting,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findGlFormCheckbox = () => wrapper.findComponent(GlFormCheckbox);
+ const findHiddenInput = () => wrapper.find('input[type="hidden"]');
+
+ describe('template', () => {
+ it('renders enabled GlFormCheckbox', () => {
+ createComponent();
+
+ expect(findGlFormCheckbox().attributes('disabled')).toBeUndefined();
+ });
+
+ it('when isInheriting is true, renders disabled GlFormCheckbox', () => {
+ createComponent({ isInheriting: true });
+
+ expect(findGlFormCheckbox().attributes('disabled')).toBe('true');
+ });
+
+ it('renders correct title', () => {
+ createComponent();
+
+ expect(findGlFormCheckbox().text()).toMatchInterpolatedText(
+ integrationTriggerEventTitles[defaultProps.event.name],
+ );
+ });
+
+ it('sets default value for hidden input', () => {
+ createComponent();
+
+ expect(findHiddenInput().attributes('value')).toBe('false');
+ });
+
+ it('toggles value of hidden input on checkbox input', async () => {
+ createComponent({
+ props: { event: { name: 'push_events', value: true } },
+ });
+ await nextTick;
+
+ expect(findHiddenInput().attributes('value')).toBe('true');
+
+ await findGlFormCheckbox().vm.$emit('input', false);
+
+ expect(findHiddenInput().attributes('value')).toBe('false');
+ });
+ });
+});
diff --git a/spec/frontend/integrations/edit/mock_data.js b/spec/frontend/integrations/edit/mock_data.js
index ac0c7d244e3..c276d2e7364 100644
--- a/spec/frontend/integrations/edit/mock_data.js
+++ b/spec/frontend/integrations/edit/mock_data.js
@@ -9,7 +9,10 @@ export const mockIntegrationProps = {
initialEnableComments: false,
},
jiraIssuesProps: {},
- triggerEvents: [],
+ triggerEvents: [
+ { name: 'push_events', title: 'Push', value: true },
+ { name: 'issues_events', title: 'Issue', value: true },
+ ],
sections: [],
fields: [],
type: '',
diff --git a/spec/frontend/invite_members/components/invite_members_trigger_spec.js b/spec/frontend/invite_members/components/invite_members_trigger_spec.js
index 28402c8331c..c522abe63c5 100644
--- a/spec/frontend/invite_members/components/invite_members_trigger_spec.js
+++ b/spec/frontend/invite_members/components/invite_members_trigger_spec.js
@@ -2,7 +2,11 @@ import { GlButton, GlLink, GlIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import InviteMembersTrigger from '~/invite_members/components/invite_members_trigger.vue';
import eventHub from '~/invite_members/event_hub';
-import { TRIGGER_ELEMENT_BUTTON, TRIGGER_ELEMENT_SIDE_NAV } from '~/invite_members/constants';
+import {
+ TRIGGER_ELEMENT_BUTTON,
+ TRIGGER_ELEMENT_SIDE_NAV,
+ TRIGGER_DEFAULT_QA_SELECTOR,
+} from '~/invite_members/constants';
jest.mock('~/experimentation/experiment_tracking');
@@ -50,12 +54,24 @@ describe.each(triggerItems)('with triggerElement as %s', (triggerItem) => {
wrapper.destroy();
});
- describe('displayText', () => {
+ describe('configurable attributes', () => {
it('includes the correct displayText for the button', () => {
createComponent();
expect(findButton().text()).toBe(displayText);
});
+
+ it('uses the default qa selector value', () => {
+ createComponent();
+
+ expect(findButton().attributes('data-qa-selector')).toBe(TRIGGER_DEFAULT_QA_SELECTOR);
+ });
+
+ it('sets the qa selector value', () => {
+ createComponent({ qaSelector: '_qaSelector_' });
+
+ expect(findButton().attributes('data-qa-selector')).toBe('_qaSelector_');
+ });
});
describe('clicking the link', () => {
diff --git a/spec/frontend/invite_members/components/invite_modal_base_spec.js b/spec/frontend/invite_members/components/invite_modal_base_spec.js
index 010f7b999fc..cc19e90a5fa 100644
--- a/spec/frontend/invite_members/components/invite_modal_base_spec.js
+++ b/spec/frontend/invite_members/components/invite_modal_base_spec.js
@@ -200,6 +200,30 @@ describe('InviteModalBase', () => {
});
});
+ describe('when user limit is close on a personal namespace', () => {
+ beforeEach(() => {
+ createComponent(
+ {
+ closeToLimit: true,
+ reachedLimit: false,
+ usersLimitDataset: { membersPath, userNamespace: true },
+ },
+ { GlModal, GlFormGroup },
+ );
+ });
+
+ it('renders correct buttons', () => {
+ const cancelButton = findCancelButton();
+ const actionButton = findActionButton();
+
+ expect(cancelButton.text()).toBe(INVITE_BUTTON_TEXT_DISABLED);
+ expect(cancelButton.attributes('href')).toBe(membersPath);
+
+ expect(actionButton.text()).toBe(INVITE_BUTTON_TEXT);
+ expect(actionButton.attributes('href')).toBe(); // default submit button
+ });
+ });
+
describe('when users limit is not reached', () => {
const textRegex = /Select a role.+Read more about role permissions Access expiration date \(optional\)/;
diff --git a/spec/frontend/invite_members/components/user_limit_notification_spec.js b/spec/frontend/invite_members/components/user_limit_notification_spec.js
index 4c9adbfcc44..bbc17932a49 100644
--- a/spec/frontend/invite_members/components/user_limit_notification_spec.js
+++ b/spec/frontend/invite_members/components/user_limit_notification_spec.js
@@ -14,9 +14,15 @@ describe('UserLimitNotification', () => {
const findAlert = () => wrapper.findComponent(GlAlert);
- const createComponent = (reachedLimit = false, usersLimitDataset = {}) => {
+ const createComponent = (
+ closeToLimit = false,
+ reachedLimit = false,
+ usersLimitDataset = {},
+ props = {},
+ ) => {
wrapper = shallowMountExtended(UserLimitNotification, {
propsData: {
+ closeToLimit,
reachedLimit,
usersLimitDataset: {
freeUsersLimit,
@@ -25,6 +31,7 @@ describe('UserLimitNotification', () => {
purchasePath: 'purchasePath',
...usersLimitDataset,
},
+ ...props,
},
provide: { name: 'my group' },
stubs: { GlSprintf },
@@ -43,9 +50,26 @@ describe('UserLimitNotification', () => {
});
});
+ describe('when close to limit with a personal namepace', () => {
+ beforeEach(() => {
+ createComponent(true, false, { membersCount: 3, userNamespace: true });
+ });
+
+ it('renders the limit for a personal namespace', () => {
+ const alert = findAlert();
+
+ expect(alert.attributes('title')).toEqual(
+ 'You only have space for 2 more members in your personal projects',
+ );
+ expect(alert.text()).toEqual(
+ 'To make more space, you can remove members who no longer need access.',
+ );
+ });
+ });
+
describe('when close to limit', () => {
it("renders user's limit notification", () => {
- createComponent(false, { membersCount: 3 });
+ createComponent(true, false, { membersCount: 3 });
const alert = findAlert();
@@ -61,7 +85,7 @@ describe('UserLimitNotification', () => {
describe('when limit is reached', () => {
it("renders user's limit notification", () => {
- createComponent(true);
+ createComponent(true, true);
const alert = findAlert();
@@ -71,12 +95,12 @@ describe('UserLimitNotification', () => {
describe('when free user namespace', () => {
it("renders user's limit notification", () => {
- createComponent(true, { userNamespace: true });
+ createComponent(true, true, { userNamespace: true });
const alert = findAlert();
expect(alert.attributes('title')).toEqual(
- "You've reached your 5 members limit for my group",
+ "You've reached your 5 members limit for your personal projects",
);
expect(alert.text()).toEqual(REACHED_LIMIT_MESSAGE);
diff --git a/spec/frontend/issuable/components/csv_export_modal_spec.js b/spec/frontend/issuable/components/csv_export_modal_spec.js
index ad4abda6912..f798f87b6b2 100644
--- a/spec/frontend/issuable/components/csv_export_modal_spec.js
+++ b/spec/frontend/issuable/components/csv_export_modal_spec.js
@@ -1,7 +1,8 @@
-import { GlModal, GlIcon, GlButton } from '@gitlab/ui';
+import { GlModal, GlIcon } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { stubComponent } from 'helpers/stub_component';
import CsvExportModal from '~/issuable/components/csv_export_modal.vue';
+import { __ } from '~/locale';
describe('CsvExportModal', () => {
let wrapper;
@@ -34,7 +35,6 @@ describe('CsvExportModal', () => {
const findModal = () => wrapper.findComponent(GlModal);
const findIcon = () => wrapper.findComponent(GlIcon);
- const findButton = () => wrapper.findComponent(GlButton);
describe('template', () => {
describe.each`
@@ -47,11 +47,25 @@ describe('CsvExportModal', () => {
});
it('displays the modal title "$modalTitle"', () => {
- expect(findModal().text()).toContain(modalTitle);
+ expect(findModal().props('title')).toBe(modalTitle);
});
- it('displays the button with title "$modalTitle"', () => {
- expect(findButton().text()).toBe(modalTitle);
+ it('displays the primary button with title "$modalTitle" and href', () => {
+ expect(findModal().props('actionPrimary')).toMatchObject({
+ text: modalTitle,
+ attributes: {
+ href: 'export/csv/path',
+ variant: 'confirm',
+ 'data-method': 'post',
+ 'data-qa-selector': `export_${issuableType}_button`,
+ 'data-track-action': 'click_button',
+ 'data-track-label': `export_${issuableType}_csv`,
+ },
+ });
+ });
+
+ it('displays the cancel button', () => {
+ expect(findModal().props('actionCancel')).toEqual({ text: __('Cancel') });
});
});
@@ -72,13 +86,5 @@ describe('CsvExportModal', () => {
);
});
});
-
- describe('primary button', () => {
- it('passes the exportCsvPath to the button', () => {
- const exportCsvPath = '/gitlab-org/gitlab-test/-/issues/export_csv';
- wrapper = createComponent({ props: { exportCsvPath } });
- expect(findButton().attributes('href')).toBe(exportCsvPath);
- });
- });
});
});
diff --git a/spec/frontend/issuable/components/csv_import_modal_spec.js b/spec/frontend/issuable/components/csv_import_modal_spec.js
index f4636fd7e6a..6e954c91f46 100644
--- a/spec/frontend/issuable/components/csv_import_modal_spec.js
+++ b/spec/frontend/issuable/components/csv_import_modal_spec.js
@@ -76,6 +76,10 @@ describe('CsvImportModal', () => {
expect(formSubmitSpy).toHaveBeenCalled();
});
+
+ it('displays the cancel button', () => {
+ expect(findModal().props('actionCancel')).toEqual({ text: __('Cancel') });
+ });
});
});
});
diff --git a/spec/frontend/issuable/popover/components/issue_popover_spec.js b/spec/frontend/issuable/popover/components/issue_popover_spec.js
new file mode 100644
index 00000000000..3e77e750f3a
--- /dev/null
+++ b/spec/frontend/issuable/popover/components/issue_popover_spec.js
@@ -0,0 +1,81 @@
+import { GlSkeletonLoader } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import StatusBox from '~/issuable/components/status_box.vue';
+import IssuePopover from '~/issuable/popover/components/issue_popover.vue';
+import issueQuery from '~/issuable/popover/queries/issue.query.graphql';
+
+describe('Issue Popover', () => {
+ let wrapper;
+
+ Vue.use(VueApollo);
+
+ const issueQueryResponse = {
+ data: {
+ project: {
+ __typename: 'Project',
+ id: '1',
+ issue: {
+ __typename: 'Issue',
+ id: 'gid://gitlab/Issue/1',
+ createdAt: '2020-07-01T04:08:01Z',
+ state: 'opened',
+ title: 'Issue title',
+ },
+ },
+ },
+ };
+
+ const mountComponent = ({
+ queryResponse = jest.fn().mockResolvedValue(issueQueryResponse),
+ } = {}) => {
+ wrapper = shallowMount(IssuePopover, {
+ apolloProvider: createMockApollo([[issueQuery, queryResponse]]),
+ propsData: {
+ target: document.createElement('a'),
+ projectPath: 'foo/bar',
+ iid: '1',
+ cachedTitle: 'Cached title',
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('shows skeleton-loader while apollo is loading', () => {
+ mountComponent();
+
+ expect(wrapper.findComponent(GlSkeletonLoader).exists()).toBe(true);
+ });
+
+ describe('when loaded', () => {
+ beforeEach(() => {
+ mountComponent();
+ return waitForPromises();
+ });
+
+ it('shows status badge', () => {
+ expect(wrapper.findComponent(StatusBox).props()).toEqual({
+ issuableType: 'issue',
+ initialState: issueQueryResponse.data.project.issue.state,
+ });
+ });
+
+ it('shows opened time', () => {
+ expect(wrapper.text()).toContain('Opened 4 days ago');
+ });
+
+ it('shows title', () => {
+ expect(wrapper.find('h5').text()).toBe(issueQueryResponse.data.project.issue.title);
+ });
+
+ it('shows reference', () => {
+ expect(wrapper.text()).toContain('foo/bar#1');
+ });
+ });
+});
diff --git a/spec/frontend/issuable/popover/components/mr_popover_spec.js b/spec/frontend/issuable/popover/components/mr_popover_spec.js
new file mode 100644
index 00000000000..5fdd1e6e8fc
--- /dev/null
+++ b/spec/frontend/issuable/popover/components/mr_popover_spec.js
@@ -0,0 +1,119 @@
+import { GlSkeletonLoader } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import MRPopover from '~/issuable/popover/components/mr_popover.vue';
+import mergeRequestQuery from '~/issuable/popover/queries/merge_request.query.graphql';
+import CiIcon from '~/vue_shared/components/ci_icon.vue';
+
+describe('MR Popover', () => {
+ let wrapper;
+
+ Vue.use(VueApollo);
+
+ const mrQueryResponse = {
+ data: {
+ project: {
+ __typename: 'Project',
+ id: '1',
+ mergeRequest: {
+ __typename: 'Merge Request',
+ id: 'gid://gitlab/Merge_Request/1',
+ createdAt: '2020-07-01T04:08:01Z',
+ state: 'opened',
+ title: 'MR title',
+ headPipeline: {
+ id: '1',
+ detailedStatus: {
+ id: '1',
+ icon: 'status_success',
+ group: 'success',
+ },
+ },
+ },
+ },
+ },
+ };
+
+ const mrQueryResponseWithoutDetailedStatus = {
+ data: {
+ project: {
+ __typename: 'Project',
+ id: '1',
+ mergeRequest: {
+ __typename: 'Merge Request',
+ id: 'gid://gitlab/Merge_Request/1',
+ createdAt: '2020-07-01T04:08:01Z',
+ state: 'opened',
+ title: 'MR title',
+ headPipeline: {
+ id: '1',
+ detailedStatus: null,
+ },
+ },
+ },
+ },
+ };
+
+ const mountComponent = ({
+ queryResponse = jest.fn().mockResolvedValue(mrQueryResponse),
+ } = {}) => {
+ wrapper = shallowMount(MRPopover, {
+ apolloProvider: createMockApollo([[mergeRequestQuery, queryResponse]]),
+ propsData: {
+ target: document.createElement('a'),
+ projectPath: 'foo/bar',
+ iid: '1',
+ cachedTitle: 'Cached Title',
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('shows skeleton-loader while apollo is loading', () => {
+ mountComponent();
+
+ expect(wrapper.findComponent(GlSkeletonLoader).exists()).toBe(true);
+ });
+
+ describe('when loaded', () => {
+ beforeEach(() => {
+ mountComponent();
+ return waitForPromises();
+ });
+
+ it('shows opened time', () => {
+ expect(wrapper.text()).toContain('Opened 4 days ago');
+ });
+
+ it('shows title', () => {
+ expect(wrapper.find('h5').text()).toBe(mrQueryResponse.data.project.mergeRequest.title);
+ });
+
+ it('shows reference', () => {
+ expect(wrapper.text()).toContain('foo/bar!1');
+ });
+
+ it('shows CI Icon if there is pipeline data', async () => {
+ expect(wrapper.findComponent(CiIcon).exists()).toBe(true);
+ });
+ });
+
+ describe('without detailed status', () => {
+ beforeEach(() => {
+ mountComponent({
+ queryResponse: jest.fn().mockResolvedValue(mrQueryResponseWithoutDetailedStatus),
+ });
+ return waitForPromises();
+ });
+
+ it('does not show CI icon if there is no pipeline data', async () => {
+ expect(wrapper.findComponent(CiIcon).exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/mr_popover/index_spec.js b/spec/frontend/issuable/popover/index_spec.js
index fd8ced17aea..b1aa7f0f0b0 100644
--- a/spec/frontend/mr_popover/index_spec.js
+++ b/spec/frontend/issuable/popover/index_spec.js
@@ -1,45 +1,52 @@
import { setHTMLFixture } from 'helpers/fixtures';
import * as createDefaultClient from '~/lib/graphql';
-import initMRPopovers from '~/mr_popover/index';
+import initIssuablePopovers from '~/issuable/popover/index';
createDefaultClient.default = jest.fn();
-describe('initMRPopovers', () => {
+describe('initIssuablePopovers', () => {
let mr1;
let mr2;
let mr3;
+ let issue1;
beforeEach(() => {
setHTMLFixture(`
- <div id="one" class="gfm-merge_request" data-mr-title="title" data-iid="1" data-project-path="group/project">
+ <div id="one" class="gfm-merge_request" data-mr-title="title" data-iid="1" data-project-path="group/project" data-reference-type="merge_request">
MR1
</div>
- <div id="two" class="gfm-merge_request" data-mr-title="title" data-iid="1" data-project-path="group/project">
+ <div id="two" class="gfm-merge_request" title="title" data-iid="1" data-project-path="group/project" data-reference-type="merge_request">
MR2
</div>
<div id="three" class="gfm-merge_request">
MR3
</div>
+ <div id="four" class="gfm-issue" title="title" data-iid="1" data-project-path="group/project" data-reference-type="issue">
+ MR3
+ </div>
`);
mr1 = document.querySelector('#one');
mr2 = document.querySelector('#two');
mr3 = document.querySelector('#three');
+ issue1 = document.querySelector('#four');
mr1.addEventListener = jest.fn();
mr2.addEventListener = jest.fn();
mr3.addEventListener = jest.fn();
+ issue1.addEventListener = jest.fn();
});
it('does not add the same event listener twice', () => {
- initMRPopovers([mr1, mr1, mr2]);
+ initIssuablePopovers([mr1, mr1, mr2, issue1]);
expect(mr1.addEventListener).toHaveBeenCalledTimes(1);
expect(mr2.addEventListener).toHaveBeenCalledTimes(1);
+ expect(issue1.addEventListener).toHaveBeenCalledTimes(1);
});
it('does not add listener if it does not have the necessary data attributes', () => {
- initMRPopovers([mr1, mr2, mr3]);
+ initIssuablePopovers([mr1, mr2, mr3]);
expect(mr3.addEventListener).not.toHaveBeenCalled();
});
diff --git a/spec/frontend/issues/create_merge_request_dropdown_spec.js b/spec/frontend/issues/create_merge_request_dropdown_spec.js
index 20b26f5abba..cb7173c56a8 100644
--- a/spec/frontend/issues/create_merge_request_dropdown_spec.js
+++ b/spec/frontend/issues/create_merge_request_dropdown_spec.js
@@ -84,7 +84,7 @@ describe('CreateMergeRequestDropdown', () => {
});
it('enables when can create confidential issue', () => {
- document.querySelector('.js-create-mr').setAttribute('data-is-confidential', 'true');
+ document.querySelector('.js-create-mr').dataset.isConfidential = 'true';
confidentialState.selectedProject = { name: 'test' };
dropdown.enable();
@@ -93,7 +93,7 @@ describe('CreateMergeRequestDropdown', () => {
});
it('does not enable when can not create confidential issue', () => {
- document.querySelector('.js-create-mr').setAttribute('data-is-confidential', 'true');
+ document.querySelector('.js-create-mr').dataset.isConfidential = 'true';
dropdown.enable();
diff --git a/spec/frontend/issues/list/components/issues_list_app_spec.js b/spec/frontend/issues/list/components/issues_list_app_spec.js
index d92ba527b5c..3f2c3c3ec5f 100644
--- a/spec/frontend/issues/list/components/issues_list_app_spec.js
+++ b/spec/frontend/issues/list/components/issues_list_app_spec.js
@@ -8,8 +8,6 @@ import VueApollo from 'vue-apollo';
import VueRouter from 'vue-router';
import getIssuesQuery from 'ee_else_ce/issues/list/queries/get_issues.query.graphql';
import getIssuesCountsQuery from 'ee_else_ce/issues/list/queries/get_issues_counts.query.graphql';
-import getIssuesWithoutCrmQuery from 'ee_else_ce/issues/list/queries/get_issues_without_crm.query.graphql';
-import getIssuesCountsWithoutCrmQuery from 'ee_else_ce/issues/list/queries/get_issues_counts_without_crm.query.graphql';
import createMockApollo from 'helpers/mock_apollo_helper';
import setWindowLocation from 'helpers/set_window_location_helper';
import { TEST_HOST } from 'helpers/test_constants';
@@ -38,9 +36,11 @@ import {
TOKEN_TYPE_ASSIGNEE,
TOKEN_TYPE_AUTHOR,
TOKEN_TYPE_CONFIDENTIAL,
+ TOKEN_TYPE_CONTACT,
TOKEN_TYPE_LABEL,
TOKEN_TYPE_MILESTONE,
TOKEN_TYPE_MY_REACTION,
+ TOKEN_TYPE_ORGANIZATION,
TOKEN_TYPE_RELEASE,
TOKEN_TYPE_TYPE,
urlSortParams,
@@ -67,6 +67,9 @@ describe('CE IssuesListApp component', () => {
autocompleteAwardEmojisPath: 'autocomplete/award/emojis/path',
calendarPath: 'calendar/path',
canBulkUpdate: false,
+ canCreateProjects: false,
+ canReadCrmContact: false,
+ canReadCrmOrganization: false,
emptyStateSvgPath: 'empty-state.svg',
exportCsvPath: 'export/csv/path',
fullPath: 'path/to/project',
@@ -77,6 +80,7 @@ describe('CE IssuesListApp component', () => {
hasIssueWeightsFeature: true,
hasIterationsFeature: true,
hasMultipleIssueAssigneesFeature: true,
+ hasScopedLabelsFeature: true,
initialEmail: 'email@example.com',
initialSort: CREATED_DESC,
isAnonymousSearchDisabled: false,
@@ -86,6 +90,7 @@ describe('CE IssuesListApp component', () => {
isSignedIn: true,
jiraIntegrationPath: 'jira/integration/path',
newIssuePath: 'new/issue/path',
+ newProjectPath: 'new/project/path',
releasesPath: 'releases/path',
rssPath: 'rss/path',
showNewIssueLink: true,
@@ -100,6 +105,9 @@ describe('CE IssuesListApp component', () => {
defaultQueryResponse.data.project.issues.nodes[0].weight = 5;
}
+ const mockIssuesQueryResponse = jest.fn().mockResolvedValue(defaultQueryResponse);
+ const mockIssuesCountsQueryResponse = jest.fn().mockResolvedValue(getIssuesCountsQueryResponse);
+
const findCsvImportExportButtons = () => wrapper.findComponent(CsvImportExportButtons);
const findIssuableByEmail = () => wrapper.findComponent(IssuableByEmail);
const findGlButton = () => wrapper.findComponent(GlButton);
@@ -113,16 +121,15 @@ describe('CE IssuesListApp component', () => {
const mountComponent = ({
provide = {},
data = {},
- issuesQueryResponse = jest.fn().mockResolvedValue(defaultQueryResponse),
- issuesCountsQueryResponse = jest.fn().mockResolvedValue(getIssuesCountsQueryResponse),
+ issuesQueryResponse = mockIssuesQueryResponse,
+ issuesCountsQueryResponse = mockIssuesCountsQueryResponse,
sortPreferenceMutationResponse = jest.fn().mockResolvedValue(setSortPreferenceMutationResponse),
+ stubs = {},
mountFn = shallowMount,
} = {}) => {
const requestHandlers = [
[getIssuesQuery, issuesQueryResponse],
[getIssuesCountsQuery, issuesCountsQueryResponse],
- [getIssuesWithoutCrmQuery, issuesQueryResponse],
- [getIssuesCountsWithoutCrmQuery, issuesCountsQueryResponse],
[setSortPreferenceMutation, sortPreferenceMutationResponse],
];
@@ -136,6 +143,7 @@ describe('CE IssuesListApp component', () => {
data() {
return data;
},
+ stubs,
});
};
@@ -156,6 +164,22 @@ describe('CE IssuesListApp component', () => {
return waitForPromises();
});
+ it('queries list with types `ISSUE` and `INCIDENT', () => {
+ const expectedTypes = ['ISSUE', 'INCIDENT', 'TEST_CASE'];
+
+ expect(mockIssuesQueryResponse).toHaveBeenCalledWith(
+ expect.objectContaining({
+ types: expectedTypes,
+ }),
+ );
+
+ expect(mockIssuesCountsQueryResponse).toHaveBeenCalledWith(
+ expect.objectContaining({
+ types: expectedTypes,
+ }),
+ );
+ });
+
it('renders', () => {
expect(findIssuableList().props()).toMatchObject({
namespace: defaultProvide.fullPath,
@@ -301,17 +325,23 @@ describe('CE IssuesListApp component', () => {
describe('initial url params', () => {
describe('page', () => {
it('page_after is set from the url params', () => {
- setWindowLocation('?page_after=randomCursorString');
+ setWindowLocation('?page_after=randomCursorString&first_page_size=20');
wrapper = mountComponent();
- expect(wrapper.vm.$route.query).toMatchObject({ page_after: 'randomCursorString' });
+ expect(wrapper.vm.$route.query).toMatchObject({
+ page_after: 'randomCursorString',
+ first_page_size: '20',
+ });
});
it('page_before is set from the url params', () => {
- setWindowLocation('?page_before=anotherRandomCursorString');
+ setWindowLocation('?page_before=anotherRandomCursorString&last_page_size=20');
wrapper = mountComponent();
- expect(wrapper.vm.$route.query).toMatchObject({ page_before: 'anotherRandomCursorString' });
+ expect(wrapper.vm.$route.query).toMatchObject({
+ page_before: 'anotherRandomCursorString',
+ last_page_size: '20',
+ });
});
});
@@ -515,10 +545,12 @@ describe('CE IssuesListApp component', () => {
it('shows empty state', () => {
expect(findGlEmptyState().props()).toMatchObject({
- description: IssuesListApp.i18n.noIssuesSignedInDescription,
title: IssuesListApp.i18n.noIssuesSignedInTitle,
svgPath: defaultProvide.emptyStateSvgPath,
});
+ expect(findGlEmptyState().text()).toContain(
+ IssuesListApp.i18n.noIssuesSignedInDescription,
+ );
});
it('shows "New issue" and import/export buttons', () => {
@@ -532,11 +564,11 @@ describe('CE IssuesListApp component', () => {
it('shows Jira integration information', () => {
const paragraphs = wrapper.findAll('p');
- expect(paragraphs.at(1).text()).toContain(IssuesListApp.i18n.jiraIntegrationTitle);
- expect(paragraphs.at(2).text()).toContain(
+ expect(paragraphs.at(2).text()).toContain(IssuesListApp.i18n.jiraIntegrationTitle);
+ expect(paragraphs.at(3).text()).toContain(
'Enable the Jira integration to view your Jira issues in GitLab.',
);
- expect(paragraphs.at(3).text()).toContain(
+ expect(paragraphs.at(4).text()).toContain(
IssuesListApp.i18n.jiraIntegrationSecondaryMessage,
);
expect(findGlLink().text()).toBe('Enable the Jira integration');
@@ -544,6 +576,29 @@ describe('CE IssuesListApp component', () => {
});
});
+ describe('when user is logged in and can create projects', () => {
+ beforeEach(() => {
+ wrapper = mountComponent({
+ provide: { canCreateProjects: true, hasAnyIssues: false, isSignedIn: true },
+ stubs: { GlEmptyState },
+ });
+ });
+
+ it('shows empty state with additional description about creating projects', () => {
+ expect(findGlEmptyState().text()).toContain(
+ IssuesListApp.i18n.noIssuesSignedInDescription,
+ );
+ expect(findGlEmptyState().text()).toContain(
+ IssuesListApp.i18n.noGroupIssuesSignedInDescription,
+ );
+ });
+
+ it('shows "New project" button', () => {
+ expect(findGlButton().text()).toBe(IssuesListApp.i18n.newProjectLabel);
+ expect(findGlButton().attributes('href')).toBe(defaultProvide.newProjectPath);
+ });
+ });
+
describe('when user is logged out', () => {
beforeEach(() => {
wrapper = mountComponent({
@@ -587,6 +642,21 @@ describe('CE IssuesListApp component', () => {
});
});
+ describe('when user does not have CRM enabled', () => {
+ beforeEach(() => {
+ wrapper = mountComponent({
+ provide: { canReadCrmContact: false, canReadCrmOrganization: false },
+ });
+ });
+
+ it('does not render Contact or Organization tokens', () => {
+ expect(findIssuableList().props('searchTokens')).not.toMatchObject([
+ { type: TOKEN_TYPE_CONTACT },
+ { type: TOKEN_TYPE_ORGANIZATION },
+ ]);
+ });
+ });
+
describe('when all tokens are available', () => {
const originalGon = window.gon;
@@ -599,7 +669,13 @@ describe('CE IssuesListApp component', () => {
current_user_avatar_url: mockCurrentUser.avatar_url,
};
- wrapper = mountComponent({ provide: { isSignedIn: true } });
+ wrapper = mountComponent({
+ provide: {
+ canReadCrmContact: true,
+ canReadCrmOrganization: true,
+ isSignedIn: true,
+ },
+ });
});
afterEach(() => {
@@ -615,9 +691,11 @@ describe('CE IssuesListApp component', () => {
{ type: TOKEN_TYPE_ASSIGNEE, preloadedAuthors },
{ type: TOKEN_TYPE_AUTHOR, preloadedAuthors },
{ type: TOKEN_TYPE_CONFIDENTIAL },
+ { type: TOKEN_TYPE_CONTACT },
{ type: TOKEN_TYPE_LABEL },
{ type: TOKEN_TYPE_MILESTONE },
{ type: TOKEN_TYPE_MY_REACTION },
+ { type: TOKEN_TYPE_ORGANIZATION },
{ type: TOKEN_TYPE_RELEASE },
{ type: TOKEN_TYPE_TYPE },
]);
@@ -675,10 +753,10 @@ describe('CE IssuesListApp component', () => {
});
describe.each`
- event | paramName | paramValue
- ${'next-page'} | ${'page_after'} | ${'endCursor'}
- ${'previous-page'} | ${'page_before'} | ${'startCursor'}
- `('when "$event" event is emitted by IssuableList', ({ event, paramName, paramValue }) => {
+ event | params
+ ${'next-page'} | ${{ page_after: 'endCursor', page_before: undefined, first_page_size: 20, last_page_size: undefined }}
+ ${'previous-page'} | ${{ page_after: undefined, page_before: 'startCursor', first_page_size: undefined, last_page_size: 20 }}
+ `('when "$event" event is emitted by IssuableList', ({ event, params }) => {
beforeEach(() => {
wrapper = mountComponent({
data: {
@@ -697,9 +775,9 @@ describe('CE IssuesListApp component', () => {
expect(scrollUp).toHaveBeenCalled();
});
- it(`updates url with "${paramName}" param`, () => {
+ it(`updates url`, () => {
expect(wrapper.vm.$router.push).toHaveBeenCalledWith({
- query: expect.objectContaining({ [paramName]: paramValue }),
+ query: expect.objectContaining(params),
});
});
});
diff --git a/spec/frontend/issues/list/utils_spec.js b/spec/frontend/issues/list/utils_spec.js
index ce0477883d7..e8ffba9bc80 100644
--- a/spec/frontend/issues/list/utils_spec.js
+++ b/spec/frontend/issues/list/utils_spec.js
@@ -42,27 +42,37 @@ describe('getInitialPageParams', () => {
'returns the correct page params for sort key %s with afterCursor',
(sortKey) => {
const firstPageSize = sortKey === RELATIVE_POSITION_ASC ? PAGE_SIZE_MANUAL : PAGE_SIZE;
+ const lastPageSize = undefined;
const afterCursor = 'randomCursorString';
const beforeCursor = undefined;
-
- expect(getInitialPageParams(sortKey, afterCursor, beforeCursor)).toEqual({
+ const pageParams = getInitialPageParams(
+ sortKey,
firstPageSize,
+ lastPageSize,
afterCursor,
- });
+ beforeCursor,
+ );
+
+ expect(pageParams).toEqual({ firstPageSize, afterCursor });
},
);
it.each(Object.keys(urlSortParams))(
'returns the correct page params for sort key %s with beforeCursor',
(sortKey) => {
- const firstPageSize = sortKey === RELATIVE_POSITION_ASC ? PAGE_SIZE_MANUAL : PAGE_SIZE;
+ const firstPageSize = undefined;
+ const lastPageSize = PAGE_SIZE;
const afterCursor = undefined;
const beforeCursor = 'anotherRandomCursorString';
-
- expect(getInitialPageParams(sortKey, afterCursor, beforeCursor)).toEqual({
+ const pageParams = getInitialPageParams(
+ sortKey,
firstPageSize,
+ lastPageSize,
+ afterCursor,
beforeCursor,
- });
+ );
+
+ expect(pageParams).toEqual({ lastPageSize, beforeCursor });
},
);
});
diff --git a/spec/frontend/issues/show/components/description_spec.js b/spec/frontend/issues/show/components/description_spec.js
index 1ae04531a6b..2cc27309e59 100644
--- a/spec/frontend/issues/show/components/description_spec.js
+++ b/spec/frontend/issues/show/components/description_spec.js
@@ -17,6 +17,7 @@ import { updateHistory } from '~/lib/utils/url_utility';
import workItemQuery from '~/work_items/graphql/work_item.query.graphql';
import TaskList from '~/task_list';
import WorkItemDetailModal from '~/work_items/components/work_item_detail_modal.vue';
+import { TRACKING_CATEGORY_SHOW } from '~/work_items/constants';
import CreateWorkItem from '~/work_items/pages/create_work_item.vue';
import {
descriptionProps as initialProps,
@@ -370,10 +371,10 @@ describe('Description component', () => {
await findTaskLink().trigger('click');
expect(trackingSpy).toHaveBeenCalledWith(
- 'workItems:show',
+ TRACKING_CATEGORY_SHOW,
'viewed_work_item_from_modal',
{
- category: 'workItems:show',
+ category: TRACKING_CATEGORY_SHOW,
label: 'work_item_view',
property: 'type_task',
},
diff --git a/spec/frontend/issues/show/components/incidents/incident_tabs_spec.js b/spec/frontend/issues/show/components/incidents/incident_tabs_spec.js
index 35acca60de7..8e090645be2 100644
--- a/spec/frontend/issues/show/components/incidents/incident_tabs_spec.js
+++ b/spec/frontend/issues/show/components/incidents/incident_tabs_spec.js
@@ -5,6 +5,7 @@ import { trackIncidentDetailsViewsOptions } from '~/incidents/constants';
import DescriptionComponent from '~/issues/show/components/description.vue';
import HighlightBar from '~/issues/show/components/incidents/highlight_bar.vue';
import IncidentTabs from '~/issues/show/components/incidents/incident_tabs.vue';
+import TimelineTab from '~/issues/show/components/incidents/timeline_events_tab.vue';
import INVALID_URL from '~/lib/utils/invalid_url';
import Tracking from '~/tracking';
import AlertDetailsTable from '~/vue_shared/components/alert_details_table.vue';
@@ -35,8 +36,9 @@ describe('Incident Tabs component', () => {
fullPath: '',
iid: '',
projectId: '',
+ issuableId: '',
uploadMetricsFeatureAvailable: true,
- glFeatures: { incidentTimeline: true, incidentTimelineEvents: true },
+ glFeatures: { incidentTimeline: true },
},
data() {
return { alert: mockAlert, ...data };
@@ -47,6 +49,9 @@ describe('Incident Tabs component', () => {
alert: {
loading: true,
},
+ timelineEvents: {
+ loading: false,
+ },
},
},
},
@@ -62,6 +67,7 @@ describe('Incident Tabs component', () => {
const findAlertDetailsComponent = () => wrapper.find(AlertDetailsTable);
const findDescriptionComponent = () => wrapper.find(DescriptionComponent);
const findHighlightBarComponent = () => wrapper.find(HighlightBar);
+ const findTimelineTab = () => wrapper.findComponent(TimelineTab);
describe('empty state', () => {
beforeEach(() => {
@@ -122,4 +128,20 @@ describe('Incident Tabs component', () => {
expect(Tracking.event).toHaveBeenCalledWith(category, action);
});
});
+
+ describe('incident timeline tab', () => {
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ it('renders the timeline tab when feature flag is enabled', () => {
+ expect(findTimelineTab().exists()).toBe(true);
+ });
+
+ it('does not render timeline tab when feature flag is disabled', () => {
+ mountComponent({}, { provide: { glFeatures: { incidentTimeline: false } } });
+
+ expect(findTimelineTab().exists()).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/issues/show/components/incidents/mock_data.js b/spec/frontend/issues/show/components/incidents/mock_data.js
new file mode 100644
index 00000000000..b5346a6089a
--- /dev/null
+++ b/spec/frontend/issues/show/components/incidents/mock_data.js
@@ -0,0 +1,72 @@
+export const mockEvents = [
+ {
+ action: 'comment',
+ author: {
+ __typename: 'UserCore',
+ id: 'gid://gitlab/User/1',
+ name: 'Administrator',
+ username: 'root',
+ },
+ createdAt: '2022-03-22T15:59:08Z',
+ id: 'gid://gitlab/IncidentManagement::TimelineEvent/132',
+ note: 'Dummy event 1',
+ noteHtml: '<p>Dummy event 1</p>',
+ occurredAt: '2022-03-22T15:59:00Z',
+ updatedAt: '2022-03-22T15:59:08Z',
+ __typename: 'TimelineEventType',
+ },
+ {
+ action: 'comment',
+ author: {
+ __typename: 'UserCore',
+ id: 'gid://gitlab/User/1',
+ name: 'Administrator',
+ username: 'root',
+ },
+ createdAt: '2022-03-23T14:57:08Z',
+ id: 'gid://gitlab/IncidentManagement::TimelineEvent/131',
+ note: 'Dummy event 2',
+ noteHtml: '<p>Dummy event 2</p>',
+ occurredAt: '2022-03-23T14:57:00Z',
+ updatedAt: '2022-03-23T14:57:08Z',
+ __typename: 'TimelineEventType',
+ },
+ {
+ action: 'comment',
+ author: {
+ __typename: 'UserCore',
+ id: 'gid://gitlab/User/1',
+ name: 'Administrator',
+ username: 'root',
+ },
+ createdAt: '2022-03-23T15:59:08Z',
+ id: 'gid://gitlab/IncidentManagement::TimelineEvent/132',
+ note: 'Dummy event 3',
+ noteHtml: '<p>Dummy event 3</p>',
+ occurredAt: '2022-03-23T15:59:00Z',
+ updatedAt: '2022-03-23T15:59:08Z',
+ __typename: 'TimelineEventType',
+ },
+];
+
+export const timelineEventsQueryListResponse = {
+ data: {
+ project: {
+ id: 'gid://gitlab/Project/8',
+ incidentManagementTimelineEvents: {
+ nodes: mockEvents,
+ },
+ },
+ },
+};
+
+export const timelineEventsQueryEmptyResponse = {
+ data: {
+ project: {
+ id: 'gid://gitlab/Project/8',
+ incidentManagementTimelineEvents: {
+ nodes: [],
+ },
+ },
+ },
+};
diff --git a/spec/frontend/issues/show/components/incidents/timeline_events_list_item_spec.js b/spec/frontend/issues/show/components/incidents/timeline_events_list_item_spec.js
new file mode 100644
index 00000000000..7e51219ffa7
--- /dev/null
+++ b/spec/frontend/issues/show/components/incidents/timeline_events_list_item_spec.js
@@ -0,0 +1,87 @@
+import timezoneMock from 'timezone-mock';
+import merge from 'lodash/merge';
+import { GlIcon } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import IncidentTimelineEventListItem from '~/issues/show/components/incidents/timeline_events_list_item.vue';
+import { mockEvents } from './mock_data';
+
+describe('IncidentTimelineEventList', () => {
+ let wrapper;
+
+ const mountComponent = (propsData) => {
+ const { action, noteHtml, occurredAt } = mockEvents[0];
+ wrapper = mountExtended(
+ IncidentTimelineEventListItem,
+ merge({
+ propsData: {
+ action,
+ noteHtml,
+ occurredAt,
+ isLastItem: false,
+ ...propsData,
+ },
+ }),
+ );
+ };
+
+ const findCommentIcon = () => wrapper.findComponent(GlIcon);
+ const findTextContainer = () => wrapper.findByTestId('event-text-container');
+ const findEventTime = () => wrapper.findByTestId('event-time');
+
+ describe('template', () => {
+ it('shows comment icon', () => {
+ mountComponent();
+
+ expect(findCommentIcon().exists()).toBe(true);
+ });
+
+ it('sets correct props for icon', () => {
+ mountComponent();
+
+ expect(findCommentIcon().props('name')).toBe(mockEvents[0].action);
+ });
+
+ it('displays the correct time', () => {
+ mountComponent();
+
+ expect(findEventTime().text()).toBe('15:59 UTC');
+ });
+
+ describe('last item in list', () => {
+ it('shows a bottom border when not the last item', () => {
+ mountComponent();
+
+ expect(findTextContainer().classes()).toContain('gl-border-1');
+ });
+
+ it('does not show a bottom border when the last item', () => {
+ mountComponent({ isLastItem: true });
+
+ expect(wrapper.classes()).not.toContain('gl-border-1');
+ });
+ });
+
+ describe.each`
+ timezone
+ ${'Europe/London'}
+ ${'US/Pacific'}
+ ${'Australia/Adelaide'}
+ `('when viewing in timezone', ({ timezone }) => {
+ describe(timezone, () => {
+ beforeEach(() => {
+ timezoneMock.register(timezone);
+
+ mountComponent();
+ });
+
+ afterEach(() => {
+ timezoneMock.unregister();
+ });
+
+ it('displays the correct time', () => {
+ expect(findEventTime().text()).toBe('15:59 UTC');
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/issues/show/components/incidents/timeline_events_list_spec.js b/spec/frontend/issues/show/components/incidents/timeline_events_list_spec.js
new file mode 100644
index 00000000000..6610ea0b832
--- /dev/null
+++ b/spec/frontend/issues/show/components/incidents/timeline_events_list_spec.js
@@ -0,0 +1,87 @@
+import timezoneMock from 'timezone-mock';
+import merge from 'lodash/merge';
+import { shallowMountExtended, extendedWrapper } from 'helpers/vue_test_utils_helper';
+import IncidentTimelineEventList from '~/issues/show/components/incidents/timeline_events_list.vue';
+import { mockEvents } from './mock_data';
+
+describe('IncidentTimelineEventList', () => {
+ let wrapper;
+
+ const mountComponent = () => {
+ wrapper = shallowMountExtended(
+ IncidentTimelineEventList,
+ merge({
+ provide: {
+ fullPath: 'group/project',
+ issuableId: '1',
+ },
+ propsData: {
+ timelineEvents: mockEvents,
+ },
+ }),
+ );
+ };
+
+ const findGroups = () => wrapper.findAllByTestId('timeline-group');
+ const findItems = (base = wrapper) => base.findAllByTestId('timeline-event');
+ const findFirstGroup = () => extendedWrapper(findGroups().at(0));
+ const findSecondGroup = () => extendedWrapper(findGroups().at(1));
+ const findDates = () => wrapper.findAllByTestId('event-date');
+
+ describe('template', () => {
+ it('groups items correctly', () => {
+ mountComponent();
+
+ expect(findGroups()).toHaveLength(2);
+
+ expect(findItems(findFirstGroup())).toHaveLength(1);
+ expect(findItems(findSecondGroup())).toHaveLength(2);
+ });
+
+ it('sets the isLastItem prop correctly', () => {
+ mountComponent();
+
+ expect(findItems().at(0).props('isLastItem')).toBe(false);
+ expect(findItems().at(1).props('isLastItem')).toBe(false);
+ expect(findItems().at(2).props('isLastItem')).toBe(true);
+ });
+
+ it('sets the event props correctly', () => {
+ mountComponent();
+
+ expect(findItems().at(1).props('occurredAt')).toBe(mockEvents[1].occurredAt);
+ expect(findItems().at(1).props('action')).toBe(mockEvents[1].action);
+ expect(findItems().at(1).props('noteHtml')).toBe(mockEvents[1].noteHtml);
+ });
+
+ it('formats dates correctly', () => {
+ mountComponent();
+
+ expect(findDates().at(0).text()).toBe('2022-03-22');
+ expect(findDates().at(1).text()).toBe('2022-03-23');
+ });
+
+ describe.each`
+ timezone
+ ${'Europe/London'}
+ ${'US/Pacific'}
+ ${'Australia/Adelaide'}
+ `('when viewing in timezone', ({ timezone }) => {
+ describe(timezone, () => {
+ beforeEach(() => {
+ timezoneMock.register(timezone);
+
+ mountComponent();
+ });
+
+ afterEach(() => {
+ timezoneMock.unregister();
+ });
+
+ it('displays the correct time', () => {
+ expect(findDates().at(0).text()).toBe('2022-03-22');
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/issues/show/components/incidents/timeline_events_tab_spec.js b/spec/frontend/issues/show/components/incidents/timeline_events_tab_spec.js
new file mode 100644
index 00000000000..cf81f4cdf66
--- /dev/null
+++ b/spec/frontend/issues/show/components/incidents/timeline_events_tab_spec.js
@@ -0,0 +1,105 @@
+import { GlEmptyState, GlLoadingIcon } from '@gitlab/ui';
+import VueApollo from 'vue-apollo';
+import Vue from 'vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import TimelineEventsTab from '~/issues/show/components/incidents/timeline_events_tab.vue';
+import IncidentTimelineEventsList from '~/issues/show/components/incidents/timeline_events_list.vue';
+import timelineEventsQuery from '~/issues/show/components/incidents/graphql/queries/get_timeline_events.query.graphql';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { createAlert } from '~/flash';
+import { timelineEventsQueryListResponse, timelineEventsQueryEmptyResponse } from './mock_data';
+
+Vue.use(VueApollo);
+
+jest.mock('~/flash');
+
+const graphQLError = new Error('GraphQL error');
+const listResponse = jest.fn().mockResolvedValue(timelineEventsQueryListResponse);
+const emptyResponse = jest.fn().mockResolvedValue(timelineEventsQueryEmptyResponse);
+const errorResponse = jest.fn().mockRejectedValue(graphQLError);
+
+function createMockApolloProvider(response = listResponse) {
+ const requestHandlers = [[timelineEventsQuery, response]];
+ return createMockApollo(requestHandlers);
+}
+
+describe('TimelineEventsTab', () => {
+ let wrapper;
+
+ const mountComponent = (options = {}) => {
+ const { mockApollo, mountMethod = shallowMountExtended } = options;
+
+ wrapper = mountMethod(TimelineEventsTab, {
+ provide: {
+ fullPath: 'group/project',
+ issuableId: '1',
+ },
+ apolloProvider: mockApollo,
+ });
+ };
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ }
+ });
+
+ const findLoadingSpinner = () => wrapper.findComponent(GlLoadingIcon);
+ const findEmptyState = () => wrapper.findComponent(GlEmptyState);
+ const findTimelineEventsList = () => wrapper.findComponent(IncidentTimelineEventsList);
+
+ describe('Timeline events tab', () => {
+ describe('empty state', () => {
+ let mockApollo;
+
+ it('should show an empty list', async () => {
+ mockApollo = createMockApolloProvider(emptyResponse);
+ mountComponent({ mockApollo });
+ await waitForPromises();
+
+ expect(findEmptyState().exists()).toBe(true);
+ });
+ });
+
+ describe('error state', () => {
+ let mockApollo;
+
+ it('should show an error state', async () => {
+ mockApollo = createMockApolloProvider(errorResponse);
+ mountComponent({ mockApollo });
+ await waitForPromises();
+
+ expect(createAlert).toHaveBeenCalledWith({
+ captureError: true,
+ error: graphQLError,
+ message: 'Something went wrong while fetching incident timeline events.',
+ });
+ });
+ });
+ });
+
+ describe('timelineEventsQuery', () => {
+ let mockApollo;
+
+ beforeEach(() => {
+ mockApollo = createMockApolloProvider();
+ mountComponent({ mockApollo });
+ });
+
+ it('should request data', () => {
+ expect(listResponse).toHaveBeenCalled();
+ });
+
+ it('should show the loading state', () => {
+ expect(findEmptyState().exists()).toBe(false);
+ expect(findLoadingSpinner().exists()).toBe(true);
+ });
+
+ it('should render the list', async () => {
+ await waitForPromises();
+ expect(findEmptyState().exists()).toBe(false);
+ expect(findTimelineEventsList().props('timelineEvents')).toHaveLength(3);
+ });
+ });
+});
diff --git a/spec/frontend/issues/show/components/incidents/utils_spec.js b/spec/frontend/issues/show/components/incidents/utils_spec.js
new file mode 100644
index 00000000000..e6f7082d280
--- /dev/null
+++ b/spec/frontend/issues/show/components/incidents/utils_spec.js
@@ -0,0 +1,31 @@
+import { displayAndLogError, getEventIcon } from '~/issues/show/components/incidents/utils';
+import { createAlert } from '~/flash';
+
+jest.mock('~/flash');
+
+describe('incident utils', () => {
+ describe('display and log error', () => {
+ it('displays and logs an error', () => {
+ const error = new Error('test');
+ displayAndLogError(error);
+
+ expect(createAlert).toHaveBeenCalledWith({
+ message: 'Something went wrong while fetching incident timeline events.',
+ captureError: true,
+ error,
+ });
+ });
+ });
+
+ describe('get event icon', () => {
+ it('should display a matching event icon name', () => {
+ const name = 'comment';
+
+ expect(getEventIcon(name)).toBe(name);
+ });
+
+ it('should return a default icon name', () => {
+ expect(getEventIcon('non-existent-icon-name')).toBe('comment');
+ });
+ });
+});
diff --git a/spec/frontend/jira_connect/subscriptions/components/sign_in_oauth_button_spec.js b/spec/frontend/jira_connect/subscriptions/components/sign_in_oauth_button_spec.js
index 8730e124ae7..8f79c74368f 100644
--- a/spec/frontend/jira_connect/subscriptions/components/sign_in_oauth_button_spec.js
+++ b/spec/frontend/jira_connect/subscriptions/components/sign_in_oauth_button_spec.js
@@ -35,7 +35,7 @@ describe('SignInOauthButton', () => {
let mockAxios;
let store;
- const createComponent = ({ slots } = {}) => {
+ const createComponent = ({ slots, props } = {}) => {
store = createStore();
jest.spyOn(store, 'dispatch').mockImplementation();
jest.spyOn(store, 'commit').mockImplementation();
@@ -46,6 +46,7 @@ describe('SignInOauthButton', () => {
provide: {
oauthMetadata: mockOauthMetadata,
},
+ propsData: props,
});
};
@@ -65,6 +66,7 @@ describe('SignInOauthButton', () => {
expect(findButton().exists()).toBe(true);
expect(findButton().text()).toBe(I18N_DEFAULT_SIGN_IN_BUTTON_TEXT);
+ expect(findButton().props('category')).toBe('primary');
});
it.each`
@@ -208,4 +210,11 @@ describe('SignInOauthButton', () => {
});
});
});
+
+ describe('when `category` prop is set', () => {
+ it('sets the `category` prop on the GlButton', () => {
+ createComponent({ props: { category: 'tertiary' } });
+ expect(findButton().props('category')).toBe('tertiary');
+ });
+ });
});
diff --git a/spec/frontend/jira_connect/subscriptions/components/user_link_spec.js b/spec/frontend/jira_connect/subscriptions/components/user_link_spec.js
index 2f5e47d1ae4..e16121243a0 100644
--- a/spec/frontend/jira_connect/subscriptions/components/user_link_spec.js
+++ b/spec/frontend/jira_connect/subscriptions/components/user_link_spec.js
@@ -1,5 +1,7 @@
import { GlSprintf } from '@gitlab/ui';
import UserLink from '~/jira_connect/subscriptions/components/user_link.vue';
+import SignInOauthButton from '~/jira_connect/subscriptions/components/sign_in_oauth_button.vue';
+
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
@@ -16,6 +18,7 @@ describe('UserLink', () => {
provide,
stubs: {
GlSprintf,
+ SignInOauthButton,
},
});
};
@@ -23,28 +26,48 @@ describe('UserLink', () => {
const findSignInLink = () => wrapper.findByTestId('sign-in-link');
const findGitlabUserLink = () => wrapper.findByTestId('gitlab-user-link');
const findSprintf = () => wrapper.findComponent(GlSprintf);
+ const findOauthButton = () => wrapper.findComponent(SignInOauthButton);
afterEach(() => {
wrapper.destroy();
});
describe.each`
- userSignedIn | hasSubscriptions | expectGlSprintf | expectGlLink
- ${true} | ${false} | ${true} | ${false}
- ${false} | ${true} | ${false} | ${true}
- ${true} | ${true} | ${true} | ${false}
- ${false} | ${false} | ${false} | ${false}
+ userSignedIn | hasSubscriptions | expectGlSprintf | expectGlLink | expectOauthButton | jiraConnectOauthEnabled
+ ${true} | ${false} | ${true} | ${false} | ${false} | ${false}
+ ${false} | ${true} | ${false} | ${true} | ${false} | ${false}
+ ${true} | ${true} | ${true} | ${false} | ${false} | ${false}
+ ${false} | ${false} | ${false} | ${false} | ${false} | ${false}
+ ${false} | ${true} | ${false} | ${false} | ${true} | ${true}
`(
- 'when `userSignedIn` is $userSignedIn and `hasSubscriptions` is $hasSubscriptions',
- ({ userSignedIn, hasSubscriptions, expectGlSprintf, expectGlLink }) => {
+ 'when `userSignedIn` is $userSignedIn, `hasSubscriptions` is $hasSubscriptions, `jiraConnectOauthEnabled` is $jiraConnectOauthEnabled',
+ ({
+ userSignedIn,
+ hasSubscriptions,
+ expectGlSprintf,
+ expectGlLink,
+ expectOauthButton,
+ jiraConnectOauthEnabled,
+ }) => {
it('renders template correctly', () => {
- createComponent({
- userSignedIn,
- hasSubscriptions,
- });
+ createComponent(
+ {
+ userSignedIn,
+ hasSubscriptions,
+ },
+ {
+ provide: {
+ glFeatures: {
+ jiraConnectOauth: jiraConnectOauthEnabled,
+ },
+ oauthMetadata: {},
+ },
+ },
+ );
expect(findSprintf().exists()).toBe(expectGlSprintf);
expect(findSignInLink().exists()).toBe(expectGlLink);
+ expect(findOauthButton().exists()).toBe(expectOauthButton);
});
},
);
diff --git a/spec/frontend/jobs/components/log/collapsible_section_spec.js b/spec/frontend/jobs/components/log/collapsible_section_spec.js
index 22ddc8b1c2d..2ab7f5fe22d 100644
--- a/spec/frontend/jobs/components/log/collapsible_section_spec.js
+++ b/spec/frontend/jobs/components/log/collapsible_section_spec.js
@@ -45,7 +45,7 @@ describe('Job Log Collapsible Section', () => {
});
it('renders an icon with the closed state', () => {
- expect(findCollapsibleLineSvg().attributes('data-testid')).toBe('angle-right-icon');
+ expect(findCollapsibleLineSvg().attributes('data-testid')).toBe('chevron-lg-right-icon');
});
});
@@ -62,7 +62,7 @@ describe('Job Log Collapsible Section', () => {
});
it('renders an icon with the open state', () => {
- expect(findCollapsibleLineSvg().attributes('data-testid')).toBe('angle-down-icon');
+ expect(findCollapsibleLineSvg().attributes('data-testid')).toBe('chevron-lg-down-icon');
});
it('renders collapsible lines content', () => {
diff --git a/spec/frontend/jobs/components/log/line_header_spec.js b/spec/frontend/jobs/components/log/line_header_spec.js
index 8055fe64d95..bdc8ae0eef0 100644
--- a/spec/frontend/jobs/components/log/line_header_spec.js
+++ b/spec/frontend/jobs/components/log/line_header_spec.js
@@ -56,8 +56,8 @@ describe('Job Log Header Line', () => {
createComponent({ ...data, isClosed: true });
});
- it('sets icon name to be angle-right', () => {
- expect(wrapper.vm.iconName).toEqual('angle-right');
+ it('sets icon name to be chevron-lg-right', () => {
+ expect(wrapper.vm.iconName).toEqual('chevron-lg-right');
});
});
@@ -66,8 +66,8 @@ describe('Job Log Header Line', () => {
createComponent({ ...data, isClosed: false });
});
- it('sets icon name to be angle-down', () => {
- expect(wrapper.vm.iconName).toEqual('angle-down');
+ it('sets icon name to be chevron-lg-down', () => {
+ expect(wrapper.vm.iconName).toEqual('chevron-lg-down');
});
});
diff --git a/spec/frontend/jobs/components/log/log_spec.js b/spec/frontend/jobs/components/log/log_spec.js
index 7e11738f82e..9cc56cce9b3 100644
--- a/spec/frontend/jobs/components/log/log_spec.js
+++ b/spec/frontend/jobs/components/log/log_spec.js
@@ -68,7 +68,9 @@ describe('Job Log', () => {
});
it('renders an icon with the open state', () => {
- expect(findCollapsibleLine().find('[data-testid="angle-down-icon"]').exists()).toBe(true);
+ expect(findCollapsibleLine().find('[data-testid="chevron-lg-down-icon"]').exists()).toBe(
+ true,
+ );
});
describe('on click header section', () => {
@@ -146,7 +148,9 @@ describe('Job Log, infinitelyCollapsibleSections feature flag enabled', () => {
});
it('renders an icon with the open state', () => {
- expect(findCollapsibleLine().find('[data-testid="angle-down-icon"]').exists()).toBe(true);
+ expect(findCollapsibleLine().find('[data-testid="chevron-lg-down-icon"]').exists()).toBe(
+ true,
+ );
});
describe('on click header section', () => {
diff --git a/spec/frontend/labels/delete_label_modal_spec.js b/spec/frontend/labels/delete_label_modal_spec.js
index 98049538948..67220821fe0 100644
--- a/spec/frontend/labels/delete_label_modal_spec.js
+++ b/spec/frontend/labels/delete_label_modal_spec.js
@@ -25,11 +25,11 @@ describe('DeleteLabelModal', () => {
buttons.forEach((x) => {
const button = document.createElement('button');
button.setAttribute('class', 'js-delete-label-modal-button');
- button.setAttribute('data-label-name', x.labelName);
- button.setAttribute('data-destroy-path', x.destroyPath);
+ button.dataset.labelName = x.labelName;
+ button.dataset.destroyPath = x.destroyPath;
if (x.subjectName) {
- button.setAttribute('data-subject-name', x.subjectName);
+ button.dataset.subjectName = x.subjectName;
}
button.innerHTML = 'Action';
diff --git a/spec/frontend/lazy_loader_spec.js b/spec/frontend/lazy_loader_spec.js
index 3d8b0d9c307..e0b6c7119f9 100644
--- a/spec/frontend/lazy_loader_spec.js
+++ b/spec/frontend/lazy_loader_spec.js
@@ -27,7 +27,7 @@ describe('LazyLoader', () => {
const createLazyLoadImage = () => {
const newImg = document.createElement('img');
newImg.className = 'lazy';
- newImg.setAttribute('data-src', TEST_PATH);
+ newImg.dataset.src = TEST_PATH;
document.body.appendChild(newImg);
triggerChildMutation();
@@ -108,7 +108,7 @@ describe('LazyLoader', () => {
expect(LazyLoader.loadImage).toHaveBeenCalledWith(img);
expect(img.getAttribute('src')).toBe(TEST_PATH);
- expect(img.getAttribute('data-src')).toBe(null);
+ expect(img.dataset.src).toBeUndefined();
expect(img).toHaveClass('js-lazy-loaded');
});
diff --git a/spec/frontend/lib/gfm/index_spec.js b/spec/frontend/lib/gfm/index_spec.js
index c9a480e9943..7aab0072364 100644
--- a/spec/frontend/lib/gfm/index_spec.js
+++ b/spec/frontend/lib/gfm/index_spec.js
@@ -1,35 +1,48 @@
import { render } from '~/lib/gfm';
describe('gfm', () => {
+ const markdownToAST = async (markdown) => {
+ let result;
+
+ await render({
+ markdown,
+ renderer: (tree) => {
+ result = tree;
+ },
+ });
+
+ return result;
+ };
+
+ const expectInRoot = (result, ...nodes) => {
+ expect(result).toEqual(
+ expect.objectContaining({
+ children: expect.arrayContaining(nodes),
+ }),
+ );
+ };
+
describe('render', () => {
it('processes Commonmark and provides an ast to the renderer function', async () => {
- let result;
-
- await render({
- markdown: 'This is text',
- renderer: (tree) => {
- result = tree;
- },
- });
+ const result = await markdownToAST('This is text');
expect(result.type).toBe('root');
});
it('transforms raw HTML into individual nodes in the AST', async () => {
- let result;
-
- await render({
- markdown: '<strong>This is bold text</strong>',
- renderer: (tree) => {
- result = tree;
- },
- });
+ const result = await markdownToAST('<strong>This is bold text</strong>');
- expect(result.children[0].children[0]).toMatchObject({
- type: 'element',
- tagName: 'strong',
- properties: {},
- });
+ expectInRoot(
+ result,
+ expect.objectContaining({
+ children: expect.arrayContaining([
+ expect.objectContaining({
+ type: 'element',
+ tagName: 'strong',
+ }),
+ ]),
+ }),
+ );
});
it('returns the result of executing the renderer function', async () => {
@@ -44,5 +57,40 @@ describe('gfm', () => {
expect(result).toEqual(rendered);
});
+
+ it('transforms footnotes into footnotedefinition and footnotereference tags', async () => {
+ const result = await markdownToAST(
+ `footnote reference [^footnote]
+
+[^footnote]: Footnote definition`,
+ );
+
+ expectInRoot(
+ result,
+ expect.objectContaining({
+ children: expect.arrayContaining([
+ expect.objectContaining({
+ type: 'element',
+ tagName: 'footnotereference',
+ properties: {
+ identifier: 'footnote',
+ label: 'footnote',
+ },
+ }),
+ ]),
+ }),
+ );
+
+ expectInRoot(
+ result,
+ expect.objectContaining({
+ tagName: 'footnotedefinition',
+ properties: {
+ identifier: 'footnote',
+ label: 'footnote',
+ },
+ }),
+ );
+ });
});
});
diff --git a/spec/frontend/lib/utils/dom_utils_spec.js b/spec/frontend/lib/utils/dom_utils_spec.js
index 88dac449527..b537e6b2bf8 100644
--- a/spec/frontend/lib/utils/dom_utils_spec.js
+++ b/spec/frontend/lib/utils/dom_utils_spec.js
@@ -5,7 +5,6 @@ import {
canScrollDown,
parseBooleanDataAttributes,
isElementVisible,
- isElementHidden,
getParents,
getParentByTagName,
setAttributes,
@@ -181,30 +180,21 @@ describe('DOM Utils', () => {
${1} | ${0} | ${0} | ${true}
${0} | ${1} | ${0} | ${true}
${0} | ${0} | ${1} | ${true}
- `(
- 'isElementVisible and isElementHidden',
- ({ offsetWidth, offsetHeight, clientRectsLength, visible }) => {
- const element = {
- offsetWidth,
- offsetHeight,
- getClientRects: () => new Array(clientRectsLength),
- };
-
- const paramDescription = `offsetWidth=${offsetWidth}, offsetHeight=${offsetHeight}, and getClientRects().length=${clientRectsLength}`;
-
- describe('isElementVisible', () => {
- it(`returns ${visible} when ${paramDescription}`, () => {
- expect(isElementVisible(element)).toBe(visible);
- });
+ `('isElementVisible', ({ offsetWidth, offsetHeight, clientRectsLength, visible }) => {
+ const element = {
+ offsetWidth,
+ offsetHeight,
+ getClientRects: () => new Array(clientRectsLength),
+ };
+
+ const paramDescription = `offsetWidth=${offsetWidth}, offsetHeight=${offsetHeight}, and getClientRects().length=${clientRectsLength}`;
+
+ describe('isElementVisible', () => {
+ it(`returns ${visible} when ${paramDescription}`, () => {
+ expect(isElementVisible(element)).toBe(visible);
});
-
- describe('isElementHidden', () => {
- it(`returns ${!visible} when ${paramDescription}`, () => {
- expect(isElementHidden(element)).toBe(!visible);
- });
- });
- },
- );
+ });
+ });
describe('getParents', () => {
it('gets all parents of an element', () => {
diff --git a/spec/frontend/lib/utils/forms_spec.js b/spec/frontend/lib/utils/forms_spec.js
index 123d36ac5d5..2f71b26b29a 100644
--- a/spec/frontend/lib/utils/forms_spec.js
+++ b/spec/frontend/lib/utils/forms_spec.js
@@ -157,7 +157,7 @@ describe('lib/utils/forms', () => {
mountEl.innerHTML = `
<input type="text" placeholder="Name" value="Administrator" name="user[name]" id="user_name" data-js-name="name">
<input type="text" placeholder="Email" value="foo@bar.com" name="user[contact_info][email]" id="user_contact_info_email" data-js-name="contactInfoEmail">
- <input type="text" placeholder="Phone" value="(123) 456-7890" name="user[contact_info][phone]" id="user_contact_info_phone" data-js-name="contact_info_phone">
+ <input type="text" placeholder="Phone" value="(123) 456-7890" name="user[contact_info][phone]" id="user_contact_info_phone" maxlength="12" pattern="mockPattern" data-js-name="contact_info_phone">
<input type="hidden" placeholder="Job title" value="" name="user[job_title]" id="user_job_title" data-js-name="jobTitle">
<textarea name="user[bio]" id="user_bio" data-js-name="bio">Foo bar</textarea>
<select name="user[timezone]" id="user_timezone" data-js-name="timezone">
@@ -192,6 +192,8 @@ describe('lib/utils/forms', () => {
id: 'user_contact_info_phone',
value: '(123) 456-7890',
placeholder: 'Phone',
+ maxLength: 12,
+ pattern: 'mockPattern',
},
jobTitle: {
name: 'user[job_title]',
diff --git a/spec/frontend/lib/utils/rails_ujs_spec.js b/spec/frontend/lib/utils/rails_ujs_spec.js
new file mode 100644
index 00000000000..00c29b72e73
--- /dev/null
+++ b/spec/frontend/lib/utils/rails_ujs_spec.js
@@ -0,0 +1,78 @@
+import { setHTMLFixture } from 'helpers/fixtures';
+import waitForPromises from 'helpers/wait_for_promises';
+
+beforeAll(async () => {
+ // @rails/ujs expects jQuery.ajaxPrefilter to exist if jQuery exists at
+ // import time. This is only a problem in tests, since we expose jQuery
+ // globally earlier than in production builds. Work around this by pretending
+ // that jQuery isn't available *before* we import @rails/ujs.
+ delete global.jQuery;
+
+ const { initRails } = await import('~/lib/utils/rails_ujs.js');
+ initRails();
+});
+
+function mockXHRResponse({ responseText, responseContentType } = {}) {
+ jest
+ .spyOn(global.XMLHttpRequest.prototype, 'getResponseHeader')
+ .mockReturnValue(responseContentType);
+
+ jest.spyOn(global.XMLHttpRequest.prototype, 'send').mockImplementation(function send() {
+ requestAnimationFrame(() => {
+ Object.defineProperties(this, {
+ readyState: { value: XMLHttpRequest.DONE },
+ status: { value: 200 },
+ response: { value: responseText },
+ });
+ this.onreadystatechange();
+ });
+ });
+}
+
+// This is a test to make sure that the patch-package patch correctly disables
+// script execution for data-remote attributes.
+it('does not perform script execution via data-remote', async () => {
+ global.scriptExecutionSpy = jest.fn();
+
+ mockXHRResponse({
+ responseText: 'scriptExecutionSpy();',
+ responseContentType: 'application/javascript',
+ });
+
+ setHTMLFixture(`
+ <a href="/foo/evil.js"
+ data-remote="true"
+ data-method="get"
+ data-type="script"
+ data-testid="evil-link"
+ >XSS</a>
+ `);
+
+ const link = document.querySelector('[data-testid="evil-link"]');
+ const ajaxSuccessSpy = jest.fn();
+ link.addEventListener('ajax:success', ajaxSuccessSpy);
+
+ link.click();
+
+ await waitForPromises();
+
+ // Make sure Rails ajax machinery finished working as expected to avoid false
+ // positives
+ expect(ajaxSuccessSpy).toHaveBeenCalledTimes(1);
+
+ // If @rails/ujs has been patched correctly, this next assertion should pass.
+ //
+ // Because it's asserting something didn't happen, it is possible for it to
+ // pass for the wrong reason. So, to verify that this test correctly fails
+ // when @rails/ujs has not been patched, run:
+ //
+ // yarn patch-package --reverse
+ //
+ // And then re-run this test. The spy should now be called, and correctly
+ // fail the test.
+ //
+ // To restore the patch(es), run:
+ //
+ // yarn install
+ expect(global.scriptExecutionSpy).not.toHaveBeenCalled();
+});
diff --git a/spec/frontend/lib/utils/table_utility_spec.js b/spec/frontend/lib/utils/table_utility_spec.js
index 0ceccbe4c74..df9006f4909 100644
--- a/spec/frontend/lib/utils/table_utility_spec.js
+++ b/spec/frontend/lib/utils/table_utility_spec.js
@@ -9,6 +9,13 @@ describe('table_utility', () => {
});
});
+ describe('thWidthPercent', () => {
+ it('returns the width class including default table header classes', () => {
+ const width = 50;
+ expect(tableUtils.thWidthPercent(width)).toBe(`gl-w-${width}p`);
+ });
+ });
+
describe('sortObjectToString', () => {
it('returns the expected sorting string ending in "DESC" when sortDesc is true', () => {
expect(tableUtils.sortObjectToString({ sortBy: 'mergedAt', sortDesc: true })).toBe(
diff --git a/spec/frontend/lib/utils/users_cache_spec.js b/spec/frontend/lib/utils/users_cache_spec.js
index d35ba20f570..5a55874b5fa 100644
--- a/spec/frontend/lib/utils/users_cache_spec.js
+++ b/spec/frontend/lib/utils/users_cache_spec.js
@@ -154,8 +154,8 @@ describe('UsersCache', () => {
};
const user = await UsersCache.retrieveById(dummyUserId);
- expect(user).toBe(dummyUser);
- expect(UsersCache.internalStorage[dummyUserId]).toBe(dummyUser);
+ expect(user).toEqual(dummyUser);
+ expect(UsersCache.internalStorage[dummyUserId]).toEqual(dummyUser);
});
it('returns undefined if Ajax call fails and cache is empty', async () => {
@@ -180,6 +180,29 @@ describe('UsersCache', () => {
const user = await UsersCache.retrieveById(dummyUserId);
expect(user).toBe(dummyUser);
});
+
+ it('does not clobber existing cached values', async () => {
+ UsersCache.internalStorage[dummyUserId] = {
+ status: dummyUserStatus,
+ };
+
+ apiSpy = (id) => {
+ expect(id).toBe(dummyUserId);
+
+ return Promise.resolve({
+ data: dummyUser,
+ });
+ };
+
+ const user = await UsersCache.retrieveById(dummyUserId);
+ const expectedUser = {
+ status: dummyUserStatus,
+ ...dummyUser,
+ };
+
+ expect(user).toEqual(expectedUser);
+ expect(UsersCache.internalStorage[dummyUserId]).toEqual(expectedUser);
+ });
});
describe('retrieveStatusById', () => {
diff --git a/spec/frontend/logs/utils_spec.js b/spec/frontend/logs/utils_spec.js
deleted file mode 100644
index 986fe320363..00000000000
--- a/spec/frontend/logs/utils_spec.js
+++ /dev/null
@@ -1,38 +0,0 @@
-import { getTimeRange } from '~/logs/utils';
-
-describe('logs/utils', () => {
- describe('getTimeRange', () => {
- const nowTimestamp = 1577836800000;
- const nowString = '2020-01-01T00:00:00.000Z';
-
- beforeEach(() => {
- jest.spyOn(Date, 'now').mockImplementation(() => nowTimestamp);
- });
-
- afterEach(() => {
- Date.now.mockRestore();
- });
-
- it('returns the right values', () => {
- expect(getTimeRange(0)).toEqual({
- start: '2020-01-01T00:00:00.000Z',
- end: nowString,
- });
-
- expect(getTimeRange(60 * 30)).toEqual({
- start: '2019-12-31T23:30:00.000Z',
- end: nowString,
- });
-
- expect(getTimeRange(60 * 60 * 24 * 7 * 1)).toEqual({
- start: '2019-12-25T00:00:00.000Z',
- end: nowString,
- });
-
- expect(getTimeRange(60 * 60 * 24 * 7 * 4)).toEqual({
- start: '2019-12-04T00:00:00.000Z',
- end: nowString,
- });
- });
- });
-});
diff --git a/spec/frontend/members/components/members_tabs_spec.js b/spec/frontend/members/components/members_tabs_spec.js
index 1d882e5ef09..1354b938d77 100644
--- a/spec/frontend/members/components/members_tabs_spec.js
+++ b/spec/frontend/members/components/members_tabs_spec.js
@@ -9,6 +9,7 @@ import {
MEMBER_TYPES,
TAB_QUERY_PARAM_VALUES,
ACTIVE_TAB_QUERY_PARAM_NAME,
+ FILTERED_SEARCH_TOKEN_GROUPS_WITH_INHERITED_PERMISSIONS,
} from '~/members/constants';
import { pagination } from '../mock_data';
@@ -42,6 +43,7 @@ describe('MembersTabs', () => {
},
filteredSearchBar: {
searchParam: 'search_groups',
+ tokens: [FILTERED_SEARCH_TOKEN_GROUPS_WITH_INHERITED_PERMISSIONS.type],
},
},
},
@@ -163,6 +165,18 @@ describe('MembersTabs', () => {
expect(findTabByText('Groups')).not.toBeUndefined();
});
});
+
+ describe('when url param matches `filteredSearchBar.tokens`', () => {
+ beforeEach(() => {
+ setWindowLocation('?groups_with_inherited_permissions=exclude');
+ });
+
+ it('shows tab that corresponds to filtered search token', async () => {
+ await createComponent({ totalItems: 0 });
+
+ expect(findTabByText('Groups')).not.toBeUndefined();
+ });
+ });
});
describe('when `canManageMembers` is `false`', () => {
diff --git a/spec/frontend/members/components/table/members_table_spec.js b/spec/frontend/members/components/table/members_table_spec.js
index 298a01e4f4d..08baa663bf0 100644
--- a/spec/frontend/members/components/table/members_table_spec.js
+++ b/spec/frontend/members/components/table/members_table_spec.js
@@ -16,12 +16,11 @@ import {
MEMBER_STATE_CREATED,
MEMBER_STATE_AWAITING,
MEMBER_STATE_ACTIVE,
- USER_STATE_BLOCKED_PENDING_APPROVAL,
- BADGE_LABELS_AWAITING_USER_SIGNUP,
- BADGE_LABELS_PENDING_OWNER_APPROVAL,
+ USER_STATE_BLOCKED,
+ BADGE_LABELS_AWAITING_SIGNUP,
+ BADGE_LABELS_PENDING,
TAB_QUERY_PARAM_VALUES,
} from '~/members/constants';
-import * as initUserPopovers from '~/user_popovers';
import {
member as memberMock,
directMember,
@@ -134,14 +133,14 @@ describe('MembersTable', () => {
describe('Invited column', () => {
describe.each`
- state | userState | expectedBadgeLabel
- ${MEMBER_STATE_CREATED} | ${null} | ${BADGE_LABELS_AWAITING_USER_SIGNUP}
- ${MEMBER_STATE_CREATED} | ${USER_STATE_BLOCKED_PENDING_APPROVAL} | ${BADGE_LABELS_PENDING_OWNER_APPROVAL}
- ${MEMBER_STATE_AWAITING} | ${''} | ${BADGE_LABELS_AWAITING_USER_SIGNUP}
- ${MEMBER_STATE_AWAITING} | ${USER_STATE_BLOCKED_PENDING_APPROVAL} | ${BADGE_LABELS_PENDING_OWNER_APPROVAL}
- ${MEMBER_STATE_AWAITING} | ${'something_else'} | ${BADGE_LABELS_PENDING_OWNER_APPROVAL}
- ${MEMBER_STATE_ACTIVE} | ${null} | ${''}
- ${MEMBER_STATE_ACTIVE} | ${'something_else'} | ${''}
+ state | userState | expectedBadgeLabel
+ ${MEMBER_STATE_CREATED} | ${null} | ${BADGE_LABELS_AWAITING_SIGNUP}
+ ${MEMBER_STATE_CREATED} | ${USER_STATE_BLOCKED} | ${BADGE_LABELS_PENDING}
+ ${MEMBER_STATE_AWAITING} | ${''} | ${BADGE_LABELS_AWAITING_SIGNUP}
+ ${MEMBER_STATE_AWAITING} | ${USER_STATE_BLOCKED} | ${BADGE_LABELS_PENDING}
+ ${MEMBER_STATE_AWAITING} | ${'something_else'} | ${BADGE_LABELS_PENDING}
+ ${MEMBER_STATE_ACTIVE} | ${null} | ${''}
+ ${MEMBER_STATE_ACTIVE} | ${'something_else'} | ${''}
`('Invited Badge', ({ state, userState, expectedBadgeLabel }) => {
it(`${
expectedBadgeLabel ? 'shows' : 'hides'
@@ -257,14 +256,6 @@ describe('MembersTable', () => {
});
});
- it('initializes user popovers when mounted', () => {
- const initUserPopoversMock = jest.spyOn(initUserPopovers, 'default');
-
- createComponent();
-
- expect(initUserPopoversMock).toHaveBeenCalled();
- });
-
it('adds QA selector to table', () => {
createComponent();
diff --git a/spec/frontend/members/index_spec.js b/spec/frontend/members/index_spec.js
index efabe54f238..251a8b0b774 100644
--- a/spec/frontend/members/index_spec.js
+++ b/spec/frontend/members/index_spec.js
@@ -24,7 +24,7 @@ describe('initMembersApp', () => {
beforeEach(() => {
el = document.createElement('div');
- el.setAttribute('data-members-data', dataAttribute);
+ el.dataset.membersData = dataAttribute;
window.gon = { current_user_id: 123 };
});
diff --git a/spec/frontend/members/utils_spec.js b/spec/frontend/members/utils_spec.js
index a157cfa1c1d..b0c9459ff4f 100644
--- a/spec/frontend/members/utils_spec.js
+++ b/spec/frontend/members/utils_spec.js
@@ -256,7 +256,7 @@ describe('Members Utils', () => {
beforeEach(() => {
el = document.createElement('div');
- el.setAttribute('data-members-data', dataAttribute);
+ el.dataset.membersData = dataAttribute;
});
afterEach(() => {
diff --git a/spec/frontend/merge_conflicts/components/merge_conflict_resolver_app_spec.js b/spec/frontend/merge_conflicts/components/merge_conflict_resolver_app_spec.js
index 55e666609bd..4fdc4024e10 100644
--- a/spec/frontend/merge_conflicts/components/merge_conflict_resolver_app_spec.js
+++ b/spec/frontend/merge_conflicts/components/merge_conflict_resolver_app_spec.js
@@ -59,7 +59,7 @@ describe('Merge Conflict Resolver App', () => {
const title = findConflictsCount();
expect(title.exists()).toBe(true);
- expect(title.text().trim()).toBe('Showing 3 conflicts between test-conflicts and main');
+ expect(title.text().trim()).toBe('Showing 3 conflicts');
});
it('shows a loading spinner while loading', () => {
diff --git a/spec/frontend/merge_request_tabs_spec.js b/spec/frontend/merge_request_tabs_spec.js
index ccbc61ea658..f0f051cbc8b 100644
--- a/spec/frontend/merge_request_tabs_spec.js
+++ b/spec/frontend/merge_request_tabs_spec.js
@@ -325,6 +325,28 @@ describe('MergeRequestTabs', () => {
expect(window.scrollTo.mock.calls[0]).toEqual([0, 39]);
});
+ it.each`
+ tab | hides | hidesText
+ ${'show'} | ${false} | ${'shows'}
+ ${'diffs'} | ${true} | ${'hides'}
+ ${'commits'} | ${true} | ${'hides'}
+ `('it $hidesText expand button on $tab tab', ({ tab, hides }) => {
+ const expandButton = document.createElement('div');
+ expandButton.classList.add('js-expand-sidebar');
+
+ const tabsContainer = document.createElement('div');
+ tabsContainer.innerHTML =
+ '<div class="tab-content"><div id="diff-notes-app"></div><div class="commits tab-pane"></div></div>';
+ tabsContainer.classList.add('merge-request-tabs-container');
+ tabsContainer.appendChild(expandButton);
+ document.body.appendChild(tabsContainer);
+
+ testContext.class = new MergeRequestTabs({ stubLocation });
+ testContext.class.tabShown(tab, 'foobar');
+
+ expect(testContext.class.expandSidebar.classList.contains('gl-display-none!')).toBe(hides);
+ });
+
describe('when switching tabs', () => {
const SCROLL_TOP = 100;
diff --git a/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap b/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
index a93035cc53a..a9f37f90561 100644
--- a/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
+++ b/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
@@ -151,7 +151,7 @@ exports[`Dashboard template matches the default snapshot 1`] = `
emptynodatasvgpath="/images/illustrations/monitoring/no_data.svg"
emptyunabletoconnectsvgpath="/images/illustrations/monitoring/unable_to_connect.svg"
selectedstate="gettingStarted"
- settingspath="/monitoring/monitor-project/-/integrations/prometheus/edit"
+ settingspath="/monitoring/monitor-project/-/settings/integrations/prometheus/edit"
/>
</div>
`;
diff --git a/spec/frontend/monitoring/components/graph_group_spec.js b/spec/frontend/monitoring/components/graph_group_spec.js
index c5b45564089..31f52f6627b 100644
--- a/spec/frontend/monitoring/components/graph_group_spec.js
+++ b/spec/frontend/monitoring/components/graph_group_spec.js
@@ -34,17 +34,17 @@ describe('Graph group component', () => {
expect(findLoadingIcon().exists()).toBe(false);
});
- it('should show the angle-down caret icon', () => {
+ it('should show the chevron-lg-down caret icon', () => {
expect(findContent().isVisible()).toBe(true);
- expect(findCaretIcon().props('name')).toBe('angle-down');
+ expect(findCaretIcon().props('name')).toBe('chevron-lg-down');
});
- it('should show the angle-right caret icon when the user collapses the group', async () => {
+ it('should show the chevron-lg-right caret icon when the user collapses the group', async () => {
findToggleButton().trigger('click');
await nextTick();
expect(findContent().isVisible()).toBe(false);
- expect(findCaretIcon().props('name')).toBe('angle-right');
+ expect(findCaretIcon().props('name')).toBe('chevron-lg-right');
});
it('should contain a tab index for the collapse button', () => {
@@ -60,7 +60,7 @@ describe('Graph group component', () => {
await nextTick();
expect(findContent().isVisible()).toBe(true);
- expect(findCaretIcon().props('name')).toBe('angle-down');
+ expect(findCaretIcon().props('name')).toBe('chevron-lg-down');
});
});
@@ -72,15 +72,15 @@ describe('Graph group component', () => {
});
});
- it('should show the angle-down caret icon when collapseGroup is true', () => {
- expect(findCaretIcon().props('name')).toBe('angle-right');
+ it('should show the chevron-lg-down caret icon when collapseGroup is true', () => {
+ expect(findCaretIcon().props('name')).toBe('chevron-lg-right');
});
- it('should show the angle-right caret icon when collapseGroup is false', async () => {
+ it('should show the chevron-lg-right caret icon when collapseGroup is false', async () => {
findToggleButton().trigger('click');
await nextTick();
- expect(findCaretIcon().props('name')).toBe('angle-down');
+ expect(findCaretIcon().props('name')).toBe('chevron-lg-down');
});
it('should call collapse the graph group content when enter is pressed on the caret icon', () => {
diff --git a/spec/frontend/monitoring/fixture_data.js b/spec/frontend/monitoring/fixture_data.js
index 6a19815883a..f4062adea81 100644
--- a/spec/frontend/monitoring/fixture_data.js
+++ b/spec/frontend/monitoring/fixture_data.js
@@ -14,13 +14,12 @@ const datasetState = stateAndPropsFromDataset(
convertObjectPropsToCamelCase(metricsDashboardResponse.metrics_data),
);
-// new properties like addDashboardDocumentationPath prop and alertsEndpoint
+// new properties like addDashboardDocumentationPath prop
// was recently added to dashboard.vue component this needs to be
// added to fixtures data
// https://gitlab.com/gitlab-org/gitlab/-/issues/229256
export const dashboardProps = {
...datasetState.dataProps,
- alertsEndpoint: null,
};
export const metricsDashboardViewModel = mapToDashboardViewModel(metricsDashboardPayload);
diff --git a/spec/frontend/mr_popover/__snapshots__/mr_popover_spec.js.snap b/spec/frontend/mr_popover/__snapshots__/mr_popover_spec.js.snap
deleted file mode 100644
index 5d84b4660c9..00000000000
--- a/spec/frontend/mr_popover/__snapshots__/mr_popover_spec.js.snap
+++ /dev/null
@@ -1,91 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`MR Popover loaded state matches the snapshot 1`] = `
-<gl-popover-stub
- boundary="viewport"
- cssclasses=""
- placement="top"
- show=""
- target=""
->
- <div
- class="mr-popover"
- >
- <div
- class="d-flex align-items-center justify-content-between"
- >
- <div
- class="d-inline-flex align-items-center"
- >
- <div
- class="issuable-status-box status-box status-box-open"
- >
-
- Open
-
- </div>
-
- <span
- class="gl-text-secondary"
- >
- Opened
- <time>
- just now
- </time>
- </span>
- </div>
-
- <ci-icon-stub
- cssclasses=""
- size="16"
- status="[object Object]"
- />
- </div>
-
- <h5
- class="my-2"
- >
- Updated Title
- </h5>
-
- <div
- class="gl-text-secondary"
- >
-
- foo/bar!1
-
- </div>
- </div>
-</gl-popover-stub>
-`;
-
-exports[`MR Popover shows skeleton-loader while apollo is loading 1`] = `
-<gl-popover-stub
- boundary="viewport"
- cssclasses=""
- placement="top"
- show=""
- target=""
->
- <div
- class="mr-popover"
- >
- <div>
- <gl-skeleton-loading-stub
- class="animation-container-small mt-1"
- lines="1"
- />
- </div>
-
- <!---->
-
- <div
- class="gl-text-secondary"
- >
-
- foo/bar!1
-
- </div>
- </div>
-</gl-popover-stub>
-`;
diff --git a/spec/frontend/mr_popover/mr_popover_spec.js b/spec/frontend/mr_popover/mr_popover_spec.js
deleted file mode 100644
index 23f97073e9e..00000000000
--- a/spec/frontend/mr_popover/mr_popover_spec.js
+++ /dev/null
@@ -1,80 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import { nextTick } from 'vue';
-import MRPopover from '~/mr_popover/components/mr_popover.vue';
-import CiIcon from '~/vue_shared/components/ci_icon.vue';
-
-describe('MR Popover', () => {
- let wrapper;
-
- beforeEach(() => {
- wrapper = shallowMount(MRPopover, {
- propsData: {
- target: document.createElement('a'),
- projectPath: 'foo/bar',
- mergeRequestIID: '1',
- mergeRequestTitle: 'MR Title',
- },
- mocks: {
- $apollo: {
- queries: {
- mergeRequest: {
- loading: false,
- },
- },
- },
- },
- });
- });
-
- it('shows skeleton-loader while apollo is loading', async () => {
- wrapper.vm.$apollo.queries.mergeRequest.loading = true;
-
- await nextTick();
- expect(wrapper.element).toMatchSnapshot();
- });
-
- describe('loaded state', () => {
- it('matches the snapshot', async () => {
- // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
- // eslint-disable-next-line no-restricted-syntax
- wrapper.setData({
- mergeRequest: {
- title: 'Updated Title',
- state: 'opened',
- createdAt: new Date(),
- headPipeline: {
- detailedStatus: {
- group: 'success',
- status: 'status_success',
- },
- },
- },
- });
-
- await nextTick();
- expect(wrapper.element).toMatchSnapshot();
- });
-
- it('does not show CI Icon if there is no pipeline data', async () => {
- // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
- // eslint-disable-next-line no-restricted-syntax
- wrapper.setData({
- mergeRequest: {
- state: 'opened',
- headPipeline: null,
- stateHumanName: 'Open',
- title: 'Merge Request Title',
- createdAt: new Date(),
- },
- });
-
- await nextTick();
- expect(wrapper.find(CiIcon).exists()).toBe(false);
- });
-
- it('falls back to cached MR title when request fails', async () => {
- await nextTick();
- expect(wrapper.text()).toContain('MR Title');
- });
- });
-});
diff --git a/spec/frontend/nav/components/responsive_header_spec.js b/spec/frontend/nav/components/responsive_header_spec.js
index 937c44727c7..f87de0afb14 100644
--- a/spec/frontend/nav/components/responsive_header_spec.js
+++ b/spec/frontend/nav/components/responsive_header_spec.js
@@ -43,7 +43,7 @@ describe('~/nav/components/top_nav_menu_sections.vue', () => {
menuItem: {
id: 'home',
view: 'home',
- icon: 'angle-left',
+ icon: 'chevron-lg-left',
},
iconOnly: true,
});
@@ -60,7 +60,7 @@ describe('~/nav/components/top_nav_menu_sections.vue', () => {
it('emits menu-item-click', () => {
expect(wrapper.emitted()).toEqual({
- 'menu-item-click': [[{ id: 'home', view: 'home', icon: 'angle-left' }]],
+ 'menu-item-click': [[{ id: 'home', view: 'home', icon: 'chevron-lg-left' }]],
});
});
});
diff --git a/spec/frontend/notebook/cells/markdown_spec.js b/spec/frontend/notebook/cells/markdown_spec.js
index 7dc6f90d202..de415b5bfe0 100644
--- a/spec/frontend/notebook/cells/markdown_spec.js
+++ b/spec/frontend/notebook/cells/markdown_spec.js
@@ -78,8 +78,8 @@ describe('Markdown component', () => {
});
await nextTick();
- expect(findLink().getAttribute('data-remote')).toBe(null);
- expect(findLink().getAttribute('data-type')).toBe(null);
+ expect(findLink().dataset.remote).toBeUndefined();
+ expect(findLink().dataset.type).toBeUndefined();
});
describe('When parsing images', () => {
diff --git a/spec/frontend/notes/components/comment_field_layout_spec.js b/spec/frontend/notes/components/comment_field_layout_spec.js
index 90c989540b9..d69c2c4adfa 100644
--- a/spec/frontend/notes/components/comment_field_layout_spec.js
+++ b/spec/frontend/notes/components/comment_field_layout_spec.js
@@ -135,14 +135,14 @@ describe('Comment Field Layout Component', () => {
});
});
- describe('issue has email participants, but note is confidential', () => {
+ describe('issue has email participants, but note is internal', () => {
it('does not show EmailParticipantsWarning', () => {
createWrapper({
noteableData: {
...noteableDataMock,
issue_email_participants: [{ email: 'someone@gitlab.com' }],
},
- noteIsConfidential: true,
+ isInternalNote: true,
});
expect(findEmailParticipantsWarning().exists()).toBe(false);
diff --git a/spec/frontend/notes/components/comment_form_spec.js b/spec/frontend/notes/components/comment_form_spec.js
index ba5d4d27e55..116016ecae2 100644
--- a/spec/frontend/notes/components/comment_form_spec.js
+++ b/spec/frontend/notes/components/comment_form_spec.js
@@ -32,7 +32,7 @@ describe('issue_comment_form component', () => {
const findTextArea = () => wrapper.findByTestId('comment-field');
const findAddToReviewButton = () => wrapper.findByTestId('add-to-review-button');
const findAddCommentNowButton = () => wrapper.findByTestId('add-comment-now-button');
- const findConfidentialNoteCheckbox = () => wrapper.findByTestId('confidential-note-checkbox');
+ const findConfidentialNoteCheckbox = () => wrapper.findByTestId('internal-note-checkbox');
const findCommentTypeDropdown = () => wrapper.findComponent(CommentTypeDropdown);
const findCommentButton = () => findCommentTypeDropdown().find('button');
const findErrorAlerts = () => wrapper.findAllComponents(GlAlert).wrappers;
@@ -249,15 +249,15 @@ describe('issue_comment_form component', () => {
describe('textarea', () => {
describe('general', () => {
it.each`
- noteType | confidential | placeholder
- ${'comment'} | ${false} | ${'Write a comment or drag your files here…'}
- ${'internal note'} | ${true} | ${'Write an internal note or drag your files here…'}
+ noteType | noteIsInternal | placeholder
+ ${'comment'} | ${false} | ${'Write a comment or drag your files here…'}
+ ${'internal note'} | ${true} | ${'Write an internal note or drag your files here…'}
`(
'should render textarea with placeholder for $noteType',
- ({ confidential, placeholder }) => {
+ ({ noteIsInternal, placeholder }) => {
mountComponent({
mountFunction: mount,
- initialData: { noteIsConfidential: confidential },
+ initialData: { noteIsInternal },
});
expect(findTextArea().attributes('placeholder')).toBe(placeholder);
@@ -389,14 +389,14 @@ describe('issue_comment_form component', () => {
});
it.each`
- confidential | buttonText
- ${false} | ${'Comment'}
- ${true} | ${'Add internal note'}
- `('renders comment button with text "$buttonText"', ({ confidential, buttonText }) => {
+ noteIsInternal | buttonText
+ ${false} | ${'Comment'}
+ ${true} | ${'Add internal note'}
+ `('renders comment button with text "$buttonText"', ({ noteIsInternal, buttonText }) => {
mountComponent({
mountFunction: mount,
- noteableData: createNotableDataMock({ confidential }),
- initialData: { noteIsConfidential: confidential },
+ noteableData: createNotableDataMock({ confidential: noteIsInternal }),
+ initialData: { noteIsInternal },
});
expect(findCommentButton().text()).toBe(buttonText);
@@ -487,8 +487,8 @@ describe('issue_comment_form component', () => {
await findCloseReopenButton().trigger('click');
- await nextTick;
- await nextTick;
+ await nextTick();
+ await nextTick();
expect(createFlash).toHaveBeenCalledWith({
message: `Something went wrong while closing the ${type}. Please try again later.`,
@@ -523,8 +523,8 @@ describe('issue_comment_form component', () => {
await findCloseReopenButton().trigger('click');
- await nextTick;
- await nextTick;
+ await nextTick();
+ await nextTick();
expect(createFlash).toHaveBeenCalledWith({
message: `Something went wrong while reopening the ${type}. Please try again later.`,
diff --git a/spec/frontend/notes/components/note_body_spec.js b/spec/frontend/notes/components/note_body_spec.js
index 378dcb97fab..0f765a8da87 100644
--- a/spec/frontend/notes/components/note_body_spec.js
+++ b/spec/frontend/notes/components/note_body_spec.js
@@ -1,5 +1,5 @@
-import { shallowMount } from '@vue/test-utils';
import Vuex from 'vuex';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { suggestionCommitMessage } from '~/diffs/store/getters';
import NoteBody from '~/notes/components/note_body.vue';
@@ -7,6 +7,7 @@ import NoteAwardsList from '~/notes/components/note_awards_list.vue';
import NoteForm from '~/notes/components/note_form.vue';
import createStore from '~/notes/stores';
import notes from '~/notes/stores/modules/index';
+import { INTERNAL_NOTE_CLASSES } from '~/notes/constants';
import Suggestions from '~/vue_shared/components/markdown/suggestions.vue';
@@ -27,7 +28,7 @@ const createComponent = ({
mockStore.dispatch('setNotesData', notesData);
}
- return shallowMount(NoteBody, {
+ return shallowMountExtended(NoteBody, {
store: mockStore || store,
propsData: {
note,
@@ -58,6 +59,24 @@ describe('issue_note_body component', () => {
expect(wrapper.findComponent(NoteAwardsList).exists()).toBe(true);
});
+ it('should not have internal note classes', () => {
+ expect(wrapper.findByTestId('note-internal-container').classes()).not.toEqual(
+ INTERNAL_NOTE_CLASSES,
+ );
+ });
+
+ describe('isInternalNote', () => {
+ beforeEach(() => {
+ wrapper = createComponent({ props: { isInternalNote: true } });
+ });
+
+ it('should have internal note classes', () => {
+ expect(wrapper.findByTestId('note-internal-container').classes()).toEqual(
+ INTERNAL_NOTE_CLASSES,
+ );
+ });
+ });
+
describe('isEditing', () => {
beforeEach(() => {
wrapper = createComponent({ props: { isEditing: true } });
@@ -86,6 +105,18 @@ describe('issue_note_body component', () => {
// which is defined in `app/assets/javascripts/notes/mixins/autosave.js`
expect(wrapper.vm.autosave.key).toEqual(autosaveKey);
});
+
+ describe('isInternalNote', () => {
+ beforeEach(() => {
+ wrapper.setProps({ isInternalNote: true });
+ });
+
+ it('should not have internal note classes', () => {
+ expect(wrapper.findByTestId('note-internal-container').classes()).not.toEqual(
+ INTERNAL_NOTE_CLASSES,
+ );
+ });
+ });
});
describe('commitMessage', () => {
diff --git a/spec/frontend/notes/components/note_header_spec.js b/spec/frontend/notes/components/note_header_spec.js
index 310a470aa18..ad2cf1c5a35 100644
--- a/spec/frontend/notes/components/note_header_spec.js
+++ b/spec/frontend/notes/components/note_header_spec.js
@@ -21,7 +21,7 @@ describe('NoteHeader component', () => {
const findActionText = () => wrapper.find({ ref: 'actionText' });
const findTimestampLink = () => wrapper.find({ ref: 'noteTimestampLink' });
const findTimestamp = () => wrapper.find({ ref: 'noteTimestamp' });
- const findConfidentialIndicator = () => wrapper.findByTestId('internalNoteIndicator');
+ const findInternalNoteIndicator = () => wrapper.findByTestId('internalNoteIndicator');
const findSpinner = () => wrapper.find({ ref: 'spinner' });
const findAuthorStatus = () => wrapper.find({ ref: 'authorStatus' });
@@ -283,20 +283,20 @@ describe('NoteHeader component', () => {
});
});
- describe('with confidentiality indicator', () => {
+ describe('with internal note badge', () => {
it.each`
status | condition
${true} | ${'shows'}
${false} | ${'hides'}
- `('$condition icon indicator when isConfidential is $status', ({ status }) => {
- createComponent({ isConfidential: status });
- expect(findConfidentialIndicator().exists()).toBe(status);
+ `('$condition badge when isInternalNote is $status', ({ status }) => {
+ createComponent({ isInternalNote: status });
+ expect(findInternalNoteIndicator().exists()).toBe(status);
});
- it('shows confidential indicator tooltip for project context', () => {
- createComponent({ isConfidential: true, noteableType: 'issue' });
+ it('shows internal note badge tooltip for project context', () => {
+ createComponent({ isInternalNote: true, noteableType: 'issue' });
- expect(findConfidentialIndicator().attributes('title')).toBe(
+ expect(findInternalNoteIndicator().attributes('title')).toBe(
'This internal note will always remain confidential',
);
});
diff --git a/spec/frontend/notes/components/noteable_discussion_spec.js b/spec/frontend/notes/components/noteable_discussion_spec.js
index c46d3bbe5b2..ddfa77117ca 100644
--- a/spec/frontend/notes/components/noteable_discussion_spec.js
+++ b/spec/frontend/notes/components/noteable_discussion_spec.js
@@ -87,10 +87,27 @@ describe('noteable_discussion component', () => {
expect(noteFormProps.discussion).toBe(discussionMock);
expect(noteFormProps.line).toBe(null);
- expect(noteFormProps.saveButtonTitle).toBe('Comment');
expect(noteFormProps.autosaveKey).toBe(`Note/Issue/${discussionMock.id}/Reply`);
});
+ it.each`
+ noteType | isNoteInternal | saveButtonTitle
+ ${'public'} | ${false} | ${'Reply'}
+ ${'internal'} | ${true} | ${'Reply internally'}
+ `(
+ 'reply button on form should have title "$saveButtonTitle" when note is $noteType',
+ async ({ isNoteInternal, saveButtonTitle }) => {
+ wrapper.setProps({ discussion: { ...discussionMock, confidential: isNoteInternal } });
+ await nextTick();
+
+ const replyPlaceholder = wrapper.find(ReplyPlaceholder);
+ replyPlaceholder.vm.$emit('focus');
+ await nextTick();
+
+ expect(wrapper.find(NoteForm).props('saveButtonTitle')).toBe(saveButtonTitle);
+ },
+ );
+
it('should expand discussion', async () => {
const discussion = { ...discussionMock, expanded: false };
diff --git a/spec/frontend/notes/components/notes_app_spec.js b/spec/frontend/notes/components/notes_app_spec.js
index 413ee815906..f4eb69e0d49 100644
--- a/spec/frontend/notes/components/notes_app_spec.js
+++ b/spec/frontend/notes/components/notes_app_spec.js
@@ -19,8 +19,6 @@ import '~/behaviors/markdown/render_gfm';
import OrderedLayout from '~/vue_shared/components/ordered_layout.vue';
import * as mockData from '../mock_data';
-jest.mock('~/user_popovers', () => jest.fn());
-
setTestTimeout(1000);
const TYPE_COMMENT_FORM = 'comment-form';
@@ -224,7 +222,7 @@ describe('note_app', () => {
});
it('renders skeleton notes', () => {
- expect(wrapper.find('.animation-container').exists()).toBe(true);
+ expect(wrapper.find('.gl-skeleton-loader-default-container').exists()).toBe(true);
});
it('should render form', () => {
diff --git a/spec/frontend/notes/mock_data.js b/spec/frontend/notes/mock_data.js
index c7a6ca5eae3..9fa7166474a 100644
--- a/spec/frontend/notes/mock_data.js
+++ b/spec/frontend/notes/mock_data.js
@@ -785,7 +785,7 @@ export const notesWithDescriptionChanges = [
current_user: { can_edit: false, can_award_emoji: true },
resolved: false,
resolved_by: null,
- system_note_icon_name: 'pencil-square',
+ system_note_icon_name: 'pencil',
discussion_id: '7f1feda384083eb31763366e6392399fde6f3f31',
emoji_awardable: false,
report_abuse_path:
@@ -874,7 +874,7 @@ export const notesWithDescriptionChanges = [
current_user: { can_edit: false, can_award_emoji: true },
resolved: false,
resolved_by: null,
- system_note_icon_name: 'pencil-square',
+ system_note_icon_name: 'pencil',
discussion_id: 'a21cf2e804acc3c60d07e37d75e395f5a9a4d044',
emoji_awardable: false,
report_abuse_path:
@@ -918,7 +918,7 @@ export const notesWithDescriptionChanges = [
current_user: { can_edit: false, can_award_emoji: true },
resolved: false,
resolved_by: null,
- system_note_icon_name: 'pencil-square',
+ system_note_icon_name: 'pencil',
discussion_id: '70411b08cdfc01f24187a06d77daa33464cb2620',
emoji_awardable: false,
report_abuse_path:
@@ -1105,7 +1105,7 @@ export const collapsedSystemNotes = [
current_user: { can_edit: false, can_award_emoji: true },
resolved: false,
resolved_by: null,
- system_note_icon_name: 'pencil-square',
+ system_note_icon_name: 'pencil',
discussion_id: 'a21cf2e804acc3c60d07e37d75e395f5a9a4d044',
emoji_awardable: false,
report_abuse_path:
@@ -1149,7 +1149,7 @@ export const collapsedSystemNotes = [
current_user: { can_edit: false, can_award_emoji: true },
resolved: false,
resolved_by: null,
- system_note_icon_name: 'pencil-square',
+ system_note_icon_name: 'pencil',
discussion_id: '70411b08cdfc01f24187a06d77daa33464cb2620',
emoji_awardable: false,
report_abuse_path:
diff --git a/spec/frontend/notes/stores/actions_spec.js b/spec/frontend/notes/stores/actions_spec.js
index ecb213590ad..38f29ac2559 100644
--- a/spec/frontend/notes/stores/actions_spec.js
+++ b/spec/frontend/notes/stores/actions_spec.js
@@ -404,13 +404,13 @@ describe('Actions Notes Store', () => {
beforeEach(() => {
axiosMock.onDelete(endpoint).replyOnce(200, {});
- document.body.setAttribute('data-page', '');
+ document.body.dataset.page = '';
});
afterEach(() => {
axiosMock.restore();
- document.body.setAttribute('data-page', '');
+ document.body.dataset.page = '';
});
it('commits DELETE_NOTE and dispatches updateMergeRequestWidget', () => {
@@ -440,7 +440,7 @@ describe('Actions Notes Store', () => {
it('dispatches removeDiscussionsFromDiff on merge request page', () => {
const note = { path: endpoint, id: 1 };
- document.body.setAttribute('data-page', 'projects:merge_requests:show');
+ document.body.dataset.page = 'projects:merge_requests:show';
return testAction(
actions.removeNote,
@@ -473,13 +473,13 @@ describe('Actions Notes Store', () => {
beforeEach(() => {
axiosMock.onDelete(endpoint).replyOnce(200, {});
- document.body.setAttribute('data-page', '');
+ document.body.dataset.page = '';
});
afterEach(() => {
axiosMock.restore();
- document.body.setAttribute('data-page', '');
+ document.body.dataset.page = '';
});
it('dispatches removeNote', () => {
@@ -1382,6 +1382,29 @@ describe('Actions Notes Store', () => {
],
);
});
+
+ it('dispatches `fetchDiscussionsBatch` action if `paginatedMrDiscussions` feature flag is enabled', () => {
+ window.gon = { features: { paginatedMrDiscussions: true } };
+
+ return testAction(
+ actions.fetchDiscussions,
+ { path: 'test-path', filter: 'test-filter', persistFilter: 'test-persist-filter' },
+ null,
+ [],
+ [
+ {
+ type: 'fetchDiscussionsBatch',
+ payload: {
+ config: {
+ params: { notes_filter: 'test-filter', persist_filter: 'test-persist-filter' },
+ },
+ path: 'test-path',
+ perPage: 20,
+ },
+ },
+ ],
+ );
+ });
});
describe('fetchDiscussionsBatch', () => {
@@ -1401,6 +1424,7 @@ describe('Actions Notes Store', () => {
null,
[
{ type: mutationTypes.ADD_OR_UPDATE_DISCUSSIONS, payload: { discussion } },
+ { type: mutationTypes.SET_DONE_FETCHING_BATCH_DISCUSSIONS, payload: true },
{ type: mutationTypes.SET_FETCHING_DISCUSSIONS, payload: false },
],
[{ type: 'updateResolvableDiscussionsCounts' }],
diff --git a/spec/frontend/notes/stores/mutation_spec.js b/spec/frontend/notes/stores/mutation_spec.js
index da1547ab6e7..e0a0fc43ffe 100644
--- a/spec/frontend/notes/stores/mutation_spec.js
+++ b/spec/frontend/notes/stores/mutation_spec.js
@@ -883,4 +883,16 @@ describe('Notes Store mutations', () => {
expect(state.discussions[0].position).toEqual(position);
});
});
+
+ describe('SET_DONE_FETCHING_BATCH_DISCUSSIONS', () => {
+ it('should set doneFetchingBatchDiscussions', () => {
+ const state = {
+ doneFetchingBatchDiscussions: false,
+ };
+
+ mutations.SET_DONE_FETCHING_BATCH_DISCUSSIONS(state, true);
+
+ expect(state.doneFetchingBatchDiscussions).toEqual(true);
+ });
+ });
});
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row_spec.js
index 057312828ff..84f01f10f21 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row_spec.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row_spec.js
@@ -10,6 +10,7 @@ import {
MISSING_MANIFEST_WARNING_TOOLTIP,
NOT_AVAILABLE_TEXT,
NOT_AVAILABLE_SIZE,
+ COPY_IMAGE_PATH_TITLE,
} from '~/packages_and_registries/container_registry/explorer/constants/index';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import DetailsRow from '~/vue_shared/components/registry/details_row.vue';
@@ -150,7 +151,7 @@ describe('tags list row', () => {
expect(findClipboardButton().attributes()).toMatchObject({
text: tag.location,
- title: tag.location,
+ title: COPY_IMAGE_PATH_TITLE,
});
});
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/cleanup_status_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/cleanup_status_spec.js
index af5723267f4..0581a40b6a2 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/cleanup_status_spec.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/cleanup_status_spec.js
@@ -1,4 +1,4 @@
-import { GlLink, GlPopover, GlSprintf } from '@gitlab/ui';
+import { GlIcon, GlLink, GlPopover, GlSprintf } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { helpPagePath } from '~/helpers/help_page_helper';
import CleanupStatus from '~/packages_and_registries/container_registry/explorer/components/list_page/cleanup_status.vue';
@@ -16,6 +16,7 @@ describe('cleanup_status', () => {
let wrapper;
const findMainIcon = () => wrapper.findByTestId('main-icon');
+ const findMainIconName = () => wrapper.findByTestId('main-icon').find(GlIcon);
const findExtraInfoIcon = () => wrapper.findByTestId('extra-info');
const findPopover = () => wrapper.findComponent(GlPopover);
@@ -61,6 +62,23 @@ describe('cleanup_status', () => {
expect(findMainIcon().exists()).toBe(true);
});
+
+ it.each`
+ status | visible | iconName
+ ${UNFINISHED_STATUS} | ${true} | ${'expire'}
+ ${SCHEDULED_STATUS} | ${true} | ${'clock'}
+ ${ONGOING_STATUS} | ${true} | ${'clock'}
+ ${UNSCHEDULED_STATUS} | ${false} | ${''}
+ `('matches "$iconName" when the status is "$status"', ({ status, visible, iconName }) => {
+ mountComponent({ status });
+
+ expect(findMainIcon().exists()).toBe(visible);
+ if (visible) {
+ const actualIcon = findMainIconName();
+ expect(actualIcon.exists()).toBe(true);
+ expect(actualIcon.props('name')).toBe(iconName);
+ }
+ });
});
describe('extra info icon', () => {
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/image_list_row_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/image_list_row_spec.js
index 690d827ec67..979e1500d7d 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/image_list_row_spec.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/image_list_row_spec.js
@@ -13,6 +13,7 @@ import {
IMAGE_MIGRATING_STATE,
SCHEDULED_STATUS,
ROOT_IMAGE_TEXT,
+ COPY_IMAGE_PATH_TITLE,
} from '~/packages_and_registries/container_registry/explorer/constants';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import ListItem from '~/vue_shared/components/registry/list_item.vue';
@@ -106,7 +107,7 @@ describe('Image List Row', () => {
const button = findClipboardButton();
expect(button.exists()).toBe(true);
expect(button.props('text')).toBe(item.location);
- expect(button.props('title')).toBe(item.location);
+ expect(button.props('title')).toBe(COPY_IMAGE_PATH_TITLE);
});
describe('cleanup status component', () => {
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/registry_header_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/registry_header_spec.js
index f811468550d..a006de9f00c 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/registry_header_spec.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/registry_header_spec.js
@@ -93,7 +93,7 @@ describe('registry_header', () => {
expect(text.exists()).toBe(true);
expect(text.props()).toMatchObject({
text: EXPIRATION_POLICY_DISABLED_TEXT,
- icon: 'expire',
+ icon: 'clock',
size: 'xl',
});
});
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/package_files_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/package_files_spec.js
index 6b6c33b7561..95de2f0bb0b 100644
--- a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/package_files_spec.js
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/package_files_spec.js
@@ -206,19 +206,19 @@ describe('Package Files', () => {
it('toggles the details row', async () => {
createComponent();
- expect(findFirstToggleDetailsButton().props('icon')).toBe('angle-down');
+ expect(findFirstToggleDetailsButton().props('icon')).toBe('chevron-lg-down');
findFirstToggleDetailsButton().vm.$emit('click');
await nextTick();
expect(findFirstRowShaComponent('sha-256').exists()).toBe(true);
- expect(findFirstToggleDetailsButton().props('icon')).toBe('angle-up');
+ expect(findFirstToggleDetailsButton().props('icon')).toBe('chevron-lg-up');
findFirstToggleDetailsButton().vm.$emit('click');
await nextTick();
expect(findFirstRowShaComponent('sha-256').exists()).toBe(false);
- expect(findFirstToggleDetailsButton().props('icon')).toBe('angle-down');
+ expect(findFirstToggleDetailsButton().props('icon')).toBe('chevron-lg-down');
});
});
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/infrastructure_search_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/infrastructure_search_spec.js
index e5230417c78..a086c20a5e7 100644
--- a/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/infrastructure_search_spec.js
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/infrastructure_search_spec.js
@@ -65,7 +65,7 @@ describe('Infrastructure Search', () => {
expect(findRegistrySearch().exists()).toBe(true);
expect(findRegistrySearch().props()).toMatchObject({
- filter: store.state.filter,
+ filters: store.state.filter,
sorting: store.state.sorting,
tokens: [],
sortableFields: sortableFields(),
@@ -80,7 +80,7 @@ describe('Infrastructure Search', () => {
mountComponent(isGroupPage);
expect(findRegistrySearch().props()).toMatchObject({
- filter: store.state.filter,
+ filters: store.state.filter,
sorting: store.state.sorting,
tokens: [],
sortableFields: fields,
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/additional_metadata_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/additional_metadata_spec.js
index 7a71a1cea0f..4f3d780b149 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/additional_metadata_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/additional_metadata_spec.js
@@ -1,4 +1,9 @@
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import { GlAlert } from '@gitlab/ui';
+import * as Sentry from '@sentry/browser';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
import {
conanMetadata,
mavenMetadata,
@@ -6,9 +11,11 @@ import {
packageData,
composerMetadata,
pypiMetadata,
+ packageMetadataQuery,
} from 'jest/packages_and_registries/package_registry/mock_data';
import component from '~/packages_and_registries/package_registry/components/details/additional_metadata.vue';
import {
+ FETCH_PACKAGE_METADATA_ERROR_MESSAGE,
PACKAGE_TYPE_NUGET,
PACKAGE_TYPE_CONAN,
PACKAGE_TYPE_MAVEN,
@@ -16,6 +23,9 @@ import {
PACKAGE_TYPE_COMPOSER,
PACKAGE_TYPE_PYPI,
} from '~/packages_and_registries/package_registry/constants';
+import AdditionalMetadataLoader from '~/packages_and_registries/package_registry/components/details/additional_metadata_loader.vue';
+import waitForPromises from 'helpers/wait_for_promises';
+import getPackageMetadata from '~/packages_and_registries/package_registry/graphql/queries/get_package_metadata.query.graphql';
const mavenPackage = { packageType: PACKAGE_TYPE_MAVEN, metadata: mavenMetadata() };
const conanPackage = { packageType: PACKAGE_TYPE_CONAN, metadata: conanMetadata() };
@@ -24,16 +34,26 @@ const composerPackage = { packageType: PACKAGE_TYPE_COMPOSER, metadata: composer
const pypiPackage = { packageType: PACKAGE_TYPE_PYPI, metadata: pypiMetadata() };
const npmPackage = { packageType: PACKAGE_TYPE_NPM, metadata: {} };
-describe('Package Additional Metadata', () => {
+Vue.use(VueApollo);
+
+describe('Package Additional metadata', () => {
let wrapper;
+ let apolloProvider;
+
const defaultProps = {
- packageEntity: {
- ...packageData(mavenPackage),
- },
+ packageId: packageData().id,
+ packageType: PACKAGE_TYPE_MAVEN,
};
- const mountComponent = (props) => {
+ const mountComponent = ({
+ props = {},
+ resolver = jest.fn().mockResolvedValue(packageMetadataQuery(mavenPackage)),
+ } = {}) => {
+ const requestHandlers = [[getPackageMetadata, resolver]];
+ apolloProvider = createMockApollo(requestHandlers);
+
wrapper = shallowMountExtended(component, {
+ apolloProvider,
propsData: { ...defaultProps, ...props },
stubs: {
component: { template: '<div data-testid="component-is"></div>' },
@@ -41,6 +61,10 @@ describe('Package Additional Metadata', () => {
});
};
+ beforeEach(() => {
+ jest.spyOn(Sentry, 'captureException').mockImplementation();
+ });
+
afterEach(() => {
wrapper.destroy();
wrapper = null;
@@ -49,6 +73,22 @@ describe('Package Additional Metadata', () => {
const findTitle = () => wrapper.findByTestId('title');
const findMainArea = () => wrapper.findByTestId('main');
const findComponentIs = () => wrapper.findByTestId('component-is');
+ const findAdditionalMetadataLoader = () => wrapper.findComponent(AdditionalMetadataLoader);
+ const findPackageMetadataAlert = () => wrapper.findComponent(GlAlert);
+
+ it('renders the loading container when loading', () => {
+ mountComponent();
+
+ expect(findAdditionalMetadataLoader().exists()).toBe(true);
+ });
+
+ it('does not render the loading container once resolved', async () => {
+ mountComponent();
+ await waitForPromises();
+
+ expect(findAdditionalMetadataLoader().exists()).toBe(false);
+ expect(Sentry.captureException).not.toHaveBeenCalled();
+ });
it('has the correct title', () => {
mountComponent();
@@ -56,7 +96,25 @@ describe('Package Additional Metadata', () => {
const title = findTitle();
expect(title.exists()).toBe(true);
- expect(title.text()).toBe('Additional Metadata');
+ expect(title.text()).toMatchInterpolatedText(component.i18n.componentTitle);
+ });
+
+ it('does not render gl-alert', () => {
+ mountComponent();
+
+ expect(findPackageMetadataAlert().exists()).toBe(false);
+ });
+
+ it('renders gl-alert if load fails', async () => {
+ mountComponent({ resolver: jest.fn().mockRejectedValue() });
+
+ await waitForPromises();
+
+ expect(findPackageMetadataAlert().exists()).toBe(true);
+ expect(findPackageMetadataAlert().text()).toMatchInterpolatedText(
+ FETCH_PACKAGE_METADATA_ERROR_MESSAGE,
+ );
+ expect(Sentry.captureException).toHaveBeenCalled();
});
it.each`
@@ -68,16 +126,22 @@ describe('Package Additional Metadata', () => {
${pypiPackage} | ${true} | ${PACKAGE_TYPE_PYPI}
${npmPackage} | ${false} | ${PACKAGE_TYPE_NPM}
`(
- `It is $visible that the component is visible when the package is $packageType`,
- ({ packageEntity, visible }) => {
- mountComponent({ packageEntity });
+ `component visibility is $visible when the package is $packageType`,
+ async ({ packageEntity, visible, packageType }) => {
+ const resolved = packageMetadataQuery(packageType);
+ const resolver = jest.fn().mockResolvedValue(resolved);
+
+ mountComponent({ props: { packageType }, resolver });
+
+ await waitForPromises();
+ await nextTick();
expect(findTitle().exists()).toBe(visible);
expect(findMainArea().exists()).toBe(visible);
expect(findComponentIs().exists()).toBe(visible);
if (visible) {
- expect(findComponentIs().props('packageEntity')).toEqual(packageEntity);
+ expect(findComponentIs().props('packageMetadata')).toEqual(packageEntity.metadata);
}
},
);
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/metadata/composer_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/metadata/composer_spec.js
index e744680cb9a..bb6846d354f 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/metadata/composer_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/metadata/composer_spec.js
@@ -1,22 +1,16 @@
import { GlSprintf } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import {
- packageData,
- composerMetadata,
-} from 'jest/packages_and_registries/package_registry/mock_data';
+import { composerMetadata } from 'jest/packages_and_registries/package_registry/mock_data';
import component from '~/packages_and_registries/package_registry/components/details/metadata/composer.vue';
-import { PACKAGE_TYPE_COMPOSER } from '~/packages_and_registries/package_registry/constants';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import DetailsRow from '~/vue_shared/components/registry/details_row.vue';
-const composerPackage = { packageType: PACKAGE_TYPE_COMPOSER, metadata: composerMetadata() };
-
describe('Composer Metadata', () => {
let wrapper;
const mountComponent = () => {
wrapper = shallowMountExtended(component, {
- propsData: { packageEntity: packageData(composerPackage) },
+ propsData: { packageMetadata: composerMetadata() },
stubs: {
DetailsRow,
GlSprintf,
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/metadata/conan_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/metadata/conan_spec.js
index 46593047f1f..e7e47401aa1 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/metadata/conan_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/metadata/conan_spec.js
@@ -1,22 +1,16 @@
import { GlSprintf } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import {
- conanMetadata,
- packageData,
-} from 'jest/packages_and_registries/package_registry/mock_data';
+import { conanMetadata } from 'jest/packages_and_registries/package_registry/mock_data';
import component from '~/packages_and_registries/package_registry/components/details/metadata/conan.vue';
-import { PACKAGE_TYPE_CONAN } from '~/packages_and_registries/package_registry/constants';
import DetailsRow from '~/vue_shared/components/registry/details_row.vue';
-const conanPackage = { packageType: PACKAGE_TYPE_CONAN, metadata: conanMetadata() };
-
describe('Conan Metadata', () => {
let wrapper;
const mountComponent = () => {
wrapper = shallowMountExtended(component, {
propsData: {
- packageEntity: packageData(conanPackage),
+ packageMetadata: conanMetadata(),
},
stubs: {
DetailsRow,
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/metadata/maven_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/metadata/maven_spec.js
index bc54cf1cb98..8680d983042 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/metadata/maven_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/metadata/maven_spec.js
@@ -1,24 +1,16 @@
import { GlSprintf } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import {
- mavenMetadata,
- packageData,
-} from 'jest/packages_and_registries/package_registry/mock_data';
+import { mavenMetadata } from 'jest/packages_and_registries/package_registry/mock_data';
import component from '~/packages_and_registries/package_registry/components/details/metadata/maven.vue';
-import { PACKAGE_TYPE_MAVEN } from '~/packages_and_registries/package_registry/constants';
import DetailsRow from '~/vue_shared/components/registry/details_row.vue';
-const mavenPackage = { packageType: PACKAGE_TYPE_MAVEN, metadata: mavenMetadata() };
-
describe('Maven Metadata', () => {
let wrapper;
const mountComponent = () => {
wrapper = shallowMountExtended(component, {
propsData: {
- packageEntity: {
- ...packageData(mavenPackage),
- },
+ packageMetadata: mavenMetadata(),
},
stubs: {
DetailsRow,
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/metadata/nuget_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/metadata/nuget_spec.js
index f759fe7a81c..af3692023f0 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/metadata/nuget_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/metadata/nuget_spec.js
@@ -1,25 +1,17 @@
import { GlLink, GlSprintf } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import {
- nugetMetadata,
- packageData,
-} from 'jest/packages_and_registries/package_registry/mock_data';
+import { nugetMetadata } from 'jest/packages_and_registries/package_registry/mock_data';
import component from '~/packages_and_registries/package_registry/components/details/metadata/nuget.vue';
-import { PACKAGE_TYPE_NUGET } from '~/packages_and_registries/package_registry/constants';
import DetailsRow from '~/vue_shared/components/registry/details_row.vue';
describe('Nuget Metadata', () => {
- let nugetPackage = { packageType: PACKAGE_TYPE_NUGET, metadata: nugetMetadata() };
+ let nugetPackageMetadata = { ...nugetMetadata() };
let wrapper;
- const mountComponent = () => {
+ const mountComponent = (props) => {
wrapper = shallowMountExtended(component, {
- propsData: {
- packageEntity: {
- ...packageData(nugetPackage),
- },
- },
+ propsData: { ...props },
stubs: {
DetailsRow,
GlSprintf,
@@ -37,7 +29,7 @@ describe('Nuget Metadata', () => {
const findElementLink = (container) => container.findComponent(GlLink);
beforeEach(() => {
- mountComponent({ packageEntity: nugetPackage });
+ mountComponent({ packageMetadata: nugetPackageMetadata });
});
it.each`
@@ -49,14 +41,14 @@ describe('Nuget Metadata', () => {
expect(element.exists()).toBe(true);
expect(element.text()).toBe(text);
expect(element.props('icon')).toBe(icon);
- expect(findElementLink(element).attributes('href')).toBe(nugetPackage.metadata[link]);
+ expect(findElementLink(element).attributes('href')).toBe(nugetPackageMetadata[link]);
});
describe('without source', () => {
beforeAll(() => {
- nugetPackage = {
- packageType: PACKAGE_TYPE_NUGET,
- metadata: { iconUrl: 'iconUrl', licenseUrl: 'licenseUrl' },
+ nugetPackageMetadata = {
+ iconUrl: 'iconUrl',
+ licenseUrl: 'licenseUrl',
};
});
@@ -67,9 +59,9 @@ describe('Nuget Metadata', () => {
describe('without license', () => {
beforeAll(() => {
- nugetPackage = {
- packageType: PACKAGE_TYPE_NUGET,
- metadata: { iconUrl: 'iconUrl', projectUrl: 'projectUrl' },
+ nugetPackageMetadata = {
+ iconUrl: 'iconUrl',
+ projectUrl: 'projectUrl',
};
});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/metadata/pypi_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/metadata/pypi_spec.js
index c4481c3f20b..d7c6ea8379d 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/metadata/pypi_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/metadata/pypi_spec.js
@@ -1,22 +1,17 @@
import { GlSprintf } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import { packageData, pypiMetadata } from 'jest/packages_and_registries/package_registry/mock_data';
+import { pypiMetadata } from 'jest/packages_and_registries/package_registry/mock_data';
import component from '~/packages_and_registries/package_registry/components/details/metadata/pypi.vue';
-import { PACKAGE_TYPE_PYPI } from '~/packages_and_registries/package_registry/constants';
import DetailsRow from '~/vue_shared/components/registry/details_row.vue';
-const pypiPackage = { packageType: PACKAGE_TYPE_PYPI, metadata: pypiMetadata() };
-
describe('Package Additional Metadata', () => {
let wrapper;
const mountComponent = () => {
wrapper = shallowMountExtended(component, {
propsData: {
- packageEntity: {
- ...packageData(pypiPackage),
- },
+ packageMetadata: pypiMetadata(),
},
stubs: {
DetailsRow,
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/package_files_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/package_files_spec.js
index f8a4ba8f3bc..0447ead0830 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/package_files_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/package_files_spec.js
@@ -34,7 +34,7 @@ describe('Package Files', () => {
},
stubs: {
...stubChildren(PackageFiles),
- GlTable: false,
+ GlTableLite: false,
},
});
};
@@ -219,19 +219,19 @@ describe('Package Files', () => {
it('toggles the details row', async () => {
createComponent();
- expect(findFirstToggleDetailsButton().props('icon')).toBe('angle-down');
+ expect(findFirstToggleDetailsButton().props('icon')).toBe('chevron-down');
findFirstToggleDetailsButton().vm.$emit('click');
await nextTick();
expect(findFirstRowShaComponent('sha-256').exists()).toBe(true);
- expect(findFirstToggleDetailsButton().props('icon')).toBe('angle-up');
+ expect(findFirstToggleDetailsButton().props('icon')).toBe('chevron-up');
findFirstToggleDetailsButton().vm.$emit('click');
await nextTick();
expect(findFirstRowShaComponent('sha-256').exists()).toBe(false);
- expect(findFirstToggleDetailsButton().props('icon')).toBe('angle-down');
+ expect(findFirstToggleDetailsButton().props('icon')).toBe('chevron-down');
});
});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/package_history_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/package_history_spec.js
index 57b8be40a7c..f4e6d43812d 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/package_history_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/package_history_spec.js
@@ -1,17 +1,29 @@
-import { GlLink, GlSprintf } from '@gitlab/ui';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { GlAlert, GlLink, GlSprintf } from '@gitlab/ui';
+import * as Sentry from '@sentry/browser';
import { stubComponent } from 'helpers/stub_component';
+import createMockApollo from 'helpers/mock_apollo_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import {
packageData,
packagePipelines,
+ packagePipelinesQuery,
} from 'jest/packages_and_registries/package_registry/mock_data';
import { HISTORY_PIPELINES_LIMIT } from '~/packages_and_registries/shared/constants';
import component from '~/packages_and_registries/package_registry/components/details/package_history.vue';
+import PackageHistoryLoader from '~/packages_and_registries/package_registry/components/details/package_history_loader.vue';
import HistoryItem from '~/vue_shared/components/registry/history_item.vue';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
+import waitForPromises from 'helpers/wait_for_promises';
+import getPackagePipelines from '~/packages_and_registries/package_registry/graphql/queries/get_package_pipelines.query.graphql';
+
+Vue.use(VueApollo);
describe('Package History', () => {
let wrapper;
+ let apolloProvider;
+
const defaultProps = {
projectName: 'baz project',
packageEntity: { ...packageData() },
@@ -22,8 +34,15 @@ describe('Package History', () => {
const createPipelines = (amount) =>
[...Array(amount)].map((x, index) => packagePipelines({ id: index + 1 })[0]);
- const mountComponent = (props) => {
+ const mountComponent = ({
+ props = {},
+ resolver = jest.fn().mockResolvedValue(packagePipelinesQuery()),
+ } = {}) => {
+ const requestHandlers = [[getPackagePipelines, resolver]];
+ apolloProvider = createMockApollo(requestHandlers);
+
wrapper = shallowMountExtended(component, {
+ apolloProvider,
propsData: { ...defaultProps, ...props },
stubs: {
HistoryItem: stubComponent(HistoryItem, {
@@ -34,18 +53,40 @@ describe('Package History', () => {
});
};
+ beforeEach(() => {
+ jest.spyOn(Sentry, 'captureException').mockImplementation();
+ });
+
afterEach(() => {
wrapper.destroy();
+ wrapper = null;
});
+ const findPackageHistoryLoader = () => wrapper.findComponent(PackageHistoryLoader);
const findHistoryElement = (testId) => wrapper.findByTestId(testId);
const findElementLink = (container) => container.findComponent(GlLink);
const findElementTimeAgo = (container) => container.findComponent(TimeAgoTooltip);
+ const findPackageHistoryAlert = () => wrapper.findComponent(GlAlert);
const findTitle = () => wrapper.findByTestId('title');
const findTimeline = () => wrapper.findByTestId('timeline');
- it('has the correct title', () => {
+ it('renders the loading container when loading', () => {
+ mountComponent();
+
+ expect(findPackageHistoryLoader().exists()).toBe(true);
+ });
+
+ it('does not render the loading container once resolved', async () => {
+ mountComponent();
+ await waitForPromises();
+
+ expect(findPackageHistoryLoader().exists()).toBe(false);
+ expect(Sentry.captureException).not.toHaveBeenCalled();
+ });
+
+ it('has the correct title', async () => {
mountComponent();
+ await waitForPromises();
const title = findTitle();
@@ -53,8 +94,9 @@ describe('Package History', () => {
expect(title.text()).toBe('History');
});
- it('has a timeline container', () => {
+ it('has a timeline container', async () => {
mountComponent();
+ await waitForPromises();
const title = findTimeline();
@@ -64,6 +106,24 @@ describe('Package History', () => {
);
});
+ it('does not render gl-alert', () => {
+ mountComponent();
+
+ expect(findPackageHistoryAlert().exists()).toBe(false);
+ });
+
+ it('renders gl-alert if load fails', async () => {
+ mountComponent({ resolver: jest.fn().mockRejectedValue() });
+
+ await waitForPromises();
+
+ expect(findPackageHistoryAlert().exists()).toBe(true);
+ expect(findPackageHistoryAlert().text()).toEqual(
+ 'Something went wrong while fetching the package history.',
+ );
+ expect(Sentry.captureException).toHaveBeenCalled();
+ });
+
describe.each`
name | amount | icon | text | timeAgoTooltip | link
${'created-on'} | ${HISTORY_PIPELINES_LIMIT + 2} | ${'clock'} | ${'@gitlab-org/package-15 version 1.0.0 was first created'} | ${packageData().createdAt} | ${null}
@@ -78,11 +138,21 @@ describe('Package History', () => {
({ name, icon, text, timeAgoTooltip, link, amount }) => {
let element;
- beforeEach(() => {
- const packageEntity = { ...packageData(), pipelines: { nodes: createPipelines(amount) } };
+ beforeEach(async () => {
+ const packageEntity = { ...packageData() };
+ const pipelinesResolver = jest
+ .fn()
+ .mockResolvedValue(packagePipelinesQuery(createPipelines(amount)));
+
mountComponent({
- packageEntity,
+ props: {
+ packageEntity,
+ },
+ resolver: pipelinesResolver,
});
+
+ await waitForPromises();
+
element = findHistoryElement(name);
});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/packages_search_spec.js b/spec/frontend/packages_and_registries/package_registry/components/list/packages_search_spec.js
index 3670cfca8ea..19505618ff7 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/list/packages_search_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/list/packages_search_spec.js
@@ -134,7 +134,7 @@ describe('Package Search', () => {
await nextTick();
- expect(findRegistrySearch().props('filter')).toEqual(['foo']);
+ expect(findRegistrySearch().props('filters')).toEqual(['foo']);
});
it('on filter:submit emits update event', async () => {
@@ -175,7 +175,7 @@ describe('Package Search', () => {
expect(getQueryParams).toHaveBeenCalled();
expect(findRegistrySearch().props()).toMatchObject({
- filter: defaultQueryParamsMock.filters,
+ filters: defaultQueryParamsMock.filters,
sorting: defaultQueryParamsMock.sorting,
});
});
diff --git a/spec/frontend/packages_and_registries/package_registry/mock_data.js b/spec/frontend/packages_and_registries/package_registry/mock_data.js
index 0a4747fc9ec..d40feee582f 100644
--- a/spec/frontend/packages_and_registries/package_registry/mock_data.js
+++ b/spec/frontend/packages_and_registries/package_registry/mock_data.js
@@ -148,6 +148,8 @@ export const conanMetadata = () => ({
recipePath: 'package-8/1.0.0/gitlab-org+gitlab-test/stable',
});
+const conanMetadataQuery = () => ({ ...conanMetadata(), __typename: 'ConanMetadata' });
+
export const composerMetadata = () => ({
targetSha: 'b83d6e391c22777fca1ed3012fce84f633d7fed0',
composerJson: {
@@ -156,23 +158,45 @@ export const composerMetadata = () => ({
},
});
+const composerMetadataQuery = () => ({
+ ...composerMetadata(),
+ __typename: 'ComposerMetadata',
+});
+
export const pypiMetadata = () => ({
+ id: 'pypi-1',
requiredPython: '1.0.0',
});
+const pypiMetadataQuery = () => ({ ...pypiMetadata(), __typename: 'PypiMetadata' });
+
export const mavenMetadata = () => ({
+ id: 'maven-1',
appName: 'appName',
appGroup: 'appGroup',
appVersion: 'appVersion',
path: 'path',
});
+const mavenMetadataQuery = () => ({ ...mavenMetadata(), __typename: 'MavenMetadata' });
+
export const nugetMetadata = () => ({
+ id: 'nuget-1',
iconUrl: 'iconUrl',
licenseUrl: 'licenseUrl',
projectUrl: 'projectUrl',
});
+const nugetMetadataQuery = () => ({ ...nugetMetadata(), __typename: 'NugetMetadata' });
+
+const packageTypeMetadataQueryMapping = {
+ CONAN: conanMetadataQuery,
+ COMPOSER: composerMetadataQuery,
+ PYPI: pypiMetadataQuery,
+ MAVEN: mavenMetadataQuery,
+ NUGET: nugetMetadataQuery,
+};
+
export const pagination = (extend) => ({
endCursor: 'eyJpZCI6IjIwNSIsIm5hbWUiOiJteS9jb21wYW55L2FwcC9teS1hcHAifQ',
hasNextPage: true,
@@ -223,6 +247,19 @@ export const packageDetailsQuery = (extendPackage) => ({
},
});
+export const packagePipelinesQuery = (pipelines = packagePipelines()) => ({
+ data: {
+ package: {
+ id: 'gid://gitlab/Packages::Package/111',
+ pipelines: {
+ nodes: pipelines,
+ __typename: 'PipelineConnection',
+ },
+ __typename: 'PackageDetailsType',
+ },
+ },
+});
+
export const emptyPackageDetailsQuery = () => ({
data: {
package: {
@@ -231,6 +268,21 @@ export const emptyPackageDetailsQuery = () => ({
},
});
+export const packageMetadataQuery = (packageType) => {
+ return {
+ data: {
+ package: {
+ id: 'gid://gitlab/Packages::Package/111',
+ packageType,
+ metadata: {
+ ...(packageTypeMetadataQueryMapping[packageType]?.() ?? {}),
+ },
+ __typename: 'PackageDetailsType',
+ },
+ },
+ };
+};
+
export const packageDestroyMutation = () => ({
data: {
destroyPackage: {
diff --git a/spec/frontend/packages_and_registries/package_registry/pages/details_spec.js b/spec/frontend/packages_and_registries/package_registry/pages/details_spec.js
index a7e31d42c9e..3cadb001c58 100644
--- a/spec/frontend/packages_and_registries/package_registry/pages/details_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/pages/details_spec.js
@@ -23,6 +23,10 @@ import {
DELETE_PACKAGE_FILE_SUCCESS_MESSAGE,
DELETE_PACKAGE_FILE_ERROR_MESSAGE,
PACKAGE_TYPE_NUGET,
+ PACKAGE_TYPE_MAVEN,
+ PACKAGE_TYPE_CONAN,
+ PACKAGE_TYPE_PYPI,
+ PACKAGE_TYPE_NPM,
} from '~/packages_and_registries/package_registry/constants';
import destroyPackageFileMutation from '~/packages_and_registries/package_registry/graphql/mutations/destroy_package_file.mutation.graphql';
@@ -160,15 +164,38 @@ describe('PackagesApp', () => {
});
});
- it('renders additional metadata and has the right props', async () => {
- createComponent();
+ describe('additional metadata', () => {
+ it.each`
+ packageType | visible
+ ${PACKAGE_TYPE_MAVEN} | ${true}
+ ${PACKAGE_TYPE_CONAN} | ${true}
+ ${PACKAGE_TYPE_NUGET} | ${true}
+ ${PACKAGE_TYPE_COMPOSER} | ${true}
+ ${PACKAGE_TYPE_PYPI} | ${true}
+ ${PACKAGE_TYPE_NPM} | ${false}
+ `(
+ `It is $visible that the component is visible when the package is $packageType`,
+ async ({ packageType, visible }) => {
+ createComponent({
+ resolver: jest.fn().mockResolvedValue(
+ packageDetailsQuery({
+ packageType,
+ }),
+ ),
+ });
- await waitForPromises();
+ await waitForPromises();
- expect(findAdditionalMetadata().exists()).toBe(true);
- expect(findAdditionalMetadata().props()).toMatchObject({
- packageEntity: expect.objectContaining(packageWithoutTypename),
- });
+ expect(findAdditionalMetadata().exists()).toBe(visible);
+
+ if (visible) {
+ expect(findAdditionalMetadata().props()).toMatchObject({
+ packageId: packageWithoutTypename.id,
+ packageType,
+ });
+ }
+ },
+ );
});
it('renders installation commands and has the right props', async () => {
diff --git a/spec/frontend/packages_and_registries/settings/group/components/dependency_proxy_settings_spec.js b/spec/frontend/packages_and_registries/settings/group/components/dependency_proxy_settings_spec.js
index 22754d31f93..e60989b0949 100644
--- a/spec/frontend/packages_and_registries/settings/group/components/dependency_proxy_settings_spec.js
+++ b/spec/frontend/packages_and_registries/settings/group/components/dependency_proxy_settings_spec.js
@@ -134,12 +134,6 @@ describe('DependencyProxySettings', () => {
mountComponent();
});
- it('has the help prop correctly set', () => {
- expect(findEnableProxyToggle().props()).toMatchObject({
- help: component.i18n.enabledProxyHelpText,
- });
- });
-
it('has help text with a link', () => {
expect(findEnableProxyToggle().text()).toContain(
'To see the image prefix and what is in the cache, visit the Dependency Proxy',
@@ -157,12 +151,6 @@ describe('DependencyProxySettings', () => {
});
});
- it('has the help prop set to empty', () => {
- expect(findEnableProxyToggle().props()).toMatchObject({
- help: '',
- });
- });
-
it('the help text is not visible', () => {
expect(findToggleHelpLink().exists()).toBe(false);
});
diff --git a/spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/settings_form_spec.js.snap b/spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/container_expiration_policy_form_spec.js.snap
index 841a9bf8290..faa313118f3 100644
--- a/spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/settings_form_spec.js.snap
+++ b/spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/container_expiration_policy_form_spec.js.snap
@@ -1,6 +1,6 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
-exports[`Settings Form Cadence matches snapshot 1`] = `
+exports[`Container Expiration Policy Settings Form Cadence matches snapshot 1`] = `
<expiration-dropdown-stub
class="gl-mr-7 gl-mb-0!"
data-testid="cadence-dropdown"
@@ -11,7 +11,7 @@ exports[`Settings Form Cadence matches snapshot 1`] = `
/>
`;
-exports[`Settings Form Enable matches snapshot 1`] = `
+exports[`Container Expiration Policy Settings Form Enable matches snapshot 1`] = `
<expiration-toggle-stub
class="gl-mb-0!"
data-testid="enable-toggle"
@@ -19,7 +19,7 @@ exports[`Settings Form Enable matches snapshot 1`] = `
/>
`;
-exports[`Settings Form Keep N matches snapshot 1`] = `
+exports[`Container Expiration Policy Settings Form Keep N matches snapshot 1`] = `
<expiration-dropdown-stub
data-testid="keep-n-dropdown"
formoptions="[object Object],[object Object],[object Object],[object Object],[object Object],[object Object],[object Object]"
@@ -29,7 +29,7 @@ exports[`Settings Form Keep N matches snapshot 1`] = `
/>
`;
-exports[`Settings Form Keep Regex matches snapshot 1`] = `
+exports[`Container Expiration Policy Settings Form Keep Regex matches snapshot 1`] = `
<expiration-input-stub
data-testid="keep-regex-input"
description="Tags with names that match this regex pattern are kept. %{linkStart}View regex examples.%{linkEnd}"
@@ -41,7 +41,7 @@ exports[`Settings Form Keep Regex matches snapshot 1`] = `
/>
`;
-exports[`Settings Form OlderThan matches snapshot 1`] = `
+exports[`Container Expiration Policy Settings Form OlderThan matches snapshot 1`] = `
<expiration-dropdown-stub
data-testid="older-than-dropdown"
formoptions="[object Object],[object Object],[object Object],[object Object],[object Object],[object Object]"
@@ -51,7 +51,7 @@ exports[`Settings Form OlderThan matches snapshot 1`] = `
/>
`;
-exports[`Settings Form Remove regex matches snapshot 1`] = `
+exports[`Container Expiration Policy Settings Form Remove regex matches snapshot 1`] = `
<expiration-input-stub
data-testid="remove-regex-input"
description="Tags with names that match this regex pattern are removed. %{linkStart}View regex examples.%{linkEnd}"
diff --git a/spec/frontend/packages_and_registries/settings/project/settings/components/settings_form_spec.js b/spec/frontend/packages_and_registries/settings/project/settings/components/container_expiration_policy_form_spec.js
index 465e6dc73e2..ca44e77e694 100644
--- a/spec/frontend/packages_and_registries/settings/project/settings/components/settings_form_spec.js
+++ b/spec/frontend/packages_and_registries/settings/project/settings/components/container_expiration_policy_form_spec.js
@@ -4,7 +4,7 @@ import Vue, { nextTick } from 'vue';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { GlCard, GlLoadingIcon } from 'jest/packages_and_registries/shared/stubs';
-import component from '~/packages_and_registries/settings/project/components/settings_form.vue';
+import component from '~/packages_and_registries/settings/project/components/container_expiration_policy_form.vue';
import {
UPDATE_SETTINGS_ERROR_MESSAGE,
UPDATE_SETTINGS_SUCCESS_MESSAGE,
@@ -14,7 +14,7 @@ import expirationPolicyQuery from '~/packages_and_registries/settings/project/gr
import Tracking from '~/tracking';
import { expirationPolicyPayload, expirationPolicyMutationPayload } from '../mock_data';
-describe('Settings Form', () => {
+describe('Container Expiration Policy Settings Form', () => {
let wrapper;
let fakeApollo;
diff --git a/spec/frontend/packages_and_registries/settings/project/settings/components/container_expiration_policy_spec.js b/spec/frontend/packages_and_registries/settings/project/settings/components/container_expiration_policy_spec.js
new file mode 100644
index 00000000000..aa3506771fa
--- /dev/null
+++ b/spec/frontend/packages_and_registries/settings/project/settings/components/container_expiration_policy_spec.js
@@ -0,0 +1,167 @@
+import { GlAlert, GlSprintf, GlLink } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import component from '~/packages_and_registries/settings/project/components/container_expiration_policy.vue';
+import ContainerExpirationPolicyForm from '~/packages_and_registries/settings/project/components/container_expiration_policy_form.vue';
+import {
+ FETCH_SETTINGS_ERROR_MESSAGE,
+ UNAVAILABLE_FEATURE_INTRO_TEXT,
+ UNAVAILABLE_USER_FEATURE_TEXT,
+} from '~/packages_and_registries/settings/project/constants';
+import expirationPolicyQuery from '~/packages_and_registries/settings/project/graphql/queries/get_expiration_policy.query.graphql';
+import SettingsBlock from '~/vue_shared/components/settings/settings_block.vue';
+
+import {
+ expirationPolicyPayload,
+ emptyExpirationPolicyPayload,
+ containerExpirationPolicyData,
+} from '../mock_data';
+
+describe('Container expiration policy project settings', () => {
+ let wrapper;
+ let fakeApollo;
+
+ const defaultProvidedValues = {
+ projectPath: 'path',
+ isAdmin: false,
+ adminSettingsPath: 'settingsPath',
+ enableHistoricEntries: false,
+ helpPagePath: 'helpPagePath',
+ showCleanupPolicyLink: false,
+ };
+
+ const findFormComponent = () => wrapper.find(ContainerExpirationPolicyForm);
+ const findAlert = () => wrapper.find(GlAlert);
+ const findSettingsBlock = () => wrapper.find(SettingsBlock);
+
+ const mountComponent = (provide = defaultProvidedValues, config) => {
+ wrapper = shallowMount(component, {
+ stubs: {
+ GlSprintf,
+ SettingsBlock,
+ },
+ mocks: {
+ $toast: {
+ show: jest.fn(),
+ },
+ },
+ provide,
+ ...config,
+ });
+ };
+
+ const mountComponentWithApollo = ({ provide = defaultProvidedValues, resolver } = {}) => {
+ Vue.use(VueApollo);
+
+ const requestHandlers = [[expirationPolicyQuery, resolver]];
+
+ fakeApollo = createMockApollo(requestHandlers);
+ mountComponent(provide, {
+ apolloProvider: fakeApollo,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('isEdited status', () => {
+ it.each`
+ description | apiResponse | workingCopy | result
+ ${'empty response and no changes from user'} | ${emptyExpirationPolicyPayload()} | ${{}} | ${false}
+ ${'empty response and changes from user'} | ${emptyExpirationPolicyPayload()} | ${{ enabled: true }} | ${true}
+ ${'response and no changes'} | ${expirationPolicyPayload()} | ${containerExpirationPolicyData()} | ${false}
+ ${'response and changes'} | ${expirationPolicyPayload()} | ${{ ...containerExpirationPolicyData(), nameRegex: '12345' }} | ${true}
+ ${'response and empty'} | ${expirationPolicyPayload()} | ${{}} | ${true}
+ `('$description', async ({ apiResponse, workingCopy, result }) => {
+ mountComponentWithApollo({
+ provide: { ...defaultProvidedValues, enableHistoricEntries: true },
+ resolver: jest.fn().mockResolvedValue(apiResponse),
+ });
+ await waitForPromises();
+
+ findFormComponent().vm.$emit('input', workingCopy);
+
+ await waitForPromises();
+
+ expect(findFormComponent().props('isEdited')).toBe(result);
+ });
+ });
+
+ it('renders the setting form', async () => {
+ mountComponentWithApollo({
+ resolver: jest.fn().mockResolvedValue(expirationPolicyPayload()),
+ });
+ await waitForPromises();
+
+ expect(findFormComponent().exists()).toBe(true);
+ expect(findSettingsBlock().props('collapsible')).toBe(false);
+ });
+
+ describe('the form is disabled', () => {
+ it('the form is hidden', () => {
+ mountComponent();
+
+ expect(findFormComponent().exists()).toBe(false);
+ });
+
+ it('shows an alert', () => {
+ mountComponent();
+
+ const text = findAlert().text();
+ expect(text).toContain(UNAVAILABLE_FEATURE_INTRO_TEXT);
+ expect(text).toContain(UNAVAILABLE_USER_FEATURE_TEXT);
+ });
+
+ describe('an admin is visiting the page', () => {
+ it('shows the admin part of the alert message', () => {
+ mountComponent({ ...defaultProvidedValues, isAdmin: true });
+
+ const sprintf = findAlert().find(GlSprintf);
+ expect(sprintf.text()).toBe('administration settings');
+ expect(sprintf.find(GlLink).attributes('href')).toBe(
+ defaultProvidedValues.adminSettingsPath,
+ );
+ });
+ });
+ });
+
+ describe('fetchSettingsError', () => {
+ beforeEach(async () => {
+ mountComponentWithApollo({
+ resolver: jest.fn().mockRejectedValue(new Error('GraphQL error')),
+ });
+ await waitForPromises();
+ });
+
+ it('the form is hidden', () => {
+ expect(findFormComponent().exists()).toBe(false);
+ });
+
+ it('shows an alert', () => {
+ expect(findAlert().html()).toContain(FETCH_SETTINGS_ERROR_MESSAGE);
+ });
+ });
+
+ describe('empty API response', () => {
+ it.each`
+ enableHistoricEntries | isShown
+ ${true} | ${true}
+ ${false} | ${false}
+ `('is $isShown that the form is shown', async ({ enableHistoricEntries, isShown }) => {
+ mountComponentWithApollo({
+ provide: {
+ ...defaultProvidedValues,
+ enableHistoricEntries,
+ },
+ resolver: jest.fn().mockResolvedValue(emptyExpirationPolicyPayload()),
+ });
+ await waitForPromises();
+
+ expect(findFormComponent().exists()).toBe(isShown);
+ });
+ });
+});
diff --git a/spec/frontend/packages_and_registries/settings/project/settings/components/registry_settings_app_spec.js b/spec/frontend/packages_and_registries/settings/project/settings/components/registry_settings_app_spec.js
index 0a72f0269ee..337991dfae0 100644
--- a/spec/frontend/packages_and_registries/settings/project/settings/components/registry_settings_app_spec.js
+++ b/spec/frontend/packages_and_registries/settings/project/settings/components/registry_settings_app_spec.js
@@ -1,165 +1,19 @@
-import { GlAlert, GlSprintf, GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import Vue from 'vue';
-import VueApollo from 'vue-apollo';
-import createMockApollo from 'helpers/mock_apollo_helper';
-import waitForPromises from 'helpers/wait_for_promises';
import component from '~/packages_and_registries/settings/project/components/registry_settings_app.vue';
-import SettingsForm from '~/packages_and_registries/settings/project/components/settings_form.vue';
-import {
- FETCH_SETTINGS_ERROR_MESSAGE,
- UNAVAILABLE_FEATURE_INTRO_TEXT,
- UNAVAILABLE_USER_FEATURE_TEXT,
-} from '~/packages_and_registries/settings/project/constants';
-import expirationPolicyQuery from '~/packages_and_registries/settings/project/graphql/queries/get_expiration_policy.query.graphql';
-import SettingsBlock from '~/vue_shared/components/settings/settings_block.vue';
+import ContainerExpirationPolicy from '~/packages_and_registries/settings/project/components/container_expiration_policy.vue';
-import {
- expirationPolicyPayload,
- emptyExpirationPolicyPayload,
- containerExpirationPolicyData,
-} from '../mock_data';
-
-describe('Registry Settings App', () => {
+describe('Registry Settings app', () => {
let wrapper;
- let fakeApollo;
-
- const defaultProvidedValues = {
- projectPath: 'path',
- isAdmin: false,
- adminSettingsPath: 'settingsPath',
- enableHistoricEntries: false,
- helpPagePath: 'helpPagePath',
- showCleanupPolicyLink: false,
- };
-
- const findSettingsComponent = () => wrapper.find(SettingsForm);
- const findAlert = () => wrapper.find(GlAlert);
-
- const mountComponent = (provide = defaultProvidedValues, config) => {
- wrapper = shallowMount(component, {
- stubs: {
- GlSprintf,
- SettingsBlock,
- },
- mocks: {
- $toast: {
- show: jest.fn(),
- },
- },
- provide,
- ...config,
- });
- };
-
- const mountComponentWithApollo = ({ provide = defaultProvidedValues, resolver } = {}) => {
- Vue.use(VueApollo);
-
- const requestHandlers = [[expirationPolicyQuery, resolver]];
-
- fakeApollo = createMockApollo(requestHandlers);
- mountComponent(provide, {
- apolloProvider: fakeApollo,
- });
- };
+ const findContainerExpirationPolicy = () => wrapper.find(ContainerExpirationPolicy);
afterEach(() => {
wrapper.destroy();
+ wrapper = null;
});
- describe('isEdited status', () => {
- it.each`
- description | apiResponse | workingCopy | result
- ${'empty response and no changes from user'} | ${emptyExpirationPolicyPayload()} | ${{}} | ${false}
- ${'empty response and changes from user'} | ${emptyExpirationPolicyPayload()} | ${{ enabled: true }} | ${true}
- ${'response and no changes'} | ${expirationPolicyPayload()} | ${containerExpirationPolicyData()} | ${false}
- ${'response and changes'} | ${expirationPolicyPayload()} | ${{ ...containerExpirationPolicyData(), nameRegex: '12345' }} | ${true}
- ${'response and empty'} | ${expirationPolicyPayload()} | ${{}} | ${true}
- `('$description', async ({ apiResponse, workingCopy, result }) => {
- mountComponentWithApollo({
- provide: { ...defaultProvidedValues, enableHistoricEntries: true },
- resolver: jest.fn().mockResolvedValue(apiResponse),
- });
- await waitForPromises();
-
- findSettingsComponent().vm.$emit('input', workingCopy);
-
- await waitForPromises();
-
- expect(findSettingsComponent().props('isEdited')).toBe(result);
- });
- });
-
- it('renders the setting form', async () => {
- mountComponentWithApollo({
- resolver: jest.fn().mockResolvedValue(expirationPolicyPayload()),
- });
- await waitForPromises();
-
- expect(findSettingsComponent().exists()).toBe(true);
- });
-
- describe('the form is disabled', () => {
- it('the form is hidden', () => {
- mountComponent();
-
- expect(findSettingsComponent().exists()).toBe(false);
- });
-
- it('shows an alert', () => {
- mountComponent();
-
- const text = findAlert().text();
- expect(text).toContain(UNAVAILABLE_FEATURE_INTRO_TEXT);
- expect(text).toContain(UNAVAILABLE_USER_FEATURE_TEXT);
- });
-
- describe('an admin is visiting the page', () => {
- it('shows the admin part of the alert message', () => {
- mountComponent({ ...defaultProvidedValues, isAdmin: true });
-
- const sprintf = findAlert().find(GlSprintf);
- expect(sprintf.text()).toBe('administration settings');
- expect(sprintf.find(GlLink).attributes('href')).toBe(
- defaultProvidedValues.adminSettingsPath,
- );
- });
- });
- });
-
- describe('fetchSettingsError', () => {
- beforeEach(async () => {
- mountComponentWithApollo({
- resolver: jest.fn().mockRejectedValue(new Error('GraphQL error')),
- });
- await waitForPromises();
- });
-
- it('the form is hidden', () => {
- expect(findSettingsComponent().exists()).toBe(false);
- });
-
- it('shows an alert', () => {
- expect(findAlert().html()).toContain(FETCH_SETTINGS_ERROR_MESSAGE);
- });
- });
-
- describe('empty API response', () => {
- it.each`
- enableHistoricEntries | isShown
- ${true} | ${true}
- ${false} | ${false}
- `('is $isShown that the form is shown', async ({ enableHistoricEntries, isShown }) => {
- mountComponentWithApollo({
- provide: {
- ...defaultProvidedValues,
- enableHistoricEntries,
- },
- resolver: jest.fn().mockResolvedValue(emptyExpirationPolicyPayload()),
- });
- await waitForPromises();
+ it('renders container expiration policy component', () => {
+ wrapper = shallowMount(component);
- expect(findSettingsComponent().exists()).toBe(isShown);
- });
+ expect(findContainerExpirationPolicy().exists()).toBe(true);
});
});
diff --git a/spec/frontend/packages_and_registries/shared/components/__snapshots__/registry_breadcrumb_spec.js.snap b/spec/frontend/packages_and_registries/shared/components/__snapshots__/registry_breadcrumb_spec.js.snap
index 3dd6023140f..e6e89806ce0 100644
--- a/spec/frontend/packages_and_registries/shared/components/__snapshots__/registry_breadcrumb_spec.js.snap
+++ b/spec/frontend/packages_and_registries/shared/components/__snapshots__/registry_breadcrumb_spec.js.snap
@@ -30,11 +30,11 @@ exports[`Registry Breadcrumb when is not rootRoute renders 1`] = `
<svg
aria-hidden="true"
class="gl-icon s8"
- data-testid="angle-right-icon"
+ data-testid="chevron-lg-right-icon"
role="img"
>
<use
- href="#angle-right"
+ href="#chevron-lg-right"
/>
</svg>
</span>
diff --git a/spec/frontend/packages_and_registries/shared/components/persisted_search_spec.js b/spec/frontend/packages_and_registries/shared/components/persisted_search_spec.js
index bd492a5ae8f..db9f96bff39 100644
--- a/spec/frontend/packages_and_registries/shared/components/persisted_search_spec.js
+++ b/spec/frontend/packages_and_registries/shared/components/persisted_search_spec.js
@@ -100,7 +100,7 @@ describe('Persisted Search', () => {
await nextTick();
- expect(findRegistrySearch().props('filter')).toEqual(['foo']);
+ expect(findRegistrySearch().props('filters')).toEqual(['foo']);
});
it('on filter:submit emits update event', async () => {
@@ -138,7 +138,7 @@ describe('Persisted Search', () => {
expect(getQueryParams).toHaveBeenCalled();
expect(findRegistrySearch().props()).toMatchObject({
- filter: defaultQueryParamsMock.filters,
+ filters: defaultQueryParamsMock.filters,
sorting: defaultQueryParamsMock.sorting,
});
});
diff --git a/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js b/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js
index ae5404f2d13..d5b4b3c22d8 100644
--- a/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js
+++ b/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js
@@ -190,7 +190,7 @@ describe('Interval Pattern Input Component', () => {
findCustomInput().setValue(newValue);
- await nextTick;
+ await nextTick();
expect(findSelectedRadioKey()).toBe(customKey);
});
diff --git a/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js b/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
index 30d5f89d2f6..46f83ac89e5 100644
--- a/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
+++ b/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
@@ -53,11 +53,13 @@ const defaultProps = {
showVisibilityConfirmModal: false,
};
+const FEATURE_ACCESS_LEVEL_ANONYMOUS = 30;
+
describe('Settings Panel', () => {
let wrapper;
const mountComponent = (
- { currentSettings = {}, ...customProps } = {},
+ { currentSettings = {}, glFeatures = {}, ...customProps } = {},
mountFn = shallowMount,
) => {
const propsData = {
@@ -68,6 +70,12 @@ describe('Settings Panel', () => {
return mountFn(settingsPanel, {
propsData,
+ provide: {
+ glFeatures: {
+ packageRegistryAccessLevel: false,
+ ...glFeatures,
+ },
+ },
});
};
@@ -95,6 +103,10 @@ describe('Settings Panel', () => {
const findContainerRegistryAccessLevelInput = () =>
wrapper.find('[name="project[project_feature_attributes][container_registry_access_level]"]');
const findPackageSettings = () => wrapper.find({ ref: 'package-settings' });
+ const findPackageAccessLevel = () =>
+ wrapper.find('[data-testid="package-registry-access-level"]');
+ const findPackageAccessLevels = () =>
+ wrapper.find('[name="project[project_feature_attributes][package_registry_access_level]"]');
const findPackagesEnabledInput = () => wrapper.find('[name="project[packages_enabled]"]');
const findPagesSettings = () => wrapper.find({ ref: 'pages-settings' });
const findPagesAccessLevels = () =>
@@ -521,6 +533,101 @@ describe('Settings Panel', () => {
settingsPanel.i18n.packagesLabel,
);
});
+
+ it('should hide the package access level settings', () => {
+ wrapper = mountComponent();
+
+ expect(findPackageAccessLevel().exists()).toBe(false);
+ });
+
+ describe('packageRegistryAccessLevel feature flag = true', () => {
+ it('should hide the packages settings', () => {
+ wrapper = mountComponent({
+ glFeatures: { packageRegistryAccessLevel: true },
+ packagesAvailable: true,
+ });
+
+ expect(findPackageSettings().exists()).toBe(false);
+ });
+
+ it('should hide the package access level settings with packagesAvailable = false', () => {
+ wrapper = mountComponent({ glFeatures: { packageRegistryAccessLevel: true } });
+
+ expect(findPackageAccessLevel().exists()).toBe(false);
+ });
+
+ it('renders the package access level settings with packagesAvailable = true', () => {
+ wrapper = mountComponent({
+ glFeatures: { packageRegistryAccessLevel: true },
+ packagesAvailable: true,
+ });
+
+ expect(findPackageAccessLevel().exists()).toBe(true);
+ });
+
+ it.each`
+ visibilityLevel | output
+ ${visibilityOptions.PRIVATE} | ${[[featureAccessLevel.PROJECT_MEMBERS, 'Only Project Members'], [30, 'Everyone']]}
+ ${visibilityOptions.INTERNAL} | ${[[featureAccessLevel.EVERYONE, 'Everyone With Access'], [30, 'Everyone']]}
+ ${visibilityOptions.PUBLIC} | ${[[30, 'Everyone']]}
+ `(
+ 'renders correct options when visibilityLevel is $visibilityLevel',
+ async ({ visibilityLevel, output }) => {
+ wrapper = mountComponent({
+ glFeatures: { packageRegistryAccessLevel: true },
+ packagesAvailable: true,
+ currentSettings: {
+ visibilityLevel,
+ },
+ });
+
+ expect(findPackageAccessLevels().props('options')).toStrictEqual(output);
+ },
+ );
+
+ it.each`
+ initialProjectVisibilityLevel | newProjectVisibilityLevel | initialPackageRegistryOption | expectedPackageRegistryOption
+ ${visibilityOptions.PRIVATE} | ${visibilityOptions.INTERNAL} | ${featureAccessLevel.NOT_ENABLED} | ${featureAccessLevel.NOT_ENABLED}
+ ${visibilityOptions.PRIVATE} | ${visibilityOptions.INTERNAL} | ${featureAccessLevel.PROJECT_MEMBERS} | ${featureAccessLevel.EVERYONE}
+ ${visibilityOptions.PRIVATE} | ${visibilityOptions.INTERNAL} | ${FEATURE_ACCESS_LEVEL_ANONYMOUS} | ${FEATURE_ACCESS_LEVEL_ANONYMOUS}
+ ${visibilityOptions.PRIVATE} | ${visibilityOptions.PUBLIC} | ${featureAccessLevel.NOT_ENABLED} | ${featureAccessLevel.NOT_ENABLED}
+ ${visibilityOptions.PRIVATE} | ${visibilityOptions.PUBLIC} | ${featureAccessLevel.PROJECT_MEMBERS} | ${FEATURE_ACCESS_LEVEL_ANONYMOUS}
+ ${visibilityOptions.PRIVATE} | ${visibilityOptions.PUBLIC} | ${FEATURE_ACCESS_LEVEL_ANONYMOUS} | ${FEATURE_ACCESS_LEVEL_ANONYMOUS}
+ ${visibilityOptions.INTERNAL} | ${visibilityOptions.PRIVATE} | ${featureAccessLevel.NOT_ENABLED} | ${featureAccessLevel.NOT_ENABLED}
+ ${visibilityOptions.INTERNAL} | ${visibilityOptions.PRIVATE} | ${featureAccessLevel.EVERYONE} | ${featureAccessLevel.PROJECT_MEMBERS}
+ ${visibilityOptions.INTERNAL} | ${visibilityOptions.PRIVATE} | ${FEATURE_ACCESS_LEVEL_ANONYMOUS} | ${FEATURE_ACCESS_LEVEL_ANONYMOUS}
+ ${visibilityOptions.INTERNAL} | ${visibilityOptions.PUBLIC} | ${featureAccessLevel.NOT_ENABLED} | ${featureAccessLevel.NOT_ENABLED}
+ ${visibilityOptions.INTERNAL} | ${visibilityOptions.PUBLIC} | ${featureAccessLevel.EVERYONE} | ${FEATURE_ACCESS_LEVEL_ANONYMOUS}
+ ${visibilityOptions.INTERNAL} | ${visibilityOptions.PUBLIC} | ${FEATURE_ACCESS_LEVEL_ANONYMOUS} | ${FEATURE_ACCESS_LEVEL_ANONYMOUS}
+ ${visibilityOptions.PUBLIC} | ${visibilityOptions.PRIVATE} | ${featureAccessLevel.NOT_ENABLED} | ${featureAccessLevel.NOT_ENABLED}
+ ${visibilityOptions.PUBLIC} | ${visibilityOptions.PRIVATE} | ${FEATURE_ACCESS_LEVEL_ANONYMOUS} | ${featureAccessLevel.PROJECT_MEMBERS}
+ ${visibilityOptions.PUBLIC} | ${visibilityOptions.INTERNAL} | ${featureAccessLevel.NOT_ENABLED} | ${featureAccessLevel.NOT_ENABLED}
+ ${visibilityOptions.PUBLIC} | ${visibilityOptions.INTERNAL} | ${FEATURE_ACCESS_LEVEL_ANONYMOUS} | ${featureAccessLevel.EVERYONE}
+ `(
+ 'changes option from $initialPackageRegistryOption to $expectedPackageRegistryOption when visibilityLevel changed from $initialProjectVisibilityLevel to $newProjectVisibilityLevel',
+ async ({
+ initialProjectVisibilityLevel,
+ newProjectVisibilityLevel,
+ initialPackageRegistryOption,
+ expectedPackageRegistryOption,
+ }) => {
+ wrapper = mountComponent({
+ glFeatures: { packageRegistryAccessLevel: true },
+ packagesAvailable: true,
+ currentSettings: {
+ visibilityLevel: initialProjectVisibilityLevel,
+ packageRegistryAccessLevel: initialPackageRegistryOption,
+ },
+ });
+
+ await findProjectVisibilityLevelInput().setValue(newProjectVisibilityLevel);
+
+ expect(findPackageAccessLevels().props('value')).toStrictEqual(
+ expectedPackageRegistryOption,
+ );
+ },
+ );
+ });
});
describe('Pages', () => {
diff --git a/spec/frontend/performance_bar/components/add_request_spec.js b/spec/frontend/performance_bar/components/add_request_spec.js
index 5422481439e..627e004ce3e 100644
--- a/spec/frontend/performance_bar/components/add_request_spec.js
+++ b/spec/frontend/performance_bar/components/add_request_spec.js
@@ -1,12 +1,16 @@
-import { shallowMount } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
+import { GlFormInput, GlButton } from '@gitlab/ui';
import AddRequest from '~/performance_bar/components/add_request.vue';
describe('add request form', () => {
let wrapper;
+ const findGlFormInput = () => wrapper.findComponent(GlFormInput);
+ const findGlButton = () => wrapper.findComponent(GlButton);
+
beforeEach(() => {
- wrapper = shallowMount(AddRequest);
+ wrapper = mount(AddRequest);
});
afterEach(() => {
@@ -14,35 +18,35 @@ describe('add request form', () => {
});
it('hides the input on load', () => {
- expect(wrapper.find('input').exists()).toBe(false);
+ expect(findGlFormInput().exists()).toBe(false);
});
describe('when clicking the button', () => {
beforeEach(async () => {
- wrapper.find('button').trigger('click');
+ findGlButton().trigger('click');
await nextTick();
});
it('shows the form', () => {
- expect(wrapper.find('input').exists()).toBe(true);
+ expect(findGlFormInput().exists()).toBe(true);
});
describe('when pressing escape', () => {
beforeEach(async () => {
- wrapper.find('input').trigger('keyup.esc');
+ findGlFormInput().trigger('keyup.esc');
await nextTick();
});
it('hides the input', () => {
- expect(wrapper.find('input').exists()).toBe(false);
+ expect(findGlFormInput().exists()).toBe(false);
});
});
describe('when submitting the form', () => {
beforeEach(async () => {
- wrapper.find('input').setValue('http://gitlab.example.com/users/root/calendar.json');
+ findGlFormInput().setValue('http://gitlab.example.com/users/root/calendar.json');
await nextTick();
- wrapper.find('input').trigger('keyup.enter');
+ findGlFormInput().trigger('keyup.enter');
await nextTick();
});
@@ -54,13 +58,13 @@ describe('add request form', () => {
});
it('hides the input', () => {
- expect(wrapper.find('input').exists()).toBe(false);
+ expect(findGlFormInput().exists()).toBe(false);
});
it('clears the value for next time', async () => {
- wrapper.find('button').trigger('click');
+ findGlButton().trigger('click');
await nextTick();
- expect(wrapper.find('input').text()).toEqual('');
+ expect(findGlFormInput().text()).toEqual('');
});
});
});
diff --git a/spec/frontend/performance_bar/index_spec.js b/spec/frontend/performance_bar/index_spec.js
index 6c1cbfa70a1..2da176dbfe4 100644
--- a/spec/frontend/performance_bar/index_spec.js
+++ b/spec/frontend/performance_bar/index_spec.js
@@ -17,11 +17,11 @@ describe('performance bar wrapper', () => {
performance.getEntriesByType = jest.fn().mockReturnValue([]);
peekWrapper.setAttribute('id', 'js-peek');
- peekWrapper.setAttribute('data-env', 'development');
- peekWrapper.setAttribute('data-request-id', '123');
- peekWrapper.setAttribute('data-peek-url', '/-/peek/results');
- peekWrapper.setAttribute('data-stats-url', 'https://log.gprd.gitlab.net/app/dashboards#/view/');
- peekWrapper.setAttribute('data-profile-url', '?lineprofiler=true');
+ peekWrapper.dataset.env = 'development';
+ peekWrapper.dataset.requestId = '123';
+ peekWrapper.dataset.peekUrl = '/-/peek/results';
+ peekWrapper.dataset.statsUrl = 'https://log.gprd.gitlab.net/app/dashboards#/view/';
+ peekWrapper.dataset.profileUrl = '?lineprofiler=true';
mock = new MockAdapter(axios);
@@ -69,7 +69,7 @@ describe('performance bar wrapper', () => {
it('adds the request immediately', () => {
vm.addRequest('123', 'https://gitlab.com/');
- expect(vm.store.addRequest).toHaveBeenCalledWith('123', 'https://gitlab.com/');
+ expect(vm.store.addRequest).toHaveBeenCalledWith('123', 'https://gitlab.com/', undefined);
});
});
diff --git a/spec/frontend/performance_bar/services/performance_bar_service_spec.js b/spec/frontend/performance_bar/services/performance_bar_service_spec.js
index 36bfd575c12..1bb70a43a1b 100644
--- a/spec/frontend/performance_bar/services/performance_bar_service_spec.js
+++ b/spec/frontend/performance_bar/services/performance_bar_service_spec.js
@@ -63,5 +63,17 @@ describe('PerformanceBarService', () => {
);
});
});
+
+ describe('operationName', () => {
+ function requestUrl(response, peekUrl) {
+ return PerformanceBarService.callbackParams(response, peekUrl)[3];
+ }
+
+ it('gets the operation name from response.config', () => {
+ expect(
+ requestUrl({ headers: {}, config: { operationName: 'someOperation' } }, '/peek'),
+ ).toBe('someOperation');
+ });
+ });
});
});
diff --git a/spec/frontend/performance_bar/stores/performance_bar_store_spec.js b/spec/frontend/performance_bar/stores/performance_bar_store_spec.js
index b7324ba2f6e..7d5c5031792 100644
--- a/spec/frontend/performance_bar/stores/performance_bar_store_spec.js
+++ b/spec/frontend/performance_bar/stores/performance_bar_store_spec.js
@@ -1,9 +1,9 @@
import PerformanceBarStore from '~/performance_bar/stores/performance_bar_store';
describe('PerformanceBarStore', () => {
- describe('truncateUrl', () => {
+ describe('displayName', () => {
let store;
- const findUrl = (id) => store.findRequest(id).truncatedUrl;
+ const findUrl = (id) => store.findRequest(id).displayName;
beforeEach(() => {
store = new PerformanceBarStore();
@@ -41,6 +41,11 @@ describe('PerformanceBarStore', () => {
store.addRequest('id', 'http://localhost:3001/h5bp/html5-boilerplate/#frag/ment');
expect(findUrl('id')).toEqual('html5-boilerplate');
});
+
+ it('appends the GraphQL operation name', () => {
+ store.addRequest('id', 'http://localhost:3001/api/graphql', 'someOperation');
+ expect(findUrl('id')).toBe('graphql (someOperation)');
+ });
});
describe('setRequestDetailsData', () => {
diff --git a/spec/frontend/pipeline_editor/components/drawer/cards/first_pipeline_card_spec.js b/spec/frontend/pipeline_editor/components/drawer/cards/first_pipeline_card_spec.js
index e435c0dcc08..bf5d15516c2 100644
--- a/spec/frontend/pipeline_editor/components/drawer/cards/first_pipeline_card_spec.js
+++ b/spec/frontend/pipeline_editor/components/drawer/cards/first_pipeline_card_spec.js
@@ -1,9 +1,12 @@
import { getByRole } from '@testing-library/dom';
import { mount } from '@vue/test-utils';
+import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import FirstPipelineCard from '~/pipeline_editor/components/drawer/cards/first_pipeline_card.vue';
+import { pipelineEditorTrackingOptions } from '~/pipeline_editor/constants';
describe('First pipeline card', () => {
let wrapper;
+ let trackingSpy;
const defaultProvide = {
runnerHelpPagePath: '/help/runners',
@@ -17,7 +20,7 @@ describe('First pipeline card', () => {
});
};
- const getLinkByName = (name) => getByRole(wrapper.element, 'link', { name }).href;
+ const getLinkByName = (name) => getByRole(wrapper.element, 'link', { name });
const findRunnersLink = () => getLinkByName(/make sure your instance has runners available/i);
const findInstructionsList = () => wrapper.find('ol');
const findAllInstructions = () => findInstructionsList().findAll('li');
@@ -40,6 +43,26 @@ describe('First pipeline card', () => {
});
it('renders the link', () => {
- expect(findRunnersLink()).toContain(defaultProvide.runnerHelpPagePath);
+ expect(findRunnersLink().href).toContain(defaultProvide.runnerHelpPagePath);
+ });
+
+ describe('tracking', () => {
+ beforeEach(() => {
+ createComponent();
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ });
+
+ afterEach(() => {
+ unmockTracking();
+ });
+
+ it('tracks runners help page click', async () => {
+ const { label } = pipelineEditorTrackingOptions;
+ const { runners } = pipelineEditorTrackingOptions.actions.helpDrawerLinks;
+
+ await findRunnersLink().click();
+
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, runners, { label });
+ });
});
});
diff --git a/spec/frontend/pipeline_editor/components/drawer/cards/pipeline_config_reference_card_spec.js b/spec/frontend/pipeline_editor/components/drawer/cards/pipeline_config_reference_card_spec.js
index 3c8821d05a7..49177befe0e 100644
--- a/spec/frontend/pipeline_editor/components/drawer/cards/pipeline_config_reference_card_spec.js
+++ b/spec/frontend/pipeline_editor/components/drawer/cards/pipeline_config_reference_card_spec.js
@@ -1,9 +1,12 @@
import { getByRole } from '@testing-library/dom';
import { mount } from '@vue/test-utils';
+import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import PipelineConfigReferenceCard from '~/pipeline_editor/components/drawer/cards/pipeline_config_reference_card.vue';
+import { pipelineEditorTrackingOptions } from '~/pipeline_editor/constants';
describe('Pipeline config reference card', () => {
let wrapper;
+ let trackingSpy;
const defaultProvide = {
ciExamplesHelpPagePath: 'help/ci/examples/',
@@ -20,7 +23,7 @@ describe('Pipeline config reference card', () => {
});
};
- const getLinkByName = (name) => getByRole(wrapper.element, 'link', { name }).href;
+ const getLinkByName = (name) => getByRole(wrapper.element, 'link', { name });
const findCiExamplesLink = () => getLinkByName(/CI\/CD examples and templates/i);
const findCiIntroLink = () => getLinkByName(/GitLab CI\/CD concepts/i);
const findNeedsLink = () => getLinkByName(/Needs keyword/i);
@@ -43,9 +46,44 @@ describe('Pipeline config reference card', () => {
});
it('renders the links', () => {
- expect(findCiExamplesLink()).toContain(defaultProvide.ciExamplesHelpPagePath);
- expect(findCiIntroLink()).toContain(defaultProvide.ciHelpPagePath);
- expect(findNeedsLink()).toContain(defaultProvide.needsHelpPagePath);
- expect(findYmlSyntaxLink()).toContain(defaultProvide.ymlHelpPagePath);
+ expect(findCiExamplesLink().href).toContain(defaultProvide.ciExamplesHelpPagePath);
+ expect(findCiIntroLink().href).toContain(defaultProvide.ciHelpPagePath);
+ expect(findNeedsLink().href).toContain(defaultProvide.needsHelpPagePath);
+ expect(findYmlSyntaxLink().href).toContain(defaultProvide.ymlHelpPagePath);
+ });
+
+ describe('tracking', () => {
+ beforeEach(() => {
+ createComponent();
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ });
+
+ afterEach(() => {
+ unmockTracking();
+ });
+
+ const testTracker = async (element, expectedAction) => {
+ const { label } = pipelineEditorTrackingOptions;
+
+ await element.click();
+
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, expectedAction, {
+ label,
+ });
+ };
+
+ it('tracks help page links', async () => {
+ const {
+ CI_EXAMPLES_LINK,
+ CI_HELP_LINK,
+ CI_NEEDS_LINK,
+ CI_YAML_LINK,
+ } = pipelineEditorTrackingOptions.actions.helpDrawerLinks;
+
+ testTracker(findCiExamplesLink(), CI_EXAMPLES_LINK);
+ testTracker(findCiIntroLink(), CI_HELP_LINK);
+ testTracker(findNeedsLink(), CI_NEEDS_LINK);
+ testTracker(findYmlSyntaxLink(), CI_YAML_LINK);
+ });
});
});
diff --git a/spec/frontend/pipeline_editor/components/editor/ci_editor_header_spec.js b/spec/frontend/pipeline_editor/components/editor/ci_editor_header_spec.js
index 8f50325295e..930f08ef545 100644
--- a/spec/frontend/pipeline_editor/components/editor/ci_editor_header_spec.js
+++ b/spec/frontend/pipeline_editor/components/editor/ci_editor_header_spec.js
@@ -29,6 +29,17 @@ describe('CI Editor Header', () => {
unmockTracking();
});
+ const testTracker = async (element, expectedAction) => {
+ const { label } = pipelineEditorTrackingOptions;
+
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ await element.vm.$emit('click');
+
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, expectedAction, {
+ label,
+ });
+ };
+
describe('link button', () => {
beforeEach(() => {
createComponent();
@@ -48,13 +59,9 @@ describe('CI Editor Header', () => {
});
it('tracks the click on the browse button', async () => {
- const { label, actions } = pipelineEditorTrackingOptions;
-
- await findLinkBtn().vm.$emit('click');
+ const { browseTemplates } = pipelineEditorTrackingOptions.actions;
- expect(trackingSpy).toHaveBeenCalledWith(undefined, actions.browse_templates, {
- label,
- });
+ testTracker(findLinkBtn(), browseTemplates);
});
});
@@ -72,21 +79,31 @@ describe('CI Editor Header', () => {
});
describe('when pipeline editor drawer is closed', () => {
- it('emits open drawer event when clicked', () => {
+ beforeEach(() => {
createComponent({ showDrawer: false });
+ });
+ it('emits open drawer event when clicked', () => {
expect(wrapper.emitted('open-drawer')).toBeUndefined();
findHelpBtn().vm.$emit('click');
expect(wrapper.emitted('open-drawer')).toHaveLength(1);
});
+
+ it('tracks open help drawer action', async () => {
+ const { actions } = pipelineEditorTrackingOptions;
+
+ testTracker(findHelpBtn(), actions.openHelpDrawer);
+ });
});
describe('when pipeline editor drawer is open', () => {
- it('emits close drawer event when clicked', () => {
+ beforeEach(() => {
createComponent({ showDrawer: true });
+ });
+ it('emits close drawer event when clicked', () => {
expect(wrapper.emitted('close-drawer')).toBeUndefined();
findHelpBtn().vm.$emit('click');
diff --git a/spec/frontend/pipeline_editor/components/file-nav/pipeline_editor_file_nav_spec.js b/spec/frontend/pipeline_editor/components/file-nav/pipeline_editor_file_nav_spec.js
index a61796dbed2..d503aff40b8 100644
--- a/spec/frontend/pipeline_editor/components/file-nav/pipeline_editor_file_nav_spec.js
+++ b/spec/frontend/pipeline_editor/components/file-nav/pipeline_editor_file_nav_spec.js
@@ -23,7 +23,6 @@ describe('Pipeline editor file nav', () => {
const createComponent = ({
appStatus = EDITOR_APP_STATUS_VALID,
isNewCiConfigFile = false,
- pipelineEditorFileTree = false,
} = {}) => {
mockApollo.clients.defaultClient.cache.writeQuery({
query: getAppStatus,
@@ -38,11 +37,6 @@ describe('Pipeline editor file nav', () => {
wrapper = extendedWrapper(
shallowMount(PipelineEditorFileNav, {
apolloProvider: mockApollo,
- provide: {
- glFeatures: {
- pipelineEditorFileTree,
- },
- },
propsData: {
isNewCiConfigFile,
},
@@ -66,24 +60,12 @@ describe('Pipeline editor file nav', () => {
it('renders the branch switcher', () => {
expect(findBranchSwitcher().exists()).toBe(true);
});
-
- it('does not render the file tree button', () => {
- expect(findFileTreeBtn().exists()).toBe(false);
- });
-
- it('does not render the file tree popover', () => {
- expect(findPopoverContainer().exists()).toBe(false);
- });
});
- describe('with pipelineEditorFileTree feature flag ON', () => {
+ describe('file tree', () => {
describe('when editor is in the empty state', () => {
beforeEach(() => {
- createComponent({
- appStatus: EDITOR_APP_STATUS_EMPTY,
- isNewCiConfigFile: false,
- pipelineEditorFileTree: true,
- });
+ createComponent({ appStatus: EDITOR_APP_STATUS_EMPTY, isNewCiConfigFile: false });
});
it('does not render the file tree button', () => {
@@ -97,11 +79,7 @@ describe('Pipeline editor file nav', () => {
describe('when user is about to create their config file for the first time', () => {
beforeEach(() => {
- createComponent({
- appStatus: EDITOR_APP_STATUS_VALID,
- isNewCiConfigFile: true,
- pipelineEditorFileTree: true,
- });
+ createComponent({ appStatus: EDITOR_APP_STATUS_VALID, isNewCiConfigFile: true });
});
it('does not render the file tree button', () => {
@@ -115,11 +93,7 @@ describe('Pipeline editor file nav', () => {
describe('when app is in a global loading state', () => {
it('renders the file tree button with a loading icon', () => {
- createComponent({
- appStatus: EDITOR_APP_STATUS_LOADING,
- isNewCiConfigFile: false,
- pipelineEditorFileTree: true,
- });
+ createComponent({ appStatus: EDITOR_APP_STATUS_LOADING, isNewCiConfigFile: false });
expect(findFileTreeBtn().exists()).toBe(true);
expect(findFileTreeBtn().attributes('loading')).toBe('true');
@@ -128,11 +102,7 @@ describe('Pipeline editor file nav', () => {
describe('when editor has a non-empty config file open', () => {
beforeEach(() => {
- createComponent({
- appStatus: EDITOR_APP_STATUS_VALID,
- isNewCiConfigFile: false,
- pipelineEditorFileTree: true,
- });
+ createComponent({ appStatus: EDITOR_APP_STATUS_VALID, isNewCiConfigFile: false });
});
it('renders the file tree button', () => {
diff --git a/spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js b/spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js
index d159a20a8d6..3ecf6472544 100644
--- a/spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js
+++ b/spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js
@@ -3,8 +3,9 @@ import { shallowMount, mount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import setWindowLocation from 'helpers/set_window_location_helper';
import CiConfigMergedPreview from '~/pipeline_editor/components/editor/ci_config_merged_preview.vue';
-import WalkthroughPopover from '~/pipeline_editor/components/popovers/walkthrough_popover.vue';
import CiLint from '~/pipeline_editor/components/lint/ci_lint.vue';
+import CiValidate from '~/pipeline_editor/components/validate/ci_validate.vue';
+import WalkthroughPopover from '~/pipeline_editor/components/popovers/walkthrough_popover.vue';
import PipelineEditorTabs from '~/pipeline_editor/components/pipeline_editor_tabs.vue';
import EditorTab from '~/pipeline_editor/components/ui/editor_tab.vue';
import {
@@ -13,9 +14,7 @@ import {
EDITOR_APP_STATUS_LOADING,
EDITOR_APP_STATUS_INVALID,
EDITOR_APP_STATUS_VALID,
- MERGED_TAB,
TAB_QUERY_PARAM,
- TABS_INDEX,
} from '~/pipeline_editor/constants';
import PipelineGraph from '~/pipelines/components/pipeline_graph/pipeline_graph.vue';
import { mockLintResponse, mockLintResponseWithoutMerged, mockCiYml } from '../mock_data';
@@ -60,10 +59,12 @@ describe('Pipeline editor tabs component', () => {
const findEditorTab = () => wrapper.find('[data-testid="editor-tab"]');
const findLintTab = () => wrapper.find('[data-testid="lint-tab"]');
const findMergedTab = () => wrapper.find('[data-testid="merged-tab"]');
+ const findValidateTab = () => wrapper.find('[data-testid="validate-tab"]');
const findVisualizationTab = () => wrapper.find('[data-testid="visualization-tab"]');
const findAlert = () => wrapper.findComponent(GlAlert);
const findCiLint = () => wrapper.findComponent(CiLint);
+ const findCiValidate = () => wrapper.findComponent(CiValidate);
const findGlTabs = () => wrapper.findComponent(GlTabs);
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findPipelineGraph = () => wrapper.findComponent(PipelineGraph);
@@ -111,6 +112,61 @@ describe('Pipeline editor tabs component', () => {
});
});
+ describe('validate tab', () => {
+ describe('with simulatePipeline feature flag ON', () => {
+ describe('while loading', () => {
+ beforeEach(() => {
+ createComponent({
+ appStatus: EDITOR_APP_STATUS_LOADING,
+ provide: {
+ glFeatures: {
+ simulatePipeline: true,
+ },
+ },
+ });
+ });
+
+ it('displays a loading icon if the lint query is loading', () => {
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
+
+ it('does not display the validate component', () => {
+ expect(findCiValidate().exists()).toBe(false);
+ });
+ });
+
+ describe('after loading', () => {
+ beforeEach(() => {
+ createComponent({
+ provide: { glFeatures: { simulatePipeline: true } },
+ });
+ });
+
+ it('displays the tab and the validate component', () => {
+ expect(findValidateTab().exists()).toBe(true);
+ expect(findCiValidate().exists()).toBe(true);
+ });
+ });
+ });
+
+ describe('with simulatePipeline feature flag OFF', () => {
+ beforeEach(() => {
+ createComponent({
+ provide: {
+ glFeatures: {
+ simulatePipeline: false,
+ },
+ },
+ });
+ });
+
+ it('does not render the tab and the validate component', () => {
+ expect(findValidateTab().exists()).toBe(false);
+ expect(findCiValidate().exists()).toBe(false);
+ });
+ });
+ });
+
describe('lint tab', () => {
describe('while loading', () => {
beforeEach(() => {
@@ -125,6 +181,7 @@ describe('Pipeline editor tabs component', () => {
expect(findCiLint().exists()).toBe(false);
});
});
+
describe('after loading', () => {
beforeEach(() => {
createComponent();
@@ -135,8 +192,24 @@ describe('Pipeline editor tabs component', () => {
expect(findCiLint().exists()).toBe(true);
});
});
- });
+ describe('with simulatePipeline feature flag ON', () => {
+ beforeEach(() => {
+ createComponent({
+ provide: {
+ glFeatures: {
+ simulatePipeline: true,
+ },
+ },
+ });
+ });
+
+ it('does not render the tab and the lint component', () => {
+ expect(findLintTab().exists()).toBe(false);
+ expect(findCiLint().exists()).toBe(false);
+ });
+ });
+ });
describe('merged tab', () => {
describe('while loading', () => {
beforeEach(() => {
@@ -221,18 +294,6 @@ describe('Pipeline editor tabs component', () => {
search: `?${TAB_QUERY_PARAM}=${queryValue}`,
});
});
-
- it('is the tab specified in query param and transform it into an index value', async () => {
- setWindowLocation(`${gitlabUrl}?${TAB_QUERY_PARAM}=${MERGED_TAB}`);
- createComponent();
-
- // If the query param has changed to an index, it means we have synced the
- // query with.
- expect(window.location).toMatchObject({
- ...matchObject,
- search: `?${TAB_QUERY_PARAM}=${TABS_INDEX[MERGED_TAB]}`,
- });
- });
});
describe('glTabs', () => {
diff --git a/spec/frontend/pipeline_editor/components/validate/ci_validate_spec.js b/spec/frontend/pipeline_editor/components/validate/ci_validate_spec.js
new file mode 100644
index 00000000000..25972317593
--- /dev/null
+++ b/spec/frontend/pipeline_editor/components/validate/ci_validate_spec.js
@@ -0,0 +1,40 @@
+import { GlButton, GlDropdown } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import CiValidate, { i18n } from '~/pipeline_editor/components/validate/ci_validate.vue';
+
+describe('Pipeline Editor Validate Tab', () => {
+ let wrapper;
+
+ const createComponent = ({ stubs } = {}) => {
+ wrapper = shallowMount(CiValidate, {
+ provide: {
+ validateTabIllustrationPath: '/path/to/img',
+ },
+ stubs,
+ });
+ };
+
+ const findCta = () => wrapper.findComponent(GlButton);
+ const findPipelineSource = () => wrapper.findComponent(GlDropdown);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('template', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders disabled pipeline source dropdown', () => {
+ expect(findPipelineSource().exists()).toBe(true);
+ expect(findPipelineSource().attributes('text')).toBe(i18n.pipelineSourceDefault);
+ expect(findPipelineSource().attributes('disabled')).toBe('true');
+ });
+
+ it('renders CTA', () => {
+ expect(findCta().exists()).toBe(true);
+ expect(findCta().text()).toBe(i18n.cta);
+ });
+ });
+});
diff --git a/spec/frontend/pipeline_editor/pipeline_editor_home_spec.js b/spec/frontend/pipeline_editor/pipeline_editor_home_spec.js
index bf0f7fd8c9f..c6964f190b4 100644
--- a/spec/frontend/pipeline_editor/pipeline_editor_home_spec.js
+++ b/spec/frontend/pipeline_editor/pipeline_editor_home_spec.js
@@ -2,6 +2,7 @@ import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import { GlButton, GlDrawer, GlModal } from '@gitlab/ui';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import setWindowLocation from 'helpers/set_window_location_helper';
import CiEditorHeader from '~/pipeline_editor/components/editor/ci_editor_header.vue';
import CommitSection from '~/pipeline_editor/components/commit/commit_section.vue';
import PipelineEditorDrawer from '~/pipeline_editor/components/drawer/pipeline_editor_drawer.vue';
@@ -11,11 +12,12 @@ import BranchSwitcher from '~/pipeline_editor/components/file_nav/branch_switche
import PipelineEditorHeader from '~/pipeline_editor/components/header/pipeline_editor_header.vue';
import PipelineEditorTabs from '~/pipeline_editor/components/pipeline_editor_tabs.vue';
import {
- MERGED_TAB,
- VISUALIZE_TAB,
CREATE_TAB,
- LINT_TAB,
FILE_TREE_DISPLAY_KEY,
+ LINT_TAB,
+ MERGED_TAB,
+ TABS_INDEX,
+ VISUALIZE_TAB,
} from '~/pipeline_editor/constants';
import PipelineEditorHome from '~/pipeline_editor/pipeline_editor_home.vue';
@@ -162,6 +164,24 @@ describe('Pipeline editor home wrapper', () => {
await nextTick();
expect(findCommitSection().exists()).toBe(true);
});
+
+ describe('rendering with tab params', () => {
+ it.each`
+ tab | shouldShow
+ ${MERGED_TAB} | ${false}
+ ${VISUALIZE_TAB} | ${false}
+ ${LINT_TAB} | ${false}
+ ${CREATE_TAB} | ${true}
+ `(
+ 'when the tab query param is $tab the commit form is shown: $shouldShow',
+ async ({ tab, shouldShow }) => {
+ setWindowLocation(`https://gitlab.test/ci/editor/?tab=${TABS_INDEX[tab]}`);
+ await createComponent({ stubs: { PipelineEditorTabs } });
+
+ expect(findCommitSection().exists()).toBe(shouldShow);
+ },
+ );
+ });
});
describe('WalkthroughPopover events', () => {
@@ -247,81 +267,63 @@ describe('Pipeline editor home wrapper', () => {
await nextTick();
};
- describe('with pipelineEditorFileTree feature flag OFF', () => {
+ describe('button toggle', () => {
beforeEach(() => {
- createComponent();
+ createComponent({
+ stubs: {
+ GlButton,
+ PipelineEditorFileNav,
+ },
+ });
});
- it('hides the file tree', () => {
- expect(findFileTreeBtn().exists()).toBe(false);
- expect(findPipelineEditorFileTree().exists()).toBe(false);
+ it('shows button toggle', () => {
+ expect(findFileTreeBtn().exists()).toBe(true);
});
- });
-
- describe('with pipelineEditorFileTree feature flag ON', () => {
- describe('button toggle', () => {
- beforeEach(() => {
- createComponent({
- glFeatures: {
- pipelineEditorFileTree: true,
- },
- stubs: {
- GlButton,
- PipelineEditorFileNav,
- },
- });
- });
-
- it('shows button toggle', () => {
- expect(findFileTreeBtn().exists()).toBe(true);
- });
- it('toggles the drawer on button click', async () => {
- await toggleFileTree();
+ it('toggles the drawer on button click', async () => {
+ await toggleFileTree();
- expect(findPipelineEditorFileTree().exists()).toBe(true);
+ expect(findPipelineEditorFileTree().exists()).toBe(true);
- await toggleFileTree();
+ await toggleFileTree();
- expect(findPipelineEditorFileTree().exists()).toBe(false);
- });
+ expect(findPipelineEditorFileTree().exists()).toBe(false);
+ });
- it('sets the display state in local storage', async () => {
- await toggleFileTree();
+ it('sets the display state in local storage', async () => {
+ await toggleFileTree();
- expect(localStorage.getItem(FILE_TREE_DISPLAY_KEY)).toBe('true');
+ expect(localStorage.getItem(FILE_TREE_DISPLAY_KEY)).toBe('true');
- await toggleFileTree();
+ await toggleFileTree();
- expect(localStorage.getItem(FILE_TREE_DISPLAY_KEY)).toBe('false');
- });
+ expect(localStorage.getItem(FILE_TREE_DISPLAY_KEY)).toBe('false');
});
+ });
- describe('when file tree display state is saved in local storage', () => {
- beforeEach(() => {
- localStorage.setItem(FILE_TREE_DISPLAY_KEY, 'true');
- createComponent({
- glFeatures: { pipelineEditorFileTree: true },
- stubs: { PipelineEditorFileNav },
- });
+ describe('when file tree display state is saved in local storage', () => {
+ beforeEach(() => {
+ localStorage.setItem(FILE_TREE_DISPLAY_KEY, 'true');
+ createComponent({
+ stubs: { PipelineEditorFileNav },
});
+ });
- it('shows the file tree by default', () => {
- expect(findPipelineEditorFileTree().exists()).toBe(true);
- });
+ it('shows the file tree by default', () => {
+ expect(findPipelineEditorFileTree().exists()).toBe(true);
});
+ });
- describe('when file tree display state is not saved in local storage', () => {
- beforeEach(() => {
- createComponent({
- glFeatures: { pipelineEditorFileTree: true },
- stubs: { PipelineEditorFileNav },
- });
+ describe('when file tree display state is not saved in local storage', () => {
+ beforeEach(() => {
+ createComponent({
+ stubs: { PipelineEditorFileNav },
});
+ });
- it('hides the file tree by default', () => {
- expect(findPipelineEditorFileTree().exists()).toBe(false);
- });
+ it('hides the file tree by default', () => {
+ expect(findPipelineEditorFileTree().exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/pipeline_wizard/components/input_spec.js b/spec/frontend/pipeline_wizard/components/input_wrapper_spec.js
index ee1f3fe70ff..ea2448b1362 100644
--- a/spec/frontend/pipeline_wizard/components/input_spec.js
+++ b/spec/frontend/pipeline_wizard/components/input_wrapper_spec.js
@@ -1,6 +1,6 @@
import { mount, shallowMount } from '@vue/test-utils';
import { Document } from 'yaml';
-import InputWrapper from '~/pipeline_wizard/components/input.vue';
+import InputWrapper from '~/pipeline_wizard/components/input_wrapper.vue';
import TextWidget from '~/pipeline_wizard/components/widgets/text.vue';
describe('Pipeline Wizard -- Input Wrapper', () => {
diff --git a/spec/frontend/pipeline_wizard/components/step_spec.js b/spec/frontend/pipeline_wizard/components/step_spec.js
index 2289a349318..aa87b1d0b04 100644
--- a/spec/frontend/pipeline_wizard/components/step_spec.js
+++ b/spec/frontend/pipeline_wizard/components/step_spec.js
@@ -3,7 +3,7 @@ import { omit } from 'lodash';
import { nextTick } from 'vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import PipelineWizardStep from '~/pipeline_wizard/components/step.vue';
-import InputWrapper from '~/pipeline_wizard/components/input.vue';
+import InputWrapper from '~/pipeline_wizard/components/input_wrapper.vue';
import StepNav from '~/pipeline_wizard/components/step_nav.vue';
import {
stepInputs,
diff --git a/spec/frontend/pipeline_wizard/components/widgets_spec.js b/spec/frontend/pipeline_wizard/components/widgets_spec.js
index 5944c76c5d0..6bd858e746c 100644
--- a/spec/frontend/pipeline_wizard/components/widgets_spec.js
+++ b/spec/frontend/pipeline_wizard/components/widgets_spec.js
@@ -1,7 +1,7 @@
import fs from 'fs';
import { mount } from '@vue/test-utils';
import { Document } from 'yaml';
-import InputWrapper from '~/pipeline_wizard/components/input.vue';
+import InputWrapper from '~/pipeline_wizard/components/input_wrapper.vue';
describe('Test all widgets in ./widgets/* whether they provide a minimal api', () => {
const createComponent = (props = {}, mountFunc = mount) => {
diff --git a/spec/frontend/pipelines/components/pipeline_tabs_spec.js b/spec/frontend/pipelines/components/pipeline_tabs_spec.js
index 89002ee47a8..e0210307823 100644
--- a/spec/frontend/pipelines/components/pipeline_tabs_spec.js
+++ b/spec/frontend/pipelines/components/pipeline_tabs_spec.js
@@ -1,4 +1,5 @@
import { shallowMount } from '@vue/test-utils';
+import { GlTab } from '@gitlab/ui';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import PipelineTabs from '~/pipelines/components/pipeline_tabs.vue';
import PipelineGraphWrapper from '~/pipelines/components/graph/graph_component_wrapper.vue';
@@ -21,35 +22,35 @@ describe('The Pipeline Tabs', () => {
const findPipelineApp = () => wrapper.findComponent(PipelineGraphWrapper);
const findTestsApp = () => wrapper.findComponent(TestReports);
+ const findFailedJobsBadge = () => wrapper.findByTestId('failed-builds-counter');
+ const findJobsBadge = () => wrapper.findByTestId('builds-counter');
+
const defaultProvide = {
defaultTabValue: '',
+ failedJobsCount: 1,
+ failedJobsSummary: [],
+ totalJobCount: 10,
};
- const createComponent = (propsData = {}) => {
+ const createComponent = (provide = {}) => {
wrapper = extendedWrapper(
shallowMount(PipelineTabs, {
- propsData,
provide: {
...defaultProvide,
+ ...provide,
},
stubs: {
- JobsApp: { template: '<div class="jobs" />' },
+ GlTab,
TestReports: { template: '<div id="tests" />' },
},
}),
);
};
- beforeEach(() => {
- createComponent();
- });
-
afterEach(() => {
wrapper.destroy();
});
- // The failed jobs MUST be removed from here and tested individually once
- // the logic for the tab is implemented.
describe('Tabs', () => {
it.each`
tabName | tabComponent | appComponent
@@ -58,9 +59,34 @@ describe('The Pipeline Tabs', () => {
${'Jobs'} | ${findJobsTab} | ${findJobsApp}
${'Failed Jobs'} | ${findFailedJobsTab} | ${findFailedJobsApp}
${'Tests'} | ${findTestsTab} | ${findTestsApp}
- `('shows $tabName tab and its associated component', ({ appComponent, tabComponent }) => {
+ `('shows $tabName tab with its associated component', ({ appComponent, tabComponent }) => {
+ createComponent();
+
expect(tabComponent().exists()).toBe(true);
expect(appComponent().exists()).toBe(true);
});
+
+ describe('with no failed jobs', () => {
+ beforeEach(() => {
+ createComponent({ failedJobsCount: 0 });
+ });
+
+ it('hides the failed jobs tab', () => {
+ expect(findFailedJobsTab().exists()).toBe(false);
+ });
+ });
+ });
+
+ describe('Tabs badges', () => {
+ it.each`
+ tabName | badgeComponent | badgeText
+ ${'Jobs'} | ${findJobsBadge} | ${String(defaultProvide.totalJobCount)}
+ ${'Failed Jobs'} | ${findFailedJobsBadge} | ${String(defaultProvide.failedJobsCount)}
+ `('shows badge for $tabName with the correct text', ({ badgeComponent, badgeText }) => {
+ createComponent();
+
+ expect(badgeComponent().exists()).toBe(true);
+ expect(badgeComponent().text()).toBe(badgeText);
+ });
});
});
diff --git a/spec/frontend/pipelines/components/pipelines_list/pipeline_stage_spec.js b/spec/frontend/pipelines/components/pipelines_list/pipeline_stage_spec.js
index 6d0e99ff63e..1ff32b03344 100644
--- a/spec/frontend/pipelines/components/pipelines_list/pipeline_stage_spec.js
+++ b/spec/frontend/pipelines/components/pipelines_list/pipeline_stage_spec.js
@@ -5,6 +5,7 @@ import CiIcon from '~/vue_shared/components/ci_icon.vue';
import axios from '~/lib/utils/axios_utils';
import PipelineStage from '~/pipelines/components/pipelines_list/pipeline_stage.vue';
import eventHub from '~/pipelines/event_hub';
+import waitForPromises from 'helpers/wait_for_promises';
import { stageReply } from '../../mock_data';
const dropdownPath = 'path.json';
@@ -55,7 +56,10 @@ describe('Pipelines stage component', () => {
const findDropdownToggle = () => wrapper.find('button.dropdown-toggle');
const findDropdownMenu = () =>
wrapper.find('[data-testid="mini-pipeline-graph-dropdown-menu-list"]');
+ const findDropdownMenuTitle = () =>
+ wrapper.find('[data-testid="pipeline-stage-dropdown-menu-title"]');
const findMergeTrainWarning = () => wrapper.find('[data-testid="warning-message-merge-trains"]');
+ const findLoadingState = () => wrapper.find('[data-testid="pipeline-stage-loading-state"]');
const openStageDropdown = () => {
findDropdownToggle().trigger('click');
@@ -64,6 +68,27 @@ describe('Pipelines stage component', () => {
});
};
+ describe('loading state', () => {
+ beforeEach(async () => {
+ createComponent({ updateDropdown: true });
+
+ mock.onGet(dropdownPath).reply(200, stageReply);
+
+ await openStageDropdown();
+ });
+
+ it('displays loading state while jobs are being fetched', () => {
+ expect(findLoadingState().exists()).toBe(true);
+ expect(findLoadingState().text()).toBe(PipelineStage.i18n.loadingText);
+ });
+
+ it('does not display loading state after jobs have been fetched', async () => {
+ await waitForPromises();
+
+ expect(findLoadingState().exists()).toBe(false);
+ });
+ });
+
describe('default appearance', () => {
beforeEach(() => {
createComponent();
@@ -78,6 +103,17 @@ describe('Pipelines stage component', () => {
expect(findDropdownToggle().exists()).toBe(true);
expect(findCiIcon().exists()).toBe(true);
});
+
+ it('should render a borderless ci-icon', () => {
+ expect(findCiIcon().exists()).toBe(true);
+ expect(findCiIcon().props('isBorderless')).toBe(true);
+ expect(findCiIcon().classes('borderless')).toBe(true);
+ });
+
+ it('should render a ci-icon with a custom border class', () => {
+ expect(findCiIcon().exists()).toBe(true);
+ expect(findCiIcon().classes('gl-border')).toBe(true);
+ });
});
describe('when update dropdown is changed', () => {
@@ -97,6 +133,7 @@ describe('Pipelines stage component', () => {
it('should render the received data and emit `clickedDropdown` event', async () => {
expect(findDropdownMenu().text()).toContain(stageReply.latest_statuses[0].name);
+ expect(findDropdownMenuTitle().text()).toContain(stageReply.name);
expect(eventHub.$emit).toHaveBeenCalledWith('clickedDropdown');
});
diff --git a/spec/frontend/pipelines/graph/linked_pipeline_spec.js b/spec/frontend/pipelines/graph/linked_pipeline_spec.js
index 06fd970778c..fd97c2dbe77 100644
--- a/spec/frontend/pipelines/graph/linked_pipeline_spec.js
+++ b/spec/frontend/pipelines/graph/linked_pipeline_spec.js
@@ -47,17 +47,12 @@ describe('Linked pipeline', () => {
const findPipelineLink = () => wrapper.findByTestId('pipelineLink');
const findRetryButton = () => wrapper.findByLabelText('Retry downstream pipeline');
- const createWrapper = ({ propsData, downstreamRetryAction = false }) => {
+ const createWrapper = ({ propsData }) => {
const mockApollo = createMockApollo();
wrapper = extendedWrapper(
mount(LinkedPipelineComponent, {
propsData,
- provide: {
- glFeatures: {
- downstreamRetryAction,
- },
- },
apolloProvider: mockApollo,
}),
);
@@ -164,197 +159,188 @@ describe('Linked pipeline', () => {
});
describe('action button', () => {
- describe('with the `downstream_retry_action` flag on', () => {
- describe('with permissions', () => {
- describe('on an upstream', () => {
- describe('when retryable', () => {
- beforeEach(() => {
- const retryablePipeline = {
- ...upstreamProps,
- pipeline: { ...mockPipeline, retryable: true },
- };
-
- createWrapper({ propsData: retryablePipeline, downstreamRetryAction: true });
- });
+ describe('with permissions', () => {
+ describe('on an upstream', () => {
+ describe('when retryable', () => {
+ beforeEach(() => {
+ const retryablePipeline = {
+ ...upstreamProps,
+ pipeline: { ...mockPipeline, retryable: true },
+ };
+
+ createWrapper({ propsData: retryablePipeline });
+ });
- it('does not show the retry or cancel button', () => {
- expect(findCancelButton().exists()).toBe(false);
- expect(findRetryButton().exists()).toBe(false);
- });
+ it('does not show the retry or cancel button', () => {
+ expect(findCancelButton().exists()).toBe(false);
+ expect(findRetryButton().exists()).toBe(false);
});
});
+ });
- describe('on a downstream', () => {
- describe('when retryable', () => {
- beforeEach(() => {
- const retryablePipeline = {
- ...downstreamProps,
- pipeline: { ...mockPipeline, retryable: true },
- };
+ describe('on a downstream', () => {
+ describe('when retryable', () => {
+ beforeEach(() => {
+ const retryablePipeline = {
+ ...downstreamProps,
+ pipeline: { ...mockPipeline, retryable: true },
+ };
- createWrapper({ propsData: retryablePipeline, downstreamRetryAction: true });
- });
+ createWrapper({ propsData: retryablePipeline });
+ });
- it('shows only the retry button', () => {
- expect(findCancelButton().exists()).toBe(false);
- expect(findRetryButton().exists()).toBe(true);
- });
+ it('shows only the retry button', () => {
+ expect(findCancelButton().exists()).toBe(false);
+ expect(findRetryButton().exists()).toBe(true);
+ });
- it('hides the card tooltip when the action button tooltip is hovered', async () => {
- expect(findCardTooltip().exists()).toBe(true);
+ it.each`
+ findElement | name
+ ${findRetryButton} | ${'retry button'}
+ ${findExpandButton} | ${'expand button'}
+ `('hides the card tooltip when $name is hovered', async ({ findElement }) => {
+ expect(findCardTooltip().exists()).toBe(true);
- await findRetryButton().trigger('mouseover');
+ await findElement().trigger('mouseover');
- expect(findCardTooltip().exists()).toBe(false);
- });
+ expect(findCardTooltip().exists()).toBe(false);
+ });
- describe('and the retry button is clicked', () => {
- describe('on success', () => {
- beforeEach(async () => {
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue();
- jest.spyOn(wrapper.vm, '$emit');
- await findRetryButton().trigger('click');
- });
+ describe('and the retry button is clicked', () => {
+ describe('on success', () => {
+ beforeEach(async () => {
+ jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue();
+ jest.spyOn(wrapper.vm, '$emit');
+ await findRetryButton().trigger('click');
+ });
- it('calls the retry mutation ', () => {
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledTimes(1);
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
- mutation: RetryPipelineMutation,
- variables: {
- id: convertToGraphQLId(PIPELINE_GRAPHQL_TYPE, mockPipeline.id),
- },
- });
+ it('calls the retry mutation ', () => {
+ expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledTimes(1);
+ expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
+ mutation: RetryPipelineMutation,
+ variables: {
+ id: convertToGraphQLId(PIPELINE_GRAPHQL_TYPE, mockPipeline.id),
+ },
});
+ });
- it('emits the refreshPipelineGraph event', () => {
- expect(wrapper.vm.$emit).toHaveBeenCalledWith('refreshPipelineGraph');
- });
+ it('emits the refreshPipelineGraph event', () => {
+ expect(wrapper.vm.$emit).toHaveBeenCalledWith('refreshPipelineGraph');
});
+ });
- describe('on failure', () => {
- beforeEach(async () => {
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockRejectedValue({ errors: [] });
- jest.spyOn(wrapper.vm, '$emit');
- await findRetryButton().trigger('click');
- });
+ describe('on failure', () => {
+ beforeEach(async () => {
+ jest.spyOn(wrapper.vm.$apollo, 'mutate').mockRejectedValue({ errors: [] });
+ jest.spyOn(wrapper.vm, '$emit');
+ await findRetryButton().trigger('click');
+ });
- it('emits an error event', () => {
- expect(wrapper.vm.$emit).toHaveBeenCalledWith('error', {
- type: ACTION_FAILURE,
- });
+ it('emits an error event', () => {
+ expect(wrapper.vm.$emit).toHaveBeenCalledWith('error', {
+ type: ACTION_FAILURE,
});
});
});
});
+ });
- describe('when cancelable', () => {
- beforeEach(() => {
- const cancelablePipeline = {
- ...downstreamProps,
- pipeline: { ...mockPipeline, cancelable: true },
- };
+ describe('when cancelable', () => {
+ beforeEach(() => {
+ const cancelablePipeline = {
+ ...downstreamProps,
+ pipeline: { ...mockPipeline, cancelable: true },
+ };
- createWrapper({ propsData: cancelablePipeline, downstreamRetryAction: true });
- });
+ createWrapper({ propsData: cancelablePipeline });
+ });
- it('shows only the cancel button ', () => {
- expect(findCancelButton().exists()).toBe(true);
- expect(findRetryButton().exists()).toBe(false);
- });
+ it('shows only the cancel button ', () => {
+ expect(findCancelButton().exists()).toBe(true);
+ expect(findRetryButton().exists()).toBe(false);
+ });
- it('hides the card tooltip when the action button tooltip is hovered', async () => {
- expect(findCardTooltip().exists()).toBe(true);
+ it.each`
+ findElement | name
+ ${findCancelButton} | ${'cancel button'}
+ ${findExpandButton} | ${'expand button'}
+ `('hides the card tooltip when $name is hovered', async ({ findElement }) => {
+ expect(findCardTooltip().exists()).toBe(true);
- await findCancelButton().trigger('mouseover');
+ await findElement().trigger('mouseover');
- expect(findCardTooltip().exists()).toBe(false);
- });
+ expect(findCardTooltip().exists()).toBe(false);
+ });
- describe('and the cancel button is clicked', () => {
- describe('on success', () => {
- beforeEach(async () => {
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue();
- jest.spyOn(wrapper.vm, '$emit');
- await findCancelButton().trigger('click');
- });
+ describe('and the cancel button is clicked', () => {
+ describe('on success', () => {
+ beforeEach(async () => {
+ jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue();
+ jest.spyOn(wrapper.vm, '$emit');
+ await findCancelButton().trigger('click');
+ });
- it('calls the cancel mutation', () => {
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledTimes(1);
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
- mutation: CancelPipelineMutation,
- variables: {
- id: convertToGraphQLId(PIPELINE_GRAPHQL_TYPE, mockPipeline.id),
- },
- });
- });
- it('emits the refreshPipelineGraph event', () => {
- expect(wrapper.vm.$emit).toHaveBeenCalledWith('refreshPipelineGraph');
+ it('calls the cancel mutation', () => {
+ expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledTimes(1);
+ expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
+ mutation: CancelPipelineMutation,
+ variables: {
+ id: convertToGraphQLId(PIPELINE_GRAPHQL_TYPE, mockPipeline.id),
+ },
});
});
- describe('on failure', () => {
- beforeEach(async () => {
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockRejectedValue({ errors: [] });
- jest.spyOn(wrapper.vm, '$emit');
- await findCancelButton().trigger('click');
- });
- it('emits an error event', () => {
- expect(wrapper.vm.$emit).toHaveBeenCalledWith('error', {
- type: ACTION_FAILURE,
- });
+ it('emits the refreshPipelineGraph event', () => {
+ expect(wrapper.vm.$emit).toHaveBeenCalledWith('refreshPipelineGraph');
+ });
+ });
+ describe('on failure', () => {
+ beforeEach(async () => {
+ jest.spyOn(wrapper.vm.$apollo, 'mutate').mockRejectedValue({ errors: [] });
+ jest.spyOn(wrapper.vm, '$emit');
+ await findCancelButton().trigger('click');
+ });
+ it('emits an error event', () => {
+ expect(wrapper.vm.$emit).toHaveBeenCalledWith('error', {
+ type: ACTION_FAILURE,
});
});
});
});
+ });
- describe('when both cancellable and retryable', () => {
- beforeEach(() => {
- const pipelineWithTwoActions = {
- ...downstreamProps,
- pipeline: { ...mockPipeline, cancelable: true, retryable: true },
- };
-
- createWrapper({ propsData: pipelineWithTwoActions, downstreamRetryAction: true });
- });
+ describe('when both cancellable and retryable', () => {
+ beforeEach(() => {
+ const pipelineWithTwoActions = {
+ ...downstreamProps,
+ pipeline: { ...mockPipeline, cancelable: true, retryable: true },
+ };
- it('only shows the cancel button', () => {
- expect(findRetryButton().exists()).toBe(false);
- expect(findCancelButton().exists()).toBe(true);
- });
+ createWrapper({ propsData: pipelineWithTwoActions });
});
- });
- });
-
- describe('without permissions', () => {
- beforeEach(() => {
- const pipelineWithTwoActions = {
- ...downstreamProps,
- pipeline: {
- ...mockPipeline,
- cancelable: true,
- retryable: true,
- userPermissions: { updatePipeline: false },
- },
- };
-
- createWrapper({ propsData: pipelineWithTwoActions });
- });
- it('does not show any action button', () => {
- expect(findRetryButton().exists()).toBe(false);
- expect(findCancelButton().exists()).toBe(false);
+ it('only shows the cancel button', () => {
+ expect(findRetryButton().exists()).toBe(false);
+ expect(findCancelButton().exists()).toBe(true);
+ });
});
});
});
- describe('with the `downstream_retry_action` flag off', () => {
+ describe('without permissions', () => {
beforeEach(() => {
const pipelineWithTwoActions = {
...downstreamProps,
- pipeline: { ...mockPipeline, cancelable: true, retryable: true },
+ pipeline: {
+ ...mockPipeline,
+ cancelable: true,
+ retryable: true,
+ userPermissions: { updatePipeline: false },
+ },
};
createWrapper({ propsData: pipelineWithTwoActions });
});
+
it('does not show any action button', () => {
expect(findRetryButton().exists()).toBe(false);
expect(findCancelButton().exists()).toBe(false);
@@ -365,19 +351,44 @@ describe('Linked pipeline', () => {
describe('expand button', () => {
it.each`
- pipelineType | anglePosition | buttonBorderClasses | expanded
- ${downstreamProps} | ${'angle-right'} | ${'gl-border-l-0!'} | ${false}
- ${downstreamProps} | ${'angle-left'} | ${'gl-border-l-0!'} | ${true}
- ${upstreamProps} | ${'angle-left'} | ${'gl-border-r-0!'} | ${false}
- ${upstreamProps} | ${'angle-right'} | ${'gl-border-r-0!'} | ${true}
+ pipelineType | chevronPosition | buttonBorderClasses | expanded
+ ${downstreamProps} | ${'chevron-lg-right'} | ${'gl-border-l-0!'} | ${false}
+ ${downstreamProps} | ${'chevron-lg-left'} | ${'gl-border-l-0!'} | ${true}
+ ${upstreamProps} | ${'chevron-lg-left'} | ${'gl-border-r-0!'} | ${false}
+ ${upstreamProps} | ${'chevron-lg-right'} | ${'gl-border-r-0!'} | ${true}
`(
- '$pipelineType.columnTitle pipeline button icon should be $anglePosition with $buttonBorderClasses if expanded state is $expanded',
- ({ pipelineType, anglePosition, buttonBorderClasses, expanded }) => {
+ '$pipelineType.columnTitle pipeline button icon should be $chevronPosition with $buttonBorderClasses if expanded state is $expanded',
+ ({ pipelineType, chevronPosition, buttonBorderClasses, expanded }) => {
createWrapper({ propsData: { ...pipelineType, expanded } });
- expect(findExpandButton().props('icon')).toBe(anglePosition);
+ expect(findExpandButton().props('icon')).toBe(chevronPosition);
expect(findExpandButton().classes()).toContain(buttonBorderClasses);
},
);
+
+ describe('shadow border', () => {
+ beforeEach(() => {
+ createWrapper({ propsData: downstreamProps });
+ });
+
+ it.each`
+ activateEventName | deactivateEventName
+ ${'mouseover'} | ${'mouseout'}
+ ${'focus'} | ${'blur'}
+ `(
+ 'applies the class on $activateEventName and removes it on $deactivateEventName ',
+ async ({ activateEventName, deactivateEventName }) => {
+ const shadowClass = 'gl-shadow-none!';
+
+ expect(findExpandButton().classes()).toContain(shadowClass);
+
+ await findExpandButton().vm.$emit(activateEventName);
+ expect(findExpandButton().classes()).not.toContain(shadowClass);
+
+ await findExpandButton().vm.$emit(deactivateEventName);
+ expect(findExpandButton().classes()).toContain(shadowClass);
+ },
+ );
+ });
});
describe('when isLoading is true', () => {
diff --git a/spec/frontend/pipelines/notification/deprecated_type_keyword_notification_spec.js b/spec/frontend/pipelines/notification/deprecated_type_keyword_notification_spec.js
deleted file mode 100644
index f626652a944..00000000000
--- a/spec/frontend/pipelines/notification/deprecated_type_keyword_notification_spec.js
+++ /dev/null
@@ -1,146 +0,0 @@
-import VueApollo from 'vue-apollo';
-import { createLocalVue, shallowMount } from '@vue/test-utils';
-import { GlAlert, GlSprintf } from '@gitlab/ui';
-import createMockApollo from 'helpers/mock_apollo_helper';
-import waitForPromises from 'helpers/wait_for_promises';
-import DeprecatedTypeKeywordNotification from '~/pipelines/components/notification/deprecated_type_keyword_notification.vue';
-import getPipelineWarnings from '~/pipelines/graphql/queries/get_pipeline_warnings.query.graphql';
-import {
- mockWarningsWithoutDeprecation,
- mockWarningsRootType,
- mockWarningsType,
- mockWarningsTypesAll,
-} from './mock_data';
-
-const defaultProvide = {
- deprecatedKeywordsDocPath: '/help/ci/yaml/index.md#deprecated-keywords',
- fullPath: '/namespace/my-project',
- pipelineIid: 4,
-};
-
-let wrapper;
-
-const mockWarnings = jest.fn();
-
-const createComponent = ({ isLoading = false, options = {} } = {}) => {
- return shallowMount(DeprecatedTypeKeywordNotification, {
- stubs: {
- GlSprintf,
- },
- provide: {
- ...defaultProvide,
- },
- mocks: {
- $apollo: {
- queries: {
- warnings: {
- loading: isLoading,
- },
- },
- },
- },
- ...options,
- });
-};
-
-const createComponentWithApollo = () => {
- const localVue = createLocalVue();
- localVue.use(VueApollo);
-
- const handlers = [[getPipelineWarnings, mockWarnings]];
- const mockApollo = createMockApollo(handlers);
-
- return createComponent({
- options: {
- localVue,
- apolloProvider: mockApollo,
- mocks: {},
- },
- });
-};
-
-const findAlert = () => wrapper.findComponent(GlAlert);
-const findAlertItems = () => findAlert().findAll('li');
-
-afterEach(() => {
- wrapper.destroy();
-});
-
-describe('Deprecated keyword notification', () => {
- describe('while loading the pipeline warnings', () => {
- beforeEach(() => {
- wrapper = createComponent({ isLoading: true });
- });
-
- it('does not display the notification', () => {
- expect(findAlert().exists()).toBe(false);
- });
- });
-
- describe('if there is an error in the query', () => {
- beforeEach(async () => {
- mockWarnings.mockResolvedValue({ errors: ['It didnt work'] });
- wrapper = createComponentWithApollo();
- await waitForPromises();
- });
-
- it('does not display the notification', () => {
- expect(findAlert().exists()).toBe(false);
- });
- });
-
- describe('with a valid query result', () => {
- describe('if there are no deprecation warnings', () => {
- beforeEach(async () => {
- mockWarnings.mockResolvedValue(mockWarningsWithoutDeprecation);
- wrapper = createComponentWithApollo();
- await waitForPromises();
- });
- it('does not show the notification', () => {
- expect(findAlert().exists()).toBe(false);
- });
- });
-
- describe('with a root type deprecation message', () => {
- beforeEach(async () => {
- mockWarnings.mockResolvedValue(mockWarningsRootType);
- wrapper = createComponentWithApollo();
- await waitForPromises();
- });
- it('shows the notification with one item', () => {
- expect(findAlert().exists()).toBe(true);
- expect(findAlertItems()).toHaveLength(1);
- expect(findAlertItems().at(0).text()).toContain('types');
- });
- });
-
- describe('with a job type deprecation message', () => {
- beforeEach(async () => {
- mockWarnings.mockResolvedValue(mockWarningsType);
- wrapper = createComponentWithApollo();
- await waitForPromises();
- });
- it('shows the notification with one item', () => {
- expect(findAlert().exists()).toBe(true);
- expect(findAlertItems()).toHaveLength(1);
- expect(findAlertItems().at(0).text()).toContain('type');
- expect(findAlertItems().at(0).text()).not.toContain('types');
- });
- });
-
- describe('with both the root types and job type deprecation message', () => {
- beforeEach(async () => {
- mockWarnings.mockResolvedValue(mockWarningsTypesAll);
- wrapper = createComponentWithApollo();
- await waitForPromises();
- });
- it('shows the notification with two items', () => {
- expect(findAlert().exists()).toBe(true);
- expect(findAlertItems()).toHaveLength(2);
- expect(findAlertItems().at(0).text()).toContain('types');
- expect(findAlertItems().at(1).text()).toContain('type');
- expect(findAlertItems().at(1).text()).not.toContain('types');
- });
- });
- });
-});
diff --git a/spec/frontend/pipelines/pipeline_tabs_spec.js b/spec/frontend/pipelines/pipeline_tabs_spec.js
new file mode 100644
index 00000000000..b184ce31d20
--- /dev/null
+++ b/spec/frontend/pipelines/pipeline_tabs_spec.js
@@ -0,0 +1,95 @@
+import { createAppOptions, createPipelineTabs } from '~/pipelines/pipeline_tabs';
+import { updateHistory } from '~/lib/utils/url_utility';
+
+jest.mock('~/lib/utils/url_utility', () => ({
+ removeParams: () => 'gitlab.com',
+ updateHistory: jest.fn(),
+ joinPaths: () => {},
+ setUrlFragment: () => {},
+}));
+
+jest.mock('~/pipelines/utils', () => ({
+ getPipelineDefaultTab: () => '',
+}));
+
+describe('~/pipelines/pipeline_tabs.js', () => {
+ describe('createAppOptions', () => {
+ const SELECTOR = 'SELECTOR';
+
+ let el;
+
+ const createElement = () => {
+ el = document.createElement('div');
+ el.id = SELECTOR;
+ el.dataset.canGenerateCodequalityReports = 'true';
+ el.dataset.codequalityReportDownloadPath = 'codequalityReportDownloadPath';
+ el.dataset.downloadablePathForReportType = 'downloadablePathForReportType';
+ el.dataset.exposeSecurityDashboard = 'true';
+ el.dataset.exposeLicenseScanningData = 'true';
+ el.dataset.failedJobsCount = 1;
+ el.dataset.failedJobsSummary = '[]';
+ el.dataset.graphqlResourceEtag = 'graphqlResourceEtag';
+ el.dataset.pipelineIid = '123';
+ el.dataset.pipelineProjectPath = 'pipelineProjectPath';
+
+ document.body.appendChild(el);
+ };
+
+ afterEach(() => {
+ el = null;
+ });
+
+ it("extracts the properties from the element's dataset", () => {
+ createElement();
+ const options = createAppOptions(`#${SELECTOR}`, null);
+
+ expect(options).toMatchObject({
+ el,
+ provide: {
+ canGenerateCodequalityReports: true,
+ codequalityReportDownloadPath: 'codequalityReportDownloadPath',
+ downloadablePathForReportType: 'downloadablePathForReportType',
+ exposeSecurityDashboard: true,
+ exposeLicenseScanningData: true,
+ failedJobsCount: '1',
+ failedJobsSummary: [],
+ graphqlResourceEtag: 'graphqlResourceEtag',
+ pipelineIid: '123',
+ pipelineProjectPath: 'pipelineProjectPath',
+ },
+ });
+ });
+
+ it('returns `null` if el does not exist', () => {
+ expect(createAppOptions('foo', null)).toBe(null);
+ });
+ });
+
+ describe('createPipelineTabs', () => {
+ const title = 'Pipeline Tabs';
+
+ beforeAll(() => {
+ document.title = title;
+ });
+
+ afterAll(() => {
+ document.title = '';
+ });
+
+ it('calls `updateHistory` with correct params', () => {
+ createPipelineTabs({});
+
+ expect(updateHistory).toHaveBeenCalledWith({
+ title,
+ url: 'gitlab.com',
+ replace: true,
+ });
+ });
+
+ it("returns early if options aren't provided", () => {
+ createPipelineTabs();
+
+ expect(updateHistory).not.toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/frontend/pipelines/test_reports/test_case_details_spec.js b/spec/frontend/pipelines/test_reports/test_case_details_spec.js
index 4b33c1522a5..29c07e5e9f8 100644
--- a/spec/frontend/pipelines/test_reports/test_case_details_spec.js
+++ b/spec/frontend/pipelines/test_reports/test_case_details_spec.js
@@ -1,4 +1,4 @@
-import { GlModal } from '@gitlab/ui';
+import { GlModal, GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import TestCaseDetails from '~/pipelines/components/test_reports/test_case_details.vue';
@@ -9,6 +9,8 @@ describe('Test case details', () => {
const defaultTestCase = {
classname: 'spec.test_spec',
name: 'Test#something cool',
+ file: '~/index.js',
+ filePath: '/src/javascripts/index.js',
formattedTime: '10.04ms',
recent_failures: {
count: 2,
@@ -19,6 +21,8 @@ describe('Test case details', () => {
const findModal = () => wrapper.findComponent(GlModal);
const findName = () => wrapper.findByTestId('test-case-name');
+ const findFile = () => wrapper.findByTestId('test-case-file');
+ const findFileLink = () => wrapper.findComponent(GlLink);
const findDuration = () => wrapper.findByTestId('test-case-duration');
const findRecentFailures = () => wrapper.findByTestId('test-case-recent-failures');
const findAttachmentUrl = () => wrapper.findByTestId('test-case-attachment-url');
@@ -57,11 +61,26 @@ describe('Test case details', () => {
expect(findName().text()).toBe(defaultTestCase.name);
});
+ it('renders the test case file', () => {
+ expect(findFile().text()).toBe(defaultTestCase.file);
+ expect(findFileLink().attributes('href')).toBe(defaultTestCase.filePath);
+ });
+
it('renders the test case duration', () => {
expect(findDuration().text()).toBe(defaultTestCase.formattedTime);
});
});
+ describe('when test case has execution time instead of formatted time', () => {
+ beforeEach(() => {
+ createComponent({ ...defaultTestCase, formattedTime: null, execution_time: 17 });
+ });
+
+ it('renders the test case duration', () => {
+ expect(findDuration().text()).toBe('17 s');
+ });
+ });
+
describe('when test case has recent failures', () => {
describe('has only 1 recent failure', () => {
it('renders the recent failure', () => {
diff --git a/spec/frontend/pipelines/test_reports/test_suite_table_spec.js b/spec/frontend/pipelines/test_reports/test_suite_table_spec.js
index dc72fa31ace..25650b24705 100644
--- a/spec/frontend/pipelines/test_reports/test_suite_table_spec.js
+++ b/spec/frontend/pipelines/test_reports/test_suite_table_spec.js
@@ -1,9 +1,9 @@
-import { GlButton, GlFriendlyWrap, GlLink, GlPagination } from '@gitlab/ui';
+import { GlButton, GlFriendlyWrap, GlLink, GlPagination, GlEmptyState } from '@gitlab/ui';
import Vue from 'vue';
import Vuex from 'vuex';
import testReports from 'test_fixtures/pipelines/test_report.json';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import SuiteTable from '~/pipelines/components/test_reports/test_suite_table.vue';
+import SuiteTable, { i18n } from '~/pipelines/components/test_reports/test_suite_table.vue';
import { TestStatus } from '~/pipelines/constants';
import * as getters from '~/pipelines/stores/test_reports/getters';
import { formatFilePath } from '~/pipelines/stores/test_reports/utils';
@@ -26,6 +26,7 @@ describe('Test reports suite table', () => {
const noCasesMessage = () => wrapper.findByTestId('no-test-cases');
const artifactsExpiredMessage = () => wrapper.findByTestId('artifacts-expired');
+ const artifactsExpiredEmptyState = () => wrapper.find(GlEmptyState);
const allCaseRows = () => wrapper.findAllByTestId('test-case-row');
const findCaseRowAtIndex = (index) => wrapper.findAllByTestId('test-case-row').at(index);
const findLinkForRow = (row) => row.find(GlLink);
@@ -65,11 +66,15 @@ describe('Test reports suite table', () => {
expect(artifactsExpiredMessage().exists()).toBe(false);
});
- it('should render a message when artifacts have expired', () => {
+ it('should render an empty state when artifacts have expired', () => {
createComponent({ suite: [], errorMessage: ARTIFACTS_EXPIRED_ERROR_MESSAGE });
+ const emptyState = artifactsExpiredEmptyState();
- expect(noCasesMessage().exists()).toBe(true);
+ expect(noCasesMessage().exists()).toBe(false);
expect(artifactsExpiredMessage().exists()).toBe(true);
+
+ expect(emptyState.exists()).toBe(true);
+ expect(emptyState.props('title')).toBe(i18n.expiredArtifactsTitle);
});
describe('when a test suite is supplied', () => {
diff --git a/spec/frontend/profile/account/components/update_username_spec.js b/spec/frontend/profile/account/components/update_username_spec.js
index e342b7c4ba1..0e56bccf27e 100644
--- a/spec/frontend/profile/account/components/update_username_spec.js
+++ b/spec/frontend/profile/account/components/update_username_spec.js
@@ -52,7 +52,7 @@ describe('UpdateUsername component', () => {
openModalBtn: wrapper.find('[data-testid="username-change-confirmation-modal"]'),
modalBody: modal.find('.modal-body'),
modalHeader: modal.find('.modal-title'),
- confirmModalBtn: wrapper.find('.btn-warning'),
+ confirmModalBtn: wrapper.find('.btn-confirm'),
};
};
diff --git a/spec/frontend/projects/clusters_deprecation_slert/components/clusters_deprecation_alert_spec.js b/spec/frontend/projects/clusters_deprecation_slert/components/clusters_deprecation_alert_spec.js
new file mode 100644
index 00000000000..d230b96ad82
--- /dev/null
+++ b/spec/frontend/projects/clusters_deprecation_slert/components/clusters_deprecation_alert_spec.js
@@ -0,0 +1,45 @@
+import { GlAlert, GlSprintf } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import ClustersDeprecationAlert from '~/projects/clusters_deprecation_alert/components/clusters_deprecation_alert.vue';
+
+const message = 'Alert message';
+
+describe('ClustersDeprecationAlert', () => {
+ let wrapper;
+
+ const provideData = {
+ message,
+ };
+
+ const findAlert = () => wrapper.findComponent(GlAlert);
+
+ const createComponent = () => {
+ wrapper = shallowMount(ClustersDeprecationAlert, {
+ provide: provideData,
+ stubs: {
+ GlSprintf,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('template', () => {
+ it('should render a non-dismissible warning alert', () => {
+ expect(findAlert().props()).toMatchObject({
+ dismissible: false,
+ variant: 'warning',
+ });
+ });
+
+ it('should display the correct message', () => {
+ expect(findAlert().text()).toBe(message);
+ });
+ });
+});
diff --git a/spec/frontend/projects/compare/components/revision_card_spec.js b/spec/frontend/projects/compare/components/revision_card_spec.js
index 57906045337..a741393fcf3 100644
--- a/spec/frontend/projects/compare/components/revision_card_spec.js
+++ b/spec/frontend/projects/compare/components/revision_card_spec.js
@@ -1,4 +1,3 @@
-import { GlCard } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import RepoDropdown from '~/projects/compare/components/repo_dropdown.vue';
import RevisionCard from '~/projects/compare/components/revision_card.vue';
@@ -14,9 +13,6 @@ describe('RepoDropdown component', () => {
...defaultProps,
...props,
},
- stubs: {
- GlCard,
- },
});
};
@@ -29,8 +25,10 @@ describe('RepoDropdown component', () => {
createComponent();
});
+ const RevisionCardWrapper = () => wrapper.find('.revision-card');
+
it('displays revision text', () => {
- expect(wrapper.find(GlCard).text()).toContain(defaultProps.revisionText);
+ expect(RevisionCardWrapper().text()).toContain(defaultProps.revisionText);
});
it('renders RepoDropdown component', () => {
diff --git a/spec/frontend/projects/new/components/new_project_push_tip_popover_spec.js b/spec/frontend/projects/new/components/new_project_push_tip_popover_spec.js
index 42259a5c392..f50dd393174 100644
--- a/spec/frontend/projects/new/components/new_project_push_tip_popover_spec.js
+++ b/spec/frontend/projects/new/components/new_project_push_tip_popover_spec.js
@@ -57,7 +57,7 @@ describe('New project push tip popover', () => {
});
expect(findFormInput().attributes()).toMatchObject({
'aria-label': 'Push project from command line',
- readonly: 'readonly',
+ readonly: '',
});
});
diff --git a/spec/frontend/projects/pipelines/charts/components/app_spec.js b/spec/frontend/projects/pipelines/charts/components/app_spec.js
index 9c94925c817..98c7856a61a 100644
--- a/spec/frontend/projects/pipelines/charts/components/app_spec.js
+++ b/spec/frontend/projects/pipelines/charts/components/app_spec.js
@@ -13,6 +13,7 @@ jest.mock('~/lib/utils/url_utility');
const DeploymentFrequencyChartsStub = { name: 'DeploymentFrequencyCharts', render: () => {} };
const LeadTimeChartsStub = { name: 'LeadTimeCharts', render: () => {} };
+const TimeToRestoreServiceChartsStub = { name: 'TimeToRestoreServiceCharts', render: () => {} };
const ProjectQualitySummaryStub = { name: 'ProjectQualitySummary', render: () => {} };
describe('ProjectsPipelinesChartsApp', () => {
@@ -31,6 +32,7 @@ describe('ProjectsPipelinesChartsApp', () => {
stubs: {
DeploymentFrequencyCharts: DeploymentFrequencyChartsStub,
LeadTimeCharts: LeadTimeChartsStub,
+ TimeToRestoreServiceCharts: TimeToRestoreServiceChartsStub,
ProjectQualitySummary: ProjectQualitySummaryStub,
},
},
@@ -47,6 +49,7 @@ describe('ProjectsPipelinesChartsApp', () => {
const findAllGlTabs = () => wrapper.findAll(GlTab);
const findGlTabAtIndex = (index) => findAllGlTabs().at(index);
const findLeadTimeCharts = () => wrapper.find(LeadTimeChartsStub);
+ const findTimeToRestoreServiceCharts = () => wrapper.find(TimeToRestoreServiceChartsStub);
const findDeploymentFrequencyCharts = () => wrapper.find(DeploymentFrequencyChartsStub);
const findPipelineCharts = () => wrapper.find(PipelineCharts);
const findProjectQualitySummary = () => wrapper.find(ProjectQualitySummaryStub);
@@ -62,6 +65,7 @@ describe('ProjectsPipelinesChartsApp', () => {
expect(findGlTabAtIndex(0).attributes('title')).toBe('Pipelines');
expect(findGlTabAtIndex(1).attributes('title')).toBe('Deployment frequency');
expect(findGlTabAtIndex(2).attributes('title')).toBe('Lead time');
+ expect(findGlTabAtIndex(3).attributes('title')).toBe('Time to restore service');
});
it('renders the pipeline charts', () => {
@@ -76,6 +80,10 @@ describe('ProjectsPipelinesChartsApp', () => {
expect(findLeadTimeCharts().exists()).toBe(true);
});
+ it('renders the time to restore service charts', () => {
+ expect(findTimeToRestoreServiceCharts().exists()).toBe(true);
+ });
+
it('renders the project quality summary', () => {
expect(findProjectQualitySummary().exists()).toBe(true);
});
@@ -123,10 +131,11 @@ describe('ProjectsPipelinesChartsApp', () => {
describe('event tracking', () => {
it.each`
- testId | event
- ${'pipelines-tab'} | ${'p_analytics_ci_cd_pipelines'}
- ${'deployment-frequency-tab'} | ${'p_analytics_ci_cd_deployment_frequency'}
- ${'lead-time-tab'} | ${'p_analytics_ci_cd_lead_time'}
+ testId | event
+ ${'pipelines-tab'} | ${'p_analytics_ci_cd_pipelines'}
+ ${'deployment-frequency-tab'} | ${'p_analytics_ci_cd_deployment_frequency'}
+ ${'lead-time-tab'} | ${'p_analytics_ci_cd_lead_time'}
+ ${'time-to-restore-service-tab'} | ${'p_analytics_ci_cd_time_to_restore_service'}
`('tracks the $event event when clicked', ({ testId, event }) => {
jest.spyOn(API, 'trackRedisHllUserEvent');
@@ -141,12 +150,13 @@ describe('ProjectsPipelinesChartsApp', () => {
describe('when provided with a query param', () => {
it.each`
- chart | tab
- ${'lead-time'} | ${'2'}
- ${'deployment-frequency'} | ${'1'}
- ${'pipelines'} | ${'0'}
- ${'fake'} | ${'0'}
- ${''} | ${'0'}
+ chart | tab
+ ${'time-to-restore-service'} | ${'3'}
+ ${'lead-time'} | ${'2'}
+ ${'deployment-frequency'} | ${'1'}
+ ${'pipelines'} | ${'0'}
+ ${'fake'} | ${'0'}
+ ${''} | ${'0'}
`('shows the correct tab for URL parameter "$chart"', ({ chart, tab }) => {
setWindowLocation(`${TEST_HOST}/gitlab-org/gitlab-test/-/pipelines/charts?chart=${chart}`);
getParameterValues.mockImplementation((name) => {
diff --git a/spec/frontend/projects/project_new_spec.js b/spec/frontend/projects/project_new_spec.js
index fe325343da8..3034037fb1d 100644
--- a/spec/frontend/projects/project_new_spec.js
+++ b/spec/frontend/projects/project_new_spec.js
@@ -1,4 +1,3 @@
-import $ from 'jquery';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import { TEST_HOST } from 'helpers/test_constants';
import projectNew from '~/projects/project_new';
@@ -8,6 +7,9 @@ describe('New Project', () => {
let $projectPath;
let $projectName;
+ const mockKeyup = (el) => el.dispatchEvent(new KeyboardEvent('keyup'));
+ const mockChange = (el) => el.dispatchEvent(new Event('change'));
+
beforeEach(() => {
setHTMLFixture(`
<div class='toggle-import-form'>
@@ -29,122 +31,127 @@ describe('New Project', () => {
</div>
`);
- $projectImportUrl = $('#project_import_url');
- $projectPath = $('#project_path');
- $projectName = $('#project_name');
+ $projectImportUrl = document.querySelector('#project_import_url');
+ $projectPath = document.querySelector('#project_path');
+ $projectName = document.querySelector('#project_name');
});
afterEach(() => {
resetHTMLFixture();
});
+ const setValueAndTriggerEvent = (el, value, event) => {
+ event(el);
+ el.value = value;
+ };
+
describe('deriveProjectPathFromUrl', () => {
const dummyImportUrl = `${TEST_HOST}/dummy/import/url.git`;
beforeEach(() => {
projectNew.bindEvents();
- $projectPath.val('').keyup().val(dummyImportUrl);
+ setValueAndTriggerEvent($projectPath, dummyImportUrl, mockKeyup);
});
it('does not change project path for disabled $projectImportUrl', () => {
- $projectImportUrl.prop('disabled', true);
+ $projectImportUrl.setAttribute('disabled', true);
projectNew.deriveProjectPathFromUrl($projectImportUrl);
- expect($projectPath.val()).toEqual(dummyImportUrl);
+ expect($projectPath.value).toEqual(dummyImportUrl);
});
describe('for enabled $projectImportUrl', () => {
beforeEach(() => {
- $projectImportUrl.prop('disabled', false);
+ $projectImportUrl.setAttribute('disabled', false);
});
it('does not change project path if it is set by user', () => {
- $projectPath.keyup();
+ mockKeyup($projectPath);
projectNew.deriveProjectPathFromUrl($projectImportUrl);
- expect($projectPath.val()).toEqual(dummyImportUrl);
+ expect($projectPath.value).toEqual(dummyImportUrl);
});
it('does not change project path for empty $projectImportUrl', () => {
- $projectImportUrl.val('');
+ $projectImportUrl.value = '';
projectNew.deriveProjectPathFromUrl($projectImportUrl);
- expect($projectPath.val()).toEqual(dummyImportUrl);
+ expect($projectPath.value).toEqual(dummyImportUrl);
});
it('does not change project path for whitespace $projectImportUrl', () => {
- $projectImportUrl.val(' ');
+ $projectImportUrl.value = ' ';
projectNew.deriveProjectPathFromUrl($projectImportUrl);
- expect($projectPath.val()).toEqual(dummyImportUrl);
+ expect($projectPath.value).toEqual(dummyImportUrl);
});
it('does not change project path for $projectImportUrl without slashes', () => {
- $projectImportUrl.val('has-no-slash');
+ $projectImportUrl.value = 'has-no-slash';
projectNew.deriveProjectPathFromUrl($projectImportUrl);
- expect($projectPath.val()).toEqual(dummyImportUrl);
+ expect($projectPath.value).toEqual(dummyImportUrl);
});
it('changes project path to last $projectImportUrl component', () => {
- $projectImportUrl.val('/this/is/last');
+ $projectImportUrl.value = '/this/is/last';
projectNew.deriveProjectPathFromUrl($projectImportUrl);
- expect($projectPath.val()).toEqual('last');
+ expect($projectPath.value).toEqual('last');
});
it('ignores trailing slashes in $projectImportUrl', () => {
- $projectImportUrl.val('/has/trailing/slash/');
+ $projectImportUrl.value = '/has/trailing/slash/';
projectNew.deriveProjectPathFromUrl($projectImportUrl);
- expect($projectPath.val()).toEqual('slash');
+ expect($projectPath.value).toEqual('slash');
});
it('ignores fragment identifier in $projectImportUrl', () => {
- $projectImportUrl.val('/this/has/a#fragment-identifier/');
+ $projectImportUrl.value = '/this/has/a#fragment-identifier/';
projectNew.deriveProjectPathFromUrl($projectImportUrl);
- expect($projectPath.val()).toEqual('a');
+ expect($projectPath.value).toEqual('a');
});
it('ignores query string in $projectImportUrl', () => {
- $projectImportUrl.val('/url/with?query=string');
+ $projectImportUrl.value = '/url/with?query=string';
projectNew.deriveProjectPathFromUrl($projectImportUrl);
- expect($projectPath.val()).toEqual('with');
+ expect($projectPath.value).toEqual('with');
});
it('ignores trailing .git in $projectImportUrl', () => {
- $projectImportUrl.val('/repository.git');
+ $projectImportUrl.value = '/repository.git';
projectNew.deriveProjectPathFromUrl($projectImportUrl);
- expect($projectPath.val()).toEqual('repository');
+ expect($projectPath.value).toEqual('repository');
});
it('changes project path for HTTPS URL in $projectImportUrl', () => {
- $projectImportUrl.val('https://gitlab.company.com/group/project.git');
+ $projectImportUrl.value = 'https://gitlab.company.com/group/project.git';
projectNew.deriveProjectPathFromUrl($projectImportUrl);
- expect($projectPath.val()).toEqual('project');
+ expect($projectPath.value).toEqual('project');
});
it('changes project path for SSH URL in $projectImportUrl', () => {
- $projectImportUrl.val('git@gitlab.com:gitlab-org/gitlab-ce.git');
+ $projectImportUrl.value = 'git@gitlab.com:gitlab-org/gitlab-ce.git';
projectNew.deriveProjectPathFromUrl($projectImportUrl);
- expect($projectPath.val()).toEqual('gitlab-ce');
+ expect($projectPath.value).toEqual('gitlab-ce');
});
});
});
@@ -152,27 +159,27 @@ describe('New Project', () => {
describe('deriveSlugFromProjectName', () => {
beforeEach(() => {
projectNew.bindEvents();
- $projectName.val('').keyup();
+ setValueAndTriggerEvent($projectName, '', mockKeyup);
});
it('converts project name to lower case and dash-limited slug', () => {
const dummyProjectName = 'My Awesome Project';
- $projectName.val(dummyProjectName);
+ $projectName.value = dummyProjectName;
projectNew.onProjectNameChange($projectName, $projectPath);
- expect($projectPath.val()).toEqual('my-awesome-project');
+ expect($projectPath.value).toEqual('my-awesome-project');
});
it('does not add additional dashes in the slug if the project name already contains dashes', () => {
const dummyProjectName = 'My-Dash-Delimited Awesome Project';
- $projectName.val(dummyProjectName);
+ $projectName.value = dummyProjectName;
projectNew.onProjectNameChange($projectName, $projectPath);
- expect($projectPath.val()).toEqual('my-dash-delimited-awesome-project');
+ expect($projectPath.value).toEqual('my-dash-delimited-awesome-project');
});
});
@@ -182,27 +189,28 @@ describe('New Project', () => {
beforeEach(() => {
projectNew.bindEvents();
- $projectPath.val('').change();
+ setValueAndTriggerEvent($projectPath, '', mockChange);
});
it('converts slug to humanized project name', () => {
- $projectPath.val(dummyProjectPath);
+ $projectPath.value = dummyProjectPath;
+ mockChange($projectPath);
projectNew.onProjectPathChange($projectName, $projectPath);
- expect($projectName.val()).toEqual('My Awesome Project');
+ expect($projectName.value).toEqual('My Awesome Project');
});
it('does not convert slug to humanized project name if a project name already exists', () => {
- $projectName.val(dummyProjectName);
- $projectPath.val(dummyProjectPath);
+ $projectName.value = dummyProjectName;
+ $projectPath.value = dummyProjectPath;
projectNew.onProjectPathChange(
$projectName,
$projectPath,
- $projectName.val().trim().length > 0,
+ $projectName.value.trim().length > 0,
);
- expect($projectName.val()).toEqual(dummyProjectName);
+ expect($projectName.value).toEqual(dummyProjectName);
});
});
});
diff --git a/spec/frontend/projects/settings/branch_rules/branch_dropdown_spec.js b/spec/frontend/projects/settings/branch_rules/branch_dropdown_spec.js
new file mode 100644
index 00000000000..5997c2a083c
--- /dev/null
+++ b/spec/frontend/projects/settings/branch_rules/branch_dropdown_spec.js
@@ -0,0 +1,101 @@
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import { GlDropdown, GlSearchBoxByType, GlDropdownItem } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import BranchDropdown, {
+ i18n,
+} from '~/projects/settings/branch_rules/components/branch_dropdown.vue';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import branchesQuery from '~/projects/settings/branch_rules/queries/branches.query.graphql';
+import waitForPromises from 'helpers/wait_for_promises';
+import { createAlert } from '~/flash';
+
+Vue.use(VueApollo);
+jest.mock('~/flash');
+
+describe('Branch dropdown', () => {
+ let wrapper;
+
+ const projectPath = 'test/project';
+ const value = 'main';
+ const mockBranchNames = ['test 1', 'test 2'];
+
+ const createComponent = async ({ branchNames = mockBranchNames, resolver } = {}) => {
+ const mockResolver =
+ resolver ||
+ jest.fn().mockResolvedValue({
+ data: { project: { id: '1', repository: { branchNames } } },
+ });
+ const apolloProvider = createMockApollo([[branchesQuery, mockResolver]]);
+
+ wrapper = shallowMountExtended(BranchDropdown, {
+ apolloProvider,
+ propsData: { projectPath, value },
+ });
+
+ await waitForPromises();
+ };
+
+ const findGlDropdown = () => wrapper.find(GlDropdown);
+ const findAllBranches = () => wrapper.findAll(GlDropdownItem);
+ const findNoDataMsg = () => wrapper.findByTestId('no-data');
+ const findGlSearchBoxByType = () => wrapper.find(GlSearchBoxByType);
+ const findWildcardButton = () => wrapper.findByTestId('create-wildcard-button');
+ const setSearchTerm = (searchTerm) => findGlSearchBoxByType().vm.$emit('input', searchTerm);
+
+ beforeEach(() => createComponent());
+
+ it('renders a GlDropdown component with the correct props', () => {
+ expect(findGlDropdown().props()).toMatchObject({ text: value });
+ });
+
+ it('renders GlDropdownItem components for each branch', () => {
+ expect(findAllBranches().length).toBe(mockBranchNames.length);
+
+ mockBranchNames.forEach((branchName, index) =>
+ expect(findAllBranches().at(index).text()).toBe(branchName),
+ );
+ });
+
+ it('emits `select` with the branch name when a branch is clicked', () => {
+ findAllBranches().at(0).vm.$emit('click');
+ expect(wrapper.emitted('input')).toEqual([[mockBranchNames[0]]]);
+ });
+
+ describe('branch searching', () => {
+ it('displays a message if no branches can be found', async () => {
+ await createComponent({ branchNames: [] });
+
+ expect(findNoDataMsg().text()).toBe(i18n.noMatch);
+ });
+
+ it('displays a loading state while search request is in flight', async () => {
+ setSearchTerm('test');
+ await nextTick();
+
+ expect(findGlSearchBoxByType().props()).toMatchObject({ isLoading: true });
+ });
+
+ it('renders a wildcard button', async () => {
+ const searchTerm = 'test-*';
+ setSearchTerm(searchTerm);
+ await nextTick();
+
+ expect(findWildcardButton().exists()).toBe(true);
+ findWildcardButton().vm.$emit('click');
+ expect(wrapper.emitted('createWildcard')).toEqual([[searchTerm]]);
+ });
+ });
+
+ it('displays an error message if fetch failed', async () => {
+ const error = new Error('an error occurred');
+ const resolver = jest.fn().mockRejectedValueOnce(error);
+ await createComponent({ resolver });
+
+ expect(createAlert).toHaveBeenCalledWith({
+ message: i18n.fetchBranchesError,
+ captureError: true,
+ error,
+ });
+ });
+});
diff --git a/spec/frontend/projects/settings/branch_rules/rule_edit_spec.js b/spec/frontend/projects/settings/branch_rules/rule_edit_spec.js
new file mode 100644
index 00000000000..66ae6ddc02d
--- /dev/null
+++ b/spec/frontend/projects/settings/branch_rules/rule_edit_spec.js
@@ -0,0 +1,49 @@
+import { nextTick } from 'vue';
+import { getParameterByName } from '~/lib/utils/url_utility';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import RuleEdit from '~/projects/settings/branch_rules/components/rule_edit.vue';
+import BranchDropdown from '~/projects/settings/branch_rules/components/branch_dropdown.vue';
+
+jest.mock('~/lib/utils/url_utility', () => ({
+ getParameterByName: jest.fn().mockImplementation(() => 'main'),
+}));
+
+describe('Edit branch rule', () => {
+ let wrapper;
+ const projectPath = 'test/testing';
+
+ const createComponent = () => {
+ wrapper = shallowMountExtended(RuleEdit, { propsData: { projectPath } });
+ };
+
+ const findBranchDropdown = () => wrapper.find(BranchDropdown);
+
+ beforeEach(() => createComponent());
+
+ it('gets the branch param from url', () => {
+ expect(getParameterByName).toHaveBeenCalledWith('branch');
+ });
+
+ describe('BranchDropdown', () => {
+ it('renders a BranchDropdown component with the correct props', () => {
+ expect(findBranchDropdown().props()).toMatchObject({
+ projectPath,
+ value: 'main',
+ });
+ });
+
+ it('sets the correct value when `input` is emitted', async () => {
+ const branch = 'test';
+ findBranchDropdown().vm.$emit('input', branch);
+ await nextTick();
+ expect(findBranchDropdown().props('value')).toBe(branch);
+ });
+
+ it('sets the correct value when `createWildcard` is emitted', async () => {
+ const wildcard = 'test-*';
+ findBranchDropdown().vm.$emit('createWildcard', wildcard);
+ await nextTick();
+ expect(findBranchDropdown().props('value')).toBe(wildcard);
+ });
+ });
+});
diff --git a/spec/frontend/projects/settings/repository/branch_rules/app_spec.js b/spec/frontend/projects/settings/repository/branch_rules/app_spec.js
new file mode 100644
index 00000000000..e12c3aeedd6
--- /dev/null
+++ b/spec/frontend/projects/settings/repository/branch_rules/app_spec.js
@@ -0,0 +1,18 @@
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import BranchRules from '~/projects/settings/repository/branch_rules/app.vue';
+
+describe('Branch rules app', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = mountExtended(BranchRules);
+ };
+
+ const findTitle = () => wrapper.find('strong');
+
+ beforeEach(() => createComponent());
+
+ it('renders a title', () => {
+ expect(findTitle().text()).toBe('Branch');
+ });
+});
diff --git a/spec/frontend/prometheus_metrics/custom_metrics_spec.js b/spec/frontend/prometheus_metrics/custom_metrics_spec.js
index 473327bf5e1..fc906194059 100644
--- a/spec/frontend/prometheus_metrics/custom_metrics_spec.js
+++ b/spec/frontend/prometheus_metrics/custom_metrics_spec.js
@@ -6,9 +6,9 @@ import CustomMetrics from '~/prometheus_metrics/custom_metrics';
import { metrics1 as metrics } from './mock_data';
describe('PrometheusMetrics', () => {
- const FIXTURE = 'services/prometheus/prometheus_service.html';
+ const FIXTURE = 'integrations/prometheus/prometheus_integration.html';
const customMetricsEndpoint =
- 'http://test.host/frontend-fixtures/services-project/prometheus/metrics';
+ 'http://test.host/frontend-fixtures/integrations-project/prometheus/metrics';
let mock;
beforeEach(() => {
diff --git a/spec/frontend/prometheus_metrics/prometheus_metrics_spec.js b/spec/frontend/prometheus_metrics/prometheus_metrics_spec.js
index 1151c0b3769..0df2aad5882 100644
--- a/spec/frontend/prometheus_metrics/prometheus_metrics_spec.js
+++ b/spec/frontend/prometheus_metrics/prometheus_metrics_spec.js
@@ -7,7 +7,7 @@ import PrometheusMetrics from '~/prometheus_metrics/prometheus_metrics';
import { metrics2 as metrics, missingVarMetrics } from './mock_data';
describe('PrometheusMetrics', () => {
- const FIXTURE = 'services/prometheus/prometheus_service.html';
+ const FIXTURE = 'integrations/prometheus/prometheus_integration.html';
beforeEach(() => {
loadHTMLFixture(FIXTURE);
diff --git a/spec/frontend/repository/components/blob_content_viewer_spec.js b/spec/frontend/repository/components/blob_content_viewer_spec.js
index 2ab4afbffbe..d498b6f0c4f 100644
--- a/spec/frontend/repository/components/blob_content_viewer_spec.js
+++ b/spec/frontend/repository/components/blob_content_viewer_spec.js
@@ -22,7 +22,7 @@ import userInfoQuery from '~/repository/queries/user_info.query.graphql';
import applicationInfoQuery from '~/repository/queries/application_info.query.graphql';
import CodeIntelligence from '~/code_navigation/components/app.vue';
import { redirectTo } from '~/lib/utils/url_utility';
-import { isLoggedIn } from '~/lib/utils/common_utils';
+import { isLoggedIn, handleLocationHash } from '~/lib/utils/common_utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import httpStatusCodes from '~/lib/utils/http_status';
import LineHighlighter from '~/blob/line_highlighter';
@@ -163,6 +163,14 @@ describe('Blob content viewer component', () => {
expect(findBlobHeader().props('blob')).toEqual(simpleViewerMock);
});
+ it('copies blob text to clipboard', async () => {
+ jest.spyOn(navigator.clipboard, 'writeText');
+ await createComponent();
+
+ findBlobHeader().vm.$emit('copy');
+ expect(navigator.clipboard.writeText).toHaveBeenCalledWith(simpleViewerMock.rawTextBlob);
+ });
+
it('renders a BlobContent component', async () => {
await createComponent();
@@ -209,6 +217,12 @@ describe('Blob content viewer component', () => {
await createComponent({ blob: { ...simpleViewerMock, fileType, highlightJs } });
expect(LineHighlighter).toHaveBeenCalled();
});
+
+ it('scrolls to the hash', async () => {
+ mockAxios.onGet(legacyViewerUrl).replyOnce(httpStatusCodes.OK, 'test');
+ await createComponent({ blob: { ...simpleViewerMock, fileType, highlightJs } });
+ expect(handleLocationHash).toHaveBeenCalled();
+ });
});
});
diff --git a/spec/frontend/repository/components/blob_viewers/sketch_viewer_spec.js b/spec/frontend/repository/components/blob_viewers/sketch_viewer_spec.js
new file mode 100644
index 00000000000..b5c8c02c4a0
--- /dev/null
+++ b/spec/frontend/repository/components/blob_viewers/sketch_viewer_spec.js
@@ -0,0 +1,32 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import SketchViewer from '~/repository/components/blob_viewers/sketch_viewer.vue';
+import SketchLoader from '~/blob/sketch';
+
+jest.mock('~/blob/sketch');
+
+describe('Sketch Viewer', () => {
+ let wrapper;
+
+ const DEFAULT_BLOB_DATA = {
+ rawPath: 'some/file.sketch',
+ };
+
+ const createComponent = () => {
+ wrapper = shallowMountExtended(SketchViewer, {
+ propsData: { blob: DEFAULT_BLOB_DATA },
+ });
+ };
+
+ const findSketchWrapper = () => wrapper.findByTestId('sketch');
+
+ beforeEach(() => createComponent());
+
+ it('inits the sketch loader', () => {
+ expect(SketchLoader).toHaveBeenCalledWith(wrapper.vm.$refs.viewer);
+ });
+
+ it('renders the sketch viewer', () => {
+ expect(findSketchWrapper().exists()).toBe(true);
+ expect(findSketchWrapper().attributes('data-endpoint')).toBe(DEFAULT_BLOB_DATA.rawPath);
+ });
+});
diff --git a/spec/frontend/repository/components/new_directory_modal_spec.js b/spec/frontend/repository/components/new_directory_modal_spec.js
index fe7f024e3ea..e1c50d63851 100644
--- a/spec/frontend/repository/components/new_directory_modal_spec.js
+++ b/spec/frontend/repository/components/new_directory_modal_spec.js
@@ -67,7 +67,7 @@ describe('NewDirectoryModal', () => {
await findBranchName().vm.$emit('input', branchName);
await findCommitMessage().vm.$emit('input', commitMessage);
await findMrToggle().vm.$emit('change', createNewMr);
- await nextTick;
+ await nextTick();
};
const submitForm = async () => {
diff --git a/spec/frontend/repository/components/table/index_spec.js b/spec/frontend/repository/components/table/index_spec.js
index 07c151ad935..ff0371b5c07 100644
--- a/spec/frontend/repository/components/table/index_spec.js
+++ b/spec/frontend/repository/components/table/index_spec.js
@@ -1,4 +1,4 @@
-import { GlDeprecatedSkeletonLoading as GlSkeletonLoading, GlButton } from '@gitlab/ui';
+import { GlSkeletonLoader, GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import Table from '~/repository/components/table/index.vue';
@@ -103,7 +103,7 @@ describe('Repository table component', () => {
it('shows loading icon', () => {
factory({ path: '/', isLoading: true });
- expect(vm.find(GlSkeletonLoading).exists()).toBe(true);
+ expect(vm.findComponent(GlSkeletonLoader).exists()).toBe(true);
});
it('renders table rows', () => {
diff --git a/spec/frontend/runner/admin_runner_show/admin_runner_show_app_spec.js b/spec/frontend/runner/admin_runner_show/admin_runner_show_app_spec.js
index 07259ec3538..28e7d192938 100644
--- a/spec/frontend/runner/admin_runner_show/admin_runner_show_app_spec.js
+++ b/spec/frontend/runner/admin_runner_show/admin_runner_show_app_spec.js
@@ -1,6 +1,7 @@
import Vue from 'vue';
-import { mount, shallowMount } from '@vue/test-utils';
+import { GlTab, GlTabs } from '@gitlab/ui';
import VueApollo from 'vue-apollo';
+import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert, VARIANT_SUCCESS } from '~/flash';
@@ -11,6 +12,7 @@ import RunnerHeader from '~/runner/components/runner_header.vue';
import RunnerPauseButton from '~/runner/components/runner_pause_button.vue';
import RunnerDeleteButton from '~/runner/components/runner_delete_button.vue';
import RunnerEditButton from '~/runner/components/runner_edit_button.vue';
+import RunnersJobs from '~/runner/components/runner_jobs.vue';
import runnerQuery from '~/runner/graphql/show/runner.query.graphql';
import AdminRunnerShowApp from '~/runner/admin_runner_show/admin_runner_show_app.vue';
import { captureException } from '~/runner/sentry_utils';
@@ -38,6 +40,8 @@ describe('AdminRunnerShowApp', () => {
const findRunnerDeleteButton = () => wrapper.findComponent(RunnerDeleteButton);
const findRunnerEditButton = () => wrapper.findComponent(RunnerEditButton);
const findRunnerPauseButton = () => wrapper.findComponent(RunnerPauseButton);
+ const findRunnersJobs = () => wrapper.findComponent(RunnersJobs);
+ const findJobCountBadge = () => wrapper.findByTestId('job-count-badge');
const mockRunnerQueryResult = (runner = {}) => {
mockRunnerQuery = jest.fn().mockResolvedValue({
@@ -47,7 +51,7 @@ describe('AdminRunnerShowApp', () => {
});
};
- const createComponent = ({ props = {}, mountFn = shallowMount } = {}) => {
+ const createComponent = ({ props = {}, mountFn = shallowMountExtended, ...options } = {}) => {
wrapper = mountFn(AdminRunnerShowApp, {
apolloProvider: createMockApollo([[runnerQuery, mockRunnerQuery]]),
propsData: {
@@ -55,6 +59,7 @@ describe('AdminRunnerShowApp', () => {
runnersPath: mockRunnersPath,
...props,
},
+ ...options,
});
return waitForPromises();
@@ -69,7 +74,7 @@ describe('AdminRunnerShowApp', () => {
beforeEach(async () => {
mockRunnerQueryResult();
- await createComponent({ mountFn: mount });
+ await createComponent({ mountFn: mountExtended });
});
it('expect GraphQL ID to be requested', async () => {
@@ -110,7 +115,7 @@ describe('AdminRunnerShowApp', () => {
});
await createComponent({
- mountFn: mount,
+ mountFn: mountExtended,
});
});
@@ -129,7 +134,7 @@ describe('AdminRunnerShowApp', () => {
});
await createComponent({
- mountFn: mount,
+ mountFn: mountExtended,
});
});
@@ -141,7 +146,7 @@ describe('AdminRunnerShowApp', () => {
describe('when runner is deleted', () => {
beforeEach(async () => {
await createComponent({
- mountFn: mount,
+ mountFn: mountExtended,
});
});
@@ -163,7 +168,7 @@ describe('AdminRunnerShowApp', () => {
});
await createComponent({
- mountFn: mount,
+ mountFn: mountExtended,
});
});
@@ -191,4 +196,49 @@ describe('AdminRunnerShowApp', () => {
expect(createAlert).toHaveBeenCalled();
});
});
+
+ describe('Jobs tab', () => {
+ const stubs = {
+ GlTab,
+ GlTabs,
+ RunnerDetails: {
+ template: `
+ <div>
+ <slot name="jobs-tab"></slot>
+ </div>
+ `,
+ },
+ };
+
+ it('without a runner, shows no jobs', () => {
+ mockRunnerQuery = jest.fn().mockResolvedValue({
+ data: {
+ runner: null,
+ },
+ });
+
+ createComponent({ stubs });
+
+ expect(findJobCountBadge().exists()).toBe(false);
+ expect(findRunnersJobs().exists()).toBe(false);
+ });
+
+ it('without a job count, shows no jobs count', async () => {
+ mockRunnerQueryResult({ jobCount: null });
+
+ await createComponent({ stubs });
+
+ expect(findJobCountBadge().exists()).toBe(false);
+ });
+
+ it('with a job count, shows jobs count', async () => {
+ const runner = { jobCount: 3 };
+ mockRunnerQueryResult(runner);
+
+ await createComponent({ stubs });
+
+ expect(findJobCountBadge().text()).toBe('3');
+ expect(findRunnersJobs().props('runner')).toEqual({ ...mockRunner, ...runner });
+ });
+ });
});
diff --git a/spec/frontend/runner/admin_runners/admin_runners_app_spec.js b/spec/frontend/runner/admin_runners/admin_runners_app_spec.js
index 405813be4e3..3d25ad075de 100644
--- a/spec/frontend/runner/admin_runners/admin_runners_app_spec.js
+++ b/spec/frontend/runner/admin_runners/admin_runners_app_spec.js
@@ -18,6 +18,7 @@ import AdminRunnersApp from '~/runner/admin_runners/admin_runners_app.vue';
import RunnerTypeTabs from '~/runner/components/runner_type_tabs.vue';
import RunnerFilteredSearchBar from '~/runner/components/runner_filtered_search_bar.vue';
import RunnerList from '~/runner/components/runner_list.vue';
+import RunnerListEmptyState from '~/runner/components/runner_list_empty_state.vue';
import RunnerStats from '~/runner/components/stat/runner_stats.vue';
import RunnerActionsCell from '~/runner/components/cells/runner_actions_cell.vue';
import RegistrationDropdown from '~/runner/components/registration/registration_dropdown.vue';
@@ -50,6 +51,8 @@ import {
runnersDataPaginated,
onlineContactTimeoutSecs,
staleTimeoutSecs,
+ emptyStateSvgPath,
+ emptyStateFilteredSvgPath,
} from '../mock_data';
const mockRegistrationToken = 'MOCK_REGISTRATION_TOKEN';
@@ -78,6 +81,7 @@ describe('AdminRunnersApp', () => {
const findRegistrationDropdown = () => wrapper.findComponent(RegistrationDropdown);
const findRunnerTypeTabs = () => wrapper.findComponent(RunnerTypeTabs);
const findRunnerList = () => wrapper.findComponent(RunnerList);
+ const findRunnerListEmptyState = () => wrapper.findComponent(RunnerListEmptyState);
const findRunnerPagination = () => extendedWrapper(wrapper.findComponent(RunnerPagination));
const findRunnerPaginationNext = () => findRunnerPagination().findByLabelText('Go to next page');
const findRunnerFilteredSearchBar = () => wrapper.findComponent(RunnerFilteredSearchBar);
@@ -106,6 +110,8 @@ describe('AdminRunnersApp', () => {
localMutations,
onlineContactTimeoutSecs,
staleTimeoutSecs,
+ emptyStateSvgPath,
+ emptyStateFilteredSvgPath,
...provide,
},
...options,
@@ -457,12 +463,28 @@ describe('AdminRunnersApp', () => {
runners: { nodes: [] },
},
});
+
createComponent();
await waitForPromises();
});
- it('shows a message for no results', async () => {
- expect(wrapper.text()).toContain('No runners found');
+ it('shows an empty state', () => {
+ expect(findRunnerListEmptyState().props('isSearchFiltered')).toBe(false);
+ });
+
+ describe('when a filter is selected by the user', () => {
+ beforeEach(async () => {
+ findRunnerFilteredSearchBar().vm.$emit('input', {
+ runnerType: null,
+ filters: [{ type: PARAM_KEY_STATUS, value: { data: STATUS_ONLINE, operator: '=' } }],
+ sort: CREATED_ASC,
+ });
+ await waitForPromises();
+ });
+
+ it('shows an empty state for a filtered search', () => {
+ expect(findRunnerListEmptyState().props('isSearchFiltered')).toBe(true);
+ });
});
});
diff --git a/spec/frontend/runner/components/__snapshots__/runner_status_popover_spec.js.snap b/spec/frontend/runner/components/__snapshots__/runner_status_popover_spec.js.snap
index 80a04401760..b27a1adf01b 100644
--- a/spec/frontend/runner/components/__snapshots__/runner_status_popover_spec.js.snap
+++ b/spec/frontend/runner/components/__snapshots__/runner_status_popover_spec.js.snap
@@ -1,3 +1,3 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
-exports[`RunnerStatusPopover renders complete text 1`] = `"Never contacted: Runner has never contacted GitLab (when you register a runner, use gitlab-runner run to bring it online) Online: Runner has contacted GitLab within the last 2 hours Offline: Runner has not contacted GitLab in more than 2 hours Stale: Runner has not contacted GitLab in more than 2 months"`;
+exports[`RunnerStatusPopover renders complete text 1`] = `"Never contacted: Runner has never contacted GitLab (when you register a runner, use gitlab-runner run to bring it online) Online: Runner has contacted GitLab within the last 2 hours Offline: Runner has not contacted GitLab in more than 2 hours Stale: Runner has not contacted GitLab in more than 3 months"`;
diff --git a/spec/frontend/runner/components/cells/runner_status_cell_spec.js b/spec/frontend/runner/components/cells/runner_status_cell_spec.js
index 20a1cdf7236..0f5133d0ae2 100644
--- a/spec/frontend/runner/components/cells/runner_status_cell_spec.js
+++ b/spec/frontend/runner/components/cells/runner_status_cell_spec.js
@@ -1,12 +1,15 @@
-import { GlBadge } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import RunnerStatusCell from '~/runner/components/cells/runner_status_cell.vue';
+
+import RunnerStatusBadge from '~/runner/components/runner_status_badge.vue';
+import RunnerPausedBadge from '~/runner/components/runner_paused_badge.vue';
import { INSTANCE_TYPE, STATUS_ONLINE, STATUS_OFFLINE } from '~/runner/constants';
-describe('RunnerTypeCell', () => {
+describe('RunnerStatusCell', () => {
let wrapper;
- const findBadgeAt = (i) => wrapper.findAllComponents(GlBadge).at(i);
+ const findStatusBadge = () => wrapper.findComponent(RunnerStatusBadge);
+ const findPausedBadge = () => wrapper.findComponent(RunnerPausedBadge);
const createComponent = ({ runner = {} } = {}) => {
wrapper = mount(RunnerStatusCell, {
@@ -29,7 +32,7 @@ describe('RunnerTypeCell', () => {
createComponent();
expect(wrapper.text()).toMatchInterpolatedText('online');
- expect(findBadgeAt(0).text()).toBe('online');
+ expect(findStatusBadge().text()).toBe('online');
});
it('Displays offline status', () => {
@@ -40,7 +43,7 @@ describe('RunnerTypeCell', () => {
});
expect(wrapper.text()).toMatchInterpolatedText('offline');
- expect(findBadgeAt(0).text()).toBe('offline');
+ expect(findStatusBadge().text()).toBe('offline');
});
it('Displays paused status', () => {
@@ -52,9 +55,7 @@ describe('RunnerTypeCell', () => {
});
expect(wrapper.text()).toMatchInterpolatedText('online paused');
-
- expect(findBadgeAt(0).text()).toBe('online');
- expect(findBadgeAt(1).text()).toBe('paused');
+ expect(findPausedBadge().text()).toBe('paused');
});
it('Is empty when data is missing', () => {
diff --git a/spec/frontend/runner/components/registration/registration_dropdown_spec.js b/spec/frontend/runner/components/registration/registration_dropdown_spec.js
index 81c2788f084..d3f38bc1d26 100644
--- a/spec/frontend/runner/components/registration/registration_dropdown_spec.js
+++ b/spec/frontend/runner/components/registration/registration_dropdown_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdown, GlDropdownItem, GlDropdownForm } from '@gitlab/ui';
+import { GlModal, GlDropdown, GlDropdownItem, GlDropdownForm } from '@gitlab/ui';
import { mount, shallowMount, createWrapper } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
@@ -24,6 +24,8 @@ import {
const mockToken = '0123456789';
const maskToken = '**********';
+Vue.use(VueApollo);
+
describe('RegistrationDropdown', () => {
let wrapper;
@@ -32,9 +34,11 @@ describe('RegistrationDropdown', () => {
const findRegistrationInstructionsDropdownItem = () => wrapper.findComponent(GlDropdownItem);
const findTokenDropdownItem = () => wrapper.findComponent(GlDropdownForm);
const findRegistrationToken = () => wrapper.findComponent(RegistrationToken);
- const findRegistrationTokenInput = () => wrapper.findByTestId('token-value').find('input');
+ const findRegistrationTokenInput = () =>
+ wrapper.findByLabelText(RegistrationToken.i18n.registrationToken);
const findTokenResetDropdownItem = () =>
wrapper.findComponent(RegistrationTokenResetDropdownItem);
+ const findModal = () => wrapper.findComponent(GlModal);
const findModalContent = () =>
createWrapper(document.body)
.find('[data-testid="runner-instructions-modal"]')
@@ -43,6 +47,8 @@ describe('RegistrationDropdown', () => {
const openModal = async () => {
await findRegistrationInstructionsDropdownItem().trigger('click');
+ findModal().vm.$emit('shown');
+
await waitForPromises();
};
@@ -60,8 +66,6 @@ describe('RegistrationDropdown', () => {
};
const createComponentWithModal = () => {
- Vue.use(VueApollo);
-
const requestHandlers = [
[getRunnerPlatformsQuery, jest.fn().mockResolvedValue(mockGraphqlRunnerPlatforms)],
[getRunnerSetupInstructionsQuery, jest.fn().mockResolvedValue(mockGraphqlInstructions)],
@@ -169,10 +173,10 @@ describe('RegistrationDropdown', () => {
await nextTick();
};
- it('Updates token in input', async () => {
+ it('Updates token input', async () => {
createComponent({}, mount);
- expect(findRegistrationTokenInput().props('value')).not.toBe(newToken);
+ expect(findRegistrationToken().props('value')).not.toBe(newToken);
await resetToken();
diff --git a/spec/frontend/runner/components/registration/registration_token_spec.js b/spec/frontend/runner/components/registration/registration_token_spec.js
index cb42c7c8493..ed1a698d36f 100644
--- a/spec/frontend/runner/components/registration/registration_token_spec.js
+++ b/spec/frontend/runner/components/registration/registration_token_spec.js
@@ -29,6 +29,7 @@ describe('RegistrationToken', () => {
wrapper = mountFn(RegistrationToken, {
propsData: {
value: mockToken,
+ inputId: 'token-value',
...props,
},
localVue,
diff --git a/spec/frontend/runner/components/runner_details_spec.js b/spec/frontend/runner/components/runner_details_spec.js
index 162d21febfd..9e0f7014750 100644
--- a/spec/frontend/runner/components/runner_details_spec.js
+++ b/spec/frontend/runner/components/runner_details_spec.js
@@ -1,14 +1,13 @@
-import { GlSprintf, GlIntersperse, GlTab } from '@gitlab/ui';
-import { createWrapper, ErrorWrapper } from '@vue/test-utils';
+import { GlSprintf, GlIntersperse } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import TimeAgo from '~/vue_shared/components/time_ago_tooltip.vue';
import { useFakeDate } from 'helpers/fake_date';
+import { findDd } from 'helpers/dl_locator_helper';
import { ACCESS_LEVEL_REF_PROTECTED, ACCESS_LEVEL_NOT_PROTECTED } from '~/runner/constants';
import RunnerDetails from '~/runner/components/runner_details.vue';
import RunnerDetail from '~/runner/components/runner_detail.vue';
import RunnerGroups from '~/runner/components/runner_groups.vue';
-import RunnersJobs from '~/runner/components/runner_jobs.vue';
import RunnerTags from '~/runner/components/runner_tags.vue';
import RunnerTag from '~/runner/components/runner_tag.vue';
@@ -24,25 +23,14 @@ describe('RunnerDetails', () => {
useFakeDate(mockNow);
- /**
- * Find the definition (<dd>) that corresponds to this term (<dt>)
- * @param {string} dtLabel - Label for this value
- * @returns Wrapper
- */
- const findDd = (dtLabel) => {
- const dt = wrapper.findByText(dtLabel).element;
- const dd = dt.nextElementSibling;
- if (dt.tagName === 'DT' && dd.tagName === 'DD') {
- return createWrapper(dd, {});
- }
- return ErrorWrapper(dtLabel);
- };
-
const findDetailGroups = () => wrapper.findComponent(RunnerGroups);
- const findRunnersJobs = () => wrapper.findComponent(RunnersJobs);
- const findJobCountBadge = () => wrapper.findByTestId('job-count-badge');
- const createComponent = ({ props = {}, mountFn = shallowMountExtended, stubs } = {}) => {
+ const createComponent = ({
+ props = {},
+ stubs,
+ mountFn = shallowMountExtended,
+ ...options
+ } = {}) => {
wrapper = mountFn(RunnerDetails, {
propsData: {
...props,
@@ -51,6 +39,7 @@ describe('RunnerDetails', () => {
RunnerDetail,
...stubs,
},
+ ...options,
});
};
@@ -108,7 +97,7 @@ describe('RunnerDetails', () => {
});
it(`displays expected value "${expectedValue}"`, () => {
- expect(findDd(field).text()).toBe(expectedValue);
+ expect(findDd(field, wrapper).text()).toBe(expectedValue);
});
});
@@ -123,7 +112,7 @@ describe('RunnerDetails', () => {
stubs,
});
- expect(findDd('Tags').text().replace(/\s+/g, ' ')).toBe('tag-1 tag-2');
+ expect(findDd('Tags', wrapper).text().replace(/\s+/g, ' ')).toBe('tag-1 tag-2');
});
it('displays "None" when runner has no tags', () => {
@@ -134,7 +123,7 @@ describe('RunnerDetails', () => {
stubs,
});
- expect(findDd('Tags').text().replace(/\s+/g, ' ')).toBe('None');
+ expect(findDd('Tags', wrapper).text().replace(/\s+/g, ' ')).toBe('None');
});
});
@@ -153,40 +142,17 @@ describe('RunnerDetails', () => {
});
});
- describe('Jobs tab', () => {
- const stubs = { GlTab };
-
- it('without a runner, shows no jobs', () => {
- createComponent({
- props: { runner: null },
- stubs,
- });
-
- expect(findJobCountBadge().exists()).toBe(false);
- expect(findRunnersJobs().exists()).toBe(false);
- });
+ describe('Jobs tab slot', () => {
+ it('shows job tab slot', () => {
+ const JOBS_TAB = '<div>Jobs Tab</div>';
- it('without a job count, shows no jobs count', () => {
createComponent({
- props: {
- runner: { ...mockRunner, jobCount: undefined },
+ slots: {
+ 'jobs-tab': JOBS_TAB,
},
- stubs,
- });
-
- expect(findJobCountBadge().exists()).toBe(false);
- });
-
- it('with a job count, shows jobs count', () => {
- const runner = { ...mockRunner, jobCount: 3 };
-
- createComponent({
- props: { runner },
- stubs,
});
- expect(findJobCountBadge().text()).toBe('3');
- expect(findRunnersJobs().props('runner')).toBe(runner);
+ expect(wrapper.html()).toContain(JOBS_TAB);
});
});
});
diff --git a/spec/frontend/runner/components/runner_jobs_spec.js b/spec/frontend/runner/components/runner_jobs_spec.js
index 8ac5685a0dd..20582aaaf40 100644
--- a/spec/frontend/runner/components/runner_jobs_spec.js
+++ b/spec/frontend/runner/components/runner_jobs_spec.js
@@ -1,4 +1,4 @@
-import { GlDeprecatedSkeletonLoading as GlSkeletonLoading } from '@gitlab/ui';
+import { GlSkeletonLoader } from '@gitlab/ui';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
@@ -28,7 +28,7 @@ describe('RunnerJobs', () => {
let wrapper;
let mockRunnerJobsQuery;
- const findGlSkeletonLoading = () => wrapper.findComponent(GlSkeletonLoading);
+ const findGlSkeletonLoading = () => wrapper.findComponent(GlSkeletonLoader);
const findRunnerJobsTable = () => wrapper.findComponent(RunnerJobsTable);
const findRunnerPagination = () => wrapper.findComponent(RunnerPagination);
diff --git a/spec/frontend/runner/components/runner_list_empty_state_spec.js b/spec/frontend/runner/components/runner_list_empty_state_spec.js
new file mode 100644
index 00000000000..59cff863106
--- /dev/null
+++ b/spec/frontend/runner/components/runner_list_empty_state_spec.js
@@ -0,0 +1,76 @@
+import { GlEmptyState, GlLink, GlSprintf } from '@gitlab/ui';
+import { s__ } from '~/locale';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import RunnerInstructionsModal from '~/vue_shared/components/runner_instructions/runner_instructions_modal.vue';
+
+import RunnerListEmptyState from '~/runner/components/runner_list_empty_state.vue';
+
+const mockSvgPath = 'mock-svg-path.svg';
+const mockFilteredSvgPath = 'mock-filtered-svg-path.svg';
+
+describe('RunnerListEmptyState', () => {
+ let wrapper;
+
+ const findEmptyState = () => wrapper.findComponent(GlEmptyState);
+ const findLink = () => wrapper.findComponent(GlLink);
+ const findRunnerInstructionsModal = () => wrapper.findComponent(RunnerInstructionsModal);
+
+ const createComponent = ({ props, mountFn = shallowMountExtended } = {}) => {
+ wrapper = mountFn(RunnerListEmptyState, {
+ propsData: {
+ svgPath: mockSvgPath,
+ filteredSvgPath: mockFilteredSvgPath,
+ ...props,
+ },
+ directives: {
+ GlModal: createMockDirective(),
+ },
+ stubs: {
+ GlEmptyState,
+ GlSprintf,
+ GlLink,
+ },
+ });
+ };
+
+ describe('when search is not filtered', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders an illustration', () => {
+ expect(findEmptyState().props('svgPath')).toBe(mockSvgPath);
+ });
+
+ it('displays "no results" text', () => {
+ const title = s__('Runners|Get started with runners');
+ const desc = s__(
+ 'Runners|Runners are the agents that run your CI/CD jobs. Follow the %{linkStart}installation and registration instructions%{linkEnd} to set up a runner.',
+ );
+
+ expect(findEmptyState().text()).toMatchInterpolatedText(`${title} ${desc}`);
+ });
+
+ it('opens a runner registration instructions modal with a link', () => {
+ const { value } = getBinding(findLink().element, 'gl-modal');
+
+ expect(findRunnerInstructionsModal().props('modalId')).toEqual(value);
+ });
+ });
+
+ describe('when search is filtered', () => {
+ beforeEach(() => {
+ createComponent({ props: { isSearchFiltered: true } });
+ });
+
+ it('renders a "filtered search" illustration', () => {
+ expect(findEmptyState().props('svgPath')).toBe(mockFilteredSvgPath);
+ });
+
+ it('displays "no filtered results" text', () => {
+ expect(findEmptyState().text()).toContain(s__('Runners|No results found'));
+ expect(findEmptyState().text()).toContain(s__('Runners|Edit your search and try again'));
+ });
+ });
+});
diff --git a/spec/frontend/runner/components/runner_projects_spec.js b/spec/frontend/runner/components/runner_projects_spec.js
index 04627e2307b..6932b3b5197 100644
--- a/spec/frontend/runner/components/runner_projects_spec.js
+++ b/spec/frontend/runner/components/runner_projects_spec.js
@@ -1,4 +1,4 @@
-import { GlDeprecatedSkeletonLoading as GlSkeletonLoading } from '@gitlab/ui';
+import { GlSkeletonLoader } from '@gitlab/ui';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
@@ -34,7 +34,7 @@ describe('RunnerProjects', () => {
let mockRunnerProjectsQuery;
const findHeading = () => wrapper.find('h3');
- const findGlSkeletonLoading = () => wrapper.findComponent(GlSkeletonLoading);
+ const findGlSkeletonLoading = () => wrapper.findComponent(GlSkeletonLoader);
const findRunnerAssignedItems = () => wrapper.findAllComponents(RunnerAssignedItem);
const findRunnerPagination = () => wrapper.findComponent(RunnerPagination);
diff --git a/spec/frontend/runner/group_runners/group_runners_app_spec.js b/spec/frontend/runner/group_runners/group_runners_app_spec.js
index 52bd51a974b..eb9f85a7d0f 100644
--- a/spec/frontend/runner/group_runners/group_runners_app_spec.js
+++ b/spec/frontend/runner/group_runners/group_runners_app_spec.js
@@ -16,6 +16,7 @@ import { updateHistory } from '~/lib/utils/url_utility';
import RunnerTypeTabs from '~/runner/components/runner_type_tabs.vue';
import RunnerFilteredSearchBar from '~/runner/components/runner_filtered_search_bar.vue';
import RunnerList from '~/runner/components/runner_list.vue';
+import RunnerListEmptyState from '~/runner/components/runner_list_empty_state.vue';
import RunnerStats from '~/runner/components/stat/runner_stats.vue';
import RunnerActionsCell from '~/runner/components/cells/runner_actions_cell.vue';
import RegistrationDropdown from '~/runner/components/registration/registration_dropdown.vue';
@@ -48,6 +49,8 @@ import {
groupRunnersCountData,
onlineContactTimeoutSecs,
staleTimeoutSecs,
+ emptyStateSvgPath,
+ emptyStateFilteredSvgPath,
} from '../mock_data';
Vue.use(VueApollo);
@@ -75,6 +78,7 @@ describe('GroupRunnersApp', () => {
const findRegistrationDropdown = () => wrapper.findComponent(RegistrationDropdown);
const findRunnerTypeTabs = () => wrapper.findComponent(RunnerTypeTabs);
const findRunnerList = () => wrapper.findComponent(RunnerList);
+ const findRunnerListEmptyState = () => wrapper.findComponent(RunnerListEmptyState);
const findRunnerRow = (id) => extendedWrapper(wrapper.findByTestId(`runner-row-${id}`));
const findRunnerPagination = () => extendedWrapper(wrapper.findComponent(RunnerPagination));
const findRunnerPaginationNext = () => findRunnerPagination().findByLabelText('Go to next page');
@@ -103,6 +107,8 @@ describe('GroupRunnersApp', () => {
provide: {
onlineContactTimeoutSecs,
staleTimeoutSecs,
+ emptyStateSvgPath,
+ emptyStateFilteredSvgPath,
},
});
};
@@ -388,8 +394,8 @@ describe('GroupRunnersApp', () => {
await waitForPromises();
});
- it('shows a message for no results', async () => {
- expect(wrapper.text()).toContain('No runners found');
+ it('shows an empty state', async () => {
+ expect(findRunnerListEmptyState().exists()).toBe(true);
});
});
diff --git a/spec/frontend/runner/mock_data.js b/spec/frontend/runner/mock_data.js
index 1c2333b552c..3368fc21544 100644
--- a/spec/frontend/runner/mock_data.js
+++ b/spec/frontend/runner/mock_data.js
@@ -19,7 +19,10 @@ import groupRunnersCountData from 'test_fixtures/graphql/runner/list/group_runne
// Other mock data
export const onlineContactTimeoutSecs = 2 * 60 * 60;
-export const staleTimeoutSecs = 5259492; // Ruby's `2.months`
+export const staleTimeoutSecs = 7889238; // Ruby's `3.months`
+
+export const emptyStateSvgPath = 'emptyStateSvgPath.svg';
+export const emptyStateFilteredSvgPath = 'emptyStateFilteredSvgPath.svg';
export {
runnersData,
diff --git a/spec/frontend/runner/runner_search_utils_spec.js b/spec/frontend/runner/runner_search_utils_spec.js
index a3c1458ed26..1f102f86b2a 100644
--- a/spec/frontend/runner/runner_search_utils_spec.js
+++ b/spec/frontend/runner/runner_search_utils_spec.js
@@ -5,6 +5,7 @@ import {
fromUrlQueryToSearch,
fromSearchToUrl,
fromSearchToVariables,
+ isSearchFiltered,
} from '~/runner/runner_search_utils';
describe('search_params.js', () => {
@@ -14,6 +15,7 @@ describe('search_params.js', () => {
urlQuery: '',
search: { runnerType: null, filters: [], pagination: { page: 1 }, sort: 'CREATED_DESC' },
graphqlVariables: { sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
+ isDefault: true,
},
{
name: 'a single status',
@@ -268,7 +270,7 @@ describe('search_params.js', () => {
describe('fromSearchToUrl', () => {
examples.forEach(({ name, urlQuery, search }) => {
it(`Converts ${name} to a url`, () => {
- expect(fromSearchToUrl(search)).toEqual(`http://test.host/${urlQuery}`);
+ expect(fromSearchToUrl(search)).toBe(`http://test.host/${urlQuery}`);
});
});
@@ -280,7 +282,7 @@ describe('search_params.js', () => {
const search = { filters: [], sort: 'CREATED_DESC' };
const expectedUrl = `http://test.host/`;
- expect(fromSearchToUrl(search, initalUrl)).toEqual(expectedUrl);
+ expect(fromSearchToUrl(search, initalUrl)).toBe(expectedUrl);
});
it('When unrelated search parameter is present, it does not get removed', () => {
@@ -288,7 +290,7 @@ describe('search_params.js', () => {
const search = { filters: [], sort: 'CREATED_DESC' };
const expectedUrl = `http://test.host/?unrelated=UNRELATED`;
- expect(fromSearchToUrl(search, initialUrl)).toEqual(expectedUrl);
+ expect(fromSearchToUrl(search, initialUrl)).toBe(expectedUrl);
});
});
@@ -331,4 +333,16 @@ describe('search_params.js', () => {
});
});
});
+
+ describe('isSearchFiltered', () => {
+ examples.forEach(({ name, search, isDefault }) => {
+ it(`Given ${name}, evaluates to ${isDefault ? 'not ' : ''}filtered`, () => {
+ expect(isSearchFiltered(search)).toBe(!isDefault);
+ });
+ });
+
+ it('given a missing pagination, evaluates as not filtered', () => {
+ expect(isSearchFiltered({ pagination: null })).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/search/store/actions_spec.js b/spec/frontend/search/store/actions_spec.js
index 67bd3194f20..2f93d3f6805 100644
--- a/spec/frontend/search/store/actions_spec.js
+++ b/spec/frontend/search/store/actions_spec.js
@@ -121,19 +121,12 @@ describe('Global Search Store Actions', () => {
describe('when groupId is set', () => {
it('calls Api.groupProjects with expected parameters', () => {
- const callbackTest = jest.fn();
- actions.fetchProjects({ commit: mockCommit, state }, undefined, callbackTest);
- expect(Api.groupProjects).toHaveBeenCalledWith(
- state.query.group_id,
- state.query.search,
- {
- order_by: 'similarity',
- include_subgroups: true,
- with_shared: false,
- },
- callbackTest,
- true,
- );
+ actions.fetchProjects({ commit: mockCommit, state }, undefined);
+ expect(Api.groupProjects).toHaveBeenCalledWith(state.query.group_id, state.query.search, {
+ order_by: 'similarity',
+ include_subgroups: true,
+ with_shared: false,
+ });
expect(Api.projects).not.toHaveBeenCalled();
});
});
diff --git a/spec/frontend/search_autocomplete_spec.js b/spec/frontend/search_autocomplete_spec.js
index 4639552b4d3..266f047e9dc 100644
--- a/spec/frontend/search_autocomplete_spec.js
+++ b/spec/frontend/search_autocomplete_spec.js
@@ -53,7 +53,7 @@ describe('Search autocomplete dropdown', () => {
};
const disableProjectIssues = () => {
- document.querySelector('.js-search-project-options').setAttribute('data-issues-disabled', true);
+ document.querySelector('.js-search-project-options').dataset.issuesDisabled = true;
};
// Mock `gl` object in window for dashboard specific page. App code will need it.
diff --git a/spec/frontend/security_configuration/components/app_spec.js b/spec/frontend/security_configuration/components/app_spec.js
index d7d46d0d415..de91e51924d 100644
--- a/spec/frontend/security_configuration/components/app_spec.js
+++ b/spec/frontend/security_configuration/components/app_spec.js
@@ -2,7 +2,6 @@ import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import { GlTab, GlTabs, GlLink } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
-
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import { makeMockUserCalloutDismisser } from 'helpers/mock_user_callout_dismisser';
import stubChildren from 'helpers/stub_children';
@@ -20,22 +19,14 @@ import {
LICENSE_COMPLIANCE_DESCRIPTION,
LICENSE_COMPLIANCE_HELP_PATH,
AUTO_DEVOPS_ENABLED_ALERT_DISMISSED_STORAGE_KEY,
- LICENSE_ULTIMATE,
- LICENSE_PREMIUM,
- LICENSE_FREE,
} from '~/security_configuration/components/constants';
import FeatureCard from '~/security_configuration/components/feature_card.vue';
import TrainingProviderList from '~/security_configuration/components/training_provider_list.vue';
-import createMockApollo from 'helpers/mock_apollo_helper';
-import currentLicenseQuery from '~/security_configuration/graphql/current_license.query.graphql';
-import waitForPromises from 'helpers/wait_for_promises';
-
import UpgradeBanner from '~/security_configuration/components/upgrade_banner.vue';
import {
REPORT_TYPE_LICENSE_COMPLIANCE,
REPORT_TYPE_SAST,
} from '~/vue_shared/security_reports/constants';
-import { getCurrentLicensePlanResponse } from '../mock_data';
const upgradePath = '/upgrade';
const autoDevopsHelpPagePath = '/autoDevopsHelpPagePath';
@@ -50,31 +41,16 @@ Vue.use(VueApollo);
describe('App component', () => {
let wrapper;
let userCalloutDismissSpy;
- let mockApollo;
- const createComponent = ({
- shouldShowCallout = true,
- licenseQueryResponse = LICENSE_ULTIMATE,
- ...propsData
- }) => {
+ const createComponent = ({ shouldShowCallout = true, ...propsData }) => {
userCalloutDismissSpy = jest.fn();
- mockApollo = createMockApollo([
- [
- currentLicenseQuery,
- jest
- .fn()
- .mockResolvedValue(
- licenseQueryResponse instanceof Error
- ? licenseQueryResponse
- : getCurrentLicensePlanResponse(licenseQueryResponse),
- ),
- ],
- ]);
-
wrapper = extendedWrapper(
mount(SecurityConfigurationApp, {
- propsData,
+ propsData: {
+ securityTrainingEnabled: true,
+ ...propsData,
+ },
provide: {
upgradePath,
autoDevopsHelpPagePath,
@@ -82,7 +58,6 @@ describe('App component', () => {
projectFullPath,
vulnerabilityTrainingDocsPath,
},
- apolloProvider: mockApollo,
stubs: {
...stubChildren(SecurityConfigurationApp),
GlLink: false,
@@ -157,7 +132,6 @@ describe('App component', () => {
afterEach(() => {
wrapper.destroy();
- mockApollo = null;
});
describe('basic structure', () => {
@@ -166,7 +140,6 @@ describe('App component', () => {
augmentedSecurityFeatures: securityFeaturesMock,
augmentedComplianceFeatures: complianceFeaturesMock,
});
- await waitForPromises();
});
it('renders main-heading with correct text', () => {
@@ -469,47 +442,42 @@ describe('App component', () => {
});
describe('Vulnerability management', () => {
- beforeEach(async () => {
+ it('does not show tab if security training is disabled', () => {
createComponent({
augmentedSecurityFeatures: securityFeaturesMock,
augmentedComplianceFeatures: complianceFeaturesMock,
+ securityTrainingEnabled: false,
});
- await waitForPromises();
- });
- it('renders TrainingProviderList component', () => {
- expect(findTrainingProviderList().exists()).toBe(true);
+ expect(findVulnerabilityManagementTab().exists()).toBe(false);
});
- it('renders security training description', () => {
- expect(findVulnerabilityManagementTab().text()).toContain(i18n.securityTrainingDescription);
- });
-
- it('renders link to help docs', () => {
- const trainingLink = findVulnerabilityManagementTab().findComponent(GlLink);
-
- expect(trainingLink.text()).toBe('Learn more about vulnerability training');
- expect(trainingLink.attributes('href')).toBe(vulnerabilityTrainingDocsPath);
- });
-
- it.each`
- licenseQueryResponse | display
- ${LICENSE_ULTIMATE} | ${true}
- ${LICENSE_PREMIUM} | ${false}
- ${LICENSE_FREE} | ${false}
- ${null} | ${true}
- ${new Error()} | ${true}
- `(
- 'displays $display for license $licenseQueryResponse',
- async ({ licenseQueryResponse, display }) => {
+ describe('security training enabled', () => {
+ beforeEach(async () => {
createComponent({
- licenseQueryResponse,
augmentedSecurityFeatures: securityFeaturesMock,
augmentedComplianceFeatures: complianceFeaturesMock,
});
- await waitForPromises();
- expect(findVulnerabilityManagementTab().exists()).toBe(display);
- },
- );
+ });
+
+ it('shows the tab if security training is enabled', () => {
+ expect(findVulnerabilityManagementTab().exists()).toBe(true);
+ });
+
+ it('renders TrainingProviderList component', () => {
+ expect(findTrainingProviderList().exists()).toBe(true);
+ });
+
+ it('renders security training description', () => {
+ expect(findVulnerabilityManagementTab().text()).toContain(i18n.securityTrainingDescription);
+ });
+
+ it('renders link to help docs', () => {
+ const trainingLink = findVulnerabilityManagementTab().findComponent(GlLink);
+
+ expect(trainingLink.text()).toBe('Learn more about vulnerability training');
+ expect(trainingLink.attributes('href')).toBe(vulnerabilityTrainingDocsPath);
+ });
+ });
});
});
diff --git a/spec/frontend/security_configuration/mock_data.js b/spec/frontend/security_configuration/mock_data.js
index 94a36472a1d..18a480bf082 100644
--- a/spec/frontend/security_configuration/mock_data.js
+++ b/spec/frontend/security_configuration/mock_data.js
@@ -111,12 +111,3 @@ export const tempProviderLogos = {
svg: `<svg>${[testProviderName[1]]}</svg>`,
},
};
-
-export const getCurrentLicensePlanResponse = (plan) => ({
- data: {
- currentLicense: {
- id: 'gid://gitlab/License/1',
- plan,
- },
- },
-});
diff --git a/spec/frontend/sidebar/components/assignees/assignee_avatar_link_spec.js b/spec/frontend/sidebar/components/assignees/assignee_avatar_link_spec.js
index 69f6a6e6e04..a286eeef14f 100644
--- a/spec/frontend/sidebar/components/assignees/assignee_avatar_link_spec.js
+++ b/spec/frontend/sidebar/components/assignees/assignee_avatar_link_spec.js
@@ -1,5 +1,8 @@
import { shallowMount } from '@vue/test-utils';
+import { GlLink } from '@gitlab/ui';
import { TEST_HOST } from 'helpers/test_constants';
+import { TYPE_USER } from '~/graphql_shared/constants';
+import { convertToGraphQLId } from '~/graphql_shared/utils';
import AssigneeAvatar from '~/sidebar/components/assignees/assignee_avatar.vue';
import AssigneeAvatarLink from '~/sidebar/components/assignees/assignee_avatar_link.vue';
import userDataMock from '../../user_data_mock';
@@ -32,6 +35,7 @@ describe('AssigneeAvatarLink component', () => {
});
const findTooltipText = () => wrapper.attributes('title');
+ const findUserLink = () => wrapper.findComponent(GlLink);
it('has the root url present in the assigneeUrl method', () => {
createComponent();
@@ -112,4 +116,24 @@ describe('AssigneeAvatarLink component', () => {
});
},
);
+
+ it('passes the correct user id for REST API', () => {
+ createComponent({
+ tooltipHasName: true,
+ user: userDataMock(),
+ });
+
+ expect(findUserLink().attributes('data-user-id')).toBe(String(userDataMock().id));
+ });
+
+ it('passes the correct user id for GraphQL API', () => {
+ const userId = userDataMock().id;
+
+ createComponent({
+ tooltipHasName: true,
+ user: { ...userDataMock(), id: convertToGraphQLId(TYPE_USER, userId) },
+ });
+
+ expect(findUserLink().attributes('data-user-id')).toBe(String(userId));
+ });
});
diff --git a/spec/frontend/sidebar/components/assignees/sidebar_editable_item_spec.js b/spec/frontend/sidebar/components/assignees/sidebar_editable_item_spec.js
index c870bbecd76..724fba62479 100644
--- a/spec/frontend/sidebar/components/assignees/sidebar_editable_item_spec.js
+++ b/spec/frontend/sidebar/components/assignees/sidebar_editable_item_spec.js
@@ -72,7 +72,7 @@ describe('boards sidebar remove issue', () => {
createComponent({ canUpdate: true, slots });
findEditButton().vm.$emit('click');
- await nextTick;
+ await nextTick();
expect(findCollapsed().isVisible()).toBe(false);
expect(findExpanded().isVisible()).toBe(true);
diff --git a/spec/frontend/sidebar/components/attention_requested_toggle_spec.js b/spec/frontend/sidebar/components/attention_requested_toggle_spec.js
index 959fa799eb7..58fa878a189 100644
--- a/spec/frontend/sidebar/components/attention_requested_toggle_spec.js
+++ b/spec/frontend/sidebar/components/attention_requested_toggle_spec.js
@@ -41,18 +41,18 @@ describe('Attention require toggle', () => {
);
it.each`
- attentionRequested | variant
- ${true} | ${'warning'}
- ${false} | ${'default'}
+ attentionRequested | selected
+ ${true} | ${true}
+ ${false} | ${false}
`(
- 'renders button with variant $variant when attention_requested is $attentionRequested',
- ({ attentionRequested, variant }) => {
+ 'renders button with as selected when $selected when attention_requested is $attentionRequested',
+ ({ attentionRequested, selected }) => {
factory({
type: 'reviewer',
user: { attention_requested: attentionRequested, can_update_merge_request: true },
});
- expect(findToggle().props('variant')).toBe(variant);
+ expect(findToggle().props('selected')).toBe(selected);
},
);
diff --git a/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_content_spec.js b/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_content_spec.js
index ab45fdf03bc..81354d64a90 100644
--- a/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_content_spec.js
+++ b/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_content_spec.js
@@ -69,14 +69,14 @@ describe('Sidebar Confidentiality Content', () => {
variant: 'warning',
});
expect(alertEl.text()).toBe(
- 'Only project members with at least Reporter role can view or be notified about this issue.',
+ 'Only project members with at least the Reporter role, the author, and assignees can view or be notified about this issue.',
);
});
it('displays a correct confidential text for epic', () => {
createComponent({ confidential: true, issuableType: 'epic' });
expect(findText().findComponent(GlAlert).text()).toBe(
- 'Only group members with at least Reporter role can view or be notified about this epic.',
+ 'Only group members with at least the Reporter role can view or be notified about this epic.',
);
});
});
diff --git a/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_form_spec.js b/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_form_spec.js
index 85d6bc7b782..1ea035c7184 100644
--- a/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_form_spec.js
+++ b/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_form_spec.js
@@ -89,7 +89,7 @@ describe('Sidebar Confidentiality Form', () => {
it('renders a message about making an issue confidential', () => {
expect(findWarningMessage().text()).toBe(
- 'You are going to turn on confidentiality. Only project members with at least Reporter role can view or be notified about this issue.',
+ 'You are going to turn on confidentiality. Only project members with at least the Reporter role, the author, and assignees can view or be notified about this issue.',
);
});
diff --git a/spec/frontend/sidebar/components/incidents/sidebar_escalation_status_spec.js b/spec/frontend/sidebar/components/incidents/sidebar_escalation_status_spec.js
index a8dc610672c..88a4913a27f 100644
--- a/spec/frontend/sidebar/components/incidents/sidebar_escalation_status_spec.js
+++ b/spec/frontend/sidebar/components/incidents/sidebar_escalation_status_spec.js
@@ -1,6 +1,12 @@
import { createLocalVue } from '@vue/test-utils';
import { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
+import {
+ fetchData,
+ fetchError,
+ mutationData,
+ mutationError,
+} from 'ee_else_ce_jest/sidebar/components/incidents/mock_data';
import createMockApollo from 'helpers/mock_apollo_helper';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import { mountExtended } from 'helpers/vue_test_utils_helper';
@@ -12,7 +18,6 @@ import EscalationStatus from 'ee_else_ce/sidebar/components/incidents/escalation
import { STATUS_ACKNOWLEDGED } from '~/sidebar/components/incidents/constants';
import { createAlert } from '~/flash';
import { logError } from '~/lib/logger';
-import { fetchData, fetchError, mutationData, mutationError } from './mock_data';
jest.mock('~/lib/logger');
jest.mock('~/flash');
diff --git a/spec/frontend/sidebar/components/time_tracking/mock_data.js b/spec/frontend/sidebar/components/time_tracking/mock_data.js
index ba2781118d9..f161ae677d0 100644
--- a/spec/frontend/sidebar/components/time_tracking/mock_data.js
+++ b/spec/frontend/sidebar/components/time_tracking/mock_data.js
@@ -1,3 +1,5 @@
+export const timelogToRemoveId = 'gid://gitlab/Timelog/18';
+
export const getIssueTimelogsQueryResponse = {
data: {
issuable: {
@@ -9,7 +11,7 @@ export const getIssueTimelogsQueryResponse = {
nodes: [
{
__typename: 'Timelog',
- id: 'gid://gitlab/Timelog/18',
+ id: timelogToRemoveId,
timeSpent: 14400,
user: {
id: 'user-1',
@@ -23,6 +25,10 @@ export const getIssueTimelogsQueryResponse = {
__typename: 'Note',
},
summary: 'A summary',
+ userPermissions: {
+ adminTimelog: true,
+ __typename: 'TimelogPermissions',
+ },
},
{
__typename: 'Timelog',
@@ -36,6 +42,10 @@ export const getIssueTimelogsQueryResponse = {
spentAt: '2021-05-07T13:19:01Z',
note: null,
summary: 'A summary',
+ userPermissions: {
+ adminTimelog: false,
+ __typename: 'TimelogPermissions',
+ },
},
{
__typename: 'Timelog',
@@ -53,6 +63,10 @@ export const getIssueTimelogsQueryResponse = {
__typename: 'Note',
},
summary: null,
+ userPermissions: {
+ adminTimelog: false,
+ __typename: 'TimelogPermissions',
+ },
},
],
__typename: 'TimelogConnection',
@@ -85,6 +99,10 @@ export const getMrTimelogsQueryResponse = {
__typename: 'Note',
},
summary: null,
+ userPermissions: {
+ adminTimelog: true,
+ __typename: 'TimelogPermissions',
+ },
},
{
__typename: 'Timelog',
@@ -98,6 +116,10 @@ export const getMrTimelogsQueryResponse = {
spentAt: '2021-05-07T14:44:39Z',
note: null,
summary: null,
+ userPermissions: {
+ adminTimelog: true,
+ __typename: 'TimelogPermissions',
+ },
},
{
__typename: 'Timelog',
@@ -115,6 +137,10 @@ export const getMrTimelogsQueryResponse = {
__typename: 'Note',
},
summary: null,
+ userPermissions: {
+ adminTimelog: true,
+ __typename: 'TimelogPermissions',
+ },
},
],
__typename: 'TimelogConnection',
diff --git a/spec/frontend/sidebar/components/time_tracking/report_spec.js b/spec/frontend/sidebar/components/time_tracking/report_spec.js
index 2b17e6dd6c3..5ed8810e95e 100644
--- a/spec/frontend/sidebar/components/time_tracking/report_spec.js
+++ b/spec/frontend/sidebar/components/time_tracking/report_spec.js
@@ -1,15 +1,21 @@
import { GlLoadingIcon } from '@gitlab/ui';
-import { getAllByRole, getByRole } from '@testing-library/dom';
+import { getAllByRole, getByRole, getAllByTestId } from '@testing-library/dom';
import { shallowMount, mount } from '@vue/test-utils';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import createFlash from '~/flash';
import Report from '~/sidebar/components/time_tracking/report.vue';
import getIssueTimelogsQuery from '~/vue_shared/components/sidebar/queries/get_issue_timelogs.query.graphql';
import getMrTimelogsQuery from '~/vue_shared/components/sidebar/queries/get_mr_timelogs.query.graphql';
-import { getIssueTimelogsQueryResponse, getMrTimelogsQueryResponse } from './mock_data';
+import deleteTimelogMutation from '~/sidebar/components/time_tracking/graphql/mutations/delete_timelog.mutation.graphql';
+import {
+ getIssueTimelogsQueryResponse,
+ getMrTimelogsQueryResponse,
+ timelogToRemoveId,
+} from './mock_data';
jest.mock('~/flash');
@@ -18,6 +24,7 @@ describe('Issuable Time Tracking Report', () => {
let wrapper;
let fakeApollo;
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findDeleteButton = () => wrapper.findByTestId('deleteButton');
const successIssueQueryHandler = jest.fn().mockResolvedValue(getIssueTimelogsQueryResponse);
const successMrQueryHandler = jest.fn().mockResolvedValue(getMrTimelogsQueryResponse);
@@ -31,14 +38,16 @@ describe('Issuable Time Tracking Report', () => {
[getIssueTimelogsQuery, queryHandler],
[getMrTimelogsQuery, queryHandler],
]);
- wrapper = mountFunction(Report, {
- provide: {
- issuableId: 1,
- issuableType,
- },
- propsData: { limitToHours, issuableId: '1' },
- apolloProvider: fakeApollo,
- });
+ wrapper = extendedWrapper(
+ mountFunction(Report, {
+ provide: {
+ issuableId: 1,
+ issuableType,
+ },
+ propsData: { limitToHours, issuableId: '1' },
+ apolloProvider: fakeApollo,
+ }),
+ );
};
afterEach(() => {
@@ -75,6 +84,7 @@ describe('Issuable Time Tracking Report', () => {
expect(getAllByRole(wrapper.element, 'row', { name: /Administrator/i })).toHaveLength(2);
expect(getAllByRole(wrapper.element, 'row', { name: /A note/i })).toHaveLength(1);
expect(getAllByRole(wrapper.element, 'row', { name: /A summary/i })).toHaveLength(2);
+ expect(getAllByTestId(wrapper.element, 'deleteButton')).toHaveLength(1);
});
});
@@ -95,6 +105,7 @@ describe('Issuable Time Tracking Report', () => {
await waitForPromises();
expect(getAllByRole(wrapper.element, 'row', { name: /Administrator/i })).toHaveLength(3);
+ expect(getAllByTestId(wrapper.element, 'deleteButton')).toHaveLength(3);
});
});
@@ -123,4 +134,59 @@ describe('Issuable Time Tracking Report', () => {
});
});
});
+
+ describe('when clicking on the delete timelog button', () => {
+ beforeEach(() => {
+ mountComponent({ mountFunction: mount });
+ });
+
+ it('calls `$apollo.mutate` with deleteTimelogMutation mutation and removes the row', async () => {
+ const mutateSpy = jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue({
+ data: {
+ timelogDelete: {
+ errors: [],
+ },
+ },
+ });
+
+ await waitForPromises();
+ await findDeleteButton().trigger('click');
+ await waitForPromises();
+
+ expect(createFlash).not.toHaveBeenCalled();
+ expect(mutateSpy).toHaveBeenCalledWith({
+ mutation: deleteTimelogMutation,
+ variables: {
+ input: {
+ id: timelogToRemoveId,
+ },
+ },
+ update: expect.anything(),
+ });
+ });
+
+ it('calls `createFlash` with errorMessage and does not remove the row on promise reject', async () => {
+ const mutateSpy = jest.spyOn(wrapper.vm.$apollo, 'mutate').mockRejectedValue({});
+
+ await waitForPromises();
+ await findDeleteButton().trigger('click');
+ await waitForPromises();
+
+ expect(mutateSpy).toHaveBeenCalledWith({
+ mutation: deleteTimelogMutation,
+ variables: {
+ input: {
+ id: timelogToRemoveId,
+ },
+ },
+ update: expect.anything(),
+ });
+
+ expect(createFlash).toHaveBeenCalledWith({
+ message: 'An error occurred while removing the timelog.',
+ captureError: true,
+ error: expect.any(Object),
+ });
+ });
+ });
});
diff --git a/spec/frontend/static_site_editor/components/app_spec.js b/spec/frontend/static_site_editor/components/app_spec.js
deleted file mode 100644
index bbdffeae68f..00000000000
--- a/spec/frontend/static_site_editor/components/app_spec.js
+++ /dev/null
@@ -1,34 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import App from '~/static_site_editor/components/app.vue';
-
-describe('static_site_editor/components/app', () => {
- const mergeRequestsIllustrationPath = 'illustrations/merge_requests.svg';
- const RouterView = {
- template: '<div></div>',
- };
- let wrapper;
-
- const buildWrapper = () => {
- wrapper = shallowMount(App, {
- stubs: {
- RouterView,
- },
- propsData: {
- mergeRequestsIllustrationPath,
- },
- });
- };
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- it('passes merge request illustration path to the router view component', () => {
- buildWrapper();
-
- expect(wrapper.find(RouterView).attributes()).toMatchObject({
- 'merge-requests-illustration-path': mergeRequestsIllustrationPath,
- });
- });
-});
diff --git a/spec/frontend/static_site_editor/components/edit_area_spec.js b/spec/frontend/static_site_editor/components/edit_area_spec.js
deleted file mode 100644
index a833fd9ff9e..00000000000
--- a/spec/frontend/static_site_editor/components/edit_area_spec.js
+++ /dev/null
@@ -1,264 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import { nextTick } from 'vue';
-import { stubComponent } from 'helpers/stub_component';
-
-import EditArea from '~/static_site_editor/components/edit_area.vue';
-import EditDrawer from '~/static_site_editor/components/edit_drawer.vue';
-import EditHeader from '~/static_site_editor/components/edit_header.vue';
-import PublishToolbar from '~/static_site_editor/components/publish_toolbar.vue';
-import UnsavedChangesConfirmDialog from '~/static_site_editor/components/unsaved_changes_confirm_dialog.vue';
-import { EDITOR_TYPES } from '~/static_site_editor/rich_content_editor/constants';
-import RichContentEditor from '~/static_site_editor/rich_content_editor/rich_content_editor.vue';
-
-import {
- sourceContentTitle as title,
- sourceContentYAML as content,
- sourceContentHeaderObjYAML as headerSettings,
- sourceContentBody as body,
- returnUrl,
- mounts,
- project,
- branch,
- baseUrl,
- imageRoot,
-} from '../mock_data';
-
-jest.mock('~/static_site_editor/services/formatter', () => jest.fn((str) => `${str} format-pass`));
-
-describe('~/static_site_editor/components/edit_area.vue', () => {
- let wrapper;
- const formattedBody = `${body} format-pass`;
- const savingChanges = true;
- const newBody = `new ${body}`;
-
- const RichContentEditorStub = stubComponent(RichContentEditor, {
- methods: {
- resetInitialValue: jest.fn(),
- },
- });
-
- const buildWrapper = (propsData = {}) => {
- wrapper = shallowMount(EditArea, {
- propsData: {
- title,
- content,
- returnUrl,
- mounts,
- project,
- branch,
- baseUrl,
- imageRoot,
- savingChanges,
- ...propsData,
- },
- stubs: { RichContentEditor: RichContentEditorStub },
- });
- };
-
- const findEditHeader = () => wrapper.find(EditHeader);
- const findEditDrawer = () => wrapper.find(EditDrawer);
- const findRichContentEditor = () => wrapper.find(RichContentEditor);
- const findPublishToolbar = () => wrapper.find(PublishToolbar);
- const findUnsavedChangesConfirmDialog = () => wrapper.find(UnsavedChangesConfirmDialog);
-
- beforeEach(() => {
- buildWrapper();
- });
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- it('renders edit header', () => {
- expect(findEditHeader().exists()).toBe(true);
- expect(findEditHeader().props('title')).toBe(title);
- });
-
- it('renders edit drawer', () => {
- expect(findEditDrawer().exists()).toBe(true);
- });
-
- it('renders rich content editor with a format pass', () => {
- expect(findRichContentEditor().exists()).toBe(true);
- expect(findRichContentEditor().props('content')).toBe(formattedBody);
- });
-
- it('renders publish toolbar', () => {
- expect(findPublishToolbar().exists()).toBe(true);
- expect(findPublishToolbar().props()).toMatchObject({
- returnUrl,
- savingChanges,
- saveable: false,
- });
- });
-
- it('renders unsaved changes confirm dialog', () => {
- expect(findUnsavedChangesConfirmDialog().exists()).toBe(true);
- expect(findUnsavedChangesConfirmDialog().props('modified')).toBe(false);
- });
-
- describe('when content changes', () => {
- beforeEach(() => {
- findRichContentEditor().vm.$emit('input', newBody);
-
- return nextTick();
- });
-
- it('updates parsedSource with new content', () => {
- const newContent = 'New content';
- const spySyncParsedSource = jest.spyOn(wrapper.vm.parsedSource, 'syncContent');
-
- findRichContentEditor().vm.$emit('input', newContent);
-
- expect(spySyncParsedSource).toHaveBeenCalledWith(newContent, true);
- });
-
- it('sets publish toolbar as saveable', () => {
- expect(findPublishToolbar().props('saveable')).toBe(true);
- });
-
- it('sets unsaved changes confirm dialog as modified', () => {
- expect(findUnsavedChangesConfirmDialog().props('modified')).toBe(true);
- });
-
- it('sets publish toolbar as not saveable when content changes are rollback', async () => {
- findRichContentEditor().vm.$emit('input', formattedBody);
-
- await nextTick();
- expect(findPublishToolbar().props('saveable')).toBe(false);
- });
- });
-
- describe('when the mode changes', () => {
- const setInitialMode = (mode) => {
- // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
- // eslint-disable-next-line no-restricted-syntax
- wrapper.setData({ editorMode: mode });
- };
-
- afterEach(() => {
- setInitialMode(EDITOR_TYPES.wysiwyg);
- });
-
- it.each`
- initialMode | targetMode | resetValue
- ${EDITOR_TYPES.wysiwyg} | ${EDITOR_TYPES.markdown} | ${`${content} format-pass format-pass`}
- ${EDITOR_TYPES.markdown} | ${EDITOR_TYPES.wysiwyg} | ${`${body} format-pass format-pass`}
- `(
- 'sets editorMode from $initialMode to $targetMode',
- ({ initialMode, targetMode, resetValue }) => {
- setInitialMode(initialMode);
-
- findRichContentEditor().vm.$emit('modeChange', targetMode);
-
- expect(RichContentEditorStub.methods.resetInitialValue).toHaveBeenCalledWith(resetValue);
- expect(wrapper.vm.editorMode).toBe(targetMode);
- },
- );
-
- it('should format the content', () => {
- findRichContentEditor().vm.$emit('modeChange', EDITOR_TYPES.markdown);
-
- expect(RichContentEditorStub.methods.resetInitialValue).toHaveBeenCalledWith(
- `${content} format-pass format-pass`,
- );
- });
- });
-
- describe('when content has front matter', () => {
- it('renders a closed edit drawer', () => {
- expect(findEditDrawer().exists()).toBe(true);
- expect(findEditDrawer().props('isOpen')).toBe(false);
- });
-
- it('opens the edit drawer', async () => {
- findPublishToolbar().vm.$emit('editSettings');
-
- await nextTick();
- expect(findEditDrawer().props('isOpen')).toBe(true);
- });
-
- it('closes the edit drawer', async () => {
- findEditDrawer().vm.$emit('close');
-
- await nextTick();
- expect(findEditDrawer().props('isOpen')).toBe(false);
- });
-
- it('forwards the matter settings when the drawer is open', async () => {
- findPublishToolbar().vm.$emit('editSettings');
-
- jest.spyOn(wrapper.vm.parsedSource, 'matter').mockReturnValueOnce(headerSettings);
-
- await nextTick();
- expect(findEditDrawer().props('settings')).toEqual(headerSettings);
- });
-
- it('enables toolbar submit button', () => {
- expect(findPublishToolbar().props('hasSettings')).toBe(true);
- });
-
- it('syncs matter changes regardless of edit mode', () => {
- const newSettings = { title: 'test' };
- const spySyncParsedSource = jest.spyOn(wrapper.vm.parsedSource, 'syncMatter');
-
- findEditDrawer().vm.$emit('updateSettings', newSettings);
-
- expect(spySyncParsedSource).toHaveBeenCalledWith(newSettings);
- });
-
- it('syncs matter changes to content in markdown mode', async () => {
- // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
- // eslint-disable-next-line no-restricted-syntax
- wrapper.setData({ editorMode: EDITOR_TYPES.markdown });
-
- const newSettings = { title: 'test' };
-
- findEditDrawer().vm.$emit('updateSettings', newSettings);
-
- await nextTick();
- expect(findRichContentEditor().props('content')).toContain('title: test');
- });
- });
-
- describe('when content lacks front matter', () => {
- beforeEach(() => {
- buildWrapper({ content: body });
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('does not render edit drawer', () => {
- expect(findEditDrawer().exists()).toBe(false);
- });
-
- it('does not enable toolbar submit button', () => {
- expect(findPublishToolbar().props('hasSettings')).toBe(false);
- });
- });
-
- describe('when content is submitted', () => {
- it('should format the content', () => {
- findPublishToolbar().vm.$emit('submit', content);
-
- expect(wrapper.emitted('submit')[0][0].content).toBe(`${content} format-pass format-pass`);
- expect(wrapper.emitted('submit').length).toBe(1);
- });
- });
-
- describe('when RichContentEditor component triggers load event', () => {
- it('stores formatted markdown provided in the event data', () => {
- const data = { formattedMarkdown: 'formatted markdown' };
-
- findRichContentEditor().vm.$emit('load', data);
-
- // We can access the formatted markdown when submitting changes
- findPublishToolbar().vm.$emit('submit');
-
- expect(wrapper.emitted('submit')[0][0]).toMatchObject(data);
- });
- });
-});
diff --git a/spec/frontend/static_site_editor/components/edit_drawer_spec.js b/spec/frontend/static_site_editor/components/edit_drawer_spec.js
deleted file mode 100644
index 402dfe441c5..00000000000
--- a/spec/frontend/static_site_editor/components/edit_drawer_spec.js
+++ /dev/null
@@ -1,67 +0,0 @@
-import { GlDrawer } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-
-import EditDrawer from '~/static_site_editor/components/edit_drawer.vue';
-import FrontMatterControls from '~/static_site_editor/components/front_matter_controls.vue';
-
-describe('~/static_site_editor/components/edit_drawer.vue', () => {
- let wrapper;
-
- const buildWrapper = (propsData = {}) => {
- wrapper = shallowMount(EditDrawer, {
- propsData: {
- isOpen: false,
- settings: { title: 'Some title' },
- ...propsData,
- },
- });
- };
-
- const findFrontMatterControls = () => wrapper.find(FrontMatterControls);
- const findGlDrawer = () => wrapper.find(GlDrawer);
-
- beforeEach(() => {
- buildWrapper();
- });
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- it('renders the GlDrawer', () => {
- expect(findGlDrawer().exists()).toBe(true);
- });
-
- it('renders the FrontMatterControls', () => {
- expect(findFrontMatterControls().exists()).toBe(true);
- });
-
- it('forwards the settings to FrontMatterControls', () => {
- expect(findFrontMatterControls().props('settings')).toBe(wrapper.props('settings'));
- });
-
- it('is closed by default', () => {
- expect(findGlDrawer().props('open')).toBe(false);
- });
-
- it('can open', () => {
- buildWrapper({ isOpen: true });
-
- expect(findGlDrawer().props('open')).toBe(true);
- });
-
- it.each`
- event | payload | finderFn
- ${'close'} | ${undefined} | ${findGlDrawer}
- ${'updateSettings'} | ${{ some: 'data' }} | ${findFrontMatterControls}
- `(
- 'forwards the emitted $event event from the $finderFn with $payload',
- ({ event, payload, finderFn }) => {
- finderFn().vm.$emit(event, payload);
-
- expect(wrapper.emitted(event)[0][0]).toBe(payload);
- expect(wrapper.emitted(event).length).toBe(1);
- },
- );
-});
diff --git a/spec/frontend/static_site_editor/components/edit_header_spec.js b/spec/frontend/static_site_editor/components/edit_header_spec.js
deleted file mode 100644
index 2b0fe226a0b..00000000000
--- a/spec/frontend/static_site_editor/components/edit_header_spec.js
+++ /dev/null
@@ -1,38 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-
-import EditHeader from '~/static_site_editor/components/edit_header.vue';
-import { DEFAULT_HEADING } from '~/static_site_editor/constants';
-
-import { sourceContentTitle } from '../mock_data';
-
-describe('~/static_site_editor/components/edit_header.vue', () => {
- let wrapper;
-
- const buildWrapper = (propsData = {}) => {
- wrapper = shallowMount(EditHeader, {
- propsData: {
- ...propsData,
- },
- });
- };
-
- const findHeading = () => wrapper.find({ ref: 'sseHeading' });
-
- beforeEach(() => {
- buildWrapper();
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('renders the default heading if there is no title prop', () => {
- expect(findHeading().text()).toBe(DEFAULT_HEADING);
- });
-
- it('renders the title prop value in the heading', () => {
- buildWrapper({ title: sourceContentTitle });
-
- expect(findHeading().text()).toBe(sourceContentTitle);
- });
-});
diff --git a/spec/frontend/static_site_editor/components/edit_meta_controls_spec.js b/spec/frontend/static_site_editor/components/edit_meta_controls_spec.js
deleted file mode 100644
index f6b29e98e5f..00000000000
--- a/spec/frontend/static_site_editor/components/edit_meta_controls_spec.js
+++ /dev/null
@@ -1,115 +0,0 @@
-import { GlDropdown, GlDropdownItem, GlFormInput, GlFormTextarea } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-
-import { nextTick } from 'vue';
-import EditMetaControls from '~/static_site_editor/components/edit_meta_controls.vue';
-
-import { mergeRequestMeta, mergeRequestTemplates } from '../mock_data';
-
-describe('~/static_site_editor/components/edit_meta_controls.vue', () => {
- let wrapper;
- let mockSelect;
- let mockGlFormInputTitleInstance;
- const { title, description } = mergeRequestMeta;
- const newTitle = 'New title';
- const newDescription = 'New description';
-
- const buildWrapper = (propsData = {}) => {
- wrapper = shallowMount(EditMetaControls, {
- propsData: {
- title,
- description,
- templates: mergeRequestTemplates,
- currentTemplate: null,
- ...propsData,
- },
- });
- };
-
- const buildMocks = () => {
- mockSelect = jest.fn();
- mockGlFormInputTitleInstance = { $el: { select: mockSelect } };
- wrapper.vm.$refs.title = mockGlFormInputTitleInstance;
- };
-
- const findGlFormInputTitle = () => wrapper.find(GlFormInput);
- const findGlDropdownDescriptionTemplate = () => wrapper.find(GlDropdown);
- const findAllDropdownItems = () => wrapper.findAll(GlDropdownItem);
- const findDropdownItemByIndex = (index) => findAllDropdownItems().at(index);
-
- const findGlFormTextAreaDescription = () => wrapper.find(GlFormTextarea);
-
- beforeEach(async () => {
- buildWrapper();
- buildMocks();
-
- await nextTick();
- });
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- it('renders the title input', () => {
- expect(findGlFormInputTitle().exists()).toBe(true);
- });
-
- it('renders the description template dropdown', () => {
- expect(findGlDropdownDescriptionTemplate().exists()).toBe(true);
- });
-
- it('renders the description input', () => {
- expect(findGlFormTextAreaDescription().exists()).toBe(true);
- });
-
- it('forwards the title prop to the title input', () => {
- expect(findGlFormInputTitle().attributes().value).toBe(title);
- });
-
- it('forwards the description prop to the description input', () => {
- expect(findGlFormTextAreaDescription().attributes().value).toBe(description);
- });
-
- it('calls select on the title input when mounted', () => {
- expect(mockGlFormInputTitleInstance.$el.select).toHaveBeenCalled();
- });
-
- it('renders a GlDropdownItem per template plus one (for the starting none option)', () => {
- expect(findDropdownItemByIndex(0).text()).toBe('None');
- expect(findAllDropdownItems().length).toBe(mergeRequestTemplates.length + 1);
- });
-
- describe('when inputs change', () => {
- const storageKey = 'sse-merge-request-meta-local-storage-editable';
-
- afterEach(() => {
- localStorage.removeItem(storageKey);
- });
-
- it.each`
- findFn | key | value
- ${findGlFormInputTitle} | ${'title'} | ${newTitle}
- ${findGlFormTextAreaDescription} | ${'description'} | ${newDescription}
- `('emits updated settings when $findFn input updates', ({ key, value, findFn }) => {
- findFn().vm.$emit('input', value);
-
- const newSettings = { ...mergeRequestMeta, [key]: value };
-
- expect(wrapper.emitted('updateSettings')[0][0]).toMatchObject(newSettings);
- });
- });
-
- describe('when templates change', () => {
- it.each`
- index | value
- ${0} | ${null}
- ${1} | ${mergeRequestTemplates[0]}
- ${2} | ${mergeRequestTemplates[1]}
- `('emits a change template event when $index is clicked', ({ index, value }) => {
- findDropdownItemByIndex(index).vm.$emit('click');
-
- expect(wrapper.emitted('changeTemplate')[0][0]).toBe(value);
- });
- });
-});
diff --git a/spec/frontend/static_site_editor/components/edit_meta_modal_spec.js b/spec/frontend/static_site_editor/components/edit_meta_modal_spec.js
deleted file mode 100644
index bf3f8b7f571..00000000000
--- a/spec/frontend/static_site_editor/components/edit_meta_modal_spec.js
+++ /dev/null
@@ -1,172 +0,0 @@
-import { GlModal } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import MockAdapter from 'axios-mock-adapter';
-import { nextTick } from 'vue';
-import { useLocalStorageSpy } from 'helpers/local_storage_helper';
-import axios from '~/lib/utils/axios_utils';
-import EditMetaControls from '~/static_site_editor/components/edit_meta_controls.vue';
-import EditMetaModal from '~/static_site_editor/components/edit_meta_modal.vue';
-import { MR_META_LOCAL_STORAGE_KEY } from '~/static_site_editor/constants';
-import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
-import {
- sourcePath,
- mergeRequestMeta,
- mergeRequestTemplates,
- project as namespaceProject,
-} from '../mock_data';
-
-describe('~/static_site_editor/components/edit_meta_modal.vue', () => {
- useLocalStorageSpy();
-
- let wrapper;
- let mockAxios;
- const { title, description } = mergeRequestMeta;
- const [namespace, project] = namespaceProject.split('/');
-
- const buildWrapper = (propsData = {}, data = {}) => {
- wrapper = shallowMount(EditMetaModal, {
- propsData: {
- sourcePath,
- namespace,
- project,
- ...propsData,
- },
- data: () => data,
- });
- };
-
- const buildMockAxios = () => {
- mockAxios = new MockAdapter(axios);
- const templatesMergeRequestsPath = `templates/merge_request`;
- mockAxios
- .onGet(`${namespace}/${project}/${templatesMergeRequestsPath}`)
- .reply(200, mergeRequestTemplates);
- };
-
- const buildMockRefs = () => {
- wrapper.vm.$refs.editMetaControls = { resetCachedEditable: jest.fn() };
- };
-
- const findGlModal = () => wrapper.find(GlModal);
- const findEditMetaControls = () => wrapper.find(EditMetaControls);
- const findLocalStorageSync = () => wrapper.find(LocalStorageSync);
-
- beforeEach(async () => {
- localStorage.setItem(MR_META_LOCAL_STORAGE_KEY);
-
- buildMockAxios();
- buildWrapper();
- buildMockRefs();
-
- await nextTick();
- });
-
- afterEach(() => {
- mockAxios.restore();
-
- wrapper.destroy();
- wrapper = null;
- });
-
- it('initializes initial merge request meta with local storage data', async () => {
- const localStorageMeta = {
- title: 'stored title',
- description: 'stored description',
- templates: null,
- currentTemplate: null,
- };
-
- findLocalStorageSync().vm.$emit('input', localStorageMeta);
-
- await nextTick();
-
- expect(findEditMetaControls().props()).toEqual(localStorageMeta);
- });
-
- it('renders the modal', () => {
- expect(findGlModal().exists()).toBe(true);
- });
-
- it('renders the edit meta controls', () => {
- expect(findEditMetaControls().exists()).toBe(true);
- });
-
- it('contains the sourcePath in the title', () => {
- expect(findEditMetaControls().props('title')).toContain(sourcePath);
- });
-
- it('forwards the title prop', () => {
- expect(findEditMetaControls().props('title')).toBe(title);
- });
-
- it('forwards the description prop', () => {
- expect(findEditMetaControls().props('description')).toBe(description);
- });
-
- it('forwards the templates prop', () => {
- expect(findEditMetaControls().props('templates')).toBe(null);
- });
-
- it('forwards the currentTemplate prop', () => {
- expect(findEditMetaControls().props('currentTemplate')).toBe(null);
- });
-
- describe('when save button is clicked', () => {
- beforeEach(() => {
- findGlModal().vm.$emit('primary', mergeRequestMeta);
- });
-
- it('removes merge request meta from local storage', () => {
- expect(findLocalStorageSync().props().clear).toBe(true);
- });
-
- it('emits the primary event with mergeRequestMeta', () => {
- expect(wrapper.emitted('primary')).toEqual([[mergeRequestMeta]]);
- });
- });
-
- describe('when templates exist', () => {
- const template1 = mergeRequestTemplates[0];
-
- beforeEach(() => {
- buildWrapper({}, { templates: mergeRequestTemplates, currentTemplate: null });
- });
-
- it('sets the currentTemplate on the changeTemplate event', async () => {
- findEditMetaControls().vm.$emit('changeTemplate', template1);
-
- await nextTick();
-
- expect(findEditMetaControls().props().currentTemplate).toBe(template1);
-
- findEditMetaControls().vm.$emit('changeTemplate', null);
-
- await nextTick();
-
- expect(findEditMetaControls().props().currentTemplate).toBe(null);
- });
-
- it('updates the description on the changeTemplate event', async () => {
- findEditMetaControls().vm.$emit('changeTemplate', template1);
-
- await nextTick();
-
- expect(findEditMetaControls().props().description).toEqual(template1.content);
- });
- });
-
- it('emits the hide event', () => {
- findGlModal().vm.$emit('hide');
- expect(wrapper.emitted('hide')).toEqual([[]]);
- });
-
- it('stores merge request meta changes in local storage when changes happen', async () => {
- const newMeta = { title: 'new title', description: 'new description' };
-
- findEditMetaControls().vm.$emit('updateSettings', newMeta);
-
- await nextTick();
-
- expect(findLocalStorageSync().props('value')).toEqual(newMeta);
- });
-});
diff --git a/spec/frontend/static_site_editor/components/front_matter_controls_spec.js b/spec/frontend/static_site_editor/components/front_matter_controls_spec.js
deleted file mode 100644
index 5fda3b40306..00000000000
--- a/spec/frontend/static_site_editor/components/front_matter_controls_spec.js
+++ /dev/null
@@ -1,71 +0,0 @@
-import { GlFormGroup } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-
-import { humanize } from '~/lib/utils/text_utility';
-
-import FrontMatterControls from '~/static_site_editor/components/front_matter_controls.vue';
-
-import { sourceContentHeaderObjYAML as settings } from '../mock_data';
-
-describe('~/static_site_editor/components/front_matter_controls.vue', () => {
- let wrapper;
-
- const buildWrapper = (propsData = {}) => {
- wrapper = shallowMount(FrontMatterControls, {
- propsData: {
- settings,
- ...propsData,
- },
- });
- };
-
- beforeEach(() => {
- buildWrapper();
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('should render only the supported GlFormGroup types', () => {
- expect(wrapper.findAll(GlFormGroup)).toHaveLength(3);
- });
-
- it.each`
- key
- ${'layout'}
- ${'title'}
- ${'twitter_image'}
- `('renders field when key is $key', ({ key }) => {
- const glFormGroup = wrapper.find(`#sse-front-matter-form-group-${key}`);
- const glFormInput = wrapper.find(`#sse-front-matter-control-${key}`);
-
- expect(glFormGroup.exists()).toBe(true);
- expect(glFormGroup.attributes().label).toBe(humanize(key));
-
- expect(glFormInput.exists()).toBe(true);
- expect(glFormInput.attributes().value).toBe(settings[key]);
- });
-
- it.each`
- key
- ${'suppress_header'}
- ${'extra_css'}
- `('does not render field when key is $key', ({ key }) => {
- const glFormInput = wrapper.find(`#sse-front-matter-control-${key}`);
-
- expect(glFormInput.exists()).toBe(false);
- });
-
- it('emits updated settings when nested control updates', () => {
- const elId = `#sse-front-matter-control-title`;
- const glFormInput = wrapper.find(elId);
- const newTitle = 'New title';
-
- glFormInput.vm.$emit('input', newTitle);
-
- const newSettings = { ...settings, title: newTitle };
-
- expect(wrapper.emitted('updateSettings')[0][0]).toMatchObject(newSettings);
- });
-});
diff --git a/spec/frontend/static_site_editor/components/invalid_content_message_spec.js b/spec/frontend/static_site_editor/components/invalid_content_message_spec.js
deleted file mode 100644
index 7e699e9451c..00000000000
--- a/spec/frontend/static_site_editor/components/invalid_content_message_spec.js
+++ /dev/null
@@ -1,23 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-
-import InvalidContentMessage from '~/static_site_editor/components/invalid_content_message.vue';
-
-describe('~/static_site_editor/components/invalid_content_message.vue', () => {
- let wrapper;
- const findDocumentationButton = () => wrapper.find({ ref: 'documentationButton' });
- const documentationUrl =
- 'https://gitlab.com/gitlab-org/project-templates/static-site-editor-middleman';
-
- beforeEach(() => {
- wrapper = shallowMount(InvalidContentMessage);
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('renders the configuration button link', () => {
- expect(findDocumentationButton().exists()).toBe(true);
- expect(findDocumentationButton().attributes('href')).toBe(documentationUrl);
- });
-});
diff --git a/spec/frontend/static_site_editor/components/publish_toolbar_spec.js b/spec/frontend/static_site_editor/components/publish_toolbar_spec.js
deleted file mode 100644
index 9ba7e4a94d1..00000000000
--- a/spec/frontend/static_site_editor/components/publish_toolbar_spec.js
+++ /dev/null
@@ -1,92 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-
-import PublishToolbar from '~/static_site_editor/components/publish_toolbar.vue';
-
-import { returnUrl } from '../mock_data';
-
-describe('Static Site Editor Toolbar', () => {
- let wrapper;
-
- const buildWrapper = (propsData = {}) => {
- wrapper = shallowMount(PublishToolbar, {
- propsData: {
- hasSettings: false,
- saveable: false,
- ...propsData,
- },
- });
- };
-
- const findReturnUrlLink = () => wrapper.find({ ref: 'returnUrlLink' });
- const findSaveChangesButton = () => wrapper.find({ ref: 'submit' });
- const findEditSettingsButton = () => wrapper.find({ ref: 'settings' });
-
- beforeEach(() => {
- buildWrapper();
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('does not render Settings button', () => {
- expect(findEditSettingsButton().exists()).toBe(false);
- });
-
- it('renders Submit Changes button', () => {
- expect(findSaveChangesButton().exists()).toBe(true);
- });
-
- it('disables Submit Changes button', () => {
- expect(findSaveChangesButton().attributes('disabled')).toBe('true');
- });
-
- it('does not render the Submit Changes button with a loader', () => {
- expect(findSaveChangesButton().props('loading')).toBe(false);
- });
-
- it('does not render returnUrl link', () => {
- expect(findReturnUrlLink().exists()).toBe(false);
- });
-
- it('renders returnUrl link when returnUrl prop exists', () => {
- buildWrapper({ returnUrl });
-
- expect(findReturnUrlLink().exists()).toBe(true);
- expect(findReturnUrlLink().attributes('href')).toBe(returnUrl);
- });
-
- describe('when providing settings CTA', () => {
- it('enables Submit Changes button', () => {
- buildWrapper({ hasSettings: true });
-
- expect(findEditSettingsButton().exists()).toBe(true);
- });
- });
-
- describe('when saveable', () => {
- it('enables Submit Changes button', () => {
- buildWrapper({ saveable: true });
-
- expect(findSaveChangesButton().attributes('disabled')).toBeFalsy();
- });
- });
-
- describe('when saving changes', () => {
- beforeEach(() => {
- buildWrapper({ savingChanges: true });
- });
-
- it('renders the Submit Changes button with a loading indicator', () => {
- expect(findSaveChangesButton().props('loading')).toBe(true);
- });
- });
-
- it('emits submit event when submit button is clicked', () => {
- buildWrapper({ saveable: true });
-
- findSaveChangesButton().vm.$emit('click');
-
- expect(wrapper.emitted('submit')).toHaveLength(1);
- });
-});
diff --git a/spec/frontend/static_site_editor/components/submit_changes_error_spec.js b/spec/frontend/static_site_editor/components/submit_changes_error_spec.js
deleted file mode 100644
index 82a5c5f624a..00000000000
--- a/spec/frontend/static_site_editor/components/submit_changes_error_spec.js
+++ /dev/null
@@ -1,48 +0,0 @@
-import { GlButton, GlAlert } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-
-import SubmitChangesError from '~/static_site_editor/components/submit_changes_error.vue';
-
-import { submitChangesError as error } from '../mock_data';
-
-describe('Submit Changes Error', () => {
- let wrapper;
-
- const buildWrapper = (propsData = {}) => {
- wrapper = shallowMount(SubmitChangesError, {
- propsData: {
- ...propsData,
- },
- stubs: {
- GlAlert,
- },
- });
- };
-
- const findRetryButton = () => wrapper.find(GlButton);
- const findAlert = () => wrapper.find(GlAlert);
-
- beforeEach(() => {
- buildWrapper({ error });
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('renders error message', () => {
- expect(findAlert().text()).toContain(error);
- });
-
- it('emits dismiss event when alert emits dismiss event', () => {
- findAlert().vm.$emit('dismiss');
-
- expect(wrapper.emitted('dismiss')).toHaveLength(1);
- });
-
- it('emits retry event when retry button is clicked', () => {
- findRetryButton().vm.$emit('click');
-
- expect(wrapper.emitted('retry')).toHaveLength(1);
- });
-});
diff --git a/spec/frontend/static_site_editor/components/unsaved_changes_confirm_dialog_spec.js b/spec/frontend/static_site_editor/components/unsaved_changes_confirm_dialog_spec.js
deleted file mode 100644
index 9b8b22da693..00000000000
--- a/spec/frontend/static_site_editor/components/unsaved_changes_confirm_dialog_spec.js
+++ /dev/null
@@ -1,44 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-
-import UnsavedChangesConfirmDialog from '~/static_site_editor/components/unsaved_changes_confirm_dialog.vue';
-
-describe('static_site_editor/components/unsaved_changes_confirm_dialog', () => {
- let wrapper;
- let event;
- let returnValueSetter;
-
- const buildWrapper = (propsData = {}) => {
- wrapper = shallowMount(UnsavedChangesConfirmDialog, {
- propsData,
- });
- };
-
- beforeEach(() => {
- event = new Event('beforeunload');
-
- jest.spyOn(event, 'preventDefault');
- returnValueSetter = jest.spyOn(event, 'returnValue', 'set');
- });
-
- afterEach(() => {
- event.preventDefault.mockRestore();
- returnValueSetter.mockRestore();
- wrapper.destroy();
- });
-
- it('displays confirmation dialog when modified = true', () => {
- buildWrapper({ modified: true });
- window.dispatchEvent(event);
-
- expect(event.preventDefault).toHaveBeenCalled();
- expect(returnValueSetter).toHaveBeenCalledWith('');
- });
-
- it('does not display confirmation dialog when modified = false', () => {
- buildWrapper();
- window.dispatchEvent(event);
-
- expect(event.preventDefault).not.toHaveBeenCalled();
- expect(returnValueSetter).not.toHaveBeenCalled();
- });
-});
diff --git a/spec/frontend/static_site_editor/graphql/resolvers/file_spec.js b/spec/frontend/static_site_editor/graphql/resolvers/file_spec.js
deleted file mode 100644
index 83ad23f7dcf..00000000000
--- a/spec/frontend/static_site_editor/graphql/resolvers/file_spec.js
+++ /dev/null
@@ -1,25 +0,0 @@
-import fileResolver from '~/static_site_editor/graphql/resolvers/file';
-import loadSourceContent from '~/static_site_editor/services/load_source_content';
-
-import {
- projectId,
- sourcePath,
- sourceContentTitle as title,
- sourceContentYAML as content,
-} from '../../mock_data';
-
-jest.mock('~/static_site_editor/services/load_source_content', () => jest.fn());
-
-describe('static_site_editor/graphql/resolvers/file', () => {
- it('returns file content and title when fetching file successfully', () => {
- loadSourceContent.mockResolvedValueOnce({ title, content });
-
- return fileResolver({ fullPath: projectId }, { path: sourcePath }).then((file) => {
- expect(file).toEqual({
- __typename: 'File',
- title,
- content,
- });
- });
- });
-});
diff --git a/spec/frontend/static_site_editor/graphql/resolvers/has_submitted_changes_spec.js b/spec/frontend/static_site_editor/graphql/resolvers/has_submitted_changes_spec.js
deleted file mode 100644
index 0670b240a3f..00000000000
--- a/spec/frontend/static_site_editor/graphql/resolvers/has_submitted_changes_spec.js
+++ /dev/null
@@ -1,27 +0,0 @@
-import appDataQuery from '~/static_site_editor/graphql/queries/app_data.query.graphql';
-import hasSubmittedChanges from '~/static_site_editor/graphql/resolvers/has_submitted_changes';
-
-describe('static_site_editor/graphql/resolvers/has_submitted_changes', () => {
- it('updates the cache with the data passed in input', () => {
- const cachedData = { appData: { original: 'foo' } };
- const newValue = { input: { hasSubmittedChanges: true } };
-
- const cache = {
- readQuery: jest.fn().mockReturnValue(cachedData),
- writeQuery: jest.fn(),
- };
- hasSubmittedChanges(null, newValue, { cache });
-
- expect(cache.readQuery).toHaveBeenCalledWith({ query: appDataQuery });
- expect(cache.writeQuery).toHaveBeenCalledWith({
- query: appDataQuery,
- data: {
- appData: {
- __typename: 'AppData',
- original: 'foo',
- hasSubmittedChanges: true,
- },
- },
- });
- });
-});
diff --git a/spec/frontend/static_site_editor/graphql/resolvers/submit_content_changes_spec.js b/spec/frontend/static_site_editor/graphql/resolvers/submit_content_changes_spec.js
deleted file mode 100644
index a0529f5f945..00000000000
--- a/spec/frontend/static_site_editor/graphql/resolvers/submit_content_changes_spec.js
+++ /dev/null
@@ -1,37 +0,0 @@
-import savedContentMetaQuery from '~/static_site_editor/graphql/queries/saved_content_meta.query.graphql';
-import submitContentChangesResolver from '~/static_site_editor/graphql/resolvers/submit_content_changes';
-import submitContentChanges from '~/static_site_editor/services/submit_content_changes';
-
-import {
- projectId as project,
- sourcePath,
- username,
- sourceContentYAML as content,
- savedContentMeta,
-} from '../../mock_data';
-
-jest.mock('~/static_site_editor/services/submit_content_changes', () => jest.fn());
-
-describe('static_site_editor/graphql/resolvers/submit_content_changes', () => {
- it('writes savedContentMeta query with the data returned by the submitContentChanges service', () => {
- const cache = { writeQuery: jest.fn() };
-
- submitContentChanges.mockResolvedValueOnce(savedContentMeta);
-
- return submitContentChangesResolver(
- {},
- { input: { path: sourcePath, project, sourcePath, content, username } },
- { cache },
- ).then(() => {
- expect(cache.writeQuery).toHaveBeenCalledWith({
- query: savedContentMetaQuery,
- data: {
- savedContentMeta: {
- __typename: 'SavedContentMeta',
- ...savedContentMeta,
- },
- },
- });
- });
- });
-});
diff --git a/spec/frontend/static_site_editor/mock_data.js b/spec/frontend/static_site_editor/mock_data.js
deleted file mode 100644
index 8d64e1799b8..00000000000
--- a/spec/frontend/static_site_editor/mock_data.js
+++ /dev/null
@@ -1,91 +0,0 @@
-export const sourceContentHeaderYAML = `---
-layout: handbook-page-toc
-title: Handbook
-twitter_image: /images/tweets/handbook-gitlab.png
-suppress_header: true
-extra_css:
- - sales-and-free-trial-common.css
- - form-to-resource.css
----`;
-export const sourceContentHeaderObjYAML = {
- layout: 'handbook-page-toc',
- title: 'Handbook',
- twitter_image: '/images/tweets/handbook-gitlab.png',
- suppress_header: true,
- extra_css: ['sales-and-free-trial-common.css', 'form-to-resource.css'],
-};
-export const sourceContentSpacing = `\n`;
-export const sourceContentBody = `## On this page
-{:.no_toc .hidden-md .hidden-lg}
-
-- TOC
-{:toc .hidden-md .hidden-lg}
-
-![image](path/to/image1.png)`;
-export const sourceContentYAML = `${sourceContentHeaderYAML}${sourceContentSpacing}${sourceContentBody}`;
-export const sourceContentTitle = 'Handbook';
-
-export const username = 'gitlabuser';
-export const projectId = '123456';
-export const project = 'user1/project1';
-export const returnUrl = 'https://www.gitlab.com';
-export const sourcePath = 'foobar.md.html';
-export const mergeRequestMeta = {
- title: `Update ${sourcePath} file`,
- description: 'Copy update',
-};
-export const savedContentMeta = {
- branch: {
- label: 'foobar',
- url: 'foobar/-/tree/foobar',
- },
- commit: {
- label: 'c1461b08',
- url: 'foobar/-/c1461b08',
- },
- mergeRequest: {
- label: '123',
- url: 'foobar/-/merge_requests/123',
- },
-};
-export const mergeRequestTemplates = [
- { key: 'Template1', name: 'Template 1', content: 'This is template 1!' },
- { key: 'Template2', name: 'Template 2', content: 'This is template 2!' },
-];
-
-export const submitChangesError = 'Could not save changes';
-export const commitBranchResponse = {
- web_url: '/tree/root-main-patch-88195',
-};
-export const commitMultipleResponse = {
- short_id: 'ed899a2f4b5',
- web_url: '/commit/ed899a2f4b5',
-};
-export const createMergeRequestResponse = {
- iid: '123',
- web_url: '/merge_requests/123',
-};
-
-export const trackingCategory = 'projects:static_site_editor:show';
-
-export const images = new Map([
- ['path/to/image1.png', 'image1-content'],
- ['path/to/image2.png', 'image2-content'],
-]);
-
-export const mounts = [
- {
- source: 'default/source/',
- target: '',
- },
- {
- source: 'source/with/target',
- target: 'target',
- },
-];
-
-export const branch = 'main';
-
-export const baseUrl = '/user1/project1/-/sse/main%2Ftest.md';
-
-export const imageRoot = 'source/images/';
diff --git a/spec/frontend/static_site_editor/pages/home_spec.js b/spec/frontend/static_site_editor/pages/home_spec.js
deleted file mode 100644
index 6571d295c36..00000000000
--- a/spec/frontend/static_site_editor/pages/home_spec.js
+++ /dev/null
@@ -1,301 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import { nextTick } from 'vue';
-import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
-import EditArea from '~/static_site_editor/components/edit_area.vue';
-import EditMetaModal from '~/static_site_editor/components/edit_meta_modal.vue';
-import InvalidContentMessage from '~/static_site_editor/components/invalid_content_message.vue';
-import SkeletonLoader from '~/static_site_editor/components/skeleton_loader.vue';
-import SubmitChangesError from '~/static_site_editor/components/submit_changes_error.vue';
-import { TRACKING_ACTION_INITIALIZE_EDITOR } from '~/static_site_editor/constants';
-import hasSubmittedChangesMutation from '~/static_site_editor/graphql/mutations/has_submitted_changes.mutation.graphql';
-import submitContentChangesMutation from '~/static_site_editor/graphql/mutations/submit_content_changes.mutation.graphql';
-import Home from '~/static_site_editor/pages/home.vue';
-import { SUCCESS_ROUTE } from '~/static_site_editor/router/constants';
-
-import {
- project,
- returnUrl,
- sourceContentYAML as content,
- sourceContentTitle as title,
- sourcePath,
- username,
- mergeRequestMeta,
- savedContentMeta,
- submitChangesError,
- trackingCategory,
- images,
- mounts,
- branch,
- baseUrl,
- imageRoot,
-} from '../mock_data';
-
-describe('static_site_editor/pages/home', () => {
- let wrapper;
- let store;
- let $apollo;
- let $router;
- let mutateMock;
- let trackingSpy;
- const defaultAppData = {
- isSupportedContent: true,
- hasSubmittedChanges: false,
- returnUrl,
- project,
- username,
- sourcePath,
- mounts,
- branch,
- baseUrl,
- imageUploadPath: imageRoot,
- };
- const hasSubmittedChangesMutationPayload = {
- data: {
- appData: { ...defaultAppData, hasSubmittedChanges: true },
- },
- };
-
- const buildApollo = (queries = {}) => {
- mutateMock = jest.fn();
-
- $apollo = {
- queries: {
- sourceContent: {
- loading: false,
- },
- ...queries,
- },
- mutate: mutateMock,
- };
- };
-
- const buildRouter = () => {
- $router = {
- push: jest.fn(),
- };
- };
-
- const buildWrapper = (data = {}) => {
- wrapper = shallowMount(Home, {
- store,
- mocks: {
- $apollo,
- $router,
- },
- data() {
- return {
- appData: { ...defaultAppData },
- sourceContent: { title, content },
- ...data,
- };
- },
- });
- };
-
- const findEditArea = () => wrapper.find(EditArea);
- const findEditMetaModal = () => wrapper.find(EditMetaModal);
- const findInvalidContentMessage = () => wrapper.find(InvalidContentMessage);
- const findSkeletonLoader = () => wrapper.find(SkeletonLoader);
- const findSubmitChangesError = () => wrapper.find(SubmitChangesError);
-
- beforeEach(() => {
- buildApollo();
- buildRouter();
-
- document.body.dataset.page = trackingCategory;
- trackingSpy = mockTracking(document.body.dataset.page, undefined, jest.spyOn);
- });
-
- afterEach(() => {
- wrapper.destroy();
- unmockTracking();
- wrapper = null;
- $apollo = null;
- });
-
- describe('when content is loaded', () => {
- beforeEach(() => {
- buildWrapper();
- });
-
- it('renders edit area', () => {
- expect(findEditArea().exists()).toBe(true);
- });
-
- it('provides source content, returnUrl, and isSavingChanges to the edit area', () => {
- expect(findEditArea().props()).toMatchObject({
- title,
- mounts,
- content,
- returnUrl,
- savingChanges: false,
- });
- });
- });
-
- it('does not render edit area when content is not loaded', () => {
- buildWrapper({ sourceContent: null });
-
- expect(findEditArea().exists()).toBe(false);
- });
-
- it('renders skeleton loader when content is not loading', () => {
- buildApollo({
- sourceContent: {
- loading: true,
- },
- });
- buildWrapper();
-
- expect(findSkeletonLoader().exists()).toBe(true);
- });
-
- it('does not render skeleton loader when content is not loading', () => {
- buildApollo({
- sourceContent: {
- loading: false,
- },
- });
- buildWrapper();
-
- expect(findSkeletonLoader().exists()).toBe(false);
- });
-
- it('displays invalid content message when content is not supported', () => {
- buildWrapper({ appData: { ...defaultAppData, isSupportedContent: false } });
-
- expect(findInvalidContentMessage().exists()).toBe(true);
- });
-
- it('does not display invalid content message when content is supported', () => {
- buildWrapper();
-
- expect(findInvalidContentMessage().exists()).toBe(false);
- });
-
- it('renders an EditMetaModal component', () => {
- buildWrapper();
-
- expect(findEditMetaModal().exists()).toBe(true);
- });
-
- describe('when preparing submission', () => {
- it('calls the show method when the edit-area submit event is emitted', async () => {
- buildWrapper();
-
- const mockInstance = { show: jest.fn() };
- wrapper.vm.$refs.editMetaModal = mockInstance;
-
- findEditArea().vm.$emit('submit', { content });
-
- await nextTick();
- expect(mockInstance.show).toHaveBeenCalled();
- });
- });
-
- describe('when submitting changes fails', () => {
- const setupMutateMock = () => {
- mutateMock
- .mockResolvedValueOnce(hasSubmittedChangesMutationPayload)
- .mockRejectedValueOnce(new Error(submitChangesError));
- };
-
- beforeEach(async () => {
- setupMutateMock();
-
- buildWrapper({ content });
- findEditMetaModal().vm.$emit('primary', mergeRequestMeta);
-
- await nextTick();
- });
-
- it('displays submit changes error message', () => {
- expect(findSubmitChangesError().exists()).toBe(true);
- });
-
- it('retries submitting changes when retry button is clicked', () => {
- setupMutateMock();
-
- findSubmitChangesError().vm.$emit('retry');
-
- expect(mutateMock).toHaveBeenCalled();
- });
-
- it('hides submit changes error message when dismiss button is clicked', async () => {
- findSubmitChangesError().vm.$emit('dismiss');
-
- await nextTick();
- expect(findSubmitChangesError().exists()).toBe(false);
- });
- });
-
- describe('when submitting changes succeeds', () => {
- const newContent = `new ${content}`;
- const formattedMarkdown = `formatted ${content}`;
-
- beforeEach(async () => {
- mutateMock.mockResolvedValueOnce(hasSubmittedChangesMutationPayload).mockResolvedValueOnce({
- data: {
- submitContentChanges: savedContentMeta,
- },
- });
-
- buildWrapper();
-
- findEditMetaModal().vm.show = jest.fn();
-
- findEditArea().vm.$emit('submit', { content: newContent, images, formattedMarkdown });
-
- findEditMetaModal().vm.$emit('primary', mergeRequestMeta);
-
- await nextTick();
- });
-
- it('dispatches hasSubmittedChanges mutation', () => {
- expect(mutateMock).toHaveBeenNthCalledWith(1, {
- mutation: hasSubmittedChangesMutation,
- variables: {
- input: {
- hasSubmittedChanges: true,
- },
- },
- });
- });
-
- it('dispatches submitContentChanges mutation', () => {
- expect(mutateMock).toHaveBeenNthCalledWith(2, {
- mutation: submitContentChangesMutation,
- variables: {
- input: {
- content: newContent,
- formattedMarkdown,
- project,
- sourcePath,
- targetBranch: branch,
- username,
- images,
- mergeRequestMeta,
- },
- },
- });
- });
-
- it('transitions to the SUCCESS route', () => {
- expect($router.push).toHaveBeenCalledWith(SUCCESS_ROUTE);
- });
- });
-
- it('does not display submit changes error when an error does not exist', () => {
- buildWrapper();
-
- expect(findSubmitChangesError().exists()).toBe(false);
- });
-
- it('tracks when editor is initialized on the mounted lifecycle hook', () => {
- buildWrapper();
- expect(trackingSpy).toHaveBeenCalledWith(
- document.body.dataset.page,
- TRACKING_ACTION_INITIALIZE_EDITOR,
- );
- });
-});
diff --git a/spec/frontend/static_site_editor/pages/success_spec.js b/spec/frontend/static_site_editor/pages/success_spec.js
deleted file mode 100644
index fbdc2c435a0..00000000000
--- a/spec/frontend/static_site_editor/pages/success_spec.js
+++ /dev/null
@@ -1,131 +0,0 @@
-import { GlButton, GlEmptyState, GlLoadingIcon } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import Success from '~/static_site_editor/pages/success.vue';
-import { HOME_ROUTE } from '~/static_site_editor/router/constants';
-import { savedContentMeta, returnUrl, sourcePath } from '../mock_data';
-
-describe('~/static_site_editor/pages/success.vue', () => {
- const mergeRequestsIllustrationPath = 'illustrations/merge_requests.svg';
- let wrapper;
- let router;
-
- const buildRouter = () => {
- router = {
- push: jest.fn(),
- };
- };
-
- const buildWrapper = (data = {}, appData = {}) => {
- wrapper = shallowMount(Success, {
- mocks: {
- $router: router,
- },
- stubs: {
- GlButton,
- GlEmptyState,
- GlLoadingIcon,
- },
- propsData: {
- mergeRequestsIllustrationPath,
- },
- data() {
- return {
- savedContentMeta,
- appData: {
- returnUrl,
- sourcePath,
- hasSubmittedChanges: true,
- ...appData,
- },
- ...data,
- };
- },
- });
- };
-
- const findReturnUrlButton = () => wrapper.find(GlButton);
- const findEmptyState = () => wrapper.find(GlEmptyState);
- const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
-
- beforeEach(() => {
- buildRouter();
- });
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- describe('when savedContentMeta is valid', () => {
- it('renders empty state with a link to the created merge request', () => {
- buildWrapper();
-
- expect(findEmptyState().exists()).toBe(true);
- expect(findEmptyState().props()).toMatchObject({
- primaryButtonText: 'View merge request',
- primaryButtonLink: savedContentMeta.mergeRequest.url,
- title: 'Your merge request has been created',
- svgPath: mergeRequestsIllustrationPath,
- svgHeight: 146,
- });
- });
-
- it('displays merge request instructions in the empty state', () => {
- buildWrapper();
-
- expect(findEmptyState().text()).toContain(
- 'To see your changes live you will need to do the following things:',
- );
- expect(findEmptyState().text()).toContain('1. Add a clear title to describe the change.');
- expect(findEmptyState().text()).toContain(
- '2. Add a description to explain why the change is being made.',
- );
- expect(findEmptyState().text()).toContain(
- '3. Assign a person to review and accept the merge request.',
- );
- });
-
- it('displays return to site button', () => {
- buildWrapper();
-
- expect(findReturnUrlButton().text()).toBe('Return to site');
- expect(findReturnUrlButton().attributes().href).toBe(returnUrl);
- });
-
- it('displays source path', () => {
- buildWrapper();
-
- expect(wrapper.text()).toContain(`Update ${sourcePath} file`);
- });
- });
-
- describe('when savedContentMeta is invalid', () => {
- it('renders empty state with a loader', () => {
- buildWrapper({ savedContentMeta: null });
-
- expect(findEmptyState().exists()).toBe(true);
- expect(findEmptyState().props()).toMatchObject({
- title: 'Creating your merge request',
- svgPath: mergeRequestsIllustrationPath,
- });
- expect(findLoadingIcon().exists()).toBe(true);
- });
-
- it('displays helper info in the empty state', () => {
- buildWrapper({ savedContentMeta: null });
-
- expect(findEmptyState().text()).toContain(
- 'You can set an assignee to get your changes reviewed and deployed once your merge request is created',
- );
- expect(findEmptyState().text()).toContain(
- 'A link to view the merge request will appear once ready',
- );
- });
-
- it('redirects to the HOME route when content has not been submitted', () => {
- buildWrapper({ savedContentMeta: null }, { hasSubmittedChanges: false });
-
- expect(router.push).toHaveBeenCalledWith(HOME_ROUTE);
- });
- });
-});
diff --git a/spec/frontend/static_site_editor/rich_content_editor/editor_service_spec.js b/spec/frontend/static_site_editor/rich_content_editor/editor_service_spec.js
deleted file mode 100644
index cd0d09c085f..00000000000
--- a/spec/frontend/static_site_editor/rich_content_editor/editor_service_spec.js
+++ /dev/null
@@ -1,214 +0,0 @@
-import buildCustomRenderer from '~/static_site_editor/rich_content_editor/services/build_custom_renderer';
-import buildHTMLToMarkdownRenderer from '~/static_site_editor/rich_content_editor/services/build_html_to_markdown_renderer';
-import {
- generateToolbarItem,
- addCustomEventListener,
- removeCustomEventListener,
- registerHTMLToMarkdownRenderer,
- addImage,
- insertVideo,
- getMarkdown,
- getEditorOptions,
-} from '~/static_site_editor/rich_content_editor/services/editor_service';
-import sanitizeHTML from '~/static_site_editor/rich_content_editor/services/sanitize_html';
-
-jest.mock('~/static_site_editor/rich_content_editor/services/build_html_to_markdown_renderer');
-jest.mock('~/static_site_editor/rich_content_editor/services/build_custom_renderer');
-jest.mock('~/static_site_editor/rich_content_editor/services/sanitize_html');
-
-describe('Editor Service', () => {
- let mockInstance;
- let event;
- let handler;
- const parseHtml = (str) => {
- const wrapper = document.createElement('div');
- wrapper.innerHTML = str;
- return wrapper.firstChild;
- };
-
- beforeEach(() => {
- mockInstance = {
- eventManager: { addEventType: jest.fn(), removeEventHandler: jest.fn(), listen: jest.fn() },
- editor: {
- exec: jest.fn(),
- isWysiwygMode: jest.fn(),
- getSquire: jest.fn(),
- insertText: jest.fn(),
- },
- invoke: jest.fn(),
- toMarkOptions: {
- renderer: {
- constructor: {
- factory: jest.fn(),
- },
- },
- },
- };
- event = 'someCustomEvent';
- handler = jest.fn();
- });
-
- describe('generateToolbarItem', () => {
- const config = {
- icon: 'bold',
- command: 'some-command',
- tooltip: 'Some Tooltip',
- event: 'some-event',
- };
-
- const generatedItem = generateToolbarItem(config);
-
- it('generates the correct command', () => {
- expect(generatedItem.options.command).toBe(config.command);
- });
-
- it('generates the correct event', () => {
- expect(generatedItem.options.event).toBe(config.event);
- });
-
- it('generates a divider when isDivider is set to true', () => {
- const isDivider = true;
-
- expect(generateToolbarItem({ isDivider })).toBe('divider');
- });
- });
-
- describe('addCustomEventListener', () => {
- it('registers an event type on the instance and adds an event handler', () => {
- addCustomEventListener(mockInstance, event, handler);
-
- expect(mockInstance.eventManager.addEventType).toHaveBeenCalledWith(event);
- expect(mockInstance.eventManager.listen).toHaveBeenCalledWith(event, handler);
- });
- });
-
- describe('removeCustomEventListener', () => {
- it('removes an event handler from the instance', () => {
- removeCustomEventListener(mockInstance, event, handler);
-
- expect(mockInstance.eventManager.removeEventHandler).toHaveBeenCalledWith(event, handler);
- });
- });
-
- describe('addImage', () => {
- const file = new File([], 'some-file.jpg');
- const mockImage = { imageUrl: 'some/url.png', altText: 'some alt text' };
-
- it('calls the insertElement method on the squire instance when in WYSIWYG mode', () => {
- jest.spyOn(URL, 'createObjectURL');
- mockInstance.editor.isWysiwygMode.mockReturnValue(true);
- mockInstance.editor.getSquire.mockReturnValue({ insertElement: jest.fn() });
-
- addImage(mockInstance, mockImage, file);
-
- expect(mockInstance.editor.getSquire().insertElement).toHaveBeenCalled();
- expect(global.URL.createObjectURL).toHaveBeenLastCalledWith(file);
- });
-
- it('calls the insertText method on the instance when in Markdown mode', () => {
- mockInstance.editor.isWysiwygMode.mockReturnValue(false);
- addImage(mockInstance, mockImage, file);
-
- expect(mockInstance.editor.insertText).toHaveBeenCalledWith('![some alt text](some/url.png)');
- });
- });
-
- describe('insertVideo', () => {
- const mockUrl = 'some/url';
- const htmlString = `<figure contenteditable="false" class="gl-relative gl-h-0 video_container"><iframe class="gl-absolute gl-top-0 gl-left-0 gl-w-full gl-h-full" width="560" height="315" frameborder="0" src="some/url"></iframe></figure>`;
- const mockInsertElement = jest.fn();
-
- beforeEach(() =>
- mockInstance.editor.getSquire.mockReturnValue({ insertElement: mockInsertElement }),
- );
-
- describe('WYSIWYG mode', () => {
- it('calls the insertElement method on the squire instance with an iFrame element', () => {
- mockInstance.editor.isWysiwygMode.mockReturnValue(true);
-
- insertVideo(mockInstance, mockUrl);
-
- expect(mockInstance.editor.getSquire().insertElement).toHaveBeenCalledWith(
- parseHtml(htmlString),
- );
- });
- });
-
- describe('Markdown mode', () => {
- it('calls the insertText method on the editor instance with the iFrame element HTML', () => {
- mockInstance.editor.isWysiwygMode.mockReturnValue(false);
-
- insertVideo(mockInstance, mockUrl);
-
- expect(mockInstance.editor.insertText).toHaveBeenCalledWith(htmlString);
- });
- });
- });
-
- describe('getMarkdown', () => {
- it('calls the invoke method on the instance', () => {
- getMarkdown(mockInstance);
-
- expect(mockInstance.invoke).toHaveBeenCalledWith('getMarkdown');
- });
- });
-
- describe('registerHTMLToMarkdownRenderer', () => {
- let baseRenderer;
- const htmlToMarkdownRenderer = {};
- const extendedRenderer = {};
-
- beforeEach(() => {
- baseRenderer = mockInstance.toMarkOptions.renderer;
- buildHTMLToMarkdownRenderer.mockReturnValueOnce(htmlToMarkdownRenderer);
- baseRenderer.constructor.factory.mockReturnValueOnce(extendedRenderer);
-
- registerHTMLToMarkdownRenderer(mockInstance);
- });
-
- it('builds a new instance of the HTML to Markdown renderer', () => {
- expect(buildHTMLToMarkdownRenderer).toHaveBeenCalledWith(baseRenderer);
- });
-
- it('extends base renderer with the HTML to Markdown renderer', () => {
- expect(baseRenderer.constructor.factory).toHaveBeenCalledWith(
- baseRenderer,
- htmlToMarkdownRenderer,
- );
- });
-
- it('replaces the default renderer with extended renderer', () => {
- expect(mockInstance.toMarkOptions.renderer).toBe(extendedRenderer);
- });
- });
-
- describe('getEditorOptions', () => {
- const externalOptions = {
- customRenderers: {},
- };
- const renderer = {};
-
- beforeEach(() => {
- buildCustomRenderer.mockReturnValueOnce(renderer);
- });
-
- it('generates a configuration object with a custom HTML renderer and toolbarItems', () => {
- expect(getEditorOptions()).toHaveProp('customHTMLRenderer', renderer);
- expect(getEditorOptions()).toHaveProp('toolbarItems');
- });
-
- it('passes external renderers to the buildCustomRenderers function', () => {
- getEditorOptions(externalOptions);
- expect(buildCustomRenderer).toHaveBeenCalledWith(externalOptions.customRenderers);
- });
-
- it('uses the internal sanitizeHTML service for HTML sanitization', () => {
- const options = getEditorOptions();
- const html = '<div></div>';
-
- options.customHTMLSanitizer(html);
-
- expect(sanitizeHTML).toHaveBeenCalledWith(html);
- });
- });
-});
diff --git a/spec/frontend/static_site_editor/rich_content_editor/modals/add_image/add_image_modal_spec.js b/spec/frontend/static_site_editor/rich_content_editor/modals/add_image/add_image_modal_spec.js
deleted file mode 100644
index c8c9f45618d..00000000000
--- a/spec/frontend/static_site_editor/rich_content_editor/modals/add_image/add_image_modal_spec.js
+++ /dev/null
@@ -1,77 +0,0 @@
-import { GlModal, GlTabs } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import { IMAGE_TABS } from '~/static_site_editor/rich_content_editor/constants';
-import AddImageModal from '~/static_site_editor/rich_content_editor/modals/add_image/add_image_modal.vue';
-import UploadImageTab from '~/static_site_editor/rich_content_editor/modals/add_image/upload_image_tab.vue';
-
-describe('Add Image Modal', () => {
- let wrapper;
- const propsData = { imageRoot: 'path/to/root/' };
-
- const findModal = () => wrapper.find(GlModal);
- const findTabs = () => wrapper.find(GlTabs);
- const findUploadImageTab = () => wrapper.find(UploadImageTab);
- const findUrlInput = () => wrapper.find({ ref: 'urlInput' });
- const findDescriptionInput = () => wrapper.find({ ref: 'descriptionInput' });
-
- beforeEach(() => {
- wrapper = shallowMount(AddImageModal, { propsData });
- });
-
- describe('when content is loaded', () => {
- it('renders a modal component', () => {
- expect(findModal().exists()).toBe(true);
- });
-
- it('renders a Tabs component', () => {
- expect(findTabs().exists()).toBe(true);
- });
-
- it('renders an upload image tab', () => {
- expect(findUploadImageTab().exists()).toBe(true);
- });
-
- it('renders an input to add an image URL', () => {
- expect(findUrlInput().exists()).toBe(true);
- });
-
- it('renders an input to add an image description', () => {
- expect(findDescriptionInput().exists()).toBe(true);
- });
- });
-
- describe('add image', () => {
- describe('Upload', () => {
- it('validates the file', () => {
- const preventDefault = jest.fn();
- const description = 'some description';
- const file = { name: 'some_file.png' };
-
- wrapper.vm.$refs.uploadImageTab = { validateFile: jest.fn() };
- // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
- // eslint-disable-next-line no-restricted-syntax
- wrapper.setData({ file, description, tabIndex: IMAGE_TABS.UPLOAD_TAB });
-
- findModal().vm.$emit('ok', { preventDefault });
-
- expect(wrapper.vm.$refs.uploadImageTab.validateFile).toHaveBeenCalled();
- });
- });
-
- describe('URL', () => {
- it('emits an addImage event when a valid URL is specified', () => {
- const preventDefault = jest.fn();
- const mockImage = { imageUrl: '/some/valid/url.png', description: 'some description' };
- // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
- // eslint-disable-next-line no-restricted-syntax
- wrapper.setData({ ...mockImage, tabIndex: IMAGE_TABS.URL_TAB });
-
- findModal().vm.$emit('ok', { preventDefault });
- expect(preventDefault).not.toHaveBeenCalled();
- expect(wrapper.emitted('addImage')).toEqual([
- [{ imageUrl: mockImage.imageUrl, altText: mockImage.description }],
- ]);
- });
- });
- });
-});
diff --git a/spec/frontend/static_site_editor/rich_content_editor/modals/add_image/upload_image_tab_spec.js b/spec/frontend/static_site_editor/rich_content_editor/modals/add_image/upload_image_tab_spec.js
deleted file mode 100644
index 11b73d58259..00000000000
--- a/spec/frontend/static_site_editor/rich_content_editor/modals/add_image/upload_image_tab_spec.js
+++ /dev/null
@@ -1,41 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import UploadImageTab from '~/static_site_editor/rich_content_editor/modals/add_image/upload_image_tab.vue';
-
-describe('Upload Image Tab', () => {
- let wrapper;
-
- beforeEach(() => {
- wrapper = shallowMount(UploadImageTab);
- });
-
- afterEach(() => wrapper.destroy());
-
- const triggerInputEvent = (size) => {
- const file = { size, name: 'file-name.png' };
- const mockEvent = new Event('input');
-
- Object.defineProperty(mockEvent, 'target', { value: { files: [file] } });
-
- wrapper.find({ ref: 'fileInput' }).element.dispatchEvent(mockEvent);
-
- return file;
- };
-
- describe('onInput', () => {
- it.each`
- size | fileError
- ${2000000000} | ${'Maximum file size is 2MB. Please select a smaller file.'}
- ${200} | ${null}
- `('validates the file correctly', ({ size, fileError }) => {
- triggerInputEvent(size);
-
- expect(wrapper.vm.fileError).toBe(fileError);
- });
- });
-
- it('emits input event when file is valid', () => {
- const file = triggerInputEvent(200);
-
- expect(wrapper.emitted('input')).toEqual([[file]]);
- });
-});
diff --git a/spec/frontend/static_site_editor/rich_content_editor/modals/insert_video_modal_spec.js b/spec/frontend/static_site_editor/rich_content_editor/modals/insert_video_modal_spec.js
deleted file mode 100644
index 392d31bf039..00000000000
--- a/spec/frontend/static_site_editor/rich_content_editor/modals/insert_video_modal_spec.js
+++ /dev/null
@@ -1,44 +0,0 @@
-import { GlModal } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import InsertVideoModal from '~/static_site_editor/rich_content_editor/modals/insert_video_modal.vue';
-
-describe('Insert Video Modal', () => {
- let wrapper;
-
- const findModal = () => wrapper.find(GlModal);
- const findUrlInput = () => wrapper.find({ ref: 'urlInput' });
-
- const triggerInsertVideo = (url) => {
- const preventDefault = jest.fn();
- findUrlInput().vm.$emit('input', url);
- findModal().vm.$emit('primary', { preventDefault });
- };
-
- beforeEach(() => {
- wrapper = shallowMount(InsertVideoModal);
- });
-
- afterEach(() => wrapper.destroy());
-
- describe('when content is loaded', () => {
- it('renders a modal component', () => {
- expect(findModal().exists()).toBe(true);
- });
-
- it('renders an input to add a URL', () => {
- expect(findUrlInput().exists()).toBe(true);
- });
- });
-
- describe('insert video', () => {
- it.each`
- url | emitted
- ${'https://www.youtube.com/embed/someId'} | ${[['https://www.youtube.com/embed/someId']]}
- ${'https://www.youtube.com/watch?v=1234'} | ${[['https://www.youtube.com/embed/1234']]}
- ${'::youtube.com/invalid/url'} | ${undefined}
- `('formats the url correctly', ({ url, emitted }) => {
- triggerInsertVideo(url);
- expect(wrapper.emitted('insertVideo')).toEqual(emitted);
- });
- });
-});
diff --git a/spec/frontend/static_site_editor/rich_content_editor/rich_content_editor_integration_spec.js b/spec/frontend/static_site_editor/rich_content_editor/rich_content_editor_integration_spec.js
deleted file mode 100644
index 6c02ec506c6..00000000000
--- a/spec/frontend/static_site_editor/rich_content_editor/rich_content_editor_integration_spec.js
+++ /dev/null
@@ -1,69 +0,0 @@
-import Editor from '@toast-ui/editor';
-import buildMarkdownToHTMLRenderer from '~/static_site_editor/rich_content_editor/services/build_custom_renderer';
-import { registerHTMLToMarkdownRenderer } from '~/static_site_editor/rich_content_editor/services/editor_service';
-
-describe('static_site_editor/rich_content_editor', () => {
- let editor;
-
- const buildEditor = () => {
- editor = new Editor({
- el: document.body,
- customHTMLRenderer: buildMarkdownToHTMLRenderer(),
- });
-
- registerHTMLToMarkdownRenderer(editor);
- };
-
- beforeEach(() => {
- buildEditor();
- });
-
- describe('HTML to Markdown', () => {
- it('uses "-" character list marker in unordered lists', () => {
- editor.setHtml('<ul><li>List item 1</li><li>List item 2</li></ul>');
-
- const markdown = editor.getMarkdown();
-
- expect(markdown).toBe('- List item 1\n- List item 2');
- });
-
- it('does not increment the list marker in ordered lists', () => {
- editor.setHtml('<ol><li>List item 1</li><li>List item 2</li></ol>');
-
- const markdown = editor.getMarkdown();
-
- expect(markdown).toBe('1. List item 1\n1. List item 2');
- });
-
- it('indents lists using four spaces', () => {
- editor.setHtml('<ul><li>List item 1</li><ul><li>List item 2</li></ul></ul>');
-
- const markdown = editor.getMarkdown();
-
- expect(markdown).toBe('- List item 1\n - List item 2');
- });
-
- it('uses * for strong and _ for emphasis text', () => {
- editor.setHtml('<strong>strong text</strong><i>emphasis text</i>');
-
- const markdown = editor.getMarkdown();
-
- expect(markdown).toBe('**strong text**_emphasis text_');
- });
- });
-
- describe('Markdown to HTML', () => {
- it.each`
- input | output
- ${'markdown with _emphasized\ntext_'} | ${'<p>markdown with <em>emphasized text</em></p>\n'}
- ${'markdown with **strong\ntext**'} | ${'<p>markdown with <strong>strong text</strong></p>\n'}
- `(
- 'does not transform softbreaks inside (_) and strong (**) nodes into <br/> tags',
- ({ input, output }) => {
- editor.setMarkdown(input);
-
- expect(editor.getHtml()).toBe(output);
- },
- );
- });
-});
diff --git a/spec/frontend/static_site_editor/rich_content_editor/rich_content_editor_spec.js b/spec/frontend/static_site_editor/rich_content_editor/rich_content_editor_spec.js
deleted file mode 100644
index 3b0d2993a5d..00000000000
--- a/spec/frontend/static_site_editor/rich_content_editor/rich_content_editor_spec.js
+++ /dev/null
@@ -1,222 +0,0 @@
-import { Editor, mockEditorApi } from '@toast-ui/vue-editor';
-import { shallowMount } from '@vue/test-utils';
-import {
- EDITOR_TYPES,
- EDITOR_HEIGHT,
- EDITOR_PREVIEW_STYLE,
- CUSTOM_EVENTS,
-} from '~/static_site_editor/rich_content_editor/constants';
-import AddImageModal from '~/static_site_editor/rich_content_editor/modals/add_image/add_image_modal.vue';
-import InsertVideoModal from '~/static_site_editor/rich_content_editor/modals/insert_video_modal.vue';
-import RichContentEditor from '~/static_site_editor/rich_content_editor/rich_content_editor.vue';
-
-import {
- addCustomEventListener,
- removeCustomEventListener,
- addImage,
- insertVideo,
- registerHTMLToMarkdownRenderer,
- getEditorOptions,
- getMarkdown,
-} from '~/static_site_editor/rich_content_editor/services/editor_service';
-
-jest.mock('~/static_site_editor/rich_content_editor/services/editor_service', () => ({
- addCustomEventListener: jest.fn(),
- removeCustomEventListener: jest.fn(),
- addImage: jest.fn(),
- insertVideo: jest.fn(),
- registerHTMLToMarkdownRenderer: jest.fn(),
- getEditorOptions: jest.fn(),
- getMarkdown: jest.fn(),
-}));
-
-describe('Rich Content Editor', () => {
- let wrapper;
-
- const content = '## Some Markdown';
- const imageRoot = 'path/to/root/';
- const findEditor = () => wrapper.find({ ref: 'editor' });
- const findAddImageModal = () => wrapper.find(AddImageModal);
- const findInsertVideoModal = () => wrapper.find(InsertVideoModal);
-
- const buildWrapper = async () => {
- wrapper = shallowMount(RichContentEditor, {
- propsData: { content, imageRoot },
- stubs: {
- ToastEditor: Editor,
- },
- });
- };
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- describe('when content is loaded', () => {
- const editorOptions = {};
-
- beforeEach(() => {
- getEditorOptions.mockReturnValueOnce(editorOptions);
- buildWrapper();
- });
-
- it('renders an editor', () => {
- expect(findEditor().exists()).toBe(true);
- });
-
- it('renders the correct content', () => {
- expect(findEditor().props().initialValue).toBe(content);
- });
-
- it('provides options generated by the getEditorOptions service', () => {
- expect(findEditor().props().options).toBe(editorOptions);
- });
-
- it('has the correct preview style', () => {
- expect(findEditor().props().previewStyle).toBe(EDITOR_PREVIEW_STYLE);
- });
-
- it('has the correct initial edit type', () => {
- expect(findEditor().props().initialEditType).toBe(EDITOR_TYPES.wysiwyg);
- });
-
- it('has the correct height', () => {
- expect(findEditor().props().height).toBe(EDITOR_HEIGHT);
- });
- });
-
- describe('when content is changed', () => {
- beforeEach(() => {
- buildWrapper();
- });
-
- it('emits an input event with the changed content', () => {
- const changedMarkdown = '## Changed Markdown';
- getMarkdown.mockReturnValueOnce(changedMarkdown);
-
- findEditor().vm.$emit('change');
-
- expect(wrapper.emitted().input[0][0]).toBe(changedMarkdown);
- });
- });
-
- describe('when content is reset', () => {
- beforeEach(() => {
- buildWrapper();
- });
-
- it('should reset the content via setMarkdown', () => {
- const newContent = 'Just the body content excluding the front matter for example';
- const mockInstance = { invoke: jest.fn() };
- wrapper.vm.$refs.editor = mockInstance;
-
- wrapper.vm.resetInitialValue(newContent);
-
- expect(mockInstance.invoke).toHaveBeenCalledWith('setMarkdown', newContent);
- });
- });
-
- describe('when editor is loaded', () => {
- const formattedMarkdown = 'formatted markdown';
-
- beforeEach(() => {
- mockEditorApi.getMarkdown.mockReturnValueOnce(formattedMarkdown);
- buildWrapper();
- });
-
- afterEach(() => {
- mockEditorApi.getMarkdown.mockReset();
- });
-
- it('adds the CUSTOM_EVENTS.openAddImageModal custom event listener', () => {
- expect(addCustomEventListener).toHaveBeenCalledWith(
- wrapper.vm.editorApi,
- CUSTOM_EVENTS.openAddImageModal,
- wrapper.vm.onOpenAddImageModal,
- );
- });
-
- it('adds the CUSTOM_EVENTS.openInsertVideoModal custom event listener', () => {
- expect(addCustomEventListener).toHaveBeenCalledWith(
- wrapper.vm.editorApi,
- CUSTOM_EVENTS.openInsertVideoModal,
- wrapper.vm.onOpenInsertVideoModal,
- );
- });
-
- it('registers HTML to markdown renderer', () => {
- expect(registerHTMLToMarkdownRenderer).toHaveBeenCalledWith(wrapper.vm.editorApi);
- });
-
- it('emits load event with the markdown formatted by Toast UI', () => {
- mockEditorApi.getMarkdown.mockReturnValueOnce(formattedMarkdown);
- expect(mockEditorApi.getMarkdown).toHaveBeenCalled();
- expect(wrapper.emitted('load')[0]).toEqual([{ formattedMarkdown }]);
- });
- });
-
- describe('when editor is destroyed', () => {
- beforeEach(() => {
- buildWrapper();
- });
-
- it('removes the CUSTOM_EVENTS.openAddImageModal custom event listener', () => {
- wrapper.vm.$destroy();
-
- expect(removeCustomEventListener).toHaveBeenCalledWith(
- wrapper.vm.editorApi,
- CUSTOM_EVENTS.openAddImageModal,
- wrapper.vm.onOpenAddImageModal,
- );
- });
-
- it('removes the CUSTOM_EVENTS.openInsertVideoModal custom event listener', () => {
- wrapper.vm.$destroy();
-
- expect(removeCustomEventListener).toHaveBeenCalledWith(
- wrapper.vm.editorApi,
- CUSTOM_EVENTS.openInsertVideoModal,
- wrapper.vm.onOpenInsertVideoModal,
- );
- });
- });
-
- describe('add image modal', () => {
- beforeEach(() => {
- buildWrapper();
- });
-
- it('renders an addImageModal component', () => {
- expect(findAddImageModal().exists()).toBe(true);
- });
-
- it('calls the onAddImage method when the addImage event is emitted', () => {
- const mockImage = { imageUrl: 'some/url.png', altText: 'some description' };
- const mockInstance = { exec: jest.fn() };
- wrapper.vm.$refs.editor = mockInstance;
-
- findAddImageModal().vm.$emit('addImage', mockImage);
- expect(addImage).toHaveBeenCalledWith(mockInstance, mockImage, undefined);
- });
- });
-
- describe('insert video modal', () => {
- beforeEach(() => {
- buildWrapper();
- });
-
- it('renders an insertVideoModal component', () => {
- expect(findInsertVideoModal().exists()).toBe(true);
- });
-
- it('calls the onInsertVideo method when the insertVideo event is emitted', () => {
- const mockUrl = 'https://www.youtube.com/embed/someId';
- const mockInstance = { exec: jest.fn() };
- wrapper.vm.$refs.editor = mockInstance;
-
- findInsertVideoModal().vm.$emit('insertVideo', mockUrl);
- expect(insertVideo).toHaveBeenCalledWith(mockInstance, mockUrl);
- });
- });
-});
diff --git a/spec/frontend/static_site_editor/rich_content_editor/services/build_custom_renderer_spec.js b/spec/frontend/static_site_editor/rich_content_editor/services/build_custom_renderer_spec.js
deleted file mode 100644
index 202e13e8bff..00000000000
--- a/spec/frontend/static_site_editor/rich_content_editor/services/build_custom_renderer_spec.js
+++ /dev/null
@@ -1,32 +0,0 @@
-import buildCustomHTMLRenderer from '~/static_site_editor/rich_content_editor/services/build_custom_renderer';
-
-describe('Build Custom Renderer Service', () => {
- describe('buildCustomHTMLRenderer', () => {
- it('should return an object with the default renderer functions when lacking arguments', () => {
- expect(buildCustomHTMLRenderer()).toEqual(
- expect.objectContaining({
- htmlBlock: expect.any(Function),
- htmlInline: expect.any(Function),
- heading: expect.any(Function),
- item: expect.any(Function),
- paragraph: expect.any(Function),
- text: expect.any(Function),
- softbreak: expect.any(Function),
- }),
- );
- });
-
- it('should return an object with both custom and default renderer functions when passed customRenderers', () => {
- const mockHtmlCustomRenderer = jest.fn();
- const customRenderers = {
- html: [mockHtmlCustomRenderer],
- };
-
- expect(buildCustomHTMLRenderer(customRenderers)).toEqual(
- expect.objectContaining({
- html: expect.any(Function),
- }),
- );
- });
- });
-});
diff --git a/spec/frontend/static_site_editor/rich_content_editor/services/build_html_to_markdown_renderer_spec.js b/spec/frontend/static_site_editor/rich_content_editor/services/build_html_to_markdown_renderer_spec.js
deleted file mode 100644
index c9cba3e8689..00000000000
--- a/spec/frontend/static_site_editor/rich_content_editor/services/build_html_to_markdown_renderer_spec.js
+++ /dev/null
@@ -1,218 +0,0 @@
-import buildHTMLToMarkdownRenderer from '~/static_site_editor/rich_content_editor/services/build_html_to_markdown_renderer';
-import { attributeDefinition } from './renderers/mock_data';
-
-describe('rich_content_editor/services/html_to_markdown_renderer', () => {
- let baseRenderer;
- let htmlToMarkdownRenderer;
- let fakeNode;
-
- beforeEach(() => {
- baseRenderer = {
- trim: jest.fn((input) => `trimmed ${input}`),
- getSpaceCollapsedText: jest.fn((input) => `space collapsed ${input}`),
- getSpaceControlled: jest.fn((input) => `space controlled ${input}`),
- convert: jest.fn(),
- };
-
- fakeNode = { nodeValue: 'mock_node', dataset: {} };
- });
-
- afterEach(() => {
- htmlToMarkdownRenderer = null;
- });
-
- describe('TEXT_NODE visitor', () => {
- it('composes getSpaceControlled, getSpaceCollapsedText, and trim services', () => {
- htmlToMarkdownRenderer = buildHTMLToMarkdownRenderer(baseRenderer);
-
- expect(htmlToMarkdownRenderer.TEXT_NODE(fakeNode)).toBe(
- `space controlled trimmed space collapsed ${fakeNode.nodeValue}`,
- );
- });
- });
-
- describe('LI OL, LI UL visitor', () => {
- const oneLevelNestedList = '\n * List item 1\n * List item 2';
- const twoLevelNestedList = '\n * List item 1\n * List item 2';
- const spaceInContentList = '\n * List item 1\n * List item 2';
-
- it.each`
- list | indentSpaces | result
- ${oneLevelNestedList} | ${2} | ${'\n * List item 1\n * List item 2'}
- ${oneLevelNestedList} | ${3} | ${'\n * List item 1\n * List item 2'}
- ${oneLevelNestedList} | ${6} | ${'\n * List item 1\n * List item 2'}
- ${twoLevelNestedList} | ${4} | ${'\n * List item 1\n * List item 2'}
- ${spaceInContentList} | ${1} | ${'\n * List item 1\n * List item 2'}
- `('changes the list indentation to $indentSpaces spaces', ({ list, indentSpaces, result }) => {
- htmlToMarkdownRenderer = buildHTMLToMarkdownRenderer(baseRenderer, {
- subListIndentSpaces: indentSpaces,
- });
-
- baseRenderer.convert.mockReturnValueOnce(list);
-
- expect(htmlToMarkdownRenderer['LI OL, LI UL'](fakeNode, list)).toBe(result);
- expect(baseRenderer.convert).toHaveBeenCalledWith(fakeNode, list);
- });
- });
-
- describe('UL LI visitor', () => {
- it.each`
- listItem | unorderedListBulletChar | result | bulletChar
- ${'* list item'} | ${undefined} | ${'- list item'} | ${'default'}
- ${' - list item'} | ${'*'} | ${' * list item'} | ${'*'}
- ${' * list item'} | ${'-'} | ${' - list item'} | ${'-'}
- `(
- 'uses $bulletChar bullet char in unordered list items when $unorderedListBulletChar is set in config',
- ({ listItem, unorderedListBulletChar, result }) => {
- htmlToMarkdownRenderer = buildHTMLToMarkdownRenderer(baseRenderer, {
- unorderedListBulletChar,
- });
- baseRenderer.convert.mockReturnValueOnce(listItem);
-
- expect(htmlToMarkdownRenderer['UL LI'](fakeNode, listItem)).toBe(result);
- expect(baseRenderer.convert).toHaveBeenCalledWith(fakeNode, listItem);
- },
- );
-
- it('detects attribute definitions and attaches them to the list item', () => {
- const listItem = '- list item';
- const result = `${listItem}\n${attributeDefinition}\n`;
-
- fakeNode.dataset.attributeDefinition = attributeDefinition;
- htmlToMarkdownRenderer = buildHTMLToMarkdownRenderer(baseRenderer);
- baseRenderer.convert.mockReturnValueOnce(`${listItem}\n`);
-
- expect(htmlToMarkdownRenderer['UL LI'](fakeNode, listItem)).toBe(result);
- });
- });
-
- describe('OL LI visitor', () => {
- it.each`
- listItem | result | incrementListMarker | action
- ${'2. list item'} | ${'1. list item'} | ${false} | ${'increments'}
- ${' 3. list item'} | ${' 1. list item'} | ${false} | ${'increments'}
- ${' 123. list item'} | ${' 1. list item'} | ${false} | ${'increments'}
- ${'3. list item'} | ${'3. list item'} | ${true} | ${'does not increment'}
- `(
- '$action a list item counter when incrementListMaker is $incrementListMarker',
- ({ listItem, result, incrementListMarker }) => {
- const subContent = null;
-
- htmlToMarkdownRenderer = buildHTMLToMarkdownRenderer(baseRenderer, {
- incrementListMarker,
- });
- baseRenderer.convert.mockReturnValueOnce(listItem);
-
- expect(htmlToMarkdownRenderer['OL LI'](fakeNode, subContent)).toBe(result);
- expect(baseRenderer.convert).toHaveBeenCalledWith(fakeNode, subContent);
- },
- );
- });
-
- describe('STRONG, B visitor', () => {
- it.each`
- input | strongCharacter | result
- ${'**strong text**'} | ${'_'} | ${'__strong text__'}
- ${'__strong text__'} | ${'*'} | ${'**strong text**'}
- `(
- 'converts $input to $result when strong character is $strongCharacter',
- ({ input, strongCharacter, result }) => {
- htmlToMarkdownRenderer = buildHTMLToMarkdownRenderer(baseRenderer, {
- strong: strongCharacter,
- });
-
- baseRenderer.convert.mockReturnValueOnce(input);
-
- expect(htmlToMarkdownRenderer['STRONG, B'](fakeNode, input)).toBe(result);
- expect(baseRenderer.convert).toHaveBeenCalledWith(fakeNode, input);
- },
- );
- });
-
- describe('EM, I visitor', () => {
- it.each`
- input | emphasisCharacter | result
- ${'*strong text*'} | ${'_'} | ${'_strong text_'}
- ${'_strong text_'} | ${'*'} | ${'*strong text*'}
- `(
- 'converts $input to $result when emphasis character is $emphasisCharacter',
- ({ input, emphasisCharacter, result }) => {
- htmlToMarkdownRenderer = buildHTMLToMarkdownRenderer(baseRenderer, {
- emphasis: emphasisCharacter,
- });
-
- baseRenderer.convert.mockReturnValueOnce(input);
-
- expect(htmlToMarkdownRenderer['EM, I'](fakeNode, input)).toBe(result);
- expect(baseRenderer.convert).toHaveBeenCalledWith(fakeNode, input);
- },
- );
- });
-
- describe('H1, H2, H3, H4, H5, H6 visitor', () => {
- it('detects attribute definitions and attaches them to the heading', () => {
- const heading = 'heading text';
- const result = `${heading.trimRight()}\n${attributeDefinition}\n\n`;
-
- fakeNode.dataset.attributeDefinition = attributeDefinition;
- htmlToMarkdownRenderer = buildHTMLToMarkdownRenderer(baseRenderer);
- baseRenderer.convert.mockReturnValueOnce(`${heading}\n\n`);
-
- expect(htmlToMarkdownRenderer['H1, H2, H3, H4, H5, H6'](fakeNode, heading)).toBe(result);
- });
- });
-
- describe('PRE CODE', () => {
- let node;
- const subContent = 'sub content';
- const originalConverterResult = 'base result';
-
- beforeEach(() => {
- node = document.createElement('PRE');
-
- node.innerText = 'reference definition content';
- node.dataset.sseReferenceDefinition = true;
-
- baseRenderer.convert.mockReturnValueOnce(originalConverterResult);
- htmlToMarkdownRenderer = buildHTMLToMarkdownRenderer(baseRenderer);
- });
-
- it('returns raw text when pre node has sse-reference-definitions class', () => {
- expect(htmlToMarkdownRenderer['PRE CODE'](node, subContent)).toBe(
- `\n\n${node.innerText}\n\n`,
- );
- });
-
- it('returns base result when pre node does not have sse-reference-definitions class', () => {
- delete node.dataset.sseReferenceDefinition;
-
- expect(htmlToMarkdownRenderer['PRE CODE'](node, subContent)).toBe(originalConverterResult);
- });
- });
-
- describe('IMG', () => {
- const originalSrc = 'path/to/image.png';
- const alt = 'alt text';
- let node;
-
- beforeEach(() => {
- node = document.createElement('img');
- node.alt = alt;
- node.src = originalSrc;
- });
-
- it('returns an image with its original src of the `original-src` attribute is preset', () => {
- node.dataset.originalSrc = originalSrc;
- node.src = 'modified/path/to/image.png';
-
- htmlToMarkdownRenderer = buildHTMLToMarkdownRenderer(baseRenderer);
-
- expect(htmlToMarkdownRenderer.IMG(node)).toBe(`![${alt}](${originalSrc})`);
- });
-
- it('fallback to `src` if no `original-src` is specified on the image', () => {
- htmlToMarkdownRenderer = buildHTMLToMarkdownRenderer(baseRenderer);
- expect(htmlToMarkdownRenderer.IMG(node)).toBe(`![${alt}](${originalSrc})`);
- });
- });
-});
diff --git a/spec/frontend/static_site_editor/rich_content_editor/services/renderers/build_uneditable_token_spec.js b/spec/frontend/static_site_editor/rich_content_editor/services/renderers/build_uneditable_token_spec.js
deleted file mode 100644
index ef3ff052cb2..00000000000
--- a/spec/frontend/static_site_editor/rich_content_editor/services/renderers/build_uneditable_token_spec.js
+++ /dev/null
@@ -1,88 +0,0 @@
-import {
- buildTextToken,
- buildUneditableOpenTokens,
- buildUneditableCloseToken,
- buildUneditableCloseTokens,
- buildUneditableBlockTokens,
- buildUneditableInlineTokens,
- buildUneditableHtmlAsTextTokens,
-} from '~/static_site_editor/rich_content_editor/services/renderers/build_uneditable_token';
-
-import {
- originInlineToken,
- originToken,
- uneditableOpenTokens,
- uneditableCloseToken,
- uneditableCloseTokens,
- uneditableBlockTokens,
- uneditableInlineTokens,
- uneditableTokens,
-} from './mock_data';
-
-describe('Build Uneditable Token renderer helper', () => {
- describe('buildTextToken', () => {
- it('returns an object literal representing a text token', () => {
- const text = originToken.content;
- expect(buildTextToken(text)).toStrictEqual(originToken);
- });
- });
-
- describe('buildUneditableOpenTokens', () => {
- it('returns a 2-item array of tokens with the originToken appended to an open token', () => {
- const result = buildUneditableOpenTokens(originToken);
-
- expect(result).toHaveLength(2);
- expect(result).toStrictEqual(uneditableOpenTokens);
- });
- });
-
- describe('buildUneditableCloseToken', () => {
- it('returns an object literal representing the uneditable close token', () => {
- expect(buildUneditableCloseToken()).toStrictEqual(uneditableCloseToken);
- });
- });
-
- describe('buildUneditableCloseTokens', () => {
- it('returns a 2-item array of tokens with the originToken prepended to a close token', () => {
- const result = buildUneditableCloseTokens(originToken);
-
- expect(result).toHaveLength(2);
- expect(result).toStrictEqual(uneditableCloseTokens);
- });
- });
-
- describe('buildUneditableBlockTokens', () => {
- it('returns a 3-item array of tokens with the originToken wrapped in the middle of block tokens', () => {
- const result = buildUneditableBlockTokens(originToken);
-
- expect(result).toHaveLength(3);
- expect(result).toStrictEqual(uneditableTokens);
- });
- });
-
- describe('buildUneditableInlineTokens', () => {
- it('returns a 3-item array of tokens with the originInlineToken wrapped in the middle of inline tokens', () => {
- const result = buildUneditableInlineTokens(originInlineToken);
-
- expect(result).toHaveLength(3);
- expect(result).toStrictEqual(uneditableInlineTokens);
- });
- });
-
- describe('buildUneditableHtmlAsTextTokens', () => {
- it('returns a 3-item array of tokens with the htmlBlockNode wrapped as a text token in the middle of block tokens', () => {
- const htmlBlockNode = {
- type: 'htmlBlock',
- literal: '<div data-tomark-pass ><h1>Some header</h1><p>Some paragraph</p></div>',
- };
- const result = buildUneditableHtmlAsTextTokens(htmlBlockNode);
- const { type, content } = result[1];
-
- expect(type).toBe('text');
- expect(content).not.toMatch(/ data-tomark-pass /);
-
- expect(result).toHaveLength(3);
- expect(result).toStrictEqual(uneditableBlockTokens);
- });
- });
-});
diff --git a/spec/frontend/static_site_editor/rich_content_editor/services/renderers/mock_data.js b/spec/frontend/static_site_editor/rich_content_editor/services/renderers/mock_data.js
deleted file mode 100644
index 407072fb596..00000000000
--- a/spec/frontend/static_site_editor/rich_content_editor/services/renderers/mock_data.js
+++ /dev/null
@@ -1,54 +0,0 @@
-// Node spec helpers
-
-export const buildMockTextNode = (literal) => ({ literal, type: 'text' });
-
-export const normalTextNode = buildMockTextNode('This is just normal text.');
-
-// Token spec helpers
-
-const buildMockUneditableOpenToken = (type) => {
- return {
- type: 'openTag',
- tagName: type,
- attributes: { contenteditable: false },
- classNames: [
- 'gl-px-4 gl-py-2 gl-my-5 gl-opacity-5 gl-bg-gray-100 gl-user-select-none gl-cursor-not-allowed',
- ],
- };
-};
-
-const buildMockTextToken = (content) => {
- return {
- type: 'text',
- tagName: null,
- content,
- };
-};
-
-const buildMockUneditableCloseToken = (type) => ({ type: 'closeTag', tagName: type });
-
-export const originToken = buildMockTextToken('{:.no_toc .hidden-md .hidden-lg}');
-const uneditableOpenToken = buildMockUneditableOpenToken('div');
-export const uneditableOpenTokens = [uneditableOpenToken, originToken];
-export const uneditableCloseToken = buildMockUneditableCloseToken('div');
-export const uneditableCloseTokens = [originToken, uneditableCloseToken];
-export const uneditableTokens = [...uneditableOpenTokens, uneditableCloseToken];
-
-export const originInlineToken = {
- type: 'text',
- content: '<i>Inline</i> content',
-};
-
-export const uneditableInlineTokens = [
- buildMockUneditableOpenToken('a'),
- originInlineToken,
- buildMockUneditableCloseToken('a'),
-];
-
-export const uneditableBlockTokens = [
- uneditableOpenToken,
- buildMockTextToken('<div><h1>Some header</h1><p>Some paragraph</p></div>'),
- uneditableCloseToken,
-];
-
-export const attributeDefinition = '{:.no_toc .hidden-md .hidden-lg}';
diff --git a/spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_attribute_definition_spec.js b/spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_attribute_definition_spec.js
deleted file mode 100644
index 6d96dd3bbca..00000000000
--- a/spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_attribute_definition_spec.js
+++ /dev/null
@@ -1,25 +0,0 @@
-import renderer from '~/static_site_editor/rich_content_editor/services/renderers/render_attribute_definition';
-import { attributeDefinition } from './mock_data';
-
-describe('rich_content_editor/renderers/render_attribute_definition', () => {
- describe('canRender', () => {
- it.each`
- input | result
- ${{ literal: attributeDefinition }} | ${true}
- ${{ literal: `FOO${attributeDefinition}` }} | ${false}
- ${{ literal: `${attributeDefinition}BAR` }} | ${false}
- ${{ literal: 'foobar' }} | ${false}
- `('returns $result when input is $input', ({ input, result }) => {
- expect(renderer.canRender(input)).toBe(result);
- });
- });
-
- describe('render', () => {
- it('returns an empty HTML comment', () => {
- expect(renderer.render()).toEqual({
- type: 'html',
- content: '<!-- sse-attribute-definition -->',
- });
- });
- });
-});
diff --git a/spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_embedded_ruby_spec.js b/spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_embedded_ruby_spec.js
deleted file mode 100644
index 29e2b5b3b16..00000000000
--- a/spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_embedded_ruby_spec.js
+++ /dev/null
@@ -1,24 +0,0 @@
-import renderer from '~/static_site_editor/rich_content_editor/services/renderers/render_embedded_ruby_text';
-import { renderUneditableLeaf } from '~/static_site_editor/rich_content_editor/services/renderers/render_utils';
-
-import { buildMockTextNode, normalTextNode } from './mock_data';
-
-const embeddedRubyTextNode = buildMockTextNode('<%= partial("some/path") %>');
-
-describe('Render Embedded Ruby Text renderer', () => {
- describe('canRender', () => {
- it('should return true when the argument `literal` has embedded ruby syntax', () => {
- expect(renderer.canRender(embeddedRubyTextNode)).toBe(true);
- });
-
- it('should return false when the argument `literal` lacks embedded ruby syntax', () => {
- expect(renderer.canRender(normalTextNode)).toBe(false);
- });
- });
-
- describe('render', () => {
- it('should delegate rendering to the renderUneditableLeaf util', () => {
- expect(renderer.render).toBe(renderUneditableLeaf);
- });
- });
-});
diff --git a/spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_font_awesome_html_inline_spec.js b/spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_font_awesome_html_inline_spec.js
deleted file mode 100644
index 0fda847b688..00000000000
--- a/spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_font_awesome_html_inline_spec.js
+++ /dev/null
@@ -1,33 +0,0 @@
-import { buildUneditableInlineTokens } from '~/static_site_editor/rich_content_editor/services/renderers/build_uneditable_token';
-import renderer from '~/static_site_editor/rich_content_editor/services/renderers/render_font_awesome_html_inline';
-
-import { normalTextNode } from './mock_data';
-
-const fontAwesomeInlineHtmlNode = {
- firstChild: null,
- literal: '<i class="far fa-paper-plane" id="biz-tech-icons">',
- type: 'html',
-};
-
-describe('Render Font Awesome Inline HTML renderer', () => {
- describe('canRender', () => {
- it('should return true when the argument `literal` has font awesome inline html syntax', () => {
- expect(renderer.canRender(fontAwesomeInlineHtmlNode)).toBe(true);
- });
-
- it('should return false when the argument `literal` lacks font awesome inline html syntax', () => {
- expect(renderer.canRender(normalTextNode)).toBe(false);
- });
- });
-
- describe('render', () => {
- it('should return uneditable inline tokens', () => {
- const token = { type: 'text', tagName: null, content: fontAwesomeInlineHtmlNode.literal };
- const context = { origin: () => token };
-
- expect(renderer.render(fontAwesomeInlineHtmlNode, context)).toStrictEqual(
- buildUneditableInlineTokens(token),
- );
- });
- });
-});
diff --git a/spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_heading_spec.js b/spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_heading_spec.js
deleted file mode 100644
index cf4a90885df..00000000000
--- a/spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_heading_spec.js
+++ /dev/null
@@ -1,12 +0,0 @@
-import renderer from '~/static_site_editor/rich_content_editor/services/renderers/render_heading';
-import * as renderUtils from '~/static_site_editor/rich_content_editor/services/renderers/render_utils';
-
-describe('rich_content_editor/renderers/render_heading', () => {
- it('canRender delegates to renderUtils.willAlwaysRender', () => {
- expect(renderer.canRender).toBe(renderUtils.willAlwaysRender);
- });
-
- it('render delegates to renderUtils.renderWithAttributeDefinitions', () => {
- expect(renderer.render).toBe(renderUtils.renderWithAttributeDefinitions);
- });
-});
diff --git a/spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_html_block_spec.js b/spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_html_block_spec.js
deleted file mode 100644
index 9c937ac22f4..00000000000
--- a/spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_html_block_spec.js
+++ /dev/null
@@ -1,37 +0,0 @@
-import { buildUneditableHtmlAsTextTokens } from '~/static_site_editor/rich_content_editor/services/renderers/build_uneditable_token';
-import renderer from '~/static_site_editor/rich_content_editor/services/renderers/render_html_block';
-
-describe('rich_content_editor/services/renderers/render_html_block', () => {
- const htmlBlockNode = {
- literal: '<div><h1>Heading</h1><p>Paragraph.</p></div>',
- type: 'htmlBlock',
- };
-
- describe('canRender', () => {
- it.each`
- input | result
- ${htmlBlockNode} | ${true}
- ${{ literal: '<iframe></iframe>', type: 'htmlBlock' }} | ${true}
- ${{ literal: '<iframe src="https://www.youtube.com"></iframe>', type: 'htmlBlock' }} | ${false}
- ${{ literal: '<iframe></iframe>', type: 'text' }} | ${false}
- `('returns $result when input=$input', ({ input, result }) => {
- expect(renderer.canRender(input)).toBe(result);
- });
- });
-
- describe('render', () => {
- const htmlBlockNodeToMark = {
- firstChild: null,
- literal: '<div data-to-mark ></div>',
- type: 'htmlBlock',
- };
-
- it.each`
- node
- ${htmlBlockNode}
- ${htmlBlockNodeToMark}
- `('should return uneditable tokens wrapping the $node as a token', ({ node }) => {
- expect(renderer.render(node)).toStrictEqual(buildUneditableHtmlAsTextTokens(node));
- });
- });
-});
diff --git a/spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_identifier_instance_text_spec.js b/spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_identifier_instance_text_spec.js
deleted file mode 100644
index 15fb2c3a430..00000000000
--- a/spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_identifier_instance_text_spec.js
+++ /dev/null
@@ -1,55 +0,0 @@
-import { buildUneditableInlineTokens } from '~/static_site_editor/rich_content_editor/services/renderers/build_uneditable_token';
-import renderer from '~/static_site_editor/rich_content_editor/services/renderers/render_identifier_instance_text';
-
-import { buildMockTextNode, normalTextNode } from './mock_data';
-
-const mockTextStart = 'Majority example ';
-const mockTextMiddle = '[environment terraform plans][terraform]';
-const mockTextEnd = '.';
-const identifierInstanceStartTextNode = buildMockTextNode(mockTextStart);
-const identifierInstanceEndTextNode = buildMockTextNode(mockTextEnd);
-
-describe('Render Identifier Instance Text renderer', () => {
- describe('canRender', () => {
- it.each`
- node | target
- ${normalTextNode} | ${false}
- ${identifierInstanceStartTextNode} | ${false}
- ${identifierInstanceEndTextNode} | ${false}
- ${buildMockTextNode(mockTextMiddle)} | ${true}
- ${buildMockTextNode('Minority example [environment terraform plans][]')} | ${true}
- ${buildMockTextNode('Minority example [environment terraform plans]')} | ${true}
- `(
- 'should return $target when the $node validates against identifier instance syntax',
- ({ node, target }) => {
- expect(renderer.canRender(node)).toBe(target);
- },
- );
- });
-
- describe('render', () => {
- it.each`
- start | middle | end
- ${mockTextStart} | ${mockTextMiddle} | ${mockTextEnd}
- ${mockTextStart} | ${'[environment terraform plans][]'} | ${mockTextEnd}
- ${mockTextStart} | ${'[environment terraform plans]'} | ${mockTextEnd}
- `(
- 'should return inline editable, uneditable, and editable tokens in sequence',
- ({ start, middle, end }) => {
- const buildMockTextToken = (content) => ({ type: 'text', tagName: null, content });
-
- const startToken = buildMockTextToken(start);
- const middleToken = buildMockTextToken(middle);
- const endToken = buildMockTextToken(end);
-
- const content = `${start}${middle}${end}`;
- const contentToken = buildMockTextToken(content);
- const contentNode = buildMockTextNode(content);
- const context = { origin: jest.fn().mockReturnValueOnce(contentToken) };
- expect(renderer.render(contentNode, context)).toStrictEqual(
- [startToken, buildUneditableInlineTokens(middleToken), endToken].flat(),
- );
- },
- );
- });
-});
diff --git a/spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_identifier_paragraph_spec.js b/spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_identifier_paragraph_spec.js
deleted file mode 100644
index ddc96ed6832..00000000000
--- a/spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_identifier_paragraph_spec.js
+++ /dev/null
@@ -1,84 +0,0 @@
-import renderer from '~/static_site_editor/rich_content_editor/services/renderers/render_identifier_paragraph';
-
-import { buildMockTextNode } from './mock_data';
-
-const buildMockParagraphNode = (literal) => {
- return {
- firstChild: buildMockTextNode(literal),
- type: 'paragraph',
- };
-};
-
-const normalParagraphNode = buildMockParagraphNode(
- 'This is just normal paragraph. It has multiple sentences.',
-);
-const identifierParagraphNode = buildMockParagraphNode(
- `[another-identifier]: https://example.com "This example has a title" [identifier]: http://example1.com [this link]: http://example.org`,
-);
-
-describe('rich_content_editor/renderers_render_identifier_paragraph', () => {
- describe('canRender', () => {
- it.each`
- node | paragraph | target
- ${identifierParagraphNode} | ${'[Some text]: https://link.com'} | ${true}
- ${normalParagraphNode} | ${'Normal non-identifier text. Another sentence.'} | ${false}
- `(
- 'should return $target when the $node matches $paragraph syntax',
- ({ node, paragraph, target }) => {
- const context = {
- entering: true,
- getChildrenText: jest.fn().mockReturnValueOnce(paragraph),
- };
-
- expect(renderer.canRender(node, context)).toBe(target);
- },
- );
- });
-
- describe('render', () => {
- let context;
- let result;
-
- beforeEach(() => {
- const node = {
- firstChild: {
- type: 'text',
- literal: '[Some text]: https://link.com',
- next: {
- type: 'linebreak',
- next: {
- type: 'text',
- literal: '[identifier]: http://example1.com "title"',
- },
- },
- },
- };
- context = { skipChildren: jest.fn() };
- result = renderer.render(node, context);
- });
-
- it('renders the reference definitions as a code block', () => {
- expect(result).toEqual([
- {
- type: 'openTag',
- tagName: 'pre',
- classNames: ['code-block', 'language-markdown'],
- attributes: {
- 'data-sse-reference-definition': true,
- },
- },
- { type: 'openTag', tagName: 'code' },
- {
- type: 'text',
- content: '[Some text]: https://link.com\n[identifier]: http://example1.com "title"',
- },
- { type: 'closeTag', tagName: 'code' },
- { type: 'closeTag', tagName: 'pre' },
- ]);
- });
-
- it('skips the reference definition node children from rendering', () => {
- expect(context.skipChildren).toHaveBeenCalled();
- });
- });
-});
diff --git a/spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_list_item_spec.js b/spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_list_item_spec.js
deleted file mode 100644
index 1e8e62b9dd2..00000000000
--- a/spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_list_item_spec.js
+++ /dev/null
@@ -1,12 +0,0 @@
-import renderer from '~/static_site_editor/rich_content_editor/services/renderers/render_list_item';
-import * as renderUtils from '~/static_site_editor/rich_content_editor/services/renderers/render_utils';
-
-describe('rich_content_editor/renderers/render_list_item', () => {
- it('canRender delegates to renderUtils.willAlwaysRender', () => {
- expect(renderer.canRender).toBe(renderUtils.willAlwaysRender);
- });
-
- it('render delegates to renderUtils.renderWithAttributeDefinitions', () => {
- expect(renderer.render).toBe(renderUtils.renderWithAttributeDefinitions);
- });
-});
diff --git a/spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_softbreak_spec.js b/spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_softbreak_spec.js
deleted file mode 100644
index d8d1e6ff295..00000000000
--- a/spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_softbreak_spec.js
+++ /dev/null
@@ -1,23 +0,0 @@
-import renderer from '~/static_site_editor/rich_content_editor/services/renderers/render_softbreak';
-
-describe('Render softbreak renderer', () => {
- describe('canRender', () => {
- it.each`
- node | parentType | result
- ${{ parent: { type: 'emph' } }} | ${'emph'} | ${true}
- ${{ parent: { type: 'strong' } }} | ${'strong'} | ${true}
- ${{ parent: { type: 'paragraph' } }} | ${'paragraph'} | ${false}
- `('returns $result when node parent type is $parentType ', ({ node, result }) => {
- expect(renderer.canRender(node)).toBe(result);
- });
- });
-
- describe('render', () => {
- it('returns text node with a break line', () => {
- expect(renderer.render()).toEqual({
- type: 'text',
- content: ' ',
- });
- });
- });
-});
diff --git a/spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_utils_spec.js b/spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_utils_spec.js
deleted file mode 100644
index 49b8936a9f7..00000000000
--- a/spec/frontend/static_site_editor/rich_content_editor/services/renderers/render_utils_spec.js
+++ /dev/null
@@ -1,109 +0,0 @@
-import {
- buildUneditableBlockTokens,
- buildUneditableOpenTokens,
-} from '~/static_site_editor/rich_content_editor/services/renderers/build_uneditable_token';
-import {
- renderUneditableLeaf,
- renderUneditableBranch,
- renderWithAttributeDefinitions,
- willAlwaysRender,
-} from '~/static_site_editor/rich_content_editor/services/renderers/render_utils';
-
-import { originToken, uneditableCloseToken, attributeDefinition } from './mock_data';
-
-describe('rich_content_editor/renderers/render_utils', () => {
- describe('renderUneditableLeaf', () => {
- it('should return uneditable block tokens around an origin token', () => {
- const context = { origin: jest.fn().mockReturnValueOnce(originToken) };
- const result = renderUneditableLeaf({}, context);
-
- expect(result).toStrictEqual(buildUneditableBlockTokens(originToken));
- });
- });
-
- describe('renderUneditableBranch', () => {
- let origin;
-
- beforeEach(() => {
- origin = jest.fn().mockReturnValueOnce(originToken);
- });
-
- it('should return uneditable block open token followed by the origin token when entering', () => {
- const context = { entering: true, origin };
- const result = renderUneditableBranch({}, context);
-
- expect(result).toStrictEqual(buildUneditableOpenTokens(originToken));
- });
-
- it('should return uneditable block closing token when exiting', () => {
- const context = { entering: false, origin };
- const result = renderUneditableBranch({}, context);
-
- expect(result).toStrictEqual(uneditableCloseToken);
- });
- });
-
- describe('willAlwaysRender', () => {
- it('always returns true', () => {
- expect(willAlwaysRender()).toBe(true);
- });
- });
-
- describe('renderWithAttributeDefinitions', () => {
- let openTagToken;
- let closeTagToken;
- let node;
- const attributes = {
- 'data-attribute-definition': attributeDefinition,
- };
-
- beforeEach(() => {
- openTagToken = { type: 'openTag' };
- closeTagToken = { type: 'closeTag' };
- node = {
- next: {
- firstChild: {
- literal: attributeDefinition,
- },
- },
- };
- });
-
- describe('when token type is openTag', () => {
- it('attaches attributes when attributes exist in the node’s next sibling', () => {
- const context = { origin: () => openTagToken };
-
- expect(renderWithAttributeDefinitions(node, context)).toEqual({
- ...openTagToken,
- attributes,
- });
- });
-
- it('attaches attributes when attributes exist in the node’s children', () => {
- const context = { origin: () => openTagToken };
- node = {
- firstChild: {
- firstChild: {
- next: {
- next: {
- literal: attributeDefinition,
- },
- },
- },
- },
- };
-
- expect(renderWithAttributeDefinitions(node, context)).toEqual({
- ...openTagToken,
- attributes,
- });
- });
- });
-
- it('does not attach attributes when token type is "closeTag"', () => {
- const context = { origin: () => closeTagToken };
-
- expect(renderWithAttributeDefinitions({}, context)).toBe(closeTagToken);
- });
- });
-});
diff --git a/spec/frontend/static_site_editor/rich_content_editor/services/sanitize_html_spec.js b/spec/frontend/static_site_editor/rich_content_editor/services/sanitize_html_spec.js
deleted file mode 100644
index 2f2d3beb53d..00000000000
--- a/spec/frontend/static_site_editor/rich_content_editor/services/sanitize_html_spec.js
+++ /dev/null
@@ -1,11 +0,0 @@
-import sanitizeHTML from '~/static_site_editor/rich_content_editor/services/sanitize_html';
-
-describe('rich_content_editor/services/sanitize_html', () => {
- it.each`
- input | result
- ${'<iframe src="https://www.youtube.com"></iframe>'} | ${'<iframe src="https://www.youtube.com"></iframe>'}
- ${'<iframe src="https://gitlab.com"></iframe>'} | ${''}
- `('removes iframes if the iframe source origin is not allowed', ({ input, result }) => {
- expect(sanitizeHTML(input)).toBe(result);
- });
-});
diff --git a/spec/frontend/static_site_editor/rich_content_editor/toolbar_item_spec.js b/spec/frontend/static_site_editor/rich_content_editor/toolbar_item_spec.js
deleted file mode 100644
index c9dcf9cfe2e..00000000000
--- a/spec/frontend/static_site_editor/rich_content_editor/toolbar_item_spec.js
+++ /dev/null
@@ -1,57 +0,0 @@
-import { GlIcon } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
-import ToolbarItem from '~/static_site_editor/rich_content_editor/toolbar_item.vue';
-
-describe('Toolbar Item', () => {
- let wrapper;
-
- const findIcon = () => wrapper.find(GlIcon);
- const findButton = () => wrapper.find('button');
-
- const buildWrapper = (propsData) => {
- wrapper = shallowMount(ToolbarItem, {
- propsData,
- directives: {
- GlTooltip: createMockDirective(),
- },
- });
- };
-
- describe.each`
- icon | tooltip
- ${'heading'} | ${'Headings'}
- ${'bold'} | ${'Add bold text'}
- ${'italic'} | ${'Add italic text'}
- ${'strikethrough'} | ${'Add strikethrough text'}
- ${'quote'} | ${'Insert a quote'}
- ${'link'} | ${'Add a link'}
- ${'doc-code'} | ${'Insert a code block'}
- ${'list-bulleted'} | ${'Add a bullet list'}
- ${'list-numbered'} | ${'Add a numbered list'}
- ${'list-task'} | ${'Add a task list'}
- ${'list-indent'} | ${'Indent'}
- ${'list-outdent'} | ${'Outdent'}
- ${'dash'} | ${'Add a line'}
- ${'table'} | ${'Add a table'}
- ${'code'} | ${'Insert an image'}
- ${'code'} | ${'Insert inline code'}
- `('toolbar item component', ({ icon, tooltip }) => {
- beforeEach(() => buildWrapper({ icon, tooltip }));
-
- it('renders a toolbar button', () => {
- expect(findButton().exists()).toBe(true);
- });
-
- it('renders the correct tooltip', () => {
- const buttonTooltip = getBinding(wrapper.element, 'gl-tooltip');
- expect(buttonTooltip).toBeDefined();
- expect(buttonTooltip.value.title).toBe(tooltip);
- });
-
- it(`renders the ${icon} icon`, () => {
- expect(findIcon().exists()).toBe(true);
- expect(findIcon().props().name).toBe(icon);
- });
- });
-});
diff --git a/spec/frontend/static_site_editor/services/formatter_spec.js b/spec/frontend/static_site_editor/services/formatter_spec.js
deleted file mode 100644
index 9e9c4bbd171..00000000000
--- a/spec/frontend/static_site_editor/services/formatter_spec.js
+++ /dev/null
@@ -1,39 +0,0 @@
-import formatter from '~/static_site_editor/services/formatter';
-
-describe('static_site_editor/services/formatter', () => {
- const source = `Some text
-<br>
-
-And some more text
-
-
-<br>
-
-
-And even more text`;
- const sourceWithoutBrTags = `Some text
-
-And some more text
-
-
-
-
-And even more text`;
-
- it('removes extraneous <br> tags', () => {
- expect(formatter(source)).toMatch(sourceWithoutBrTags);
- });
-
- describe('ordered lists with incorrect content indentation', () => {
- it.each`
- input | result
- ${'12. ordered list item\n13.Next ordered list item'} | ${'12. ordered list item\n13.Next ordered list item'}
- ${'12. ordered list item\n - Next ordered list item'} | ${'12. ordered list item\n - Next ordered list item'}
- ${'12. ordered list item\n - Next ordered list item'} | ${'12. ordered list item\n - Next ordered list item'}
- ${'12. ordered list item\n Next ordered list item'} | ${'12. ordered list item\n Next ordered list item'}
- ${'1. ordered list item\n Next ordered list item'} | ${'1. ordered list item\n Next ordered list item'}
- `('\ntransforms\n$input \nto\n$result', ({ input, result }) => {
- expect(formatter(input)).toBe(result);
- });
- });
-});
diff --git a/spec/frontend/static_site_editor/services/front_matterify_spec.js b/spec/frontend/static_site_editor/services/front_matterify_spec.js
deleted file mode 100644
index ec3752b30c6..00000000000
--- a/spec/frontend/static_site_editor/services/front_matterify_spec.js
+++ /dev/null
@@ -1,54 +0,0 @@
-import { frontMatterify, stringify } from '~/static_site_editor/services/front_matterify';
-import {
- sourceContentYAML as content,
- sourceContentHeaderObjYAML as yamlFrontMatterObj,
- sourceContentSpacing as spacing,
- sourceContentBody as body,
-} from '../mock_data';
-
-describe('static_site_editor/services/front_matterify', () => {
- const frontMatterifiedContent = {
- source: content,
- matter: yamlFrontMatterObj,
- hasMatter: true,
- spacing,
- content: body,
- delimiter: '---',
- type: 'yaml',
- };
- const frontMatterifiedBody = {
- source: body,
- matter: null,
- hasMatter: false,
- spacing: null,
- content: body,
- delimiter: null,
- type: null,
- };
-
- describe('frontMatterify', () => {
- it.each`
- frontMatterified | target
- ${frontMatterify(content)} | ${frontMatterifiedContent}
- ${frontMatterify(body)} | ${frontMatterifiedBody}
- `('returns $target from $frontMatterified', ({ frontMatterified, target }) => {
- expect(frontMatterified).toEqual(target);
- });
-
- it('should throw when matter is invalid', () => {
- const invalidContent = `---\nkey: val\nkeyNoVal\n---\n${body}`;
-
- expect(() => frontMatterify(invalidContent)).toThrow();
- });
- });
-
- describe('stringify', () => {
- it.each`
- stringified | target
- ${stringify(frontMatterifiedContent)} | ${content}
- ${stringify(frontMatterifiedBody)} | ${body}
- `('returns $target from $stringified', ({ stringified, target }) => {
- expect(stringified).toBe(target);
- });
- });
-});
diff --git a/spec/frontend/static_site_editor/services/generate_branch_name_spec.js b/spec/frontend/static_site_editor/services/generate_branch_name_spec.js
deleted file mode 100644
index 7e437506a16..00000000000
--- a/spec/frontend/static_site_editor/services/generate_branch_name_spec.js
+++ /dev/null
@@ -1,22 +0,0 @@
-import { BRANCH_SUFFIX_COUNT } from '~/static_site_editor/constants';
-import generateBranchName from '~/static_site_editor/services/generate_branch_name';
-
-import { username, branch as targetBranch } from '../mock_data';
-
-describe('generateBranchName', () => {
- const timestamp = 12345678901234;
-
- beforeEach(() => {
- jest.spyOn(Date, 'now').mockReturnValueOnce(timestamp);
- });
-
- it('generates a name that includes the username and target branch', () => {
- expect(generateBranchName(username, targetBranch)).toMatch(`${username}-${targetBranch}`);
- });
-
- it(`adds the first ${BRANCH_SUFFIX_COUNT} numbers of the current timestamp`, () => {
- expect(generateBranchName(username, targetBranch)).toMatch(
- timestamp.toString().substring(BRANCH_SUFFIX_COUNT),
- );
- });
-});
diff --git a/spec/frontend/static_site_editor/services/load_source_content_spec.js b/spec/frontend/static_site_editor/services/load_source_content_spec.js
deleted file mode 100644
index 98d437698c4..00000000000
--- a/spec/frontend/static_site_editor/services/load_source_content_spec.js
+++ /dev/null
@@ -1,36 +0,0 @@
-import Api from '~/api';
-
-import loadSourceContent from '~/static_site_editor/services/load_source_content';
-
-import {
- sourceContentYAML as sourceContent,
- sourceContentTitle,
- projectId,
- sourcePath,
-} from '../mock_data';
-
-describe('loadSourceContent', () => {
- describe('requesting source content succeeds', () => {
- let result;
-
- beforeEach(() => {
- jest.spyOn(Api, 'getRawFile').mockResolvedValue({ data: sourceContent });
-
- return loadSourceContent({ projectId, sourcePath }).then((_result) => {
- result = _result;
- });
- });
-
- it('calls getRawFile API with project id and source path', () => {
- expect(Api.getRawFile).toHaveBeenCalledWith(projectId, sourcePath);
- });
-
- it('extracts page title from source content', () => {
- expect(result.title).toBe(sourceContentTitle);
- });
-
- it('returns raw content', () => {
- expect(result.content).toBe(sourceContent);
- });
- });
-});
diff --git a/spec/frontend/static_site_editor/services/parse_source_file_spec.js b/spec/frontend/static_site_editor/services/parse_source_file_spec.js
deleted file mode 100644
index fdd11297e09..00000000000
--- a/spec/frontend/static_site_editor/services/parse_source_file_spec.js
+++ /dev/null
@@ -1,101 +0,0 @@
-import parseSourceFile from '~/static_site_editor/services/parse_source_file';
-import {
- sourceContentYAML as content,
- sourceContentHeaderYAML as yamlFrontMatter,
- sourceContentHeaderObjYAML as yamlFrontMatterObj,
- sourceContentBody as body,
-} from '../mock_data';
-
-describe('static_site_editor/services/parse_source_file', () => {
- const contentComplex = [content, content, content].join('');
- const complexBody = [body, content, content].join('');
- const edit = 'and more';
- const newContent = `${content} ${edit}`;
- const newContentComplex = `${contentComplex} ${edit}`;
-
- describe('unmodified front matter', () => {
- it.each`
- parsedSource
- ${parseSourceFile(content)}
- ${parseSourceFile(contentComplex)}
- `('returns $targetFrontMatter when frontMatter queried', ({ parsedSource }) => {
- expect(parsedSource.matter()).toEqual(yamlFrontMatterObj);
- });
- });
-
- describe('unmodified content', () => {
- it.each`
- parsedSource
- ${parseSourceFile(content)}
- ${parseSourceFile(contentComplex)}
- `('returns false by default', ({ parsedSource }) => {
- expect(parsedSource.isModified()).toBe(false);
- });
-
- it.each`
- parsedSource | isBody | target
- ${parseSourceFile(content)} | ${undefined} | ${content}
- ${parseSourceFile(content)} | ${false} | ${content}
- ${parseSourceFile(content)} | ${true} | ${body}
- ${parseSourceFile(contentComplex)} | ${undefined} | ${contentComplex}
- ${parseSourceFile(contentComplex)} | ${false} | ${contentComplex}
- ${parseSourceFile(contentComplex)} | ${true} | ${complexBody}
- `(
- 'returns only the $target content when the `isBody` parameter argument is $isBody',
- ({ parsedSource, isBody, target }) => {
- expect(parsedSource.content(isBody)).toBe(target);
- },
- );
- });
-
- describe('modified front matter', () => {
- const newYamlFrontMatter = '---\nnewKey: newVal\n---';
- const newYamlFrontMatterObj = { newKey: 'newVal' };
- const contentWithNewFrontMatter = content.replace(yamlFrontMatter, newYamlFrontMatter);
- const contentComplexWithNewFrontMatter = contentComplex.replace(
- yamlFrontMatter,
- newYamlFrontMatter,
- );
-
- it.each`
- parsedSource | targetContent
- ${parseSourceFile(content)} | ${contentWithNewFrontMatter}
- ${parseSourceFile(contentComplex)} | ${contentComplexWithNewFrontMatter}
- `(
- 'returns the correct front matter and modified content',
- ({ parsedSource, targetContent }) => {
- expect(parsedSource.matter()).toMatchObject(yamlFrontMatterObj);
-
- parsedSource.syncMatter(newYamlFrontMatterObj);
-
- expect(parsedSource.matter()).toMatchObject(newYamlFrontMatterObj);
- expect(parsedSource.content()).toBe(targetContent);
- },
- );
- });
-
- describe('modified content', () => {
- const newBody = `${body} ${edit}`;
- const newComplexBody = `${complexBody} ${edit}`;
-
- it.each`
- parsedSource | hasMatter | isModified | targetRaw | targetBody
- ${parseSourceFile(content)} | ${true} | ${false} | ${content} | ${body}
- ${parseSourceFile(content)} | ${true} | ${true} | ${newContent} | ${newBody}
- ${parseSourceFile(contentComplex)} | ${true} | ${false} | ${contentComplex} | ${complexBody}
- ${parseSourceFile(contentComplex)} | ${true} | ${true} | ${newContentComplex} | ${newComplexBody}
- ${parseSourceFile(body)} | ${false} | ${false} | ${body} | ${body}
- ${parseSourceFile(body)} | ${false} | ${true} | ${newBody} | ${newBody}
- `(
- 'returns $isModified after a $targetRaw sync',
- ({ parsedSource, hasMatter, isModified, targetRaw, targetBody }) => {
- parsedSource.syncContent(targetRaw);
-
- expect(parsedSource.hasMatter()).toBe(hasMatter);
- expect(parsedSource.isModified()).toBe(isModified);
- expect(parsedSource.content()).toBe(targetRaw);
- expect(parsedSource.content(true)).toBe(targetBody);
- },
- );
- });
-});
diff --git a/spec/frontend/static_site_editor/services/renderers/render_image_spec.js b/spec/frontend/static_site_editor/services/renderers/render_image_spec.js
deleted file mode 100644
index d3298aa0b26..00000000000
--- a/spec/frontend/static_site_editor/services/renderers/render_image_spec.js
+++ /dev/null
@@ -1,96 +0,0 @@
-import imageRenderer from '~/static_site_editor/services/renderers/render_image';
-import { mounts, project, branch, baseUrl } from '../../mock_data';
-
-describe('rich_content_editor/renderers/render_image', () => {
- let renderer;
- let imageRepository;
-
- beforeEach(() => {
- renderer = imageRenderer.build(mounts, project, branch, baseUrl, imageRepository);
- imageRepository = { get: () => null };
- });
-
- describe('build', () => {
- it('builds a renderer object containing `canRender` and `render` functions', () => {
- expect(renderer).toHaveProperty('canRender', expect.any(Function));
- expect(renderer).toHaveProperty('render', expect.any(Function));
- });
- });
-
- describe('canRender', () => {
- it.each`
- input | result
- ${{ type: 'image' }} | ${true}
- ${{ type: 'text' }} | ${false}
- ${{ type: 'htmlBlock' }} | ${false}
- `('returns $result when input is $input', ({ input, result }) => {
- expect(renderer.canRender(input)).toBe(result);
- });
- });
-
- describe('render', () => {
- let skipChildren;
- let context;
- let node;
-
- beforeEach(() => {
- skipChildren = jest.fn();
- context = { skipChildren };
- node = {
- firstChild: {
- type: 'img',
- literal: 'Some Image',
- },
- };
- });
-
- it.each`
- destination | isAbsolute | src
- ${'http://test.host/absolute/path/to/image.png'} | ${true} | ${'http://test.host/absolute/path/to/image.png'}
- ${'/relative/path/to/image.png'} | ${false} | ${'http://test.host/user1/project1/-/raw/main/default/source/relative/path/to/image.png'}
- ${'/target/image.png'} | ${false} | ${'http://test.host/user1/project1/-/raw/main/source/with/target/image.png'}
- ${'relative/to/current/image.png'} | ${false} | ${'http://test.host/user1/project1/-/raw/main/relative/to/current/image.png'}
- ${'./relative/to/current/image.png'} | ${false} | ${'http://test.host/user1/project1/-/raw/main/./relative/to/current/image.png'}
- ${'../relative/to/current/image.png'} | ${false} | ${'http://test.host/user1/project1/-/raw/main/../relative/to/current/image.png'}
- `('returns an image with the correct attributes', ({ destination, isAbsolute, src }) => {
- node.destination = destination;
-
- const result = renderer.render(node, context);
-
- expect(result).toEqual({
- type: 'openTag',
- tagName: 'img',
- selfClose: true,
- attributes: {
- 'data-original-src': !isAbsolute ? destination : '',
- src,
- alt: 'Some Image',
- },
- });
-
- expect(skipChildren).toHaveBeenCalled();
- });
-
- it('renders an image if a cached image is found in the repository, use the base64 content as the source', () => {
- const imageContent = 'some-content';
- const originalSrc = 'path/to/image.png';
-
- imageRepository.get = () => imageContent;
- renderer = imageRenderer.build(mounts, project, branch, baseUrl, imageRepository);
- node.destination = originalSrc;
-
- const result = renderer.render(node, context);
-
- expect(result).toEqual({
- type: 'openTag',
- tagName: 'img',
- selfClose: true,
- attributes: {
- 'data-original-src': originalSrc,
- src: `data:image;base64,${imageContent}`,
- alt: 'Some Image',
- },
- });
- });
- });
-});
diff --git a/spec/frontend/static_site_editor/services/submit_content_changes_spec.js b/spec/frontend/static_site_editor/services/submit_content_changes_spec.js
deleted file mode 100644
index 757611166d7..00000000000
--- a/spec/frontend/static_site_editor/services/submit_content_changes_spec.js
+++ /dev/null
@@ -1,261 +0,0 @@
-import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
-import Api from '~/api';
-import { convertObjectPropsToSnakeCase } from '~/lib/utils/common_utils';
-
-import {
- SUBMIT_CHANGES_BRANCH_ERROR,
- SUBMIT_CHANGES_COMMIT_ERROR,
- SUBMIT_CHANGES_MERGE_REQUEST_ERROR,
- TRACKING_ACTION_CREATE_COMMIT,
- TRACKING_ACTION_CREATE_MERGE_REQUEST,
- SERVICE_PING_TRACKING_ACTION_CREATE_COMMIT,
- SERVICE_PING_TRACKING_ACTION_CREATE_MERGE_REQUEST,
- DEFAULT_FORMATTING_CHANGES_COMMIT_MESSAGE,
- DEFAULT_FORMATTING_CHANGES_COMMIT_DESCRIPTION,
-} from '~/static_site_editor/constants';
-import generateBranchName from '~/static_site_editor/services/generate_branch_name';
-import submitContentChanges from '~/static_site_editor/services/submit_content_changes';
-
-import {
- username,
- projectId,
- commitBranchResponse,
- commitMultipleResponse,
- createMergeRequestResponse,
- mergeRequestMeta,
- sourcePath,
- branch as targetBranch,
- sourceContentYAML as content,
- trackingCategory,
- images,
-} from '../mock_data';
-
-jest.mock('~/static_site_editor/services/generate_branch_name');
-
-describe('submitContentChanges', () => {
- const sourceBranch = 'branch-name';
- let trackingSpy;
- let origPage;
-
- const buildPayload = (overrides = {}) => ({
- username,
- projectId,
- sourcePath,
- targetBranch,
- content,
- images,
- mergeRequestMeta,
- ...overrides,
- });
-
- beforeEach(() => {
- jest.spyOn(Api, 'createBranch').mockResolvedValue({ data: commitBranchResponse });
- jest.spyOn(Api, 'commitMultiple').mockResolvedValue({ data: commitMultipleResponse });
- jest
- .spyOn(Api, 'createProjectMergeRequest')
- .mockResolvedValue({ data: createMergeRequestResponse });
-
- generateBranchName.mockReturnValue(sourceBranch);
-
- origPage = document.body.dataset.page;
- document.body.dataset.page = trackingCategory;
- trackingSpy = mockTracking(document.body.dataset.page, undefined, jest.spyOn);
- });
-
- afterEach(() => {
- document.body.dataset.page = origPage;
- unmockTracking();
- });
-
- it('creates a branch named after the username and target branch', () => {
- return submitContentChanges(buildPayload()).then(() => {
- expect(Api.createBranch).toHaveBeenCalledWith(projectId, {
- ref: targetBranch,
- branch: sourceBranch,
- });
- });
- });
-
- it('notifies error when branch could not be created', () => {
- Api.createBranch.mockRejectedValueOnce();
-
- return expect(submitContentChanges(buildPayload())).rejects.toThrow(
- SUBMIT_CHANGES_BRANCH_ERROR,
- );
- });
-
- describe('committing markdown formatting changes', () => {
- const formattedMarkdown = `formatted ${content}`;
- const commitPayload = {
- branch: sourceBranch,
- commit_message: `${DEFAULT_FORMATTING_CHANGES_COMMIT_MESSAGE}\n\n${DEFAULT_FORMATTING_CHANGES_COMMIT_DESCRIPTION}`,
- actions: [
- {
- action: 'update',
- file_path: sourcePath,
- content: formattedMarkdown,
- },
- ],
- };
-
- it('commits markdown formatting changes in a separate commit', () => {
- return submitContentChanges(buildPayload({ formattedMarkdown })).then(() => {
- expect(Api.commitMultiple).toHaveBeenCalledWith(projectId, commitPayload);
- });
- });
-
- it('does not commit markdown formatting changes when there are none', () => {
- return submitContentChanges(buildPayload()).then(() => {
- expect(Api.commitMultiple.mock.calls).toHaveLength(1);
- expect(Api.commitMultiple.mock.calls[0][1]).not.toMatchObject({
- actions: commitPayload.actions,
- });
- });
- });
- });
-
- it('commits the content changes to the branch when creating branch succeeds', () => {
- return submitContentChanges(buildPayload()).then(() => {
- expect(Api.commitMultiple).toHaveBeenCalledWith(projectId, {
- branch: sourceBranch,
- commit_message: mergeRequestMeta.title,
- actions: [
- {
- action: 'update',
- file_path: sourcePath,
- content,
- },
- {
- action: 'create',
- content: 'image1-content',
- encoding: 'base64',
- file_path: 'path/to/image1.png',
- },
- ],
- });
- });
- });
-
- it('does not commit an image if it has been removed from the content', () => {
- const contentWithoutImages = '## Content without images';
- const payload = buildPayload({ content: contentWithoutImages });
- return submitContentChanges(payload).then(() => {
- expect(Api.commitMultiple).toHaveBeenCalledWith(projectId, {
- branch: sourceBranch,
- commit_message: mergeRequestMeta.title,
- actions: [
- {
- action: 'update',
- file_path: sourcePath,
- content: contentWithoutImages,
- },
- ],
- });
- });
- });
-
- it('notifies error when content could not be committed', () => {
- Api.commitMultiple.mockRejectedValueOnce();
-
- return expect(submitContentChanges(buildPayload())).rejects.toThrow(
- SUBMIT_CHANGES_COMMIT_ERROR,
- );
- });
-
- it('creates a merge request when committing changes succeeds', () => {
- return submitContentChanges(buildPayload()).then(() => {
- const { title, description } = mergeRequestMeta;
- expect(Api.createProjectMergeRequest).toHaveBeenCalledWith(
- projectId,
- convertObjectPropsToSnakeCase({
- title,
- description,
- targetBranch,
- sourceBranch,
- }),
- );
- });
- });
-
- it('notifies error when merge request could not be created', () => {
- Api.createProjectMergeRequest.mockRejectedValueOnce();
-
- return expect(submitContentChanges(buildPayload())).rejects.toThrow(
- SUBMIT_CHANGES_MERGE_REQUEST_ERROR,
- );
- });
-
- describe('when changes are submitted successfully', () => {
- let result;
-
- beforeEach(() => {
- return submitContentChanges(buildPayload()).then((_result) => {
- result = _result;
- });
- });
-
- it('returns the branch name', () => {
- expect(result).toMatchObject({ branch: { label: sourceBranch } });
- });
-
- it('returns commit short id and web url', () => {
- expect(result).toMatchObject({
- commit: {
- label: commitMultipleResponse.short_id,
- url: commitMultipleResponse.web_url,
- },
- });
- });
-
- it('returns merge request iid and web url', () => {
- expect(result).toMatchObject({
- mergeRequest: {
- label: createMergeRequestResponse.iid,
- url: createMergeRequestResponse.web_url,
- },
- });
- });
- });
-
- describe('sends the correct tracking event', () => {
- beforeEach(() => {
- return submitContentChanges(buildPayload());
- });
-
- it('for committing changes', () => {
- expect(trackingSpy).toHaveBeenCalledWith(
- document.body.dataset.page,
- TRACKING_ACTION_CREATE_COMMIT,
- );
- });
-
- it('for creating a merge request', () => {
- expect(trackingSpy).toHaveBeenCalledWith(
- document.body.dataset.page,
- TRACKING_ACTION_CREATE_MERGE_REQUEST,
- );
- });
- });
-
- describe('sends the correct Service Ping tracking event', () => {
- beforeEach(() => {
- jest.spyOn(Api, 'trackRedisCounterEvent').mockResolvedValue({ data: '' });
- });
-
- it('for commiting changes', () => {
- return submitContentChanges(buildPayload()).then(() => {
- expect(Api.trackRedisCounterEvent).toHaveBeenCalledWith(
- SERVICE_PING_TRACKING_ACTION_CREATE_COMMIT,
- );
- });
- });
-
- it('for creating a merge request', () => {
- return submitContentChanges(buildPayload()).then(() => {
- expect(Api.trackRedisCounterEvent).toHaveBeenCalledWith(
- SERVICE_PING_TRACKING_ACTION_CREATE_MERGE_REQUEST,
- );
- });
- });
- });
-});
diff --git a/spec/frontend/static_site_editor/services/templater_spec.js b/spec/frontend/static_site_editor/services/templater_spec.js
deleted file mode 100644
index cb3a0a0c106..00000000000
--- a/spec/frontend/static_site_editor/services/templater_spec.js
+++ /dev/null
@@ -1,112 +0,0 @@
-/* eslint-disable no-useless-escape */
-import templater from '~/static_site_editor/services/templater';
-
-describe('templater', () => {
- const source = `Below this line is a simple ERB (single-line erb block) example.
-
-<% some erb code %>
-
-Below this line is a complex ERB (multi-line erb block) example.
-
-<% if apptype.maturity && (apptype.maturity != "planned") %>
- <% maturity = "This application type is at the \"#{apptype.maturity}\" level of maturity." %>
-<% end %>
-
-Below this line is a non-erb (single-line HTML) markup example that also has erb.
-
-<a href="<%= compensation_roadmap.role_path %>"><%= compensation_roadmap.role_path %></a>
-
-Below this line is a non-erb (multi-line HTML block) markup example that also has erb.
-
-<ul>
-<% compensation_roadmap.recommendation.recommendations.each do |recommendation| %>
- <li><%= recommendation %></li>
-<% end %>
-</ul>
-
-Below this line is a block of HTML.
-
-<div>
- <h1>Heading</h1>
- <p>Some paragraph...</p>
-</div>
-
-Below this line is a codeblock of the same HTML that should be ignored and preserved.
-
-\`\`\` html
-<div>
- <h1>Heading</h1>
- <p>Some paragraph...</p>
-</div>
-\`\`\`
-
-Below this line is a iframe that should be ignored and preserved
-
-<iframe></iframe>
-`;
- const sourceTemplated = `Below this line is a simple ERB (single-line erb block) example.
-
-\`\`\` sse
-<% some erb code %>
-\`\`\`
-
-Below this line is a complex ERB (multi-line erb block) example.
-
-\`\`\` sse
-<% if apptype.maturity && (apptype.maturity != "planned") %>
- <% maturity = "This application type is at the \"#{apptype.maturity}\" level of maturity." %>
-<% end %>
-\`\`\`
-
-Below this line is a non-erb (single-line HTML) markup example that also has erb.
-
-\`\`\` sse
-<a href="<%= compensation_roadmap.role_path %>"><%= compensation_roadmap.role_path %></a>
-\`\`\`
-
-Below this line is a non-erb (multi-line HTML block) markup example that also has erb.
-
-\`\`\` sse
-<ul>
-<% compensation_roadmap.recommendation.recommendations.each do |recommendation| %>
- <li><%= recommendation %></li>
-<% end %>
-</ul>
-\`\`\`
-
-Below this line is a block of HTML.
-
-\`\`\` sse
-<div>
- <h1>Heading</h1>
- <p>Some paragraph...</p>
-</div>
-\`\`\`
-
-Below this line is a codeblock of the same HTML that should be ignored and preserved.
-
-\`\`\` html
-<div>
- <h1>Heading</h1>
- <p>Some paragraph...</p>
-</div>
-\`\`\`
-
-Below this line is a iframe that should be ignored and preserved
-
-<iframe></iframe>
-`;
-
- it.each`
- fn | initial | target
- ${'wrap'} | ${source} | ${sourceTemplated}
- ${'wrap'} | ${sourceTemplated} | ${sourceTemplated}
- ${'unwrap'} | ${sourceTemplated} | ${source}
- ${'unwrap'} | ${source} | ${source}
- `(
- 'wraps $initial in a templated sse codeblocks if $fn is wrap, unwraps otherwise',
- ({ fn, initial, target }) => {
- expect(templater[fn](initial)).toMatch(target);
- },
- );
-});
diff --git a/spec/frontend/tags/components/delete_tag_modal_spec.js b/spec/frontend/tags/components/delete_tag_modal_spec.js
new file mode 100644
index 00000000000..b1726a2c0ef
--- /dev/null
+++ b/spec/frontend/tags/components/delete_tag_modal_spec.js
@@ -0,0 +1,138 @@
+import { GlButton, GlModal, GlFormInput, GlSprintf } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { stubComponent } from 'helpers/stub_component';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import DeleteTagModal from '~/tags/components/delete_tag_modal.vue';
+import eventHub from '~/tags/event_hub';
+
+let wrapper;
+
+const tagName = 'test-tag';
+const path = '/path/to/tag';
+const isProtected = false;
+
+const createComponent = (data = {}) => {
+ wrapper = extendedWrapper(
+ shallowMount(DeleteTagModal, {
+ data() {
+ return {
+ tagName,
+ path,
+ isProtected,
+ ...data,
+ };
+ },
+ stubs: {
+ GlModal: stubComponent(GlModal, {
+ template:
+ '<div><slot name="modal-title"></slot><slot></slot><slot name="modal-footer"></slot></div>',
+ }),
+ GlButton,
+ GlFormInput,
+ GlSprintf,
+ },
+ }),
+ );
+};
+
+const findModal = () => wrapper.findComponent(GlModal);
+const findModalMessage = () => wrapper.findByTestId('modal-message');
+const findDeleteButton = () => wrapper.findByTestId('delete-tag-confirmation-button');
+const findCancelButton = () => wrapper.findByTestId('delete-tag-cancel-button');
+const findFormInput = () => wrapper.findComponent(GlFormInput);
+const findForm = () => wrapper.find('form');
+
+describe('Delete tag modal', () => {
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('Deleting a regular tag', () => {
+ const expectedTitle = 'Delete tag. Are you ABSOLUTELY SURE?';
+ const expectedMessage = "You're about to permanently delete the tag test-tag.";
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders the modal correctly', () => {
+ expect(findModal().props('title')).toBe(expectedTitle);
+ expect(findModalMessage().text()).toMatchInterpolatedText(expectedMessage);
+ expect(findCancelButton().text()).toBe('Cancel, keep tag');
+ expect(findDeleteButton().text()).toBe('Yes, delete tag');
+ expect(findForm().attributes('action')).toBe(path);
+ });
+
+ it('submits the form when the delete button is clicked', () => {
+ const submitFormSpy = jest.spyOn(wrapper.vm.$refs.form, 'submit');
+
+ findDeleteButton().trigger('click');
+
+ expect(findForm().attributes('action')).toBe(path);
+ expect(submitFormSpy).toHaveBeenCalled();
+ });
+
+ it('calls show on the modal when a `openModal` event is received through the event hub', async () => {
+ const showSpy = jest.spyOn(wrapper.vm.$refs.modal, 'show');
+
+ eventHub.$emit('openModal', {
+ isProtected,
+ tagName,
+ path,
+ });
+
+ expect(showSpy).toHaveBeenCalled();
+ });
+
+ it('calls hide on the modal when cancel button is clicked', () => {
+ const closeModalSpy = jest.spyOn(wrapper.vm.$refs.modal, 'hide');
+
+ findCancelButton().trigger('click');
+
+ expect(closeModalSpy).toHaveBeenCalled();
+ });
+ });
+
+ describe('Deleting a protected tag (for owner or maintainer)', () => {
+ const expectedTitleProtected = 'Delete protected tag. Are you ABSOLUTELY SURE?';
+ const expectedMessageProtected =
+ "You're about to permanently delete the protected tag test-tag.";
+ const expectedConfirmationText =
+ 'After you confirm and select Yes, delete protected tag, you cannot recover this tag. Please type the following to confirm: test-tag';
+
+ beforeEach(() => {
+ createComponent({ isProtected: true });
+ });
+
+ describe('rendering the modal correctly for a protected tag', () => {
+ it('sets the modal title for a protected tag', () => {
+ expect(findModal().props('title')).toBe(expectedTitleProtected);
+ });
+
+ it('renders the correct text in the modal message', () => {
+ expect(findModalMessage().text()).toMatchInterpolatedText(expectedMessageProtected);
+ });
+
+ it('renders the protected tag name confirmation form with expected text and action', () => {
+ expect(findForm().text()).toMatchInterpolatedText(expectedConfirmationText);
+ expect(findForm().attributes('action')).toBe(path);
+ });
+
+ it('renders the buttons with the correct button text', () => {
+ expect(findCancelButton().text()).toBe('Cancel, keep tag');
+ expect(findDeleteButton().text()).toBe('Yes, delete protected tag');
+ });
+ });
+
+ it('opens with the delete button disabled and enables it when tag name is confirmed', async () => {
+ expect(findDeleteButton().props('disabled')).toBe(true);
+
+ findFormInput().vm.$emit('input', tagName);
+
+ await waitForPromises();
+
+ expect(findDeleteButton().props('disabled')).not.toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/tags/init_delete_tag_modal_spec.js b/spec/frontend/tags/init_delete_tag_modal_spec.js
new file mode 100644
index 00000000000..537df4fac52
--- /dev/null
+++ b/spec/frontend/tags/init_delete_tag_modal_spec.js
@@ -0,0 +1,23 @@
+import Vue from 'vue';
+import { resetHTMLFixture, setHTMLFixture } from 'helpers/fixtures';
+import initDeleteTagModal from '../../../app/assets/javascripts/tags/init_delete_tag_modal';
+
+describe('initDeleteTagModal', () => {
+ beforeEach(() => {
+ setHTMLFixture('<div class="js-delete-tag-modal"></div>');
+ });
+
+ afterEach(() => {
+ resetHTMLFixture();
+ });
+
+ it('should mount the delete tag modal', () => {
+ expect(initDeleteTagModal()).toBeInstanceOf(Vue);
+ expect(document.querySelector('.js-delete-tag-modal')).toBeNull();
+ });
+
+ it('should return false if the mounting element is missing', () => {
+ document.querySelector('.js-delete-tag-modal').remove();
+ expect(initDeleteTagModal()).toBe(false);
+ });
+});
diff --git a/spec/frontend/terraform/components/states_table_actions_spec.js b/spec/frontend/terraform/components/states_table_actions_spec.js
index d01f6af9023..40b7448d78d 100644
--- a/spec/frontend/terraform/components/states_table_actions_spec.js
+++ b/spec/frontend/terraform/components/states_table_actions_spec.js
@@ -69,6 +69,7 @@ describe('StatesTableActions', () => {
wrapper = shallowMount(StateActions, {
apolloProvider,
propsData,
+ provide: { projectPath: 'path/to/project' },
mocks: { $toast: { show: toast } },
stubs: { GlDropdown, GlModal, GlSprintf },
});
diff --git a/spec/frontend/terraform/components/states_table_spec.js b/spec/frontend/terraform/components/states_table_spec.js
index fa9c8320b4f..16ffd2b7013 100644
--- a/spec/frontend/terraform/components/states_table_spec.js
+++ b/spec/frontend/terraform/components/states_table_spec.js
@@ -2,6 +2,8 @@ import { GlBadge, GlLoadingIcon, GlTooltip } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
import { useFakeDate } from 'helpers/fake_date';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import StatesTable from '~/terraform/components/states_table.vue';
import StateActions from '~/terraform/components/states_table_actions.vue';
@@ -104,11 +106,31 @@ describe('StatesTable', () => {
updatedAt: '2020-10-10T00:00:00Z',
latestVersion: null,
},
+ {
+ _showDetails: false,
+ errorMessages: [],
+ name: 'state-6',
+ loadingLock: false,
+ loadingRemove: false,
+ lockedAt: null,
+ lockedByUser: null,
+ updatedAt: '2020-10-10T00:00:00Z',
+ deletedAt: '2022-02-02T00:00:00Z',
+ latestVersion: null,
+ },
],
};
const createComponent = async (propsData = defaultProps) => {
- wrapper = mount(StatesTable, { propsData });
+ wrapper = extendedWrapper(
+ mount(StatesTable, {
+ propsData,
+ provide: { projectPath: 'path/to/project' },
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
+ }),
+ );
await nextTick();
};
@@ -124,27 +146,28 @@ describe('StatesTable', () => {
});
it.each`
- name | toolTipText | locked | loading | lineNumber
+ name | toolTipText | hasBadge | loading | lineNumber
${'state-1'} | ${'Locked by user-1 2 days ago'} | ${true} | ${false} | ${0}
${'state-2'} | ${'Locking state'} | ${false} | ${true} | ${1}
${'state-3'} | ${'Unlocking state'} | ${false} | ${true} | ${2}
${'state-4'} | ${'Locked by Unknown User 5 days ago'} | ${true} | ${false} | ${3}
${'state-5'} | ${'Removing'} | ${false} | ${true} | ${4}
+ ${'state-6'} | ${'Deletion in progress'} | ${true} | ${false} | ${5}
`(
'displays the name and locked information "$name" for line "$lineNumber"',
- ({ name, toolTipText, locked, loading, lineNumber }) => {
+ ({ name, toolTipText, hasBadge, loading, lineNumber }) => {
const states = wrapper.findAll('[data-testid="terraform-states-table-name"]');
-
const state = states.at(lineNumber);
- const toolTip = state.find(GlTooltip);
expect(state.text()).toContain(name);
- expect(state.find(GlBadge).exists()).toBe(locked);
+ expect(state.find(GlBadge).exists()).toBe(hasBadge);
expect(state.find(GlLoadingIcon).exists()).toBe(loading);
- expect(toolTip.exists()).toBe(locked);
- if (locked) {
- expect(toolTip.text()).toMatchInterpolatedText(toolTipText);
+ if (hasBadge) {
+ const badge = wrapper.findByTestId(`state-badge-${name}`);
+
+ expect(getBinding(badge.element, 'gl-tooltip')).toBeDefined();
+ expect(badge.attributes('title')).toMatchInterpolatedText(toolTipText);
}
},
);
diff --git a/spec/frontend/terraform/components/terraform_list_spec.js b/spec/frontend/terraform/components/terraform_list_spec.js
index c8b4cd564d9..cfd82768098 100644
--- a/spec/frontend/terraform/components/terraform_list_spec.js
+++ b/spec/frontend/terraform/components/terraform_list_spec.js
@@ -16,6 +16,9 @@ describe('TerraformList', () => {
const propsData = {
emptyStateImage: '/path/to/image',
+ };
+
+ const provide = {
projectPath: 'path/to/project',
};
@@ -47,6 +50,7 @@ describe('TerraformList', () => {
wrapper = shallowMount(TerraformList, {
apolloProvider,
propsData,
+ provide,
stubs: {
GlTab,
},
diff --git a/spec/frontend/user_popovers_spec.js b/spec/frontend/user_popovers_spec.js
index fa598716645..1544fed5240 100644
--- a/spec/frontend/user_popovers_spec.js
+++ b/spec/frontend/user_popovers_spec.js
@@ -22,16 +22,17 @@ describe('User Popovers', () => {
const link = document.createElement('a');
link.classList.add('js-user-link');
- link.setAttribute('data-user', '1');
+ link.dataset.user = '1';
return link;
};
+ const findPopovers = () => {
+ return Array.from(document.querySelectorAll('[data-testid="user-popover"]'));
+ };
const dummyUser = { name: 'root', username: 'root', is_followed: false };
const dummyUserStatus = { message: 'active' };
- let popovers;
-
const triggerEvent = (eventName, el) => {
const event = new MouseEvent(eventName, {
bubbles: true,
@@ -54,56 +55,73 @@ describe('User Popovers', () => {
.mockImplementation((userId) => userStatusCacheSpy(userId));
jest.spyOn(UsersCache, 'updateById');
- popovers = initUserPopovers(document.querySelectorAll(selector));
+ initUserPopovers((popoverInstance) => {
+ const mountingRoot = document.createElement('div');
+ document.body.appendChild(mountingRoot);
+ popoverInstance.$mount(mountingRoot);
+ });
});
afterEach(() => {
resetHTMLFixture();
});
- it('initializes a popover for each user link with a user id', () => {
- const linksWithUsers = findFixtureLinks();
+ describe('shows a placeholder popover on hover', () => {
+ let linksWithUsers;
+ beforeEach(() => {
+ linksWithUsers = findFixtureLinks();
+ linksWithUsers.forEach((el) => {
+ triggerEvent('mouseover', el);
+ });
+ });
- expect(linksWithUsers.length).toBe(popovers.length);
- });
+ it('for initial links', () => {
+ expect(findPopovers().length).toBe(linksWithUsers.length);
+ });
- it('adds popovers to user links added to the DOM tree after the initial call', async () => {
- document.body.appendChild(createUserLink());
- document.body.appendChild(createUserLink());
+ it('for elements added after initial load', async () => {
+ const addedLinks = [createUserLink(), createUserLink()];
+ addedLinks.forEach((link) => {
+ document.body.appendChild(link);
+ });
- const linksWithUsers = findFixtureLinks();
+ jest.runOnlyPendingTimers();
- expect(linksWithUsers.length).toBe(popovers.length + 2);
+ addedLinks.forEach((link) => {
+ triggerEvent('mouseover', link);
+ });
+
+ expect(findPopovers().length).toBe(linksWithUsers.length + addedLinks.length);
+ });
});
- it('does not initialize the user popovers twice for the same element', () => {
- const newPopovers = initUserPopovers(document.querySelectorAll(selector));
- const samePopovers = popovers.every((popover, index) => newPopovers[index] === popover);
+ it('does not initialize the user popovers twice for the same element', async () => {
+ const [firstUserLink] = findFixtureLinks();
+ triggerEvent('mouseover', firstUserLink);
+ jest.runOnlyPendingTimers();
+ triggerEvent('mouseleave', firstUserLink);
+ jest.runOnlyPendingTimers();
+ triggerEvent('mouseover', firstUserLink);
+ jest.runOnlyPendingTimers();
- expect(samePopovers).toBe(true);
+ expect(findPopovers().length).toBe(1);
});
- describe('when user link emits mouseenter event', () => {
+ describe('when user link emits mouseenter event with empty user cache', () => {
let userLink;
beforeEach(() => {
UsersCache.retrieveById.mockReset();
- userLink = document.querySelector(selector);
-
- triggerEvent('mouseenter', userLink);
- });
+ [userLink] = findFixtureLinks();
- it('removes title attribute from user links', () => {
- expect(userLink.getAttribute('title')).toBeFalsy();
- expect(userLink.dataset.originalTitle).toBeFalsy();
+ triggerEvent('mouseover', userLink);
});
- it('populates popovers with preloaded user data', () => {
+ it('populates popover with preloaded user data', () => {
const { name, userId, username } = userLink.dataset;
- const [firstPopover] = popovers;
- expect(firstPopover.$props.user).toEqual(
+ expect(userLink.user).toEqual(
expect.objectContaining({
name,
userId,
@@ -111,6 +129,21 @@ describe('User Popovers', () => {
}),
);
});
+ });
+
+ describe('when user link emits mouseenter event', () => {
+ let userLink;
+
+ beforeEach(() => {
+ [userLink] = findFixtureLinks();
+
+ triggerEvent('mouseover', userLink);
+ });
+
+ it('removes title attribute from user links', () => {
+ expect(userLink.getAttribute('title')).toBeFalsy();
+ expect(userLink.dataset.originalTitle).toBeFalsy();
+ });
it('fetches user info and status from the user cache', () => {
const { userId } = userLink.dataset;
@@ -118,42 +151,38 @@ describe('User Popovers', () => {
expect(UsersCache.retrieveById).toHaveBeenCalledWith(userId);
expect(UsersCache.retrieveStatusById).toHaveBeenCalledWith(userId);
});
- });
-
- it('removes aria-describedby attribute from the user link on mouseleave', () => {
- const userLink = document.querySelector(selector);
- userLink.setAttribute('aria-describedby', 'popover');
- triggerEvent('mouseleave', userLink);
+ it('removes aria-describedby attribute from the user link on mouseleave', () => {
+ userLink.setAttribute('aria-describedby', 'popover');
+ triggerEvent('mouseleave', userLink);
- expect(userLink.getAttribute('aria-describedby')).toBe(null);
- });
-
- it('updates toggle follow button and `UsersCache` when toggle follow button is clicked', async () => {
- const [firstPopover] = popovers;
- const withinFirstPopover = within(firstPopover.$el);
- const findFollowButton = () => withinFirstPopover.queryByRole('button', { name: 'Follow' });
- const findUnfollowButton = () => withinFirstPopover.queryByRole('button', { name: 'Unfollow' });
+ expect(userLink.getAttribute('aria-describedby')).toBe(null);
+ });
- const userLink = document.querySelector(selector);
- triggerEvent('mouseenter', userLink);
+ it('updates toggle follow button and `UsersCache` when toggle follow button is clicked', async () => {
+ const [firstPopover] = findPopovers();
+ const withinFirstPopover = within(firstPopover);
+ const findFollowButton = () => withinFirstPopover.queryByRole('button', { name: 'Follow' });
+ const findUnfollowButton = () =>
+ withinFirstPopover.queryByRole('button', { name: 'Unfollow' });
- await waitForPromises();
+ jest.runOnlyPendingTimers();
- const { userId } = document.querySelector(selector).dataset;
+ const { userId } = document.querySelector(selector).dataset;
- triggerEvent('click', findFollowButton());
+ triggerEvent('click', findFollowButton());
- await waitForPromises();
+ await waitForPromises();
- expect(findUnfollowButton()).not.toBe(null);
- expect(UsersCache.updateById).toHaveBeenCalledWith(userId, { is_followed: true });
+ expect(findUnfollowButton()).not.toBe(null);
+ expect(UsersCache.updateById).toHaveBeenCalledWith(userId, { is_followed: true });
- triggerEvent('click', findUnfollowButton());
+ triggerEvent('click', findUnfollowButton());
- await waitForPromises();
+ await waitForPromises();
- expect(findFollowButton()).not.toBe(null);
- expect(UsersCache.updateById).toHaveBeenCalledWith(userId, { is_followed: false });
+ expect(findFollowButton()).not.toBe(null);
+ expect(UsersCache.updateById).toHaveBeenCalledWith(userId, { is_followed: false });
+ });
});
});
diff --git a/spec/frontend/users_select/test_helper.js b/spec/frontend/users_select/test_helper.js
index 59edde48eab..9231e38ea90 100644
--- a/spec/frontend/users_select/test_helper.js
+++ b/spec/frontend/users_select/test_helper.js
@@ -95,10 +95,10 @@ export const setAssignees = (...users) => {
const input = document.createElement('input');
input.name = 'merge_request[assignee_ids][]';
input.value = user.id.toString();
- input.setAttribute('data-avatar-url', user.avatar_url);
- input.setAttribute('data-name', user.name);
- input.setAttribute('data-username', user.username);
- input.setAttribute('data-can-merge', user.can_merge);
+ input.dataset.avatarUrl = user.avatar_url;
+ input.dataset.name = user.name;
+ input.dataset.username = user.username;
+ input.dataset.canMerge = user.can_merge;
return input;
}),
);
diff --git a/spec/frontend/vue_mr_widget/components/approvals/approvals_spec.js b/spec/frontend/vue_mr_widget/components/approvals/approvals_spec.js
index 4985417ad99..05cd1bb5b3d 100644
--- a/spec/frontend/vue_mr_widget/components/approvals/approvals_spec.js
+++ b/spec/frontend/vue_mr_widget/components/approvals/approvals_spec.js
@@ -1,5 +1,5 @@
import { nextTick } from 'vue';
-import { GlButton } from '@gitlab/ui';
+import { GlButton, GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import createFlash from '~/flash';
import Approvals from '~/vue_merge_request_widget/components/approvals/approvals.vue';
@@ -15,6 +15,7 @@ import eventHub from '~/vue_merge_request_widget/event_hub';
jest.mock('~/flash');
+const RULE_NAME = 'first_rule';
const TEST_HELP_PATH = 'help/path';
const testApprovedBy = () => [1, 7, 10].map((id) => ({ id }));
const testApprovals = () => ({
@@ -26,6 +27,7 @@ const testApprovals = () => ({
user_can_approve: true,
user_has_approved: true,
require_password_to_approve: false,
+ invalid_approvers_rules: [],
});
const testApprovalRulesResponse = () => ({ rules: [{ id: 2 }] });
@@ -41,6 +43,9 @@ describe('MRWidget approvals', () => {
service,
...props,
},
+ stubs: {
+ GlSprintf,
+ },
});
};
@@ -58,6 +63,7 @@ describe('MRWidget approvals', () => {
};
const findSummary = () => wrapper.find(ApprovalsSummary);
const findOptionalSummary = () => wrapper.find(ApprovalsSummaryOptional);
+ const findInvalidRules = () => wrapper.find('[data-testid="invalid-rules"]');
beforeEach(() => {
service = {
@@ -171,7 +177,7 @@ describe('MRWidget approvals', () => {
it('approve action is rendered', () => {
expect(findActionData()).toEqual({
- variant: 'info',
+ variant: 'confirm',
text: 'Approve',
category: 'primary',
});
@@ -192,7 +198,7 @@ describe('MRWidget approvals', () => {
it('approve action (with inverted style) is rendered', () => {
expect(findActionData()).toEqual({
- variant: 'info',
+ variant: 'confirm',
text: 'Approve',
category: 'secondary',
});
@@ -208,7 +214,7 @@ describe('MRWidget approvals', () => {
it('approve additionally action is rendered', () => {
expect(findActionData()).toEqual({
- variant: 'info',
+ variant: 'confirm',
text: 'Approve additionally',
category: 'secondary',
});
@@ -279,9 +285,9 @@ describe('MRWidget approvals', () => {
it('revoke action is rendered', () => {
expect(findActionData()).toEqual({
- variant: 'warning',
+ category: 'primary',
+ variant: 'default',
text: 'Revoke approval',
- category: 'secondary',
});
});
@@ -383,4 +389,36 @@ describe('MRWidget approvals', () => {
});
});
});
+
+ describe('invalid rules', () => {
+ beforeEach(() => {
+ mr.approvals.merge_request_approvers_available = true;
+ createComponent();
+ });
+
+ it('does not render related components', () => {
+ expect(findInvalidRules().exists()).toBe(false);
+ });
+
+ describe('when invalid rules are present', () => {
+ beforeEach(() => {
+ mr.approvals.invalid_approvers_rules = [{ name: RULE_NAME }];
+ createComponent();
+ });
+
+ it('renders related components', () => {
+ const invalidRules = findInvalidRules();
+
+ expect(invalidRules.exists()).toBe(true);
+
+ const invalidRulesText = invalidRules.text();
+
+ expect(invalidRulesText).toContain(RULE_NAME);
+ expect(invalidRulesText).toContain(
+ 'GitLab has approved this rule automatically to unblock the merge request.',
+ );
+ expect(invalidRulesText).toContain('Learn more.');
+ });
+ });
+ });
});
diff --git a/spec/frontend/vue_mr_widget/components/approvals/humanized_text_spec.js b/spec/frontend/vue_mr_widget/components/approvals/humanized_text_spec.js
new file mode 100644
index 00000000000..d6776c00b29
--- /dev/null
+++ b/spec/frontend/vue_mr_widget/components/approvals/humanized_text_spec.js
@@ -0,0 +1,18 @@
+import { humanizeInvalidApproversRules } from '~/vue_merge_request_widget/components/approvals/humanized_text';
+
+const testRules = [{ name: 'Lorem' }, { name: 'Ipsum' }, { name: 'Dolar' }];
+
+describe('humanizeInvalidApproversRules', () => {
+ it('returns text in regards to a single rule', () => {
+ const [singleRule] = testRules;
+ expect(humanizeInvalidApproversRules([singleRule])).toBe('"Lorem"');
+ });
+
+ it('returns empty text when there is no rule', () => {
+ expect(humanizeInvalidApproversRules([])).toBe('');
+ });
+
+ it('returns text in regards to multiple rules', () => {
+ expect(humanizeInvalidApproversRules(testRules)).toBe('"Lorem", "Ipsum" and "Dolar"');
+ });
+});
diff --git a/spec/frontend/vue_mr_widget/components/extensions/index_spec.js b/spec/frontend/vue_mr_widget/components/extensions/index_spec.js
index 63df63a9b00..dc25596655a 100644
--- a/spec/frontend/vue_mr_widget/components/extensions/index_spec.js
+++ b/spec/frontend/vue_mr_widget/components/extensions/index_spec.js
@@ -21,8 +21,8 @@ describe('MR widget extension registering', () => {
expect.objectContaining({
extends: ExtensionBase,
name: 'Test',
- props: ['helloWorld'],
computed: {
+ helloWorld: expect.any(Function),
test: expect.any(Function),
},
methods: {
diff --git a/spec/frontend/vue_mr_widget/components/mr_collapsible_extension_spec.js b/spec/frontend/vue_mr_widget/components/mr_collapsible_extension_spec.js
index 82526af7afa..01fbcb2154f 100644
--- a/spec/frontend/vue_mr_widget/components/mr_collapsible_extension_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_collapsible_extension_spec.js
@@ -42,8 +42,8 @@ describe('Merge Request Collapsible Extension', () => {
expect(wrapper.find('[data-testid="collapsed-header"]').text()).toBe('hello there');
});
- it('renders angle-right icon', () => {
- expect(findIcon().props('name')).toBe('angle-right');
+ it('renders chevron-lg-right icon', () => {
+ expect(findIcon().props('name')).toBe('chevron-lg-right');
});
describe('onClick', () => {
@@ -60,8 +60,8 @@ describe('Merge Request Collapsible Extension', () => {
expect(findTitle().text()).toBe('Collapse');
});
- it('renders angle-down icon', () => {
- expect(findIcon().props('name')).toBe('angle-down');
+ it('renders chevron-lg-down icon', () => {
+ expect(findIcon().props('name')).toBe('chevron-lg-down');
});
});
});
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js
deleted file mode 100644
index ed6dc598845..00000000000
--- a/spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js
+++ /dev/null
@@ -1,176 +0,0 @@
-import { shallowMount, mount } from '@vue/test-utils';
-import Header from '~/vue_merge_request_widget/components/mr_widget_header.vue';
-
-describe('MRWidgetHeader', () => {
- let wrapper;
-
- const createComponent = (propsData = {}) => {
- wrapper = shallowMount(Header, {
- propsData,
- });
- };
-
- afterEach(() => {
- wrapper.destroy();
- gon.relative_url_root = '';
- });
-
- const commonMrProps = {
- divergedCommitsCount: 1,
- sourceBranch: 'mr-widget-refactor',
- sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">Link</a>',
- targetBranch: 'main',
- targetBranchPath: '/foo/bar/main',
- statusPath: 'abc',
- };
-
- describe('computed', () => {
- describe('shouldShowCommitsBehindText', () => {
- it('return true when there are divergedCommitsCount', () => {
- createComponent({
- mr: {
- divergedCommitsCount: 12,
- sourceBranch: 'mr-widget-refactor',
- sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">Link</a>',
- targetBranch: 'main',
- statusPath: 'abc',
- },
- });
-
- expect(wrapper.vm.shouldShowCommitsBehindText).toBe(true);
- });
-
- it('returns false where there are no divergedComits count', () => {
- createComponent({
- mr: {
- divergedCommitsCount: 0,
- sourceBranch: 'mr-widget-refactor',
- sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">Link</a>',
- targetBranch: 'main',
- statusPath: 'abc',
- },
- });
-
- expect(wrapper.vm.shouldShowCommitsBehindText).toBe(false);
- });
- });
-
- describe('commitsBehindText', () => {
- it('returns singular when there is one commit', () => {
- wrapper = mount(Header, {
- propsData: {
- mr: commonMrProps,
- },
- });
-
- expect(wrapper.find('.diverged-commits-count').element.innerHTML).toBe(
- 'The source branch is <a href="/foo/bar/main" class="gl-link">1 commit behind</a> the target branch',
- );
- });
-
- it('returns plural when there is more than one commit', () => {
- wrapper = mount(Header, {
- propsData: {
- mr: {
- ...commonMrProps,
- divergedCommitsCount: 2,
- },
- },
- });
- expect(wrapper.find('.diverged-commits-count').element.innerHTML).toBe(
- 'The source branch is <a href="/foo/bar/main" class="gl-link">2 commits behind</a> the target branch',
- );
- });
- });
- });
-
- describe('template', () => {
- describe('common elements', () => {
- beforeEach(() => {
- createComponent({
- mr: {
- divergedCommitsCount: 12,
- sourceBranch: 'mr-widget-refactor',
- sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">mr-widget-refactor</a>',
- sourceBranchRemoved: false,
- targetBranchPath: 'foo/bar/commits-path',
- targetBranchTreePath: 'foo/bar/tree/path',
- targetBranch: 'main',
- isOpen: true,
- emailPatchesPath: '/mr/email-patches',
- plainDiffPath: '/mr/plainDiffPath',
- statusPath: 'abc',
- },
- });
- });
-
- it('renders source branch link', () => {
- expect(wrapper.find('.js-source-branch').html()).toContain(
- '<a href="/foo/bar/mr-widget-refactor">mr-widget-refactor</a>',
- );
- });
-
- it('renders clipboard button', () => {
- expect(wrapper.find('[data-testid="mr-widget-copy-clipboard"]')).not.toBe(null);
- });
-
- it('renders target branch', () => {
- expect(wrapper.find('.js-target-branch').text().trim()).toBe('main');
- });
- });
-
- describe('without diverged commits', () => {
- beforeEach(() => {
- createComponent({
- mr: {
- divergedCommitsCount: 0,
- sourceBranch: 'mr-widget-refactor',
- sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">mr-widget-refactor</a>',
- sourceBranchRemoved: false,
- targetBranchPath: 'foo/bar/commits-path',
- targetBranchTreePath: 'foo/bar/tree/path',
- targetBranch: 'main',
- isOpen: true,
- emailPatchesPath: '/mr/email-patches',
- plainDiffPath: '/mr/plainDiffPath',
- statusPath: 'abc',
- },
- });
- });
-
- it('does not render diverged commits info', () => {
- expect(wrapper.find('.diverged-commits-count').exists()).toBe(false);
- });
- });
-
- describe('with diverged commits', () => {
- beforeEach(() => {
- wrapper = mount(Header, {
- propsData: {
- mr: {
- ...commonMrProps,
- divergedCommitsCount: 12,
- sourceBranchRemoved: false,
- targetBranchPath: 'foo/bar/commits-path',
- targetBranchTreePath: 'foo/bar/tree/path',
- isOpen: true,
- emailPatchesPath: '/mr/email-patches',
- plainDiffPath: '/mr/plainDiffPath',
- },
- },
- });
- });
-
- it('renders diverged commits info', () => {
- expect(wrapper.find('.diverged-commits-count').text().trim()).toBe(
- 'The source branch is 12 commits behind the target branch',
- );
-
- expect(wrapper.find('.diverged-commits-count a').text().trim()).toBe('12 commits behind');
- expect(wrapper.find('.diverged-commits-count a').attributes('href')).toBe(
- wrapper.vm.mr.targetBranchPath,
- );
- });
- });
- });
-});
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_suggest_pipeline_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_suggest_pipeline_spec.js
index 8e710b6d65f..352bc1a08ea 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_suggest_pipeline_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_suggest_pipeline_spec.js
@@ -71,7 +71,7 @@ describe('MRWidgetSuggestPipeline', () => {
const button = findOkBtn();
expect(button.exists()).toBe(true);
- expect(button.classes('btn-info')).toEqual(true);
+ expect(button.classes('btn-confirm')).toEqual(true);
expect(button.attributes('href')).toBe(suggestProps.pipelinePath);
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_merged_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_merged_spec.js
index 8efc4d84624..29ee7e0010f 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_merged_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_merged_spec.js
@@ -193,9 +193,7 @@ describe('MRWidgetMerged', () => {
it('shows button to copy commit SHA to clipboard', () => {
expect(selectors.copyMergeShaButton).not.toBe(null);
- expect(selectors.copyMergeShaButton.getAttribute('data-clipboard-text')).toBe(
- vm.mr.mergeCommitSha,
- );
+ expect(selectors.copyMergeShaButton.dataset.clipboardText).toBe(vm.mr.mergeCommitSha);
});
it('hides button to copy commit SHA if SHA does not exist', async () => {
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
index da3a323e8ea..46d90ddc83c 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
@@ -87,7 +87,11 @@ const createReadyToMergeResponse = (customMr) => {
});
};
-const createComponent = (customConfig = {}, mergeRequestWidgetGraphql = false) => {
+const createComponent = (
+ customConfig = {},
+ mergeRequestWidgetGraphql = false,
+ restructuredMrWidget = false,
+) => {
wrapper = shallowMount(ReadyToMerge, {
localVue,
propsData: {
@@ -97,6 +101,7 @@ const createComponent = (customConfig = {}, mergeRequestWidgetGraphql = false) =
provide: {
glFeatures: {
mergeRequestWidgetGraphql,
+ restructuredMrWidget,
},
},
stubs: {
@@ -307,6 +312,20 @@ describe('ReadyToMerge', () => {
},
});
+ beforeEach(() => {
+ readyToMergeResponseSpy = jest
+ .fn()
+ .mockResolvedValueOnce(createReadyToMergeResponse({ squash: true, squashOnMerge: true }))
+ .mockResolvedValue(
+ createReadyToMergeResponse({
+ squash: true,
+ squashOnMerge: true,
+ defaultMergeCommitMessage: '',
+ defaultSquashCommitMessage: '',
+ }),
+ );
+ });
+
it('should handle merge when pipeline succeeds', async () => {
createComponent();
@@ -379,6 +398,27 @@ describe('ReadyToMerge', () => {
expect(params.should_remove_source_branch).toBeTruthy();
expect(params.auto_merge_strategy).toBeUndefined();
});
+
+ it('hides edit commit message', async () => {
+ createComponent({}, true, true);
+
+ await waitForPromises();
+
+ jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
+ jest.spyOn(wrapper.vm.service, 'merge').mockResolvedValue(response('success'));
+
+ await wrapper
+ .findComponent('[data-testid="widget_edit_commit_message"]')
+ .vm.$emit('input', true);
+
+ expect(wrapper.findComponent('[data-testid="edit_commit_message"]').exists()).toBe(true);
+
+ wrapper.vm.handleMergeButtonClick();
+
+ await waitForPromises();
+
+ expect(wrapper.findComponent('[data-testid="edit_commit_message"]').exists()).toBe(false);
+ });
});
describe('initiateRemoveSourceBranchPolling', () => {
diff --git a/spec/frontend/vue_mr_widget/components/terraform/mr_widget_terraform_container_spec.js b/spec/frontend/vue_mr_widget/components/terraform/mr_widget_terraform_container_spec.js
index b7c22b403aa..8f20d6a8fc9 100644
--- a/spec/frontend/vue_mr_widget/components/terraform/mr_widget_terraform_container_spec.js
+++ b/spec/frontend/vue_mr_widget/components/terraform/mr_widget_terraform_container_spec.js
@@ -1,4 +1,4 @@
-import { GlDeprecatedSkeletonLoading as GlSkeletonLoading, GlSprintf } from '@gitlab/ui';
+import { GlSkeletonLoader, GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
@@ -51,7 +51,7 @@ describe('MrWidgetTerraformConainer', () => {
});
it('diplays loading skeleton', () => {
- expect(wrapper.find(GlSkeletonLoading).exists()).toBe(true);
+ expect(wrapper.findComponent(GlSkeletonLoader).exists()).toBe(true);
expect(wrapper.find(MrWidgetExpanableSection).exists()).toBe(false);
});
});
@@ -63,7 +63,7 @@ describe('MrWidgetTerraformConainer', () => {
});
it('displays terraform content', () => {
- expect(wrapper.find(GlSkeletonLoading).exists()).toBe(false);
+ expect(wrapper.findComponent(GlSkeletonLoader).exists()).toBe(false);
expect(wrapper.find(MrWidgetExpanableSection).exists()).toBe(true);
expect(findPlans()).toEqual(Object.values(plans));
});
@@ -158,7 +158,7 @@ describe('MrWidgetTerraformConainer', () => {
});
it('stops loading', () => {
- expect(wrapper.find(GlSkeletonLoading).exists()).toBe(false);
+ expect(wrapper.findComponent(GlSkeletonLoader).exists()).toBe(false);
});
it('generates one broken plan', () => {
diff --git a/spec/frontend/vue_mr_widget/extensions/test_report/index_spec.js b/spec/frontend/vue_mr_widget/extensions/test_report/index_spec.js
index 2bc6860743a..da4b990c078 100644
--- a/spec/frontend/vue_mr_widget/extensions/test_report/index_spec.js
+++ b/spec/frontend/vue_mr_widget/extensions/test_report/index_spec.js
@@ -9,6 +9,7 @@ import axios from '~/lib/utils/axios_utils';
import extensionsContainer from '~/vue_merge_request_widget/components/extensions/container';
import { registerExtension } from '~/vue_merge_request_widget/components/extensions';
import httpStatusCodes from '~/lib/utils/http_status';
+import TestCaseDetails from '~/pipelines/components/test_reports/test_case_details.vue';
import { failedReport } from 'jest/reports/mock_data/mock_data';
import mixedResultsTestReports from 'jest/reports/mock_data/new_and_fixed_failures_report.json';
@@ -39,6 +40,7 @@ describe('Test report extension', () => {
const findToggleCollapsedButton = () => wrapper.findByTestId('toggle-button');
const findTertiaryButton = () => wrapper.find(GlButton);
const findAllExtensionListItems = () => wrapper.findAllByTestId('extension-list-item');
+ const findModal = () => wrapper.find(TestCaseDetails);
const createComponent = () => {
wrapper = mountExtended(extensionsContainer, {
@@ -190,4 +192,19 @@ describe('Test report extension', () => {
);
});
});
+
+ describe('modal link', () => {
+ beforeEach(async () => {
+ await createExpandedWidgetWithData();
+
+ wrapper.findByTestId('modal-link').trigger('click');
+ });
+
+ it('opens a modal to display test case details', () => {
+ expect(findModal().exists()).toBe(true);
+ expect(findModal().props('testCase')).toMatchObject(
+ mixedResultsTestReports.suites[0].new_failures[0],
+ );
+ });
+ });
});
diff --git a/spec/frontend/vue_mr_widget/extentions/terraform/index_spec.js b/spec/frontend/vue_mr_widget/extentions/terraform/index_spec.js
index f8ea6fc23a2..77b3576a3d3 100644
--- a/spec/frontend/vue_mr_widget/extentions/terraform/index_spec.js
+++ b/spec/frontend/vue_mr_widget/extentions/terraform/index_spec.js
@@ -1,6 +1,7 @@
import MockAdapter from 'axios-mock-adapter';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
+import api from '~/api';
import axios from '~/lib/utils/axios_utils';
import Poll from '~/lib/utils/poll';
import extensionsContainer from '~/vue_merge_request_widget/components/extensions/container';
@@ -14,6 +15,8 @@ import {
invalidPlanWithoutName,
} from '../../components/terraform/mock_data';
+jest.mock('~/api.js');
+
describe('Terraform extension', () => {
let wrapper;
let mock;
@@ -130,20 +133,33 @@ describe('Terraform extension', () => {
}
});
});
+
+ it('responds with the correct telemetry when the deeply nested "Full log" link is clicked', () => {
+ api.trackRedisHllUserEvent.mockClear();
+ api.trackRedisCounterEvent.mockClear();
+
+ findListItem(0).find('[data-testid="extension-actions-button"]').trigger('click');
+
+ expect(api.trackRedisHllUserEvent).toHaveBeenCalledTimes(1);
+ expect(api.trackRedisHllUserEvent).toHaveBeenCalledWith(
+ 'i_merge_request_widget_terraform_click_full_report',
+ );
+ expect(api.trackRedisCounterEvent).toHaveBeenCalledTimes(1);
+ expect(api.trackRedisCounterEvent).toHaveBeenCalledWith(
+ 'i_merge_request_widget_terraform_count_click_full_report',
+ );
+ });
});
describe('polling', () => {
let pollRequest;
- let pollStop;
beforeEach(() => {
pollRequest = jest.spyOn(Poll.prototype, 'makeRequest');
- pollStop = jest.spyOn(Poll.prototype, 'stop');
});
afterEach(() => {
pollRequest.mockRestore();
- pollStop.mockRestore();
});
describe('successful poll', () => {
@@ -155,7 +171,6 @@ describe('Terraform extension', () => {
it('does not make additional requests after poll is successful', () => {
expect(pollRequest).toHaveBeenCalledTimes(1);
- expect(pollStop).toHaveBeenCalledTimes(1);
});
});
@@ -171,7 +186,6 @@ describe('Terraform extension', () => {
it('does not make additional requests after poll is unsuccessful', () => {
expect(pollRequest).toHaveBeenCalledTimes(1);
- expect(pollStop).toHaveBeenCalledTimes(1);
});
});
});
diff --git a/spec/frontend/vue_mr_widget/mr_widget_options_spec.js b/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
index 9719e81fe12..6abbb052aef 100644
--- a/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
+++ b/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
@@ -29,8 +29,11 @@ import {
workingExtension,
collapsedDataErrorExtension,
fullDataErrorExtension,
+ fullReportExtension,
+ noTelemetryExtension,
pollingExtension,
pollingErrorExtension,
+ multiPollingExtension,
} from './test_extensions';
jest.mock('~/api.js');
@@ -48,6 +51,8 @@ describe('MrWidgetOptions', () => {
const COLLABORATION_MESSAGE = 'Members who can merge are allowed to add commits';
const findExtensionToggleButton = () =>
wrapper.find('[data-testid="widget-extension"] [data-testid="toggle-button"]');
+ const findExtensionLink = (linkHref) =>
+ wrapper.find(`[data-testid="widget-extension"] [href="${linkHref}"]`);
beforeEach(() => {
gl.mrWidgetData = { ...mockData };
@@ -67,7 +72,7 @@ describe('MrWidgetOptions', () => {
gon.features = {};
});
- const createComponent = (mrData = mockData, options = {}) => {
+ const createComponent = (mrData = mockData, options = {}, glFeatures = {}) => {
if (wrapper) {
wrapper.destroy();
}
@@ -76,6 +81,9 @@ describe('MrWidgetOptions', () => {
propsData: {
mrData: { ...mrData },
},
+ provide: {
+ glFeatures,
+ },
...options,
});
@@ -423,7 +431,7 @@ describe('MrWidgetOptions', () => {
beforeEach(() => {
const favicon = document.createElement('link');
favicon.setAttribute('id', 'favicon');
- favicon.setAttribute('data-original-href', faviconDataUrl);
+ favicon.dataset.originalHref = faviconDataUrl;
document.body.appendChild(favicon);
faviconElement = document.getElementById('favicon');
@@ -621,7 +629,16 @@ describe('MrWidgetOptions', () => {
});
describe('code quality widget', () => {
- it('renders the component', () => {
+ beforeEach(() => {
+ jest.spyOn(document, 'dispatchEvent');
+ });
+ it('renders the component when refactorCodeQualityExtension is false', () => {
+ createComponent(mockData, {}, { refactorCodeQualityExtension: false });
+ expect(wrapper.find('.js-codequality-widget').exists()).toBe(true);
+ });
+
+ it('does not render the component when refactorCodeQualityExtension is true', () => {
+ createComponent(mockData, {}, { refactorCodeQualityExtension: true });
expect(wrapper.find('.js-codequality-widget').exists()).toBe(true);
});
});
@@ -911,18 +928,6 @@ describe('MrWidgetOptions', () => {
expect(wrapper.text()).toContain('Test extension summary count: 1');
});
- it('triggers trackRedisHllUserEvent API call', async () => {
- await waitForPromises();
-
- wrapper
- .find('[data-testid="widget-extension"] [data-testid="toggle-button"]')
- .trigger('click');
-
- await nextTick();
-
- expect(api.trackRedisHllUserEvent).toHaveBeenCalledWith('test_expand_event');
- });
-
it('renders full data', async () => {
await waitForPromises();
@@ -982,31 +987,98 @@ describe('MrWidgetOptions', () => {
describe('mock polling extension', () => {
let pollRequest;
- let pollStop;
+
+ const findWidgetTestExtension = () => wrapper.find('[data-testid="widget-extension"]');
beforeEach(() => {
pollRequest = jest.spyOn(Poll.prototype, 'makeRequest');
- pollStop = jest.spyOn(Poll.prototype, 'stop');
+
+ registeredExtensions.extensions = [];
});
afterEach(() => {
pollRequest.mockRestore();
- pollStop.mockRestore();
registeredExtensions.extensions = [];
+
+ // Clear all left-over timeouts that may be registered in the poll class
+ let id = window.setTimeout(() => {}, 0);
+
+ while (id > 0) {
+ window.clearTimeout(id);
+ id -= 1;
+ }
});
- describe('success', () => {
- beforeEach(() => {
- registerExtension(pollingExtension);
+ describe('success - multi polling', () => {
+ it('sets data when polling is complete', async () => {
+ registerExtension(
+ multiPollingExtension([
+ () =>
+ Promise.resolve({
+ headers: { 'poll-interval': 0 },
+ status: 200,
+ data: { reports: 'parsed' },
+ }),
+ () =>
+ Promise.resolve({
+ status: 200,
+ data: { reports: 'parsed' },
+ }),
+ ]),
+ );
- createComponent();
+ await createComponent();
+ expect(findWidgetTestExtension().html()).toContain(
+ 'Multi polling test extension reports: parsed, count: 2',
+ );
});
- it('does not make additional requests after poll is successful', () => {
+ it('shows loading state until polling is complete', async () => {
+ registerExtension(
+ multiPollingExtension([
+ () =>
+ Promise.resolve({
+ headers: { 'poll-interval': 1 },
+ status: 204,
+ }),
+ () =>
+ Promise.resolve({
+ status: 200,
+ data: { reports: 'parsed' },
+ }),
+ ]),
+ );
+
+ await createComponent();
+ expect(findWidgetTestExtension().html()).toContain('Test extension loading...');
+ });
+ });
+
+ describe('success', () => {
+ it('does not make additional requests after poll is successful', async () => {
+ registerExtension(pollingExtension);
+ await createComponent();
// called two times due to parent component polling (mount) and extension polling
expect(pollRequest).toHaveBeenCalledTimes(2);
- expect(pollStop).toHaveBeenCalledTimes(1);
+ });
+
+ it('keeps polling when poll-interval header is provided', async () => {
+ registerExtension({
+ ...pollingExtension,
+ methods: {
+ ...pollingExtension.methods,
+ fetchCollapsedData() {
+ return Promise.resolve({
+ data: {},
+ headers: { 'poll-interval': 1 },
+ status: 204,
+ });
+ },
+ },
+ });
+ await createComponent();
+ expect(findWidgetTestExtension().html()).toContain('Test extension loading...');
});
});
@@ -1024,7 +1096,6 @@ describe('MrWidgetOptions', () => {
it('does not make additional requests after poll has failed', () => {
// called two times due to parent component polling (mount) and extension polling
expect(pollRequest).toHaveBeenCalledTimes(2);
- expect(pollStop).toHaveBeenCalledTimes(1);
});
it('captures sentry error and displays error when poll has failed', () => {
@@ -1080,4 +1151,119 @@ describe('MrWidgetOptions', () => {
itHandlesTheException();
});
});
+
+ describe('telemetry', () => {
+ afterEach(() => {
+ registeredExtensions.extensions = [];
+ });
+
+ it('triggers view events when mounted', () => {
+ registerExtension(workingExtension());
+ createComponent();
+
+ expect(api.trackRedisHllUserEvent).toHaveBeenCalledTimes(1);
+ expect(api.trackRedisHllUserEvent).toHaveBeenCalledWith(
+ 'i_merge_request_widget_test_extension_view',
+ );
+ expect(api.trackRedisCounterEvent).toHaveBeenCalledTimes(1);
+ expect(api.trackRedisCounterEvent).toHaveBeenCalledWith(
+ 'i_merge_request_widget_test_extension_count_view',
+ );
+ });
+
+ describe('expand button', () => {
+ it('triggers expand events when clicked', async () => {
+ registerExtension(workingExtension());
+ createComponent();
+
+ await waitForPromises();
+
+ api.trackRedisHllUserEvent.mockClear();
+ api.trackRedisCounterEvent.mockClear();
+
+ findExtensionToggleButton().trigger('click');
+
+ // The default working extension is a "warning" type, which generates a second - more specific - telemetry event for expansions
+ expect(api.trackRedisHllUserEvent).toHaveBeenCalledTimes(2);
+ expect(api.trackRedisHllUserEvent).toHaveBeenCalledWith(
+ 'i_merge_request_widget_test_extension_expand',
+ );
+ expect(api.trackRedisHllUserEvent).toHaveBeenCalledWith(
+ 'i_merge_request_widget_test_extension_expand_warning',
+ );
+ expect(api.trackRedisCounterEvent).toHaveBeenCalledTimes(2);
+ expect(api.trackRedisCounterEvent).toHaveBeenCalledWith(
+ 'i_merge_request_widget_test_extension_count_expand',
+ );
+ expect(api.trackRedisCounterEvent).toHaveBeenCalledWith(
+ 'i_merge_request_widget_test_extension_count_expand_warning',
+ );
+ });
+
+ it.each`
+ widgetName | nonStandardEvent
+ ${'WidgetCodeQuality'} | ${'i_testing_code_quality_widget_total'}
+ ${'WidgetTerraform'} | ${'i_testing_terraform_widget_total'}
+ ${'WidgetIssues'} | ${'i_testing_load_performance_widget_total'}
+ ${'WidgetTestReport'} | ${'i_testing_summary_widget_total'}
+ `(
+ "sends non-standard events for the '$widgetName' widget",
+ async ({ widgetName, nonStandardEvent }) => {
+ const definition = {
+ ...workingExtension(),
+ name: widgetName,
+ };
+
+ registerExtension(definition);
+ createComponent();
+
+ await waitForPromises();
+
+ api.trackRedisHllUserEvent.mockClear();
+
+ findExtensionToggleButton().trigger('click');
+
+ expect(api.trackRedisHllUserEvent).toHaveBeenCalledWith(nonStandardEvent);
+ },
+ );
+ });
+
+ it('triggers the "full report clicked" events when the appropriate button is clicked', () => {
+ registerExtension(fullReportExtension);
+ createComponent();
+
+ api.trackRedisHllUserEvent.mockClear();
+ api.trackRedisCounterEvent.mockClear();
+
+ findExtensionLink('testref').trigger('click');
+
+ expect(api.trackRedisHllUserEvent).toHaveBeenCalledTimes(1);
+ expect(api.trackRedisHllUserEvent).toHaveBeenCalledWith(
+ 'i_merge_request_widget_test_extension_click_full_report',
+ );
+ expect(api.trackRedisCounterEvent).toHaveBeenCalledTimes(1);
+ expect(api.trackRedisCounterEvent).toHaveBeenCalledWith(
+ 'i_merge_request_widget_test_extension_count_click_full_report',
+ );
+ });
+
+ describe('when disabled', () => {
+ afterEach(() => {
+ registeredExtensions.extensions = [];
+ });
+
+ it("doesn't emit any telemetry events", async () => {
+ registerExtension(noTelemetryExtension);
+ createComponent();
+
+ await waitForPromises();
+
+ findExtensionToggleButton().trigger('click');
+ findExtensionLink('testref').trigger('click'); // The "full report" link
+
+ expect(api.trackRedisHllUserEvent).not.toHaveBeenCalled();
+ expect(api.trackRedisCounterEvent).not.toHaveBeenCalled();
+ });
+ });
+ });
});
diff --git a/spec/frontend/vue_mr_widget/test_extensions.js b/spec/frontend/vue_mr_widget/test_extensions.js
index 6344636873f..76644e0be77 100644
--- a/spec/frontend/vue_mr_widget/test_extensions.js
+++ b/spec/frontend/vue_mr_widget/test_extensions.js
@@ -4,11 +4,14 @@ export const workingExtension = (shouldCollapse = true) => ({
name: 'WidgetTestExtension',
props: ['targetProjectFullPath'],
expandEvent: 'test_expand_event',
+ i18n: {
+ loading: 'Test extension loading...',
+ },
computed: {
- summary({ count, targetProjectFullPath }) {
+ summary({ count, targetProjectFullPath } = {}) {
return `Test extension summary count: ${count} & ${targetProjectFullPath}`;
},
- statusIcon({ count }) {
+ statusIcon({ count } = {}) {
return count > 0 ? EXTENSION_ICONS.warning : EXTENSION_ICONS.success;
},
shouldCollapse() {
@@ -106,6 +109,50 @@ export const pollingExtension = {
enablePolling: true,
};
+export const fullReportExtension = {
+ ...workingExtension(),
+ computed: {
+ ...workingExtension().computed,
+ tertiaryButtons() {
+ return [
+ {
+ text: 'test',
+ href: `testref`,
+ target: '_blank',
+ fullReport: true,
+ },
+ ];
+ },
+ },
+};
+
+export const noTelemetryExtension = {
+ ...fullReportExtension,
+ telemetry: false,
+};
+
+export const multiPollingExtension = (endpointsToBePolled) => ({
+ name: 'WidgetTestMultiPollingExtension',
+ props: [],
+ i18n: {
+ loading: 'Test extension loading...',
+ },
+ computed: {
+ summary(data) {
+ return `Multi polling test extension reports: ${data?.[0]?.reports}, count: ${data.length}`;
+ },
+ statusIcon(data) {
+ return data?.[0]?.reports === 'parsed' ? EXTENSION_ICONS.success : EXTENSION_ICONS.warning;
+ },
+ },
+ enablePolling: true,
+ methods: {
+ fetchMultiData() {
+ return endpointsToBePolled;
+ },
+ },
+});
+
export const pollingErrorExtension = {
...collapsedDataErrorExtension,
enablePolling: true,
diff --git a/spec/frontend/vue_shared/alert_details/alert_details_spec.js b/spec/frontend/vue_shared/alert_details/alert_details_spec.js
index 7aa54a1c55a..ce51af31a70 100644
--- a/spec/frontend/vue_shared/alert_details/alert_details_spec.js
+++ b/spec/frontend/vue_shared/alert_details/alert_details_spec.js
@@ -201,28 +201,6 @@ describe('AlertDetails', () => {
});
});
- describe('Threat Monitoring details', () => {
- it('should not render the metrics tab', () => {
- mountComponent({
- data: { alert: mockAlert },
- provide: { isThreatMonitoringPage: true },
- });
- expect(findMetricsTab().exists()).toBe(false);
- });
-
- it('should display "View incident" button that links the issues page when incident exists', () => {
- const iid = '3';
- mountComponent({
- data: { alert: { ...mockAlert, issue: { iid } }, sidebarStatus: false },
- provide: { isThreatMonitoringPage: true },
- });
-
- expect(findViewIncidentBtn().exists()).toBe(true);
- expect(findViewIncidentBtn().attributes('href')).toBe(joinPaths(projectIssuesPath, iid));
- expect(findCreateIncidentBtn().exists()).toBe(false);
- });
- });
-
describe('Create incident from alert', () => {
it('should display "View incident" button that links the incident page when incident exists', () => {
const iid = '3';
diff --git a/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
index 44b4c0398cd..30e15595193 100644
--- a/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
+++ b/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
@@ -12,7 +12,7 @@ exports[`Clone Dropdown Button rendering matches the snapshot 1`] = `
right="true"
size="medium"
text="Clone"
- variant="info"
+ variant="confirm"
>
<div
class="pb-2 mx-1"
@@ -24,41 +24,38 @@ exports[`Clone Dropdown Button rendering matches the snapshot 1`] = `
<div
class="mx-3"
>
- <div
- readonly="readonly"
+ <b-input-group-stub
+ readonly=""
+ tag="div"
>
- <b-input-group-stub
+ <!---->
+
+ <b-form-input-stub
+ class="gl-form-input"
+ debounce="0"
+ formatter="[Function]"
+ readonly="true"
+ type="text"
+ value="ssh://foo.bar"
+ />
+
+ <b-input-group-append-stub
tag="div"
>
- <!---->
-
- <b-form-input-stub
- class="gl-form-input"
- debounce="0"
- formatter="[Function]"
- readonly="true"
- type="text"
- value="ssh://foo.bar"
+ <gl-button-stub
+ aria-label="Copy URL"
+ buttontextclasses=""
+ category="primary"
+ class="d-inline-flex"
+ data-clipboard-text="ssh://foo.bar"
+ data-qa-selector="copy_ssh_url_button"
+ icon="copy-to-clipboard"
+ size="medium"
+ title="Copy URL"
+ variant="default"
/>
-
- <b-input-group-append-stub
- tag="div"
- >
- <gl-button-stub
- aria-label="Copy URL"
- buttontextclasses=""
- category="primary"
- class="d-inline-flex"
- data-clipboard-text="ssh://foo.bar"
- data-qa-selector="copy_ssh_url_button"
- icon="copy-to-clipboard"
- size="medium"
- title="Copy URL"
- variant="default"
- />
- </b-input-group-append-stub>
- </b-input-group-stub>
- </div>
+ </b-input-group-append-stub>
+ </b-input-group-stub>
</div>
<gl-dropdown-section-header-stub>
@@ -68,41 +65,38 @@ exports[`Clone Dropdown Button rendering matches the snapshot 1`] = `
<div
class="mx-3"
>
- <div
- readonly="readonly"
+ <b-input-group-stub
+ readonly=""
+ tag="div"
>
- <b-input-group-stub
+ <!---->
+
+ <b-form-input-stub
+ class="gl-form-input"
+ debounce="0"
+ formatter="[Function]"
+ readonly="true"
+ type="text"
+ value="http://foo.bar"
+ />
+
+ <b-input-group-append-stub
tag="div"
>
- <!---->
-
- <b-form-input-stub
- class="gl-form-input"
- debounce="0"
- formatter="[Function]"
- readonly="true"
- type="text"
- value="http://foo.bar"
+ <gl-button-stub
+ aria-label="Copy URL"
+ buttontextclasses=""
+ category="primary"
+ class="d-inline-flex"
+ data-clipboard-text="http://foo.bar"
+ data-qa-selector="copy_http_url_button"
+ icon="copy-to-clipboard"
+ size="medium"
+ title="Copy URL"
+ variant="default"
/>
-
- <b-input-group-append-stub
- tag="div"
- >
- <gl-button-stub
- aria-label="Copy URL"
- buttontextclasses=""
- category="primary"
- class="d-inline-flex"
- data-clipboard-text="http://foo.bar"
- data-qa-selector="copy_http_url_button"
- icon="copy-to-clipboard"
- size="medium"
- title="Copy URL"
- variant="default"
- />
- </b-input-group-append-stub>
- </b-input-group-stub>
- </div>
+ </b-input-group-append-stub>
+ </b-input-group-stub>
</div>
</div>
</gl-dropdown-stub>
diff --git a/spec/frontend/vue_shared/components/ci_icon_spec.js b/spec/frontend/vue_shared/components/ci_icon_spec.js
index 6d52db7ae65..1b502f9587c 100644
--- a/spec/frontend/vue_shared/components/ci_icon_spec.js
+++ b/spec/frontend/vue_shared/components/ci_icon_spec.js
@@ -5,6 +5,8 @@ import ciIcon from '~/vue_shared/components/ci_icon.vue';
describe('CI Icon component', () => {
let wrapper;
+ const findIconWrapper = () => wrapper.find('[data-testid="ci-icon-wrapper"]');
+
afterEach(() => {
wrapper.destroy();
wrapper = null;
@@ -23,6 +25,52 @@ describe('CI Icon component', () => {
expect(wrapper.find(GlIcon).exists()).toBe(true);
});
+ describe('active icons', () => {
+ it.each`
+ isActive | cssClass
+ ${true} | ${'active'}
+ ${false} | ${'active'}
+ `('active should be $isActive', ({ isActive, cssClass }) => {
+ wrapper = shallowMount(ciIcon, {
+ propsData: {
+ status: {
+ icon: 'status_success',
+ },
+ isActive,
+ },
+ });
+
+ if (isActive) {
+ expect(findIconWrapper().classes()).toContain(cssClass);
+ } else {
+ expect(findIconWrapper().classes()).not.toContain(cssClass);
+ }
+ });
+ });
+
+ describe('interactive icons', () => {
+ it.each`
+ isInteractive | cssClass
+ ${true} | ${'interactive'}
+ ${false} | ${'interactive'}
+ `('interactive should be $isInteractive', ({ isInteractive, cssClass }) => {
+ wrapper = shallowMount(ciIcon, {
+ propsData: {
+ status: {
+ icon: 'status_success',
+ },
+ isInteractive,
+ },
+ });
+
+ if (isInteractive) {
+ expect(findIconWrapper().classes()).toContain(cssClass);
+ } else {
+ expect(findIconWrapper().classes()).not.toContain(cssClass);
+ }
+ });
+ });
+
describe('rendering a status', () => {
it.each`
icon | group | cssClass
diff --git a/spec/frontend/vue_shared/components/color_select_dropdown/color_item_spec.js b/spec/frontend/vue_shared/components/color_select_dropdown/color_item_spec.js
new file mode 100644
index 00000000000..fe614f03119
--- /dev/null
+++ b/spec/frontend/vue_shared/components/color_select_dropdown/color_item_spec.js
@@ -0,0 +1,35 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { hexToRgb } from '~/lib/utils/color_utils';
+import ColorItem from '~/vue_shared/components/color_select_dropdown/color_item.vue';
+import { color } from './mock_data';
+
+describe('ColorItem', () => {
+ let wrapper;
+
+ const propsData = color;
+
+ const createComponent = () => {
+ wrapper = shallowMountExtended(ColorItem, {
+ propsData,
+ });
+ };
+
+ const findColorItem = () => wrapper.findByTestId('color-item');
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders the correct title', () => {
+ expect(wrapper.text()).toBe(propsData.title);
+ });
+
+ it('renders the correct background color for the color item', () => {
+ const convertedColor = hexToRgb(propsData.color).join(', ');
+ expect(findColorItem().attributes('style')).toBe(`background-color: rgb(${convertedColor});`);
+ });
+});
diff --git a/spec/frontend/vue_shared/components/color_select_dropdown/color_select_root_spec.js b/spec/frontend/vue_shared/components/color_select_dropdown/color_select_root_spec.js
new file mode 100644
index 00000000000..93b59800c27
--- /dev/null
+++ b/spec/frontend/vue_shared/components/color_select_dropdown/color_select_root_spec.js
@@ -0,0 +1,192 @@
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import createFlash from '~/flash';
+import SidebarEditableItem from '~/sidebar/components/sidebar_editable_item.vue';
+import DropdownContents from '~/vue_shared/components/color_select_dropdown/dropdown_contents.vue';
+import DropdownValue from '~/vue_shared/components/color_select_dropdown/dropdown_value.vue';
+import epicColorQuery from '~/vue_shared/components/color_select_dropdown/graphql/epic_color.query.graphql';
+import updateEpicColorMutation from '~/vue_shared/components/color_select_dropdown/graphql/epic_update_color.mutation.graphql';
+import ColorSelectRoot from '~/vue_shared/components/color_select_dropdown/color_select_root.vue';
+import { DROPDOWN_VARIANT } from '~/vue_shared/components/color_select_dropdown/constants';
+import { colorQueryResponse, updateColorMutationResponse, color } from './mock_data';
+
+jest.mock('~/flash');
+
+Vue.use(VueApollo);
+
+const successfulQueryHandler = jest.fn().mockResolvedValue(colorQueryResponse);
+const successfulMutationHandler = jest.fn().mockResolvedValue(updateColorMutationResponse);
+const errorQueryHandler = jest.fn().mockRejectedValue('Error fetching epic color.');
+const errorMutationHandler = jest.fn().mockRejectedValue('An error occurred while updating color.');
+
+const defaultProps = {
+ allowEdit: true,
+ iid: '1',
+ fullPath: 'workspace-1',
+};
+
+describe('LabelsSelectRoot', () => {
+ let wrapper;
+
+ const findSidebarEditableItem = () => wrapper.findComponent(SidebarEditableItem);
+ const findDropdownValue = () => wrapper.findComponent(DropdownValue);
+ const findDropdownContents = () => wrapper.findComponent(DropdownContents);
+
+ const createComponent = ({
+ queryHandler = successfulQueryHandler,
+ mutationHandler = successfulMutationHandler,
+ propsData,
+ } = {}) => {
+ const mockApollo = createMockApollo([
+ [epicColorQuery, queryHandler],
+ [updateEpicColorMutation, mutationHandler],
+ ]);
+
+ wrapper = shallowMount(ColorSelectRoot, {
+ apolloProvider: mockApollo,
+ propsData: {
+ ...defaultProps,
+ ...propsData,
+ },
+ provide: {
+ canUpdate: true,
+ },
+ stubs: {
+ SidebarEditableItem,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('template', () => {
+ const defaultClasses = ['labels-select-wrapper', 'gl-relative'];
+
+ it.each`
+ variant | cssClass
+ ${'sidebar'} | ${defaultClasses}
+ ${'embedded'} | ${[...defaultClasses, 'is-embedded']}
+ `(
+ 'renders component root element with CSS class `$cssClass` when variant is "$variant"',
+ async ({ variant, cssClass }) => {
+ createComponent({
+ propsData: { variant },
+ });
+
+ expect(wrapper.classes()).toEqual(cssClass);
+ },
+ );
+ });
+
+ describe('if the variant is `sidebar`', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders SidebarEditableItem component', () => {
+ expect(findSidebarEditableItem().exists()).toBe(true);
+ });
+
+ it('renders correct props for the SidebarEditableItem component', () => {
+ expect(findSidebarEditableItem().props()).toMatchObject({
+ title: wrapper.vm.$options.i18n.widgetTitle,
+ canEdit: defaultProps.allowEdit,
+ loading: true,
+ });
+ });
+
+ describe('when colors are loaded', () => {
+ beforeEach(async () => {
+ createComponent();
+ await waitForPromises();
+ });
+
+ it('passes false `loading` prop to sidebar editable item', () => {
+ expect(findSidebarEditableItem().props('loading')).toBe(false);
+ });
+
+ it('renders dropdown value component when query colors is resolved', () => {
+ expect(findDropdownValue().props('selectedColor')).toMatchObject(color);
+ });
+ });
+ });
+
+ describe('if the variant is `embedded`', () => {
+ beforeEach(() => {
+ createComponent({ propsData: { iid: undefined, variant: DROPDOWN_VARIANT.Embedded } });
+ });
+
+ it('renders DropdownContents component', () => {
+ expect(findDropdownContents().exists()).toBe(true);
+ });
+
+ it('renders correct props for the DropdownContents component', () => {
+ expect(findDropdownContents().props()).toMatchObject({
+ variant: DROPDOWN_VARIANT.Embedded,
+ dropdownTitle: wrapper.vm.$options.i18n.assignColor,
+ dropdownButtonText: wrapper.vm.$options.i18n.dropdownButtonText,
+ });
+ });
+
+ it('handles DropdownContents setColor', () => {
+ findDropdownContents().vm.$emit('setColor', color);
+ expect(wrapper.emitted('updateSelectedColor')).toEqual([[color]]);
+ });
+ });
+
+ describe('when epicColorQuery errored', () => {
+ beforeEach(async () => {
+ createComponent({ queryHandler: errorQueryHandler });
+ await waitForPromises();
+ });
+
+ it('creates flash with error message', () => {
+ expect(createFlash).toHaveBeenCalledWith({
+ captureError: true,
+ message: 'Error fetching epic color.',
+ });
+ });
+ });
+
+ it('emits `updateSelectedColor` event on dropdown contents `setColor` event if iid is not set', () => {
+ createComponent({ propsData: { iid: undefined } });
+
+ findDropdownContents().vm.$emit('setColor', color);
+ expect(wrapper.emitted('updateSelectedColor')).toEqual([[color]]);
+ });
+
+ describe('when updating color for epic', () => {
+ beforeEach(() => {
+ createComponent();
+ findDropdownContents().vm.$emit('setColor', color);
+ });
+
+ it('sets the loading state', () => {
+ expect(findSidebarEditableItem().props('loading')).toBe(true);
+ });
+
+ it('updates color correctly after successful mutation', async () => {
+ await waitForPromises();
+ expect(findDropdownValue().props('selectedColor').color).toEqual(
+ updateColorMutationResponse.data.updateIssuableColor.issuable.color,
+ );
+ });
+
+ it('displays an error if mutation was rejected', async () => {
+ createComponent({ mutationHandler: errorMutationHandler });
+ findDropdownContents().vm.$emit('setColor', color);
+ await waitForPromises();
+
+ expect(createFlash).toHaveBeenCalledWith({
+ captureError: true,
+ error: expect.anything(),
+ message: 'An error occurred while updating color.',
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/color_select_dropdown/dropdown_contents_color_view_spec.js b/spec/frontend/vue_shared/components/color_select_dropdown/dropdown_contents_color_view_spec.js
new file mode 100644
index 00000000000..303824c77b3
--- /dev/null
+++ b/spec/frontend/vue_shared/components/color_select_dropdown/dropdown_contents_color_view_spec.js
@@ -0,0 +1,43 @@
+import { GlDropdownForm } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import DropdownContentsColorView from '~/vue_shared/components/color_select_dropdown/dropdown_contents_color_view.vue';
+import ColorItem from '~/vue_shared/components/color_select_dropdown/color_item.vue';
+import { ISSUABLE_COLORS } from '~/vue_shared/components/color_select_dropdown/constants';
+import { color as defaultColor } from './mock_data';
+
+const propsData = {
+ selectedColor: defaultColor,
+};
+
+describe('DropdownContentsColorView', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMount(DropdownContentsColorView, {
+ propsData,
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findColors = () => wrapper.findAllComponents(ColorItem);
+ const findColorList = () => wrapper.findComponent(GlDropdownForm);
+
+ it('renders color list', async () => {
+ expect(findColorList().exists()).toBe(true);
+ expect(findColors()).toHaveLength(ISSUABLE_COLORS.length);
+ });
+
+ it.each(ISSUABLE_COLORS)('emits an `input` event with %o on click on the option %#', (color) => {
+ const colorIndex = ISSUABLE_COLORS.indexOf(color);
+ findColors().at(colorIndex).trigger('click');
+
+ expect(wrapper.emitted('input')[0][0]).toMatchObject(color);
+ });
+});
diff --git a/spec/frontend/vue_shared/components/color_select_dropdown/dropdown_contents_spec.js b/spec/frontend/vue_shared/components/color_select_dropdown/dropdown_contents_spec.js
new file mode 100644
index 00000000000..74f50b878e2
--- /dev/null
+++ b/spec/frontend/vue_shared/components/color_select_dropdown/dropdown_contents_spec.js
@@ -0,0 +1,113 @@
+import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import { DROPDOWN_VARIANT } from '~/vue_shared/components/color_select_dropdown/constants';
+import DropdownContents from '~/vue_shared/components/color_select_dropdown/dropdown_contents.vue';
+import DropdownContentsColorView from '~/vue_shared/components/color_select_dropdown/dropdown_contents_color_view.vue';
+
+import { color } from './mock_data';
+
+const showDropdown = jest.fn();
+const focusInput = jest.fn();
+
+const defaultProps = {
+ dropdownTitle: '',
+ selectedColor: color,
+ dropdownButtonText: '',
+ variant: '',
+ isVisible: false,
+};
+
+const GlDropdownStub = {
+ template: `
+ <div>
+ <slot name="header"></slot>
+ <slot></slot>
+ </div>
+ `,
+ methods: {
+ show: showDropdown,
+ hide: jest.fn(),
+ },
+};
+
+const DropdownHeaderStub = {
+ template: `
+ <div>Hello, I am a header</div>
+ `,
+ methods: {
+ focusInput,
+ },
+};
+
+describe('DropdownContent', () => {
+ let wrapper;
+
+ const createComponent = ({ propsData = {} } = {}) => {
+ wrapper = shallowMount(DropdownContents, {
+ propsData: {
+ ...defaultProps,
+ ...propsData,
+ },
+ stubs: {
+ GlDropdown: GlDropdownStub,
+ DropdownHeader: DropdownHeaderStub,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findColorView = () => wrapper.findComponent(DropdownContentsColorView);
+ const findDropdownHeader = () => wrapper.findComponent(DropdownHeaderStub);
+ const findDropdown = () => wrapper.findComponent(GlDropdownStub);
+
+ it('calls dropdown `show` method on `isVisible` prop change', async () => {
+ createComponent();
+ await wrapper.setProps({
+ isVisible: true,
+ });
+
+ expect(showDropdown).toHaveBeenCalledTimes(1);
+ });
+
+ it('does not emit `setColor` event on dropdown hide if color did not change', () => {
+ createComponent();
+ findDropdown().vm.$emit('hide');
+
+ expect(wrapper.emitted('setColor')).toBeUndefined();
+ });
+
+ it('emits `setColor` event on dropdown hide if color changed on non-sidebar widget', async () => {
+ createComponent({ propsData: { variant: DROPDOWN_VARIANT.Embedded } });
+ const updatedColor = {
+ title: 'Blue-gray',
+ color: '#6699cc',
+ };
+ findColorView().vm.$emit('input', updatedColor);
+ await nextTick();
+ findDropdown().vm.$emit('hide');
+
+ expect(wrapper.emitted('setColor')).toEqual([[updatedColor]]);
+ });
+
+ it('emits `setColor` event on visibility change if color changed on sidebar widget', async () => {
+ createComponent({ propsData: { variant: DROPDOWN_VARIANT.Sidebar, isVisible: true } });
+ const updatedColor = {
+ title: 'Blue-gray',
+ color: '#6699cc',
+ };
+ findColorView().vm.$emit('input', updatedColor);
+ wrapper.setProps({ isVisible: false });
+ await nextTick();
+
+ expect(wrapper.emitted('setColor')).toEqual([[updatedColor]]);
+ });
+
+ it('renders header', () => {
+ createComponent();
+
+ expect(findDropdownHeader().exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/vue_shared/components/color_select_dropdown/dropdown_header_spec.js b/spec/frontend/vue_shared/components/color_select_dropdown/dropdown_header_spec.js
new file mode 100644
index 00000000000..d203d78477f
--- /dev/null
+++ b/spec/frontend/vue_shared/components/color_select_dropdown/dropdown_header_spec.js
@@ -0,0 +1,40 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlButton } from '@gitlab/ui';
+import DropdownHeader from '~/vue_shared/components/color_select_dropdown/dropdown_header.vue';
+
+const propsData = {
+ dropdownTitle: 'Epic color',
+};
+
+describe('DropdownHeader', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMount(DropdownHeader, { propsData });
+ };
+
+ const findButton = () => wrapper.findComponent(GlButton);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders the correct title', () => {
+ expect(wrapper.text()).toBe(propsData.dropdownTitle);
+ });
+
+ it('renders a close button', () => {
+ expect(findButton().attributes('aria-label')).toBe('Close');
+ });
+
+ it('emits `closeDropdown` event on button click', () => {
+ expect(wrapper.emitted('closeDropdown')).toBeUndefined();
+ findButton().vm.$emit('click');
+
+ expect(wrapper.emitted('closeDropdown')).toEqual([[]]);
+ });
+});
diff --git a/spec/frontend/vue_shared/components/color_select_dropdown/dropdown_value_spec.js b/spec/frontend/vue_shared/components/color_select_dropdown/dropdown_value_spec.js
new file mode 100644
index 00000000000..f22592dd604
--- /dev/null
+++ b/spec/frontend/vue_shared/components/color_select_dropdown/dropdown_value_spec.js
@@ -0,0 +1,46 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+
+import ColorItem from '~/vue_shared/components/color_select_dropdown/color_item.vue';
+import DropdownValue from '~/vue_shared/components/color_select_dropdown/dropdown_value.vue';
+
+import { color } from './mock_data';
+
+const propsData = {
+ selectedColor: color,
+};
+
+describe('DropdownValue', () => {
+ let wrapper;
+
+ const findColorItems = () => wrapper.findAllComponents(ColorItem);
+
+ const createComponent = () => {
+ wrapper = shallowMountExtended(DropdownValue, { propsData });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when there is a color set', () => {
+ it('renders the color', () => {
+ expect(findColorItems()).toHaveLength(2);
+ });
+
+ it.each`
+ index | cssClass
+ ${0} | ${['gl-font-base', 'gl-line-height-24']}
+ ${1} | ${['hide-collapsed']}
+ `(
+ 'passes correct props to the ColorItem with CSS class `$cssClass`',
+ async ({ index, cssClass }) => {
+ expect(findColorItems().at(index).props()).toMatchObject(propsData.selectedColor);
+ expect(findColorItems().at(index).classes()).toEqual(cssClass);
+ },
+ );
+ });
+});
diff --git a/spec/frontend/vue_shared/components/color_select_dropdown/mock_data.js b/spec/frontend/vue_shared/components/color_select_dropdown/mock_data.js
new file mode 100644
index 00000000000..097f47cc731
--- /dev/null
+++ b/spec/frontend/vue_shared/components/color_select_dropdown/mock_data.js
@@ -0,0 +1,30 @@
+export const color = {
+ color: '#217645',
+ title: 'Green',
+};
+
+export const colorQueryResponse = {
+ data: {
+ workspace: {
+ id: 'gid://gitlab/Workspace/1',
+ issuable: {
+ __typename: 'Epic',
+ id: 'gid://gitlab/Epic/1',
+ color: '#217645',
+ },
+ },
+ },
+};
+
+export const updateColorMutationResponse = {
+ data: {
+ updateIssuableColor: {
+ issuable: {
+ __typename: 'Epic',
+ id: 'gid://gitlab/Epic/1',
+ color: '#217645',
+ },
+ errors: [],
+ },
+ },
+};
diff --git a/spec/frontend/vue_shared/components/confidentiality_badge_spec.js b/spec/frontend/vue_shared/components/confidentiality_badge_spec.js
index 9d11fbbaf55..e1860d3399b 100644
--- a/spec/frontend/vue_shared/components/confidentiality_badge_spec.js
+++ b/spec/frontend/vue_shared/components/confidentiality_badge_spec.js
@@ -29,8 +29,8 @@ describe('ConfidentialityBadge', () => {
it.each`
workspaceType | issuableType | expectedTooltip
- ${WorkspaceType.project} | ${IssuableType.Issue} | ${'Only project members with at least Reporter role can view or be notified about this issue.'}
- ${WorkspaceType.group} | ${IssuableType.Epic} | ${'Only group members with at least Reporter role can view or be notified about this epic.'}
+ ${WorkspaceType.project} | ${IssuableType.Issue} | ${'Only project members with at least the Reporter role, the author, and assignees can view or be notified about this issue.'}
+ ${WorkspaceType.group} | ${IssuableType.Epic} | ${'Only group members with at least the Reporter role can view or be notified about this epic.'}
`(
'should render gl-badge with correct tooltip when workspaceType is $workspaceType and issuableType is $issuableType',
({ workspaceType, issuableType, expectedTooltip }) => {
diff --git a/spec/frontend/vue_shared/components/content_viewer/viewers/markdown_viewer_spec.js b/spec/frontend/vue_shared/components/content_viewer/viewers/markdown_viewer_spec.js
index 1397fb0405e..01ef52c6af9 100644
--- a/spec/frontend/vue_shared/components/content_viewer/viewers/markdown_viewer_spec.js
+++ b/spec/frontend/vue_shared/components/content_viewer/viewers/markdown_viewer_spec.js
@@ -1,3 +1,4 @@
+import { GlSkeletonLoader } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import $ from 'jquery';
@@ -39,10 +40,10 @@ describe('MarkdownViewer', () => {
});
});
- it('renders an animation container while the markdown is loading', () => {
+ it('renders a skeleton loader while the markdown is loading', () => {
createComponent();
- expect(wrapper.find('.animation-container').exists()).toBe(true);
+ expect(wrapper.findComponent(GlSkeletonLoader).exists()).toBe(true);
});
it('renders markdown preview preview renders and loads rendered markdown from server', () => {
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js
index f03a2e7934f..51161a1a0ef 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js
@@ -77,7 +77,7 @@ describe('LabelToken', () => {
describe('getActiveLabel', () => {
it('returns label object from labels array based on provided `currentValue` param', () => {
- expect(wrapper.vm.getActiveLabel(mockLabels, 'foo label')).toEqual(mockRegularLabel);
+ expect(wrapper.vm.getActiveLabel(mockLabels, 'Foo Label')).toEqual(mockRegularLabel);
});
});
diff --git a/spec/frontend/vue_shared/components/form/input_copy_toggle_visibility_spec.js b/spec/frontend/vue_shared/components/form/input_copy_toggle_visibility_spec.js
index e636f58d868..e1da8b690af 100644
--- a/spec/frontend/vue_shared/components/form/input_copy_toggle_visibility_spec.js
+++ b/spec/frontend/vue_shared/components/form/input_copy_toggle_visibility_spec.js
@@ -66,7 +66,7 @@ describe('InputCopyToggleVisibility', () => {
});
it('displays value as hidden', () => {
- expect(findFormInputGroup().props('value')).toBe('********************');
+ expect(findFormInput().element.value).toBe('********************');
});
it('saves actual value to clipboard when manually copied', () => {
@@ -77,6 +77,16 @@ describe('InputCopyToggleVisibility', () => {
expect(event.preventDefault).toHaveBeenCalled();
});
+ it('emits `copy` event when manually copied the token', () => {
+ expect(wrapper.emitted('copy')).toBeUndefined();
+
+ findFormInput().element.dispatchEvent(createCopyEvent());
+
+ expect(wrapper.emitted()).toHaveProperty('copy');
+ expect(wrapper.emitted('copy')).toHaveLength(1);
+ expect(wrapper.emitted('copy')[0]).toEqual([]);
+ });
+
describe('visibility toggle button', () => {
it('renders a reveal button', () => {
const revealButton = findRevealButton();
@@ -97,7 +107,7 @@ describe('InputCopyToggleVisibility', () => {
});
it('displays value', () => {
- expect(findFormInputGroup().props('value')).toBe(valueProp);
+ expect(findFormInput().element.value).toBe(valueProp);
});
it('renders a hide button', () => {
@@ -135,6 +145,8 @@ describe('InputCopyToggleVisibility', () => {
});
it('emits `copy` event', () => {
+ expect(wrapper.emitted()).toHaveProperty('copy');
+ expect(wrapper.emitted('copy')).toHaveLength(1);
expect(wrapper.emitted('copy')[0]).toEqual([]);
});
});
@@ -147,25 +159,52 @@ describe('InputCopyToggleVisibility', () => {
});
it('displays value as hidden with 20 asterisks', () => {
- expect(findFormInputGroup().props('value')).toBe('********************');
+ expect(findFormInput().element.value).toBe('********************');
});
});
describe('when `initialVisibility` prop is `true`', () => {
+ const label = 'My label';
+
beforeEach(() => {
createComponent({
propsData: {
value: valueProp,
initialVisibility: true,
+ label,
+ 'label-for': 'my-input',
+ formInputGroupProps: {
+ id: 'my-input',
+ },
},
});
});
it('displays value', () => {
- expect(findFormInputGroup().props('value')).toBe(valueProp);
+ expect(findFormInput().element.value).toBe(valueProp);
});
itDoesNotModifyCopyEvent();
+
+ describe('when input is clicked', () => {
+ it('selects input value', async () => {
+ const mockSelect = jest.fn();
+ wrapper.vm.$refs.input.$el.select = mockSelect;
+ await wrapper.findByLabelText(label).trigger('click');
+
+ expect(mockSelect).toHaveBeenCalled();
+ });
+ });
+
+ describe('when label is clicked', () => {
+ it('selects input value', async () => {
+ const mockSelect = jest.fn();
+ wrapper.vm.$refs.input.$el.select = mockSelect;
+ await wrapper.find('label').trigger('click');
+
+ expect(mockSelect).toHaveBeenCalled();
+ });
+ });
});
describe('when `showToggleVisibilityButton` is `false`', () => {
@@ -184,7 +223,7 @@ describe('InputCopyToggleVisibility', () => {
});
it('displays value', () => {
- expect(findFormInputGroup().props('value')).toBe(valueProp);
+ expect(findFormInput().element.value).toBe(valueProp);
});
itDoesNotModifyCopyEvent();
@@ -204,16 +243,30 @@ describe('InputCopyToggleVisibility', () => {
});
});
- it('passes `formInputGroupProps` prop to `GlFormInputGroup`', () => {
+ it('passes `formInputGroupProps` prop only to the input', () => {
createComponent({
propsData: {
formInputGroupProps: {
- label: 'Foo bar',
+ name: 'Foo bar',
+ 'data-qa-selector': 'Foo bar',
+ class: 'Foo bar',
+ id: 'Foo bar',
},
},
});
- expect(findFormInputGroup().props('label')).toBe('Foo bar');
+ expect(findFormInput().attributes()).toMatchObject({
+ name: 'Foo bar',
+ 'data-qa-selector': 'Foo bar',
+ class: expect.stringContaining('Foo bar'),
+ id: 'Foo bar',
+ });
+
+ const attributesInputGroup = findFormInputGroup().attributes();
+ expect(attributesInputGroup.name).toBeUndefined();
+ expect(attributesInputGroup['data-qa-selector']).toBeUndefined();
+ expect(attributesInputGroup.class).not.toContain('Foo bar');
+ expect(attributesInputGroup.id).toBeUndefined();
});
it('passes `copyButtonTitle` prop to `ClipboardButton`', () => {
diff --git a/spec/frontend/vue_shared/components/notes/__snapshots__/noteable_warning_spec.js.snap b/spec/frontend/vue_shared/components/notes/__snapshots__/noteable_warning_spec.js.snap
index f878d685b6d..8a187f3cb1f 100644
--- a/spec/frontend/vue_shared/components/notes/__snapshots__/noteable_warning_spec.js.snap
+++ b/spec/frontend/vue_shared/components/notes/__snapshots__/noteable_warning_spec.js.snap
@@ -10,7 +10,7 @@ exports[`Issue Warning Component when issue is locked but not confidential rende
href="locked-path"
target="_blank"
>
- Learn more
+ Learn more.
</gl-link-stub>
</span>
`;
@@ -25,7 +25,7 @@ exports[`Issue Warning Component when noteable is confidential but not locked re
href="confidential-path"
target="_blank"
>
- Learn more
+ Learn more.
</gl-link-stub>
</span>
`;
diff --git a/spec/frontend/vue_shared/components/notes/system_note_spec.js b/spec/frontend/vue_shared/components/notes/system_note_spec.js
index 65f79bab005..98b04ede943 100644
--- a/spec/frontend/vue_shared/components/notes/system_note_spec.js
+++ b/spec/frontend/vue_shared/components/notes/system_note_spec.js
@@ -1,13 +1,11 @@
import MockAdapter from 'axios-mock-adapter';
import { mount } from '@vue/test-utils';
+import $ from 'jquery';
import waitForPromises from 'helpers/wait_for_promises';
-import initMRPopovers from '~/mr_popover/index';
import createStore from '~/notes/stores';
import IssueSystemNote from '~/vue_shared/components/notes/system_note.vue';
import axios from '~/lib/utils/axios_utils';
-jest.mock('~/mr_popover/index', () => jest.fn());
-
describe('system note component', () => {
let vm;
let props;
@@ -76,10 +74,12 @@ describe('system note component', () => {
expect(vm.find('.system-note-message').html()).toContain('<span>closed</span>');
});
- it('should initMRPopovers onMount', () => {
+ it('should renderGFM onMount', () => {
+ const renderGFMSpy = jest.spyOn($.fn, 'renderGFM');
+
createComponent(props);
- expect(initMRPopovers).toHaveBeenCalled();
+ expect(renderGFMSpy).toHaveBeenCalled();
});
it('renders outdated code lines', async () => {
diff --git a/spec/frontend/vue_shared/components/papa_parse_alert_spec.js b/spec/frontend/vue_shared/components/papa_parse_alert_spec.js
index 9be2de17d01..ff4febd647e 100644
--- a/spec/frontend/vue_shared/components/papa_parse_alert_spec.js
+++ b/spec/frontend/vue_shared/components/papa_parse_alert_spec.js
@@ -22,7 +22,7 @@ describe('app/assets/javascripts/vue_shared/components/papa_parse_alert.vue', ()
it('should render alert with correct props', async () => {
createComponent({ errorMessages: [{ code: 'MissingQuotes' }] });
- await nextTick;
+ await nextTick();
expect(findAlert().props()).toMatchObject({
variant: 'danger',
@@ -37,7 +37,7 @@ describe('app/assets/javascripts/vue_shared/components/papa_parse_alert.vue', ()
createComponent({
errorMessages: [{ code: 'NotDefined', message: 'Error code is undefined' }],
});
- await nextTick;
+ await nextTick();
expect(findAlert().text()).toContain('Error code is undefined');
});
diff --git a/spec/frontend/vue_shared/components/registry/registry_search_spec.js b/spec/frontend/vue_shared/components/registry/registry_search_spec.js
index f5ef5b3d443..20716e79a04 100644
--- a/spec/frontend/vue_shared/components/registry/registry_search_spec.js
+++ b/spec/frontend/vue_shared/components/registry/registry_search_spec.js
@@ -11,7 +11,7 @@ describe('Registry Search', () => {
const findFilteredSearch = () => wrapper.find(GlFilteredSearch);
const defaultProps = {
- filter: [],
+ filters: [],
sorting: { sort: 'asc', orderBy: 'name' },
tokens: [{ type: 'foo' }],
sortableFields: [
@@ -123,7 +123,7 @@ describe('Registry Search', () => {
});
describe('query string calculation', () => {
- const filter = [
+ const filters = [
{ type: FILTERED_SEARCH_TERM, value: { data: 'one' } },
{ type: FILTERED_SEARCH_TERM, value: { data: 'two' } },
{ type: 'typeOne', value: { data: 'value_one' } },
@@ -131,7 +131,7 @@ describe('Registry Search', () => {
];
it('aggregates the filter in the correct object', () => {
- mountComponent({ ...defaultProps, filter });
+ mountComponent({ ...defaultProps, filters });
findFilteredSearch().vm.$emit('submit');
diff --git a/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js b/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js
index 001b6ee4a6f..7173abe1316 100644
--- a/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js
+++ b/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js
@@ -48,12 +48,12 @@ describe('RunnerInstructionsModal component', () => {
const findModal = () => wrapper.findComponent(GlModal);
const findPlatformButtonGroup = () => wrapper.findByTestId('platform-buttons');
const findPlatformButtons = () => findPlatformButtonGroup().findAllComponents(GlButton);
- const findOsxPlatformButton = () => wrapper.find({ ref: 'osx' });
const findArchitectureDropdownItems = () => wrapper.findAllByTestId('architecture-dropdown-item');
+ const findBinaryDownloadButton = () => wrapper.findByTestId('binary-download-button');
const findBinaryInstructions = () => wrapper.findByTestId('binary-instructions');
const findRegisterCommand = () => wrapper.findByTestId('register-command');
- const createComponent = ({ props, ...options } = {}) => {
+ const createComponent = ({ props, shown = true, ...options } = {}) => {
const requestHandlers = [
[getRunnerPlatformsQuery, runnerPlatformsHandler],
[getRunnerSetupInstructionsQuery, runnerSetupInstructionsHandler],
@@ -72,169 +72,202 @@ describe('RunnerInstructionsModal component', () => {
...options,
}),
);
+
+ // trigger open modal
+ if (shown) {
+ findModal().vm.$emit('shown');
+ }
};
beforeEach(async () => {
runnerPlatformsHandler = jest.fn().mockResolvedValue(mockGraphqlRunnerPlatforms);
runnerSetupInstructionsHandler = jest.fn().mockResolvedValue(mockGraphqlInstructions);
-
- createComponent();
- await waitForPromises();
});
afterEach(() => {
wrapper.destroy();
});
- it('should not show alert', () => {
- expect(findAlert().exists()).toBe(false);
- });
-
- it('should contain a number of platforms buttons', () => {
- expect(runnerPlatformsHandler).toHaveBeenCalledWith({});
+ describe('when the modal is shown', () => {
+ beforeEach(async () => {
+ createComponent();
+ await waitForPromises();
+ });
- const buttons = findPlatformButtons();
+ it('should not show alert', async () => {
+ expect(findAlert().exists()).toBe(false);
+ });
- expect(buttons).toHaveLength(mockGraphqlRunnerPlatforms.data.runnerPlatforms.nodes.length);
- });
+ it('should contain a number of platforms buttons', () => {
+ expect(runnerPlatformsHandler).toHaveBeenCalledWith({});
- it('should contain a number of dropdown items for the architecture options', () => {
- expect(findArchitectureDropdownItems()).toHaveLength(
- mockGraphqlRunnerPlatforms.data.runnerPlatforms.nodes[0].architectures.nodes.length,
- );
- });
+ const buttons = findPlatformButtons();
- describe('should display default instructions', () => {
- const { installInstructions, registerInstructions } = mockGraphqlInstructions.data.runnerSetup;
+ expect(buttons).toHaveLength(mockGraphqlRunnerPlatforms.data.runnerPlatforms.nodes.length);
+ });
- it('runner instructions are requested', () => {
- expect(runnerSetupInstructionsHandler).toHaveBeenCalledWith({
- platform: 'linux',
- architecture: 'amd64',
- });
+ it('should contain a number of dropdown items for the architecture options', () => {
+ expect(findArchitectureDropdownItems()).toHaveLength(
+ mockGraphqlRunnerPlatforms.data.runnerPlatforms.nodes[0].architectures.nodes.length,
+ );
});
- it('binary instructions are shown', async () => {
- await waitForPromises();
- const instructions = findBinaryInstructions().text();
+ describe('should display default instructions', () => {
+ const {
+ installInstructions,
+ registerInstructions,
+ } = mockGraphqlInstructions.data.runnerSetup;
- expect(instructions).toBe(installInstructions);
- });
+ it('runner instructions are requested', () => {
+ expect(runnerSetupInstructionsHandler).toHaveBeenCalledWith({
+ platform: 'linux',
+ architecture: 'amd64',
+ });
+ });
- it('register command is shown with a replaced token', async () => {
- await waitForPromises();
- const instructions = findRegisterCommand().text();
+ it('binary instructions are shown', async () => {
+ const instructions = findBinaryInstructions().text();
- expect(instructions).toBe(
- 'sudo gitlab-runner register --url http://gdk.test:3000/ --registration-token MY_TOKEN',
- );
- });
+ expect(instructions).toBe(installInstructions);
+ });
- describe('when a register token is not shown', () => {
- beforeEach(async () => {
- createComponent({ props: { registrationToken: undefined } });
- await waitForPromises();
+ it('register command is shown with a replaced token', async () => {
+ const command = findRegisterCommand().text();
+
+ expect(command).toBe(
+ 'sudo gitlab-runner register --url http://gdk.test:3000/ --registration-token MY_TOKEN',
+ );
});
- it('register command is shown without a defined registration token', () => {
- const instructions = findRegisterCommand().text();
+ describe('when a register token is not shown', () => {
+ beforeEach(async () => {
+ createComponent({ props: { registrationToken: undefined } });
+ await waitForPromises();
+ });
+
+ it('register command is shown without a defined registration token', () => {
+ const instructions = findRegisterCommand().text();
- expect(instructions).toBe(registerInstructions);
+ expect(instructions).toBe(registerInstructions);
+ });
});
- });
- describe('when the modal is shown', () => {
- it('sets the focus on the selected platform', () => {
- findPlatformButtons().at(0).element.focus = jest.fn();
+ describe('when providing a defaultPlatformName', () => {
+ beforeEach(async () => {
+ createComponent({ props: { defaultPlatformName: 'osx' } });
+ await waitForPromises();
+ });
+
+ it('runner instructions for the default selected platform are requested', () => {
+ expect(runnerSetupInstructionsHandler).toHaveBeenCalledWith({
+ platform: 'osx',
+ architecture: 'amd64',
+ });
+ });
+
+ it('sets the focus on the default selected platform', () => {
+ const findOsxPlatformButton = () => wrapper.find({ ref: 'osx' });
+
+ findOsxPlatformButton().element.focus = jest.fn();
- findModal().vm.$emit('shown');
+ findModal().vm.$emit('shown');
- expect(findPlatformButtons().at(0).element.focus).toHaveBeenCalled();
+ expect(findOsxPlatformButton().element.focus).toHaveBeenCalled();
+ });
});
});
- describe('when providing a defaultPlatformName', () => {
+ describe('after a platform and architecture are selected', () => {
+ const windowsIndex = 2;
+ const { installInstructions } = mockGraphqlInstructionsWindows.data.runnerSetup;
+
beforeEach(async () => {
- createComponent({ props: { defaultPlatformName: 'osx' } });
+ runnerSetupInstructionsHandler.mockResolvedValue(mockGraphqlInstructionsWindows);
+
+ findPlatformButtons().at(windowsIndex).vm.$emit('click');
await waitForPromises();
});
- it('runner instructions for the default selected platform are requested', () => {
- expect(runnerSetupInstructionsHandler).toHaveBeenCalledWith({
- platform: 'osx',
+ it('runner instructions are requested', () => {
+ expect(runnerSetupInstructionsHandler).toHaveBeenLastCalledWith({
+ platform: 'windows',
architecture: 'amd64',
});
});
- it('sets the focus on the default selected platform', () => {
- findOsxPlatformButton().element.focus = jest.fn();
-
- findModal().vm.$emit('shown');
+ it('architecture download link is updated', () => {
+ const architectures =
+ mockGraphqlRunnerPlatforms.data.runnerPlatforms.nodes[windowsIndex].architectures.nodes;
- expect(findOsxPlatformButton().element.focus).toHaveBeenCalled();
+ expect(findBinaryDownloadButton().attributes('href')).toBe(
+ architectures[0].downloadLocation,
+ );
});
- });
- });
- describe('after a platform and architecture are selected', () => {
- const { installInstructions } = mockGraphqlInstructionsWindows.data.runnerSetup;
+ it('other binary instructions are shown', () => {
+ const instructions = findBinaryInstructions().text();
- beforeEach(async () => {
- runnerSetupInstructionsHandler.mockResolvedValue(mockGraphqlInstructionsWindows);
+ expect(instructions).toBe(installInstructions);
+ });
- findPlatformButtons().at(2).vm.$emit('click'); // another option, happens to be windows
- await nextTick();
+ it('register command is shown', () => {
+ const command = findRegisterCommand().text();
- findArchitectureDropdownItems().at(1).vm.$emit('click'); // another option
- await nextTick();
- });
+ expect(command).toBe(
+ './gitlab-runner.exe register --url http://gdk.test:3000/ --registration-token MY_TOKEN',
+ );
+ });
+
+ it('runner instructions are requested with another architecture', async () => {
+ findArchitectureDropdownItems().at(1).vm.$emit('click');
+ await waitForPromises();
- it('runner instructions are requested', () => {
- expect(runnerSetupInstructionsHandler).toHaveBeenCalledWith({
- platform: 'windows',
- architecture: '386',
+ expect(runnerSetupInstructionsHandler).toHaveBeenLastCalledWith({
+ platform: 'windows',
+ architecture: '386',
+ });
});
});
- it('other binary instructions are shown', () => {
- const instructions = findBinaryInstructions().text();
+ describe('when the modal resizes', () => {
+ it('to an xs viewport', async () => {
+ MockResizeObserver.mockResize('xs');
+ await nextTick();
- expect(instructions).toBe(installInstructions);
- });
+ expect(findPlatformButtonGroup().attributes('vertical')).toBeTruthy();
+ });
- it('register command is shown', () => {
- const command = findRegisterCommand().text();
+ it('to a non-xs viewport', async () => {
+ MockResizeObserver.mockResize('sm');
+ await nextTick();
- expect(command).toBe(
- './gitlab-runner.exe register --url http://gdk.test:3000/ --registration-token MY_TOKEN',
- );
+ expect(findPlatformButtonGroup().props('vertical')).toBeFalsy();
+ });
});
});
- describe('when the modal resizes', () => {
- it('to an xs viewport', async () => {
- MockResizeObserver.mockResize('xs');
- await nextTick();
-
- expect(findPlatformButtonGroup().attributes('vertical')).toBeTruthy();
+ describe('when the modal is not shown', () => {
+ beforeEach(async () => {
+ createComponent({ shown: false });
+ await waitForPromises();
});
- it('to a non-xs viewport', async () => {
- MockResizeObserver.mockResize('sm');
- await nextTick();
-
- expect(findPlatformButtonGroup().props('vertical')).toBeFalsy();
+ it('does not fetch instructions', () => {
+ expect(runnerPlatformsHandler).not.toHaveBeenCalled();
+ expect(runnerSetupInstructionsHandler).not.toHaveBeenCalled();
});
});
describe('when apollo is loading', () => {
- it('should show a skeleton loader', async () => {
+ beforeEach(() => {
createComponent();
+ });
+
+ it('should show a skeleton loader', async () => {
expect(findSkeletonLoader().exists()).toBe(true);
expect(findGlLoadingIcon().exists()).toBe(false);
- await nextTick();
- jest.runOnlyPendingTimers();
+ // wait on fetch of both `platforms` and `instructions`
await nextTick();
await nextTick();
@@ -242,7 +275,6 @@ describe('RunnerInstructionsModal component', () => {
});
it('once loaded, should not show a loading state', async () => {
- createComponent();
await waitForPromises();
expect(findSkeletonLoader().exists()).toBe(false);
@@ -255,7 +287,6 @@ describe('RunnerInstructionsModal component', () => {
runnerSetupInstructionsHandler.mockRejectedValue();
createComponent();
-
await waitForPromises();
});
@@ -287,6 +318,7 @@ describe('RunnerInstructionsModal component', () => {
mockShow = jest.fn();
createComponent({
+ shown: false,
stubs: {
GlModal: getGlModalStub({ show: mockShow }),
},
diff --git a/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_spec.js b/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_spec.js
index 9a95a838291..986d76d2b95 100644
--- a/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_spec.js
+++ b/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_spec.js
@@ -1,6 +1,5 @@
-import { shallowMount } from '@vue/test-utils';
-import { nextTick } from 'vue';
-import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import RunnerInstructions from '~/vue_shared/components/runner_instructions/runner_instructions.vue';
import RunnerInstructionsModal from '~/vue_shared/components/runner_instructions/runner_instructions_modal.vue';
@@ -11,7 +10,11 @@ describe('RunnerInstructions component', () => {
const findModal = () => wrapper.findComponent(RunnerInstructionsModal);
const createComponent = () => {
- wrapper = extendedWrapper(shallowMount(RunnerInstructions));
+ wrapper = shallowMountExtended(RunnerInstructions, {
+ directives: {
+ GlModal: createMockDirective(),
+ },
+ });
};
beforeEach(() => {
@@ -23,19 +26,12 @@ describe('RunnerInstructions component', () => {
});
it('should show the "Show runner installation instructions" button', () => {
- expect(findModalButton().exists()).toBe(true);
expect(findModalButton().text()).toBe('Show runner installation instructions');
});
- it('should not render the modal once mounted', () => {
- expect(findModal().exists()).toBe(false);
- });
-
- it('should render the modal once clicked', async () => {
- findModalButton().vm.$emit('click');
-
- await nextTick();
+ it('should render the modal', () => {
+ const modalId = getBinding(findModal().element, 'gl-modal');
- expect(findModal().exists()).toBe(true);
+ expect(findModalButton().attributes('modal-id')).toBe(modalId);
});
});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js
index 42202db4935..00c8e3a814a 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js
@@ -226,12 +226,7 @@ describe('DropdownContentsLabelsView', () => {
preventDefault: fakePreventDefault,
});
- expect(wrapper.vm.updateSelectedLabels).toHaveBeenCalledWith([
- {
- ...mockLabels[2],
- set: true,
- },
- ]);
+ expect(wrapper.vm.updateSelectedLabels).toHaveBeenCalledWith([mockLabels[2]]);
});
it('calls action `toggleDropdownContents` when Esc key is pressed', () => {
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/label_item_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/label_item_spec.js
index bd1705e7693..bedb6204088 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/label_item_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/label_item_spec.js
@@ -1,4 +1,4 @@
-import { GlIcon, GlLink } from '@gitlab/ui';
+import { GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import LabelItem from '~/vue_shared/components/sidebar/labels_select_vue/label_item.vue';
@@ -45,18 +45,26 @@ describe('LabelItem', () => {
wrapperTemp.destroy();
});
- it('renders visible gl-icon component when `isLabelSet` prop is true', () => {
- const wrapperTemp = createComponent({
- isLabelSet: true,
- });
-
- const iconEl = wrapperTemp.find(GlIcon);
-
- expect(iconEl.isVisible()).toBe(true);
- expect(iconEl.props('name')).toBe('mobile-issue-close');
-
- wrapperTemp.destroy();
- });
+ it.each`
+ isLabelSet | isLabelIndeterminate | testId | iconName
+ ${true} | ${false} | ${'checked-icon'} | ${'mobile-issue-close'}
+ ${false} | ${true} | ${'indeterminate-icon'} | ${'dash'}
+ `(
+ 'renders visible gl-icon component when `isLabelSet` prop is $isLabelSet and `isLabelIndeterminate` is $isLabelIndeterminate',
+ ({ isLabelSet, isLabelIndeterminate, testId, iconName }) => {
+ const wrapperTemp = createComponent({
+ isLabelSet,
+ isLabelIndeterminate,
+ });
+
+ const iconEl = wrapperTemp.find(`[data-testid="${testId}"]`);
+
+ expect(iconEl.isVisible()).toBe(true);
+ expect(iconEl.props('name')).toBe(iconName);
+
+ wrapperTemp.destroy();
+ },
+ );
it('renders visible span element as placeholder instead of gl-icon when `isLabelSet` prop is false', () => {
const wrapperTemp = createComponent({
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js
index 31819d0e2f7..c150410ff8e 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js
@@ -46,9 +46,15 @@ describe('LabelsSelectRoot', () => {
describe('methods', () => {
describe('handleVuexActionDispatch', () => {
+ const touchedLabels = [
+ {
+ id: 2,
+ touched: true,
+ },
+ ];
+
it('calls `handleDropdownClose` when params `action.type` is `toggleDropdownContents` and state has `showDropdownButton` & `showDropdownContents` props `false`', () => {
createComponent();
- jest.spyOn(wrapper.vm, 'handleDropdownClose').mockImplementation();
wrapper.vm.handleVuexActionDispatch(
{ type: 'toggleDropdownContents' },
@@ -59,14 +65,12 @@ describe('LabelsSelectRoot', () => {
},
);
- expect(wrapper.vm.handleDropdownClose).toHaveBeenCalledWith(
- expect.arrayContaining([
- {
- id: 2,
- touched: true,
- },
- ]),
- );
+ // We're utilizing `onDropdownClose` event emitted from the component to always include `touchedLabels`
+ // while the first param of the method is the labels list which were added/removed.
+ expect(wrapper.emitted('updateSelectedLabels')).toBeTruthy();
+ expect(wrapper.emitted('updateSelectedLabels')[0]).toEqual([touchedLabels]);
+ expect(wrapper.emitted('onDropdownClose')).toBeTruthy();
+ expect(wrapper.emitted('onDropdownClose')[0]).toEqual([touchedLabels]);
});
it('calls `handleDropdownClose` with state.labels filterd using `set` prop when dropdown variant is `embedded`', () => {
@@ -75,8 +79,6 @@ describe('LabelsSelectRoot', () => {
variant: 'embedded',
});
- jest.spyOn(wrapper.vm, 'handleDropdownClose').mockImplementation();
-
wrapper.vm.handleVuexActionDispatch(
{ type: 'toggleDropdownContents' },
{
@@ -86,34 +88,17 @@ describe('LabelsSelectRoot', () => {
},
);
- expect(wrapper.vm.handleDropdownClose).toHaveBeenCalledWith(
- expect.arrayContaining([
+ expect(wrapper.emitted('updateSelectedLabels')).toBeTruthy();
+ expect(wrapper.emitted('updateSelectedLabels')[0]).toEqual([
+ [
{
id: 2,
set: true,
},
- ]),
- );
- });
- });
-
- describe('handleDropdownClose', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('emits `updateSelectedLabels` & `onDropdownClose` events on component when provided `labels` param is not empty', () => {
- wrapper.vm.handleDropdownClose([{ id: 1 }, { id: 2 }]);
-
- expect(wrapper.emitted().updateSelectedLabels).toBeTruthy();
- expect(wrapper.emitted().onDropdownClose).toBeTruthy();
- });
-
- it('emits only `onDropdownClose` event on component when provided `labels` param is empty', () => {
- wrapper.vm.handleDropdownClose([]);
-
- expect(wrapper.emitted().updateSelectedLabels).toBeFalsy();
- expect(wrapper.emitted().onDropdownClose).toBeTruthy();
+ ],
+ ]);
+ expect(wrapper.emitted('onDropdownClose')).toBeTruthy();
+ expect(wrapper.emitted('onDropdownClose')[0]).toEqual([[]]);
});
});
@@ -152,13 +137,13 @@ describe('LabelsSelectRoot', () => {
it('renders `dropdown-value-collapsed` component when `allowLabelCreate` prop is `true`', async () => {
createComponent();
- await nextTick;
+ await nextTick();
expect(wrapper.find(DropdownValueCollapsed).exists()).toBe(true);
});
it('renders `dropdown-title` component', async () => {
createComponent();
- await nextTick;
+ await nextTick();
expect(wrapper.find(DropdownTitle).exists()).toBe(true);
});
@@ -166,7 +151,7 @@ describe('LabelsSelectRoot', () => {
createComponent(mockConfig, {
default: 'None',
});
- await nextTick;
+ await nextTick();
const valueComp = wrapper.find(DropdownValue);
@@ -177,14 +162,14 @@ describe('LabelsSelectRoot', () => {
it('renders `dropdown-button` component when `showDropdownButton` prop is `true`', async () => {
createComponent();
wrapper.vm.$store.dispatch('toggleDropdownButton');
- await nextTick;
+ await nextTick();
expect(wrapper.find(DropdownButton).exists()).toBe(true);
});
it('renders `dropdown-contents` component when `showDropdownButton` & `showDropdownContents` prop is `true`', async () => {
createComponent();
wrapper.vm.$store.dispatch('toggleDropdownContents');
- await nextTick;
+ await nextTick();
expect(wrapper.find(DropdownContents).exists()).toBe(true);
});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/getters_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/getters_spec.js
index 1f899e84897..6ad46dbe898 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/getters_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/getters_spec.js
@@ -17,24 +17,39 @@ describe('LabelsSelect Getters', () => {
},
);
- it('returns label title when state.labels has only 1 label', () => {
- const labels = [{ id: 1, title: 'Foobar', set: true }];
+ describe.each`
+ dropdownVariant | isDropdownVariantSidebar | isDropdownVariantEmbedded
+ ${'sidebar'} | ${true} | ${false}
+ ${'embedded'} | ${false} | ${true}
+ `(
+ 'when dropdown variant is $dropdownVariant',
+ ({ isDropdownVariantSidebar, isDropdownVariantEmbedded }) => {
+ it('returns label title when state.labels has only 1 label', () => {
+ const labels = [{ id: 1, title: 'Foobar', set: true }];
- expect(getters.dropdownButtonText({ labels }, { isDropdownVariantSidebar: true })).toBe(
- 'Foobar',
- );
- });
+ expect(
+ getters.dropdownButtonText(
+ { labels },
+ { isDropdownVariantSidebar, isDropdownVariantEmbedded },
+ ),
+ ).toBe('Foobar');
+ });
- it('returns first label title and remaining labels count when state.labels has more than 1 label', () => {
- const labels = [
- { id: 1, title: 'Foo', set: true },
- { id: 2, title: 'Bar', set: true },
- ];
+ it('returns first label title and remaining labels count when state.labels has more than 1 label', () => {
+ const labels = [
+ { id: 1, title: 'Foo', set: true },
+ { id: 2, title: 'Bar', set: true },
+ ];
- expect(getters.dropdownButtonText({ labels }, { isDropdownVariantSidebar: true })).toBe(
- 'Foo +1 more',
- );
- });
+ expect(
+ getters.dropdownButtonText(
+ { labels },
+ { isDropdownVariantSidebar, isDropdownVariantEmbedded },
+ ),
+ ).toBe('Foo +1 more');
+ });
+ },
+ );
});
describe('selectedLabelsList', () => {
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js
index a60e6f52862..1819e750324 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js
@@ -80,7 +80,10 @@ describe('LabelsSelect Mutations', () => {
});
describe(`${types.RECEIVE_SET_LABELS_SUCCESS}`, () => {
- const selectedLabels = [{ id: 2 }, { id: 4 }];
+ const selectedLabels = [
+ { id: 2, set: true },
+ { id: 4, set: true },
+ ];
const labels = [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }];
it('sets value of `state.labelsFetchInProgress` to false', () => {
@@ -196,20 +199,23 @@ describe('LabelsSelect Mutations', () => {
it('updates labels `set` state to match selected labels', () => {
const state = {
labels: [
- { id: 1, title: 'scoped::test', set: false },
- { id: 2, set: true, title: 'scoped::one', touched: true },
- { id: 3, title: '' },
- { id: 4, title: '' },
+ { id: 1, title: 'scoped::test', set: false, indeterminate: false },
+ { id: 2, title: 'scoped::one', set: true, indeterminate: false, touched: true },
+ { id: 3, title: '', set: false, indeterminate: false },
+ { id: 4, title: '', set: false, indeterminate: false },
+ ],
+ selectedLabels: [
+ { id: 1, set: true },
+ { id: 3, set: true },
],
- selectedLabels: [{ id: 1 }, { id: 3 }],
};
mutations[types.UPDATE_LABELS_SET_STATE](state);
expect(state.labels).toEqual([
- { id: 1, title: 'scoped::test', set: true },
- { id: 2, set: false, title: 'scoped::one', touched: true },
- { id: 3, title: '', set: true },
- { id: 4, title: '', set: false },
+ { id: 1, title: 'scoped::test', set: true, indeterminate: false },
+ { id: 2, title: 'scoped::one', set: false, indeterminate: false, touched: true },
+ { id: 3, title: '', set: true, indeterminate: false },
+ { id: 4, title: '', set: false, indeterminate: false },
]);
});
});
diff --git a/spec/frontend/vue_shared/components/source_viewer/plugins/index_spec.js b/spec/frontend/vue_shared/components/source_viewer/plugins/index_spec.js
new file mode 100644
index 00000000000..83fdc5d669d
--- /dev/null
+++ b/spec/frontend/vue_shared/components/source_viewer/plugins/index_spec.js
@@ -0,0 +1,14 @@
+import { registerPlugins } from '~/vue_shared/components/source_viewer/plugins/index';
+import { HLJS_ON_AFTER_HIGHLIGHT } from '~/vue_shared/components/source_viewer/constants';
+import wrapComments from '~/vue_shared/components/source_viewer/plugins/wrap_comments';
+
+jest.mock('~/vue_shared/components/source_viewer/plugins/wrap_comments');
+const hljsMock = { addPlugin: jest.fn() };
+
+describe('Highlight.js plugin registration', () => {
+ beforeEach(() => registerPlugins(hljsMock));
+
+ it('registers our plugins', () => {
+ expect(hljsMock.addPlugin).toHaveBeenCalledWith({ [HLJS_ON_AFTER_HIGHLIGHT]: wrapComments });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/source_viewer/plugins/wrap_comments_spec.js b/spec/frontend/vue_shared/components/source_viewer/plugins/wrap_comments_spec.js
new file mode 100644
index 00000000000..5fd4182da29
--- /dev/null
+++ b/spec/frontend/vue_shared/components/source_viewer/plugins/wrap_comments_spec.js
@@ -0,0 +1,29 @@
+import { HLJS_COMMENT_SELECTOR } from '~/vue_shared/components/source_viewer/constants';
+import wrapComments from '~/vue_shared/components/source_viewer/plugins/wrap_comments';
+
+describe('Highlight.js plugin for wrapping comments', () => {
+ it('mutates the input value by wrapping each line in a span tag', () => {
+ const inputValue = `<span class="${HLJS_COMMENT_SELECTOR}">/* Line 1 \n* Line 2 \n*/</span>`;
+ const outputValue = `<span class="${HLJS_COMMENT_SELECTOR}">/* Line 1 \n<span class="${HLJS_COMMENT_SELECTOR}">* Line 2 </span>\n<span class="${HLJS_COMMENT_SELECTOR}">*/</span>`;
+ const hljsResultMock = { value: inputValue };
+
+ wrapComments(hljsResultMock);
+ expect(hljsResultMock.value).toBe(outputValue);
+ });
+
+ it('does not mutate the input value if the hljs comment selector is not present', () => {
+ const inputValue = '<span class="hljs-keyword">const</span>';
+ const hljsResultMock = { value: inputValue };
+
+ wrapComments(hljsResultMock);
+ expect(hljsResultMock.value).toBe(inputValue);
+ });
+
+ it('does not mutate the input value if the hljs comment line includes a closing tag', () => {
+ const inputValue = `<span class="${HLJS_COMMENT_SELECTOR}">/* Line 1 </span> \n* Line 2 \n*/`;
+ const hljsResultMock = { value: inputValue };
+
+ wrapComments(hljsResultMock);
+ expect(hljsResultMock.value).toBe(inputValue);
+ });
+});
diff --git a/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js b/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js
index 6a9ea75127d..bb0945a1f3e 100644
--- a/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js
+++ b/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js
@@ -3,6 +3,7 @@ import Vue from 'vue';
import VueRouter from 'vue-router';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import SourceViewer from '~/vue_shared/components/source_viewer/source_viewer.vue';
+import { registerPlugins } from '~/vue_shared/components/source_viewer/plugins/index';
import Chunk from '~/vue_shared/components/source_viewer/components/chunk.vue';
import { ROUGE_TO_HLJS_LANGUAGE_MAP } from '~/vue_shared/components/source_viewer/constants';
import waitForPromises from 'helpers/wait_for_promises';
@@ -11,6 +12,7 @@ import eventHub from '~/notes/event_hub';
jest.mock('~/blob/line_highlighter');
jest.mock('highlight.js/lib/core');
+jest.mock('~/vue_shared/components/source_viewer/plugins/index');
Vue.use(VueRouter);
const router = new VueRouter();
@@ -59,6 +61,10 @@ describe('Source Viewer component', () => {
describe('highlight.js', () => {
beforeEach(() => createComponent({ language: mappedLanguage }));
+ it('registers our plugins for Highlight.js', () => {
+ expect(registerPlugins).toHaveBeenCalledWith(hljs);
+ });
+
it('registers the language definition', async () => {
const languageDefinition = await import(`highlight.js/lib/languages/${mappedLanguage}`);
diff --git a/spec/frontend/vue_shared/components/upload_dropzone/__snapshots__/upload_dropzone_spec.js.snap b/spec/frontend/vue_shared/components/upload_dropzone/__snapshots__/upload_dropzone_spec.js.snap
index a613b325462..1798ca5ccde 100644
--- a/spec/frontend/vue_shared/components/upload_dropzone/__snapshots__/upload_dropzone_spec.js.snap
+++ b/spec/frontend/vue_shared/components/upload_dropzone/__snapshots__/upload_dropzone_spec.js.snap
@@ -5,7 +5,7 @@ exports[`Upload dropzone component correctly overrides description and drop mess
class="gl-w-full gl-relative"
>
<button
- class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-p-3"
+ class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-p-4"
type="button"
>
<div
@@ -41,7 +41,7 @@ exports[`Upload dropzone component correctly overrides description and drop mess
name="upload-dropzone-fade"
>
<div
- class="card upload-dropzone-border upload-dropzone-overlay gl-w-full gl-h-full gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-3 gl-bg-white"
+ class="card upload-dropzone-border upload-dropzone-overlay gl-w-full gl-h-full gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-4"
style="display: none;"
>
<div
@@ -86,7 +86,7 @@ exports[`Upload dropzone component when dragging renders correct template when d
class="gl-w-full gl-relative"
>
<button
- class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-p-3"
+ class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-p-4"
type="button"
>
<div
@@ -126,7 +126,7 @@ exports[`Upload dropzone component when dragging renders correct template when d
name="upload-dropzone-fade"
>
<div
- class="card upload-dropzone-border upload-dropzone-overlay gl-w-full gl-h-full gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-3 gl-bg-white"
+ class="card upload-dropzone-border upload-dropzone-overlay gl-w-full gl-h-full gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-4"
style=""
>
<div
@@ -171,7 +171,7 @@ exports[`Upload dropzone component when dragging renders correct template when d
class="gl-w-full gl-relative"
>
<button
- class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-p-3"
+ class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-p-4"
type="button"
>
<div
@@ -211,7 +211,7 @@ exports[`Upload dropzone component when dragging renders correct template when d
name="upload-dropzone-fade"
>
<div
- class="card upload-dropzone-border upload-dropzone-overlay gl-w-full gl-h-full gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-3 gl-bg-white"
+ class="card upload-dropzone-border upload-dropzone-overlay gl-w-full gl-h-full gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-4"
style=""
>
<div
@@ -256,7 +256,7 @@ exports[`Upload dropzone component when dragging renders correct template when d
class="gl-w-full gl-relative"
>
<button
- class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-p-3"
+ class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-p-4"
type="button"
>
<div
@@ -296,7 +296,7 @@ exports[`Upload dropzone component when dragging renders correct template when d
name="upload-dropzone-fade"
>
<div
- class="card upload-dropzone-border upload-dropzone-overlay gl-w-full gl-h-full gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-3 gl-bg-white"
+ class="card upload-dropzone-border upload-dropzone-overlay gl-w-full gl-h-full gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-4"
style=""
>
<div
@@ -342,7 +342,7 @@ exports[`Upload dropzone component when dragging renders correct template when d
class="gl-w-full gl-relative"
>
<button
- class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-p-3"
+ class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-p-4"
type="button"
>
<div
@@ -382,7 +382,7 @@ exports[`Upload dropzone component when dragging renders correct template when d
name="upload-dropzone-fade"
>
<div
- class="card upload-dropzone-border upload-dropzone-overlay gl-w-full gl-h-full gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-3 gl-bg-white"
+ class="card upload-dropzone-border upload-dropzone-overlay gl-w-full gl-h-full gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-4"
style=""
>
<div
@@ -428,7 +428,7 @@ exports[`Upload dropzone component when dragging renders correct template when d
class="gl-w-full gl-relative"
>
<button
- class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-p-3"
+ class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-p-4"
type="button"
>
<div
@@ -468,7 +468,7 @@ exports[`Upload dropzone component when dragging renders correct template when d
name="upload-dropzone-fade"
>
<div
- class="card upload-dropzone-border upload-dropzone-overlay gl-w-full gl-h-full gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-3 gl-bg-white"
+ class="card upload-dropzone-border upload-dropzone-overlay gl-w-full gl-h-full gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-4"
style="display: none;"
>
<div
@@ -514,7 +514,7 @@ exports[`Upload dropzone component when no slot provided renders default dropzon
class="gl-w-full gl-relative"
>
<button
- class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-p-3"
+ class="card upload-dropzone-card upload-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-p-4"
type="button"
>
<div
@@ -554,7 +554,7 @@ exports[`Upload dropzone component when no slot provided renders default dropzon
name="upload-dropzone-fade"
>
<div
- class="card upload-dropzone-border upload-dropzone-overlay gl-w-full gl-h-full gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-3 gl-bg-white"
+ class="card upload-dropzone-border upload-dropzone-overlay gl-w-full gl-h-full gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-4"
style="display: none;"
>
<div
@@ -606,7 +606,7 @@ exports[`Upload dropzone component when slot provided renders dropzone with slot
name="upload-dropzone-fade"
>
<div
- class="card upload-dropzone-border upload-dropzone-overlay gl-w-full gl-h-full gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-3 gl-bg-white"
+ class="card upload-dropzone-border upload-dropzone-overlay gl-w-full gl-h-full gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-4"
style="display: none;"
>
<div
diff --git a/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js b/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js
index 65eb42ef053..70017903079 100644
--- a/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js
+++ b/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js
@@ -5,9 +5,10 @@ import { shallowMountExtended as shallowMount } from 'helpers/vue_test_utils_hel
import IssuableItem from '~/vue_shared/issuable/list/components/issuable_item.vue';
import IssuableAssignees from '~/issuable/components/issue_assignees.vue';
-import { mockIssuable, mockRegularLabel, mockScopedLabel } from '../mock_data';
+import { mockIssuable, mockRegularLabel } from '../mock_data';
const createComponent = ({
+ hasScopedLabelsFeature = false,
issuableSymbol = '#',
issuable = mockIssuable,
showCheckbox = true,
@@ -15,6 +16,7 @@ const createComponent = ({
} = {}) =>
shallowMount(IssuableItem, {
propsData: {
+ hasScopedLabelsFeature,
issuableSymbol,
issuable,
showDiscussions: true,
@@ -182,21 +184,6 @@ describe('IssuableItem', () => {
});
describe('methods', () => {
- describe('scopedLabel', () => {
- it.each`
- label | labelType | returnValue
- ${mockRegularLabel} | ${'regular'} | ${false}
- ${mockScopedLabel} | ${'scoped'} | ${true}
- `(
- 'return $returnValue when provided label param is a $labelType label',
- ({ label, returnValue }) => {
- wrapper = createComponent();
-
- expect(wrapper.vm.scopedLabel(label)).toBe(returnValue);
- },
- );
- });
-
describe('labelTitle', () => {
it.each`
label | propWithTitle | returnValue
@@ -500,5 +487,21 @@ describe('IssuableItem', () => {
expect(wrapper.classes()).not.toContain('today');
});
});
+
+ describe('scoped labels', () => {
+ describe.each`
+ description | labelPosition | hasScopedLabelsFeature | scoped
+ ${'when label is not scoped and there is no scoped_labels feature'} | ${0} | ${false} | ${false}
+ ${'when label is scoped and there is no scoped_labels feature'} | ${1} | ${false} | ${false}
+ ${'when label is not scoped and there is scoped_labels feature'} | ${0} | ${true} | ${false}
+ ${'when label is scoped and there is scoped_labels feature'} | ${1} | ${true} | ${true}
+ `('$description', ({ hasScopedLabelsFeature, labelPosition, scoped }) => {
+ it(`${scoped ? 'renders' : 'does not render'} as scoped label`, () => {
+ wrapper = createComponent({ hasScopedLabelsFeature });
+
+ expect(wrapper.findAllComponents(GlLabel).at(labelPosition).props('scoped')).toBe(scoped);
+ });
+ });
+ });
});
});
diff --git a/spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js b/spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js
index 058cb30c1d5..66f71c0b028 100644
--- a/spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js
+++ b/spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js
@@ -1,9 +1,4 @@
-import {
- GlAlert,
- GlKeysetPagination,
- GlDeprecatedSkeletonLoading as GlSkeletonLoading,
- GlPagination,
-} from '@gitlab/ui';
+import { GlAlert, GlKeysetPagination, GlSkeletonLoader, GlPagination } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import VueDraggable from 'vuedraggable';
@@ -263,7 +258,7 @@ describe('IssuableListRoot', () => {
it('renders gl-loading-icon when `issuablesLoading` prop is true', () => {
wrapper = createComponent({ props: { issuablesLoading: true } });
- expect(wrapper.findAllComponents(GlSkeletonLoading)).toHaveLength(
+ expect(wrapper.findAllComponents(GlSkeletonLoader)).toHaveLength(
wrapper.vm.skeletonItemCount,
);
});
diff --git a/spec/frontend/vue_shared/issuable/show/components/issuable_body_spec.js b/spec/frontend/vue_shared/issuable/show/components/issuable_body_spec.js
index 1a93838b03f..7c582360637 100644
--- a/spec/frontend/vue_shared/issuable/show/components/issuable_body_spec.js
+++ b/spec/frontend/vue_shared/issuable/show/components/issuable_body_spec.js
@@ -159,7 +159,6 @@ describe('IssuableBody', () => {
expect(titleEl.exists()).toBe(true);
expect(titleEl.props()).toMatchObject({
issuable: issuableBodyProps.issuable,
- statusBadgeClass: issuableBodyProps.statusBadgeClass,
statusIcon: issuableBodyProps.statusIcon,
enableEdit: issuableBodyProps.enableEdit,
});
diff --git a/spec/frontend/vue_shared/issuable/show/components/issuable_header_spec.js b/spec/frontend/vue_shared/issuable/show/components/issuable_header_spec.js
index 544db891a13..e00bb184535 100644
--- a/spec/frontend/vue_shared/issuable/show/components/issuable_header_spec.js
+++ b/spec/frontend/vue_shared/issuable/show/components/issuable_header_spec.js
@@ -1,4 +1,4 @@
-import { GlIcon, GlAvatarLabeled } from '@gitlab/ui';
+import { GlBadge, GlIcon, GlAvatarLabeled } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import IssuableHeader from '~/vue_shared/issuable/show/components/issuable_header.vue';
@@ -69,7 +69,7 @@ describe('IssuableHeader', () => {
describe('template', () => {
it('renders issuable status icon and text', () => {
createComponent();
- const statusBoxEl = wrapper.findByTestId('status');
+ const statusBoxEl = wrapper.findComponent(GlBadge);
const statusIconEl = statusBoxEl.findComponent(GlIcon);
expect(statusBoxEl.exists()).toBe(true);
diff --git a/spec/frontend/vue_shared/issuable/show/components/issuable_show_root_spec.js b/spec/frontend/vue_shared/issuable/show/components/issuable_show_root_spec.js
index 8b027f990a2..f56064ed8e1 100644
--- a/spec/frontend/vue_shared/issuable/show/components/issuable_show_root_spec.js
+++ b/spec/frontend/vue_shared/issuable/show/components/issuable_show_root_spec.js
@@ -47,7 +47,6 @@ describe('IssuableShowRoot', () => {
describe('template', () => {
const {
- statusBadgeClass,
statusIcon,
statusIconClass,
enableEdit,
@@ -69,7 +68,6 @@ describe('IssuableShowRoot', () => {
expect(issuableHeader.exists()).toBe(true);
expect(issuableHeader.props()).toMatchObject({
issuableState: state,
- statusBadgeClass,
statusIcon,
statusIconClass,
blocked,
@@ -91,7 +89,6 @@ describe('IssuableShowRoot', () => {
expect(issuableBody.exists()).toBe(true);
expect(issuableBody.props()).toMatchObject({
issuable: mockIssuable,
- statusBadgeClass,
statusIcon,
enableEdit,
enableAutocomplete,
diff --git a/spec/frontend/vue_shared/issuable/show/components/issuable_title_spec.js b/spec/frontend/vue_shared/issuable/show/components/issuable_title_spec.js
index 11e3302d409..5aa67667033 100644
--- a/spec/frontend/vue_shared/issuable/show/components/issuable_title_spec.js
+++ b/spec/frontend/vue_shared/issuable/show/components/issuable_title_spec.js
@@ -1,4 +1,4 @@
-import { GlIcon, GlButton, GlIntersectionObserver } from '@gitlab/ui';
+import { GlIcon, GlBadge, GlButton, GlIntersectionObserver } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
@@ -40,7 +40,7 @@ describe('IssuableTitle', () => {
describe('methods', () => {
describe('handleTitleAppear', () => {
it('sets value of `stickyTitleVisible` prop to false', () => {
- wrapper.find(GlIntersectionObserver).vm.$emit('appear');
+ wrapper.findComponent(GlIntersectionObserver).vm.$emit('appear');
expect(wrapper.vm.stickyTitleVisible).toBe(false);
});
@@ -48,7 +48,7 @@ describe('IssuableTitle', () => {
describe('handleTitleDisappear', () => {
it('sets value of `stickyTitleVisible` prop to true', () => {
- wrapper.find(GlIntersectionObserver).vm.$emit('disappear');
+ wrapper.findComponent(GlIntersectionObserver).vm.$emit('disappear');
expect(wrapper.vm.stickyTitleVisible).toBe(true);
});
@@ -70,14 +70,14 @@ describe('IssuableTitle', () => {
expect(titleEl.exists()).toBe(true);
expect(titleEl.html()).toBe(
- '<h1 dir="auto" data-testid="title" class="title qa-title"><b>Sample</b> title</h1>',
+ '<h1 dir="auto" data-testid="title" class="title qa-title gl-font-size-h-display"><b>Sample</b> title</h1>',
);
wrapperWithTitle.destroy();
});
it('renders edit button', () => {
- const editButtonEl = wrapper.find(GlButton);
+ const editButtonEl = wrapper.findComponent(GlButton);
const tooltip = getBinding(editButtonEl.element, 'gl-tooltip');
expect(editButtonEl.exists()).toBe(true);
@@ -97,7 +97,10 @@ describe('IssuableTitle', () => {
const stickyHeaderEl = wrapper.find('[data-testid="header"]');
expect(stickyHeaderEl.exists()).toBe(true);
- expect(stickyHeaderEl.find(GlIcon).props('name')).toBe(issuableTitleProps.statusIcon);
+ expect(stickyHeaderEl.findComponent(GlBadge).props('variant')).toBe('success');
+ expect(stickyHeaderEl.findComponent(GlIcon).props('name')).toBe(
+ issuableTitleProps.statusIcon,
+ );
expect(stickyHeaderEl.text()).toContain('Open');
expect(stickyHeaderEl.text()).toContain(issuableTitleProps.issuable.title);
});
diff --git a/spec/frontend/vue_shared/issuable/show/mock_data.js b/spec/frontend/vue_shared/issuable/show/mock_data.js
index 32bb9edfe08..5ec205a2d5c 100644
--- a/spec/frontend/vue_shared/issuable/show/mock_data.js
+++ b/spec/frontend/vue_shared/issuable/show/mock_data.js
@@ -36,7 +36,6 @@ export const mockIssuableShowProps = {
enableTaskList: true,
enableEdit: true,
showFieldTitle: false,
- statusBadgeClass: 'issuable-status-badge-open',
statusIcon: 'issues',
statusIconClass: 'gl-sm-display-none',
taskCompletionStatus: {
diff --git a/spec/frontend/work_items/components/item_title_spec.js b/spec/frontend/work_items/components/item_title_spec.js
index 0d85df25b4f..2c3f6ef8634 100644
--- a/spec/frontend/work_items/components/item_title_spec.js
+++ b/spec/frontend/work_items/components/item_title_spec.js
@@ -15,7 +15,7 @@ const createComponent = ({ title = 'Sample title', disabled = false } = {}) =>
describe('ItemTitle', () => {
let wrapper;
const mockUpdatedTitle = 'Updated title';
- const findInputEl = () => wrapper.find('span#item-title');
+ const findInputEl = () => wrapper.find('[aria-label="Title"]');
beforeEach(() => {
wrapper = createComponent();
diff --git a/spec/frontend/work_items/components/work_item_assignees_spec.js b/spec/frontend/work_items/components/work_item_assignees_spec.js
new file mode 100644
index 00000000000..0552fe5050e
--- /dev/null
+++ b/spec/frontend/work_items/components/work_item_assignees_spec.js
@@ -0,0 +1,93 @@
+import { GlLink, GlTokenSelector } from '@gitlab/ui';
+import { nextTick } from 'vue';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import WorkItemAssignees from '~/work_items/components/work_item_assignees.vue';
+import localUpdateWorkItemMutation from '~/work_items/graphql/local_update_work_item.mutation.graphql';
+
+const mockAssignees = [
+ {
+ __typename: 'UserCore',
+ id: 'gid://gitlab/User/1',
+ avatarUrl: '',
+ webUrl: '',
+ name: 'John Doe',
+ username: 'doe_I',
+ },
+ {
+ __typename: 'UserCore',
+ id: 'gid://gitlab/User/2',
+ avatarUrl: '',
+ webUrl: '',
+ name: 'Marcus Rutherford',
+ username: 'ruthfull',
+ },
+];
+
+const workItemId = 'gid://gitlab/WorkItem/1';
+
+const mutate = jest.fn();
+
+describe('WorkItemAssignees component', () => {
+ let wrapper;
+
+ const findAssigneeLinks = () => wrapper.findAllComponents(GlLink);
+ const findTokenSelector = () => wrapper.findComponent(GlTokenSelector);
+
+ const findEmptyState = () => wrapper.findByTestId('empty-state');
+
+ const createComponent = ({ assignees = mockAssignees } = {}) => {
+ wrapper = mountExtended(WorkItemAssignees, {
+ propsData: {
+ assignees,
+ workItemId,
+ },
+ mocks: {
+ $apollo: {
+ mutate,
+ },
+ },
+ attachTo: document.body,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('should pass the correct data-user-id attribute', () => {
+ createComponent();
+
+ expect(findAssigneeLinks().at(0).attributes('data-user-id')).toBe('1');
+ });
+
+ describe('when there are assignees', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('should focus token selector on token removal', async () => {
+ findTokenSelector().vm.$emit('token-remove', mockAssignees[0].id);
+ await nextTick();
+
+ expect(findEmptyState().exists()).toBe(false);
+ expect(findTokenSelector().element.contains(document.activeElement)).toBe(true);
+ });
+
+ it('should call a mutation on clicking outside the token selector', async () => {
+ findTokenSelector().vm.$emit('input', [mockAssignees[0]]);
+ findTokenSelector().vm.$emit('token-remove');
+ await nextTick();
+ expect(mutate).not.toHaveBeenCalled();
+
+ findTokenSelector().vm.$emit('blur', new FocusEvent({ relatedTarget: null }));
+ await nextTick();
+
+ expect(mutate).toHaveBeenCalledWith({
+ mutation: localUpdateWorkItemMutation,
+ variables: {
+ input: { id: workItemId, assigneeIds: [mockAssignees[0].id] },
+ },
+ });
+ });
+ });
+});
diff --git a/spec/frontend/work_items/components/work_item_description_spec.js b/spec/frontend/work_items/components/work_item_description_spec.js
new file mode 100644
index 00000000000..8017c46dea8
--- /dev/null
+++ b/spec/frontend/work_items/components/work_item_description_spec.js
@@ -0,0 +1,222 @@
+import { shallowMount } from '@vue/test-utils';
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { mockTracking } from 'helpers/tracking_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import { updateDraft } from '~/lib/utils/autosave';
+import { confirmAction } from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal';
+import MarkdownField from '~/vue_shared/components/markdown/field.vue';
+import WorkItemDescription from '~/work_items/components/work_item_description.vue';
+import { TRACKING_CATEGORY_SHOW } from '~/work_items/constants';
+import workItemQuery from '~/work_items/graphql/work_item.query.graphql';
+import updateWorkItemWidgetsMutation from '~/work_items/graphql/update_work_item_widgets.mutation.graphql';
+import {
+ updateWorkItemWidgetsResponse,
+ workItemResponseFactory,
+ workItemQueryResponse,
+} from '../mock_data';
+
+jest.mock('~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal', () => {
+ return {
+ confirmAction: jest.fn(),
+ };
+});
+jest.mock('~/lib/utils/autosave');
+
+const workItemId = workItemQueryResponse.data.workItem.id;
+
+describe('WorkItemDescription', () => {
+ let wrapper;
+
+ Vue.use(VueApollo);
+
+ const mutationSuccessHandler = jest.fn().mockResolvedValue(updateWorkItemWidgetsResponse);
+
+ const findEditButton = () => wrapper.find('[data-testid="edit-description"]');
+ const findMarkdownField = () => wrapper.findComponent(MarkdownField);
+
+ const editDescription = (newText) => wrapper.find('textarea').setValue(newText);
+
+ const clickCancel = () => wrapper.find('[data-testid="cancel"]').vm.$emit('click');
+ const clickSave = () => wrapper.find('[data-testid="save-description"]').vm.$emit('click', {});
+
+ const createComponent = async ({
+ mutationHandler = mutationSuccessHandler,
+ canUpdate = true,
+ isEditing = false,
+ } = {}) => {
+ const workItemResponse = workItemResponseFactory({ canUpdate });
+ const workItemResponseHandler = jest.fn().mockResolvedValue(workItemResponse);
+
+ const { id } = workItemQueryResponse.data.workItem;
+ wrapper = shallowMount(WorkItemDescription, {
+ apolloProvider: createMockApollo([
+ [workItemQuery, workItemResponseHandler],
+ [updateWorkItemWidgetsMutation, mutationHandler],
+ ]),
+ propsData: {
+ workItemId: id,
+ },
+ provide: {
+ fullPath: '/group/project',
+ },
+ stubs: {
+ MarkdownField,
+ },
+ });
+
+ await waitForPromises();
+
+ if (isEditing) {
+ findEditButton().vm.$emit('click');
+
+ await nextTick();
+ }
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('Edit button', () => {
+ it('is not visible when canUpdate = false', async () => {
+ await createComponent({
+ canUpdate: false,
+ });
+
+ expect(findEditButton().exists()).toBe(false);
+ });
+
+ it('toggles edit mode', async () => {
+ await createComponent({
+ canUpdate: true,
+ });
+
+ findEditButton().vm.$emit('click');
+
+ await nextTick();
+
+ expect(findMarkdownField().exists()).toBe(true);
+ });
+ });
+
+ describe('editing description', () => {
+ it('cancels when clicking cancel', async () => {
+ await createComponent({
+ isEditing: true,
+ });
+
+ clickCancel();
+
+ await nextTick();
+
+ expect(confirmAction).not.toHaveBeenCalled();
+ expect(findMarkdownField().exists()).toBe(false);
+ });
+
+ it('prompts for confirmation when clicking cancel after changes', async () => {
+ await createComponent({
+ isEditing: true,
+ });
+
+ editDescription('updated desc');
+
+ clickCancel();
+
+ await nextTick();
+
+ expect(confirmAction).toHaveBeenCalled();
+ });
+
+ it('calls update widgets mutation', async () => {
+ await createComponent({
+ isEditing: true,
+ });
+
+ editDescription('updated desc');
+
+ clickSave();
+
+ await waitForPromises();
+
+ expect(mutationSuccessHandler).toHaveBeenCalledWith({
+ input: {
+ id: workItemId,
+ descriptionWidget: {
+ description: 'updated desc',
+ },
+ },
+ });
+ });
+
+ it('tracks editing description', async () => {
+ await createComponent({
+ isEditing: true,
+ markdownPreviewPath: '/preview',
+ });
+ const trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+
+ clickSave();
+
+ await waitForPromises();
+
+ expect(trackingSpy).toHaveBeenCalledWith(TRACKING_CATEGORY_SHOW, 'updated_description', {
+ category: TRACKING_CATEGORY_SHOW,
+ label: 'item_description',
+ property: 'type_Task',
+ });
+ });
+
+ it('emits error when mutation returns error', async () => {
+ const error = 'eror';
+
+ await createComponent({
+ isEditing: true,
+ mutationHandler: jest.fn().mockResolvedValue({
+ data: {
+ workItemUpdateWidgets: {
+ workItem: {},
+ errors: [error],
+ },
+ },
+ }),
+ });
+
+ editDescription('updated desc');
+
+ clickSave();
+
+ await waitForPromises();
+
+ expect(wrapper.emitted('error')).toEqual([[error]]);
+ });
+
+ it('emits error when mutation fails', async () => {
+ const error = 'eror';
+
+ await createComponent({
+ isEditing: true,
+ mutationHandler: jest.fn().mockRejectedValue(new Error(error)),
+ });
+
+ editDescription('updated desc');
+
+ clickSave();
+
+ await waitForPromises();
+
+ expect(wrapper.emitted('error')).toEqual([[error]]);
+ });
+
+ it('autosaves description', async () => {
+ await createComponent({
+ isEditing: true,
+ });
+
+ editDescription('updated desc');
+
+ expect(updateDraft).toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/frontend/work_items/components/work_item_detail_modal_spec.js b/spec/frontend/work_items/components/work_item_detail_modal_spec.js
index aaabdbc82d9..d55ba318e46 100644
--- a/spec/frontend/work_items/components/work_item_detail_modal_spec.js
+++ b/spec/frontend/work_items/components/work_item_detail_modal_spec.js
@@ -29,7 +29,7 @@ describe('WorkItemDetailModal component', () => {
const findAlert = () => wrapper.findComponent(GlAlert);
const findWorkItemDetail = () => wrapper.findComponent(WorkItemDetail);
- const createComponent = ({ workItemId = '1', error = false } = {}) => {
+ const createComponent = ({ workItemId = '1', issueGid = '2', error = false } = {}) => {
const apolloProvider = createMockApollo([
[
deleteWorkItemFromTaskMutation,
@@ -46,7 +46,7 @@ describe('WorkItemDetailModal component', () => {
wrapper = shallowMount(WorkItemDetailModal, {
apolloProvider,
- propsData: { workItemId },
+ propsData: { workItemId, issueGid },
data() {
return {
error,
@@ -67,6 +67,7 @@ describe('WorkItemDetailModal component', () => {
expect(findWorkItemDetail().props()).toEqual({
workItemId: '1',
+ workItemParentId: '2',
});
});
@@ -97,13 +98,6 @@ describe('WorkItemDetailModal component', () => {
expect(wrapper.emitted('close')).toBeTruthy();
});
- it('emits `workItemUpdated` event on updating work item', () => {
- createComponent();
- findWorkItemDetail().vm.$emit('workItemUpdated');
-
- expect(wrapper.emitted('workItemUpdated')).toBeTruthy();
- });
-
describe('delete work item', () => {
it('emits workItemDeleted and closes modal', async () => {
createComponent();
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_links_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_links_spec.js
new file mode 100644
index 00000000000..774e9198992
--- /dev/null
+++ b/spec/frontend/work_items/components/work_item_links/work_item_links_spec.js
@@ -0,0 +1,88 @@
+import Vue, { nextTick } from 'vue';
+import { GlBadge } from '@gitlab/ui';
+import VueApollo from 'vue-apollo';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import WorkItemLinks from '~/work_items/components/work_item_links/work_item_links.vue';
+import getWorkItemLinksQuery from '~/work_items/graphql/work_item_links.query.graphql';
+import { workItemHierarchyResponse, workItemHierarchyEmptyResponse } from '../../mock_data';
+
+Vue.use(VueApollo);
+
+describe('WorkItemLinks', () => {
+ let wrapper;
+
+ const createComponent = async ({ response = workItemHierarchyResponse } = {}) => {
+ wrapper = shallowMountExtended(WorkItemLinks, {
+ apolloProvider: createMockApollo([
+ [getWorkItemLinksQuery, jest.fn().mockResolvedValue(response)],
+ ]),
+ propsData: { issuableId: 1 },
+ });
+
+ await waitForPromises();
+ };
+
+ const findToggleButton = () => wrapper.findByTestId('toggle-links');
+ const findLinksBody = () => wrapper.findByTestId('links-body');
+ const findEmptyState = () => wrapper.findByTestId('links-empty');
+ const findToggleAddFormButton = () => wrapper.findByTestId('toggle-add-form');
+ const findAddLinksForm = () => wrapper.findByTestId('add-links-form');
+
+ beforeEach(async () => {
+ await createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('is expanded by default', () => {
+ expect(findToggleButton().props('icon')).toBe('chevron-lg-up');
+ expect(findLinksBody().exists()).toBe(true);
+ });
+
+ it('expands on click toggle button', async () => {
+ findToggleButton().vm.$emit('click');
+ await nextTick();
+
+ expect(findToggleButton().props('icon')).toBe('chevron-lg-down');
+ expect(findLinksBody().exists()).toBe(false);
+ });
+
+ describe('when no child links', () => {
+ beforeEach(async () => {
+ await createComponent({ response: workItemHierarchyEmptyResponse });
+ });
+
+ it('displays empty state if there are no children', () => {
+ expect(findEmptyState().exists()).toBe(true);
+ });
+
+ describe('add link form', () => {
+ it('displays form on click add button and hides form on cancel', async () => {
+ expect(findEmptyState().exists()).toBe(true);
+
+ findToggleAddFormButton().vm.$emit('click');
+ await nextTick();
+
+ expect(findAddLinksForm().exists()).toBe(true);
+
+ findAddLinksForm().vm.$emit('cancel');
+ await nextTick();
+
+ expect(findAddLinksForm().exists()).toBe(false);
+ });
+ });
+ });
+
+ it('renders all hierarchy widget children', () => {
+ expect(findLinksBody().exists()).toBe(true);
+
+ const children = wrapper.findAll('[data-testid="links-child"]');
+
+ expect(children).toHaveLength(4);
+ expect(children.at(0).findComponent(GlBadge).text()).toBe('Open');
+ });
+});
diff --git a/spec/frontend/work_items/components/work_item_state_spec.js b/spec/frontend/work_items/components/work_item_state_spec.js
index 9e48f56d9e9..b379d1fc846 100644
--- a/spec/frontend/work_items/components/work_item_state_spec.js
+++ b/spec/frontend/work_items/components/work_item_state_spec.js
@@ -12,6 +12,7 @@ import {
STATE_CLOSED,
STATE_EVENT_CLOSE,
STATE_EVENT_REOPEN,
+ TRACKING_CATEGORY_SHOW,
} from '~/work_items/constants';
import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
import { updateWorkItemMutationResponse, workItemQueryResponse } from '../mock_data';
@@ -81,15 +82,6 @@ describe('WorkItemState component', () => {
});
});
- it('emits updated event', async () => {
- createComponent();
-
- findItemState().vm.$emit('changed', STATE_CLOSED);
- await waitForPromises();
-
- expect(wrapper.emitted('updated')).toEqual([[]]);
- });
-
it('emits an error message when the mutation was unsuccessful', async () => {
createComponent({ mutationHandler: jest.fn().mockRejectedValue('Error!') });
@@ -107,8 +99,8 @@ describe('WorkItemState component', () => {
findItemState().vm.$emit('changed', STATE_CLOSED);
await waitForPromises();
- expect(trackingSpy).toHaveBeenCalledWith('workItems:show', 'updated_state', {
- category: 'workItems:show',
+ expect(trackingSpy).toHaveBeenCalledWith(TRACKING_CATEGORY_SHOW, 'updated_state', {
+ category: TRACKING_CATEGORY_SHOW,
label: 'item_state',
property: 'type_Task',
});
diff --git a/spec/frontend/work_items/components/work_item_title_spec.js b/spec/frontend/work_items/components/work_item_title_spec.js
index 19b56362ac0..a48449bb636 100644
--- a/spec/frontend/work_items/components/work_item_title_spec.js
+++ b/spec/frontend/work_items/components/work_item_title_spec.js
@@ -6,8 +6,9 @@ import { mockTracking } from 'helpers/tracking_helper';
import waitForPromises from 'helpers/wait_for_promises';
import ItemTitle from '~/work_items/components/item_title.vue';
import WorkItemTitle from '~/work_items/components/work_item_title.vue';
-import { i18n } from '~/work_items/constants';
+import { i18n, TRACKING_CATEGORY_SHOW } from '~/work_items/constants';
import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
+import updateWorkItemTaskMutation from '~/work_items/graphql/update_work_item_task.mutation.graphql';
import { updateWorkItemMutationResponse, workItemQueryResponse } from '../mock_data';
describe('WorkItemTitle component', () => {
@@ -19,14 +20,18 @@ describe('WorkItemTitle component', () => {
const findItemTitle = () => wrapper.findComponent(ItemTitle);
- const createComponent = ({ mutationHandler = mutationSuccessHandler } = {}) => {
+ const createComponent = ({ workItemParentId, mutationHandler = mutationSuccessHandler } = {}) => {
const { id, title, workItemType } = workItemQueryResponse.data.workItem;
wrapper = shallowMount(WorkItemTitle, {
- apolloProvider: createMockApollo([[updateWorkItemMutation, mutationHandler]]),
+ apolloProvider: createMockApollo([
+ [updateWorkItemMutation, mutationHandler],
+ [updateWorkItemTaskMutation, mutationHandler],
+ ]),
propsData: {
workItemId: id,
workItemTitle: title,
workItemType: workItemType.name,
+ workItemParentId,
},
});
};
@@ -57,13 +62,25 @@ describe('WorkItemTitle component', () => {
});
});
- it('emits updated event', async () => {
- createComponent();
+ it('calls WorkItemTaskUpdate if passed workItemParentId prop', () => {
+ const title = 'new title!';
+ const workItemParentId = '1234';
- findItemTitle().vm.$emit('title-changed', 'new title');
- await waitForPromises();
+ createComponent({
+ workItemParentId,
+ });
- expect(wrapper.emitted('updated')).toEqual([[]]);
+ findItemTitle().vm.$emit('title-changed', title);
+
+ expect(mutationSuccessHandler).toHaveBeenCalledWith({
+ input: {
+ id: workItemParentId,
+ taskData: {
+ id: workItemQueryResponse.data.workItem.id,
+ title,
+ },
+ },
+ });
});
it('does not call a mutation when the title has not changed', () => {
@@ -91,8 +108,8 @@ describe('WorkItemTitle component', () => {
findItemTitle().vm.$emit('title-changed', 'new title');
await waitForPromises();
- expect(trackingSpy).toHaveBeenCalledWith('workItems:show', 'updated_title', {
- category: 'workItems:show',
+ expect(trackingSpy).toHaveBeenCalledWith(TRACKING_CATEGORY_SHOW, 'updated_title', {
+ category: TRACKING_CATEGORY_SHOW,
label: 'item_title',
property: 'type_Task',
});
diff --git a/spec/frontend/work_items/components/work_item_weight_spec.js b/spec/frontend/work_items/components/work_item_weight_spec.js
new file mode 100644
index 00000000000..80a1d032ad7
--- /dev/null
+++ b/spec/frontend/work_items/components/work_item_weight_spec.js
@@ -0,0 +1,47 @@
+import { shallowMount } from '@vue/test-utils';
+import WorkItemWeight from '~/work_items/components/work_item_weight.vue';
+
+describe('WorkItemAssignees component', () => {
+ let wrapper;
+
+ const createComponent = ({ weight, hasIssueWeightsFeature = true } = {}) => {
+ wrapper = shallowMount(WorkItemWeight, {
+ propsData: {
+ weight,
+ },
+ provide: {
+ hasIssueWeightsFeature,
+ },
+ });
+ };
+
+ describe('weight licensed feature', () => {
+ describe.each`
+ description | hasIssueWeightsFeature | exists
+ ${'when available'} | ${true} | ${true}
+ ${'when not available'} | ${false} | ${false}
+ `('$description', ({ hasIssueWeightsFeature, exists }) => {
+ it(hasIssueWeightsFeature ? 'renders component' : 'does not render component', () => {
+ createComponent({ hasIssueWeightsFeature });
+
+ expect(wrapper.find('div').exists()).toBe(exists);
+ });
+ });
+ });
+
+ describe('weight text', () => {
+ describe.each`
+ description | weight | text
+ ${'renders 1'} | ${1} | ${'1'}
+ ${'renders 0'} | ${0} | ${'0'}
+ ${'renders None'} | ${null} | ${'None'}
+ ${'renders None'} | ${undefined} | ${'None'}
+ `('when weight is $weight', ({ description, weight, text }) => {
+ it(description, () => {
+ createComponent({ weight });
+
+ expect(wrapper.text()).toContain(text);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/work_items/mock_data.js b/spec/frontend/work_items/mock_data.js
index f3483550013..bf3f4e1364d 100644
--- a/spec/frontend/work_items/mock_data.js
+++ b/spec/frontend/work_items/mock_data.js
@@ -15,6 +15,15 @@ export const workItemQueryResponse = {
deleteWorkItem: false,
updateWorkItem: false,
},
+ widgets: [
+ {
+ __typename: 'WorkItemWidgetDescription',
+ type: 'DESCRIPTION',
+ description: 'some **great** text',
+ descriptionHtml:
+ '<p data-sourcepos="1:1-1:19" dir="auto">some <strong>great</strong> text</p>',
+ },
+ ],
},
},
};
@@ -38,11 +47,53 @@ export const updateWorkItemMutationResponse = {
deleteWorkItem: false,
updateWorkItem: false,
},
+ widgets: [],
},
},
},
};
+export const workItemResponseFactory = ({ canUpdate } = {}) => ({
+ data: {
+ workItem: {
+ __typename: 'WorkItem',
+ id: 'gid://gitlab/WorkItem/1',
+ title: 'Updated title',
+ state: 'OPEN',
+ description: 'description',
+ workItemType: {
+ __typename: 'WorkItemType',
+ id: 'gid://gitlab/WorkItems::Type/5',
+ name: 'Task',
+ },
+ userPermissions: {
+ deleteWorkItem: false,
+ updateWorkItem: canUpdate,
+ },
+ widgets: [
+ {
+ __typename: 'WorkItemWidgetDescription',
+ type: 'DESCRIPTION',
+ description: 'some **great** text',
+ descriptionHtml:
+ '<p data-sourcepos="1:1-1:19" dir="auto">some <strong>great</strong> text</p>',
+ },
+ ],
+ },
+ },
+});
+
+export const updateWorkItemWidgetsResponse = {
+ data: {
+ workItemUpdateWidgets: {
+ workItem: {
+ id: 1234,
+ },
+ errors: [],
+ },
+ },
+};
+
export const projectWorkItemTypesQueryResponse = {
data: {
workspace: {
@@ -77,6 +128,7 @@ export const createWorkItemMutationResponse = {
deleteWorkItem: false,
updateWorkItem: false,
},
+ widgets: [],
},
},
},
@@ -124,3 +176,102 @@ export const workItemTitleSubscriptionResponse = {
},
},
};
+
+export const workItemHierarchyEmptyResponse = {
+ data: {
+ workItem: {
+ id: 'gid://gitlab/WorkItem/1',
+ workItemType: {
+ id: 'gid://gitlab/WorkItems::Type/6',
+ __typename: 'WorkItemType',
+ },
+ title: 'New title',
+ widgets: [
+ {
+ type: 'DESCRIPTION',
+ __typename: 'WorkItemWidgetDescription',
+ },
+ {
+ type: 'HIERARCHY',
+ parent: null,
+ children: {
+ nodes: [],
+ __typename: 'WorkItemConnection',
+ },
+ __typename: 'WorkItemWidgetHierarchy',
+ },
+ ],
+ __typename: 'WorkItem',
+ },
+ },
+};
+
+export const workItemHierarchyResponse = {
+ data: {
+ workItem: {
+ id: 'gid://gitlab/WorkItem/1',
+ workItemType: {
+ id: 'gid://gitlab/WorkItems::Type/6',
+ __typename: 'WorkItemType',
+ },
+ title: 'New title',
+ widgets: [
+ {
+ type: 'DESCRIPTION',
+ __typename: 'WorkItemWidgetDescription',
+ },
+ {
+ type: 'HIERARCHY',
+ parent: null,
+ children: {
+ nodes: [
+ {
+ id: 'gid://gitlab/WorkItem/2',
+ workItemType: {
+ id: 'gid://gitlab/WorkItems::Type/5',
+ __typename: 'WorkItemType',
+ },
+ title: 'xyz',
+ state: 'OPEN',
+ __typename: 'WorkItem',
+ },
+ {
+ id: 'gid://gitlab/WorkItem/3',
+ workItemType: {
+ id: 'gid://gitlab/WorkItems::Type/5',
+ __typename: 'WorkItemType',
+ },
+ title: 'abc',
+ state: 'CLOSED',
+ __typename: 'WorkItem',
+ },
+ {
+ id: 'gid://gitlab/WorkItem/4',
+ workItemType: {
+ id: 'gid://gitlab/WorkItems::Type/5',
+ __typename: 'WorkItemType',
+ },
+ title: 'bar',
+ state: 'OPEN',
+ __typename: 'WorkItem',
+ },
+ {
+ id: 'gid://gitlab/WorkItem/5',
+ workItemType: {
+ id: 'gid://gitlab/WorkItems::Type/5',
+ __typename: 'WorkItemType',
+ },
+ title: 'foobar',
+ state: 'OPEN',
+ __typename: 'WorkItem',
+ },
+ ],
+ __typename: 'WorkItemConnection',
+ },
+ __typename: 'WorkItemWidgetHierarchy',
+ },
+ ],
+ __typename: 'WorkItem',
+ },
+ },
+};
diff --git a/spec/frontend/work_items/pages/work_item_detail_spec.js b/spec/frontend/work_items/pages/work_item_detail_spec.js
index 9f87655175c..b9724034cb4 100644
--- a/spec/frontend/work_items/pages/work_item_detail_spec.js
+++ b/spec/frontend/work_items/pages/work_item_detail_spec.js
@@ -5,11 +5,15 @@ import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import WorkItemDetail from '~/work_items/components/work_item_detail.vue';
+import WorkItemDescription from '~/work_items/components/work_item_description.vue';
import WorkItemState from '~/work_items/components/work_item_state.vue';
import WorkItemTitle from '~/work_items/components/work_item_title.vue';
+import WorkItemAssignees from '~/work_items/components/work_item_assignees.vue';
+import WorkItemWeight from '~/work_items/components/work_item_weight.vue';
import { i18n } from '~/work_items/constants';
import workItemQuery from '~/work_items/graphql/work_item.query.graphql';
import workItemTitleSubscription from '~/work_items/graphql/work_item_title.subscription.graphql';
+import { temporaryConfig } from '~/work_items/graphql/provider';
import { workItemTitleSubscriptionResponse, workItemQueryResponse } from '../mock_data';
describe('WorkItemDetail component', () => {
@@ -24,18 +28,34 @@ describe('WorkItemDetail component', () => {
const findSkeleton = () => wrapper.findComponent(GlSkeletonLoader);
const findWorkItemTitle = () => wrapper.findComponent(WorkItemTitle);
const findWorkItemState = () => wrapper.findComponent(WorkItemState);
+ const findWorkItemDescription = () => wrapper.findComponent(WorkItemDescription);
+ const findWorkItemAssignees = () => wrapper.findComponent(WorkItemAssignees);
+ const findWorkItemWeight = () => wrapper.findComponent(WorkItemWeight);
const createComponent = ({
workItemId = workItemQueryResponse.data.workItem.id,
handler = successHandler,
subscriptionHandler = initialSubscriptionHandler,
+ workItemsMvc2Enabled = false,
+ includeWidgets = false,
} = {}) => {
wrapper = shallowMount(WorkItemDetail, {
- apolloProvider: createMockApollo([
- [workItemQuery, handler],
- [workItemTitleSubscription, subscriptionHandler],
- ]),
+ apolloProvider: createMockApollo(
+ [
+ [workItemQuery, handler],
+ [workItemTitleSubscription, subscriptionHandler],
+ ],
+ {},
+ {
+ typePolicies: includeWidgets ? temporaryConfig.cacheConfig.typePolicies : {},
+ },
+ ),
propsData: { workItemId },
+ provide: {
+ glFeatures: {
+ workItemsMvc2: workItemsMvc2Enabled,
+ },
+ },
});
};
@@ -78,6 +98,22 @@ describe('WorkItemDetail component', () => {
});
});
+ describe('description', () => {
+ it('does not show description widget if loading description fails', () => {
+ createComponent();
+
+ expect(findWorkItemDescription().exists()).toBe(false);
+ });
+
+ it('shows description widget if description loads', async () => {
+ createComponent();
+
+ await waitForPromises();
+
+ expect(findWorkItemDescription().exists()).toBe(true);
+ });
+ });
+
it('shows an error message when the work item query was unsuccessful', async () => {
const errorHandler = jest.fn().mockRejectedValue('Oops');
createComponent({ handler: errorHandler });
@@ -105,17 +141,64 @@ describe('WorkItemDetail component', () => {
});
});
- it('emits workItemUpdated event when fields updated', async () => {
- createComponent();
+ describe('when work_items_mvc_2 feature flag is enabled', () => {
+ it('renders assignees component when assignees widget is returned from the API', async () => {
+ createComponent({
+ workItemsMvc2Enabled: true,
+ includeWidgets: true,
+ });
+ await waitForPromises();
- await waitForPromises();
+ expect(findWorkItemAssignees().exists()).toBe(true);
+ });
- findWorkItemState().vm.$emit('updated');
+ it('does not render assignees component when assignees widget is not returned from the API', async () => {
+ createComponent({
+ workItemsMvc2Enabled: true,
+ includeWidgets: false,
+ });
+ await waitForPromises();
- expect(wrapper.emitted('workItemUpdated')).toEqual([[]]);
+ expect(findWorkItemAssignees().exists()).toBe(false);
+ });
+ });
- findWorkItemTitle().vm.$emit('updated');
+ it('does not render assignees component when assignees feature flag is disabled', async () => {
+ createComponent();
+ await waitForPromises();
- expect(wrapper.emitted('workItemUpdated')).toEqual([[], []]);
+ expect(findWorkItemAssignees().exists()).toBe(false);
+ });
+
+ describe('weight widget', () => {
+ describe('when work_items_mvc_2 feature flag is enabled', () => {
+ describe.each`
+ description | includeWidgets | exists
+ ${'when widget is returned from API'} | ${true} | ${true}
+ ${'when widget is not returned from API'} | ${false} | ${false}
+ `('$description', ({ includeWidgets, exists }) => {
+ it(`${includeWidgets ? 'renders' : 'does not render'} weight component`, async () => {
+ createComponent({ includeWidgets, workItemsMvc2Enabled: true });
+ await waitForPromises();
+
+ expect(findWorkItemWeight().exists()).toBe(exists);
+ });
+ });
+ });
+
+ describe('when work_items_mvc_2 feature flag is disabled', () => {
+ describe.each`
+ description | includeWidgets | exists
+ ${'when widget is returned from API'} | ${true} | ${false}
+ ${'when widget is not returned from API'} | ${false} | ${false}
+ `('$description', ({ includeWidgets, exists }) => {
+ it(`${includeWidgets ? 'renders' : 'does not render'} weight component`, async () => {
+ createComponent({ includeWidgets, workItemsMvc2Enabled: false });
+ await waitForPromises();
+
+ expect(findWorkItemWeight().exists()).toBe(exists);
+ });
+ });
+ });
});
});
diff --git a/spec/frontend/work_items/pages/work_item_root_spec.js b/spec/frontend/work_items/pages/work_item_root_spec.js
index 85096392e84..3c5da94114e 100644
--- a/spec/frontend/work_items/pages/work_item_root_spec.js
+++ b/spec/frontend/work_items/pages/work_item_root_spec.js
@@ -11,6 +11,7 @@ import deleteWorkItem from '~/work_items/graphql/delete_work_item.mutation.graph
import { deleteWorkItemResponse, deleteWorkItemFailureResponse } from '../mock_data';
jest.mock('~/lib/utils/url_utility', () => ({
+ ...jest.requireActual('~/lib/utils/url_utility'),
visitUrl: jest.fn(),
}));
@@ -52,6 +53,7 @@ describe('Work items root component', () => {
expect(findWorkItemDetail().props()).toEqual({
workItemId: 'gid://gitlab/WorkItem/1',
+ workItemParentId: null,
});
});
diff --git a/spec/frontend_integration/content_editor/content_editor_integration_spec.js b/spec/frontend_integration/content_editor/content_editor_integration_spec.js
index 1b45c0d43a3..89b8d8d6d94 100644
--- a/spec/frontend_integration/content_editor/content_editor_integration_spec.js
+++ b/spec/frontend_integration/content_editor/content_editor_integration_spec.js
@@ -60,4 +60,30 @@ describe('content_editor', () => {
});
});
});
+
+ it('renders footnote ids alongside the footnote definition', async () => {
+ buildWrapper();
+
+ renderMarkdown.mockResolvedValue(`
+ <p data-sourcepos="3:1-3:56" dir="auto">
+ This reference tag is a mix of letters and numbers. <sup class="footnote-ref"><a href="#fn-footnote-2717" id="fnref-footnote-2717" data-footnote-ref="">2</a></sup>
+ </p>
+ <section class="footnotes" data-footnotes>
+ <ol>
+ <li id="fn-footnote-2717">
+ <p data-sourcepos="6:7-6:31">This is another footnote. <a href="#fnref-footnote-2717" aria-label="Back to content" class="footnote-backref" data-footnote-backref=""><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p>
+ </li>
+ </ol>
+ </section>
+ `);
+
+ await contentEditorService.setSerializedContent(`
+ This reference tag is a mix of letters and numbers [^footnote].
+
+ [^footnote]: This is another footnote.
+ `);
+ await nextTick();
+
+ expect(wrapper.text()).toContain('footnote: This is another footnote');
+ });
});
diff --git a/spec/frontend_integration/ide/helpers/ide_helper.js b/spec/frontend_integration/ide/helpers/ide_helper.js
index 8c5ff816c74..20a1e5aceb2 100644
--- a/spec/frontend_integration/ide/helpers/ide_helper.js
+++ b/spec/frontend_integration/ide/helpers/ide_helper.js
@@ -46,14 +46,14 @@ export const findMonacoDiffEditor = () =>
export const findAndSetEditorValue = async (value) => {
const editor = await findMonacoEditor();
- const uri = editor.getAttribute('data-uri');
+ const { uri } = editor.dataset;
monacoEditor.getModel(uri).setValue(value);
};
export const getEditorValue = async () => {
const editor = await findMonacoEditor();
- const uri = editor.getAttribute('data-uri');
+ const { uri } = editor.dataset;
return monacoEditor.getModel(uri).getValue();
};
@@ -207,10 +207,10 @@ export const commit = async ({ newBranch = false, newMR = false, newBranchName =
if (!newBranch) {
const option = await screen.findByLabelText(/Commit to .+ branch/);
- option.click();
+ await option.click();
} else {
const option = await screen.findByLabelText('Create a new branch');
- option.click();
+ await option.click();
const branchNameInput = await screen.findByTestId('ide-new-branch-name');
fireEvent.input(branchNameInput, { target: { value: newBranchName } });
diff --git a/spec/graphql/features/authorization_spec.rb b/spec/graphql/features/authorization_spec.rb
index 514f63a6f5a..4f8ae92ff99 100644
--- a/spec/graphql/features/authorization_spec.rb
+++ b/spec/graphql/features/authorization_spec.rb
@@ -10,17 +10,14 @@ RSpec.describe 'DeclarativePolicy authorization in GraphQL ' do
let(:permission_single) { :foo }
let(:permission_collection) { [:foo, :bar] }
- let(:test_object) { double(name: 'My name') }
+ let(:test_object) { double(name: 'My name', address: 'Worldwide') }
let(:authorizing_object) { test_object }
# to override when combining permissions
let(:permission_object_one) { authorizing_object }
let(:permission_object_two) { authorizing_object }
let(:query_string) { '{ item { name } }' }
- let(:result) do
- schema = empty_schema
- execute_query(query_type, schema: schema)
- end
+ let(:result) { execute_query(query_type) }
subject { result.dig('data', 'item') }
@@ -103,47 +100,47 @@ RSpec.describe 'DeclarativePolicy authorization in GraphQL ' do
end
describe 'with a single permission' do
+ let(:query_string) { '{ item { name address } }' }
let(:type) do
type_factory do |type|
- type.field :name, GraphQL::Types::String, null: true, authorize: permission_single
+ type.field :address, GraphQL::Types::String, null: true, authorize: permission_single
end
end
it 'returns the protected field when user has permission' do
permit(permission_single)
- expect(subject).to eq('name' => test_object.name)
+ expect(subject).to include('address' => test_object.address)
end
it 'returns nil when user is not authorized' do
- expect(subject).to eq('name' => nil)
+ expect(subject).to include('address' => nil)
end
end
describe 'with a collection of permissions' do
+ let(:query_string) { '{ item { name address } }' }
let(:type) do
permissions = permission_collection
type_factory do |type|
- type.field :name, GraphQL::Types::String,
- null: true,
- authorize: permissions
+ type.field :address, GraphQL::Types::String, null: true, authorize: permissions
end
end
it 'returns the protected field when user has all permissions' do
permit(*permission_collection)
- expect(subject).to eq('name' => test_object.name)
+ expect(subject).to include('address' => test_object.address)
end
it 'returns nil when user only has one of the permissions' do
permit(permission_collection.first)
- expect(subject).to eq('name' => nil)
+ expect(subject).to include('address' => nil)
end
it 'returns nil when user only has none of the permissions' do
- expect(subject).to eq('name' => nil)
+ expect(subject).to include('address' => nil)
end
end
end
@@ -179,7 +176,7 @@ RSpec.describe 'DeclarativePolicy authorization in GraphQL ' do
describe 'type and field authorizations together' do
let(:authorizing_object) { anything }
let(:permission_1) { permission_collection.first }
- let(:permission_2) { permission_collection.last }
+ let(:permission_2) { permission_collection.second }
let(:type) do
type_factory do |type|
@@ -224,6 +221,55 @@ RSpec.describe 'DeclarativePolicy authorization in GraphQL ' do
include_examples 'authorization with a collection of permissions'
end
+ context 'when the resolver is a subclass of one that authorizes the object' do
+ let(:permission_object_one) { be_nil }
+ let(:permission_object_two) { be_nil }
+ let(:parent) do
+ parent = Class.new(Resolvers::BaseResolver)
+ parent.include(::Gitlab::Graphql::Authorize::AuthorizeResource)
+ parent.authorizes_object!
+ parent.authorize permission_1
+ parent
+ end
+
+ let(:resolver) do
+ simple_resolver(test_object, base_class: parent)
+ end
+
+ include_examples 'authorization with a collection of permissions'
+ end
+
+ context 'when the resolver is a subclass of one that authorizes the object, extra permission' do
+ let(:permission_object_one) { be_nil }
+ let(:permission_object_two) { be_nil }
+ let(:parent) do
+ parent = Class.new(Resolvers::BaseResolver)
+ parent.include(::Gitlab::Graphql::Authorize::AuthorizeResource)
+ parent.authorizes_object!
+ parent.authorize permission_1
+ parent
+ end
+
+ let(:resolver) do
+ resolver = simple_resolver(test_object, base_class: parent)
+ resolver.include(::Gitlab::Graphql::Authorize::AuthorizeResource)
+ resolver.authorize permission_2
+ resolver
+ end
+
+ context 'when the field does not define any permissions' do
+ let(:query_type) do
+ query_factory do |query|
+ query.field :item, type,
+ null: true,
+ resolver: resolver
+ end
+ end
+
+ include_examples 'authorization with a collection of permissions'
+ end
+ end
+
context 'when the resolver does not authorize the object, but instead calls authorized_find!' do
let(:permission_object_one) { test_object }
let(:permission_object_two) { be_nil }
diff --git a/spec/graphql/gitlab_schema_spec.rb b/spec/graphql/gitlab_schema_spec.rb
index 02c686af688..60b3edfc279 100644
--- a/spec/graphql/gitlab_schema_spec.rb
+++ b/spec/graphql/gitlab_schema_spec.rb
@@ -4,15 +4,16 @@ require 'spec_helper'
RSpec.describe GitlabSchema do
let_it_be(:connections) { GitlabSchema.connections.all_wrappers }
+ let_it_be(:tracers) { described_class.tracers }
let(:user) { build :user }
it 'uses batch loading' do
- expect(field_instrumenters).to include(BatchLoader::GraphQL)
+ expect(tracers).to include(BatchLoader::GraphQL)
end
it 'enables the generic instrumenter' do
- expect(field_instrumenters).to include(instance_of(::Gitlab::Graphql::GenericTracing))
+ expect(tracers).to include(instance_of(::Gitlab::Graphql::GenericTracing))
end
it 'has the base mutation' do
@@ -219,6 +220,8 @@ RSpec.describe GitlabSchema do
badField
veryBadField
alsoNotAGoodField
+ yetAnotherBadField
+ andYetAnother
}
}
GQL
@@ -308,8 +311,4 @@ RSpec.describe GitlabSchema do
end
end
end
-
- def field_instrumenters
- described_class.instrumenters[:field] + described_class.instrumenters[:field_after_built_ins]
- end
end
diff --git a/spec/graphql/mutations/boards/issues/issue_move_list_spec.rb b/spec/graphql/mutations/boards/issues/issue_move_list_spec.rb
index 11c0fa44110..10aed8a1f00 100644
--- a/spec/graphql/mutations/boards/issues/issue_move_list_spec.rb
+++ b/spec/graphql/mutations/boards/issues/issue_move_list_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Mutations::Boards::Issues::IssueMoveList do
let_it_be(:existing_issue2) { create(:labeled_issue, project: project, labels: [testing], relative_position: 50) }
let(:current_ctx) { { current_user: user } }
- let(:params) { { board_id: global_id_of(board), project_path: project.full_path, iid: issue1.iid } }
+ let(:params) { { board_id: global_id_of(board), project_path: project.full_path, iid: issue1.iid.to_s } }
let(:move_params) do
{
from_list_id: list1.id,
@@ -67,7 +67,7 @@ RSpec.describe Mutations::Boards::Issues::IssueMoveList do
end
it 'raises an error' do
- expect { subject }.to raise_error(::GraphQL::LoadApplicationObjectFailedError)
+ expect { subject }.to raise_error(::GraphQL::CoercionError, "\"#{params[:board_id]}\" does not represent an instance of Board")
end
end
diff --git a/spec/graphql/mutations/branches/create_spec.rb b/spec/graphql/mutations/branches/create_spec.rb
index e378a8e3d41..5e9b914d87c 100644
--- a/spec/graphql/mutations/branches/create_spec.rb
+++ b/spec/graphql/mutations/branches/create_spec.rb
@@ -3,13 +3,16 @@
require 'spec_helper'
RSpec.describe Mutations::Branches::Create do
+ include GraphqlHelpers
+
subject(:mutation) { described_class.new(object: nil, context: context, field: nil) }
let_it_be(:project) { create(:project, :public, :repository) }
let_it_be(:user) { create(:user) }
- let_it_be(:context) do
+
+ let(:context) do
GraphQL::Query::Context.new(
- query: OpenStruct.new(schema: nil),
+ query: query_double(schema: nil),
values: { current_user: user },
object: nil
)
diff --git a/spec/graphql/mutations/ci/runner/update_spec.rb b/spec/graphql/mutations/ci/runner/update_spec.rb
index 75e9b57e60a..ffaa6e93d1b 100644
--- a/spec/graphql/mutations/ci/runner/update_spec.rb
+++ b/spec/graphql/mutations/ci/runner/update_spec.rb
@@ -2,11 +2,12 @@
require 'spec_helper'
-RSpec.describe Mutations::Ci::Runner::Update do
+RSpec.describe 'Mutations::Ci::Runner::Update' do
include GraphqlHelpers
let_it_be(:user) { create(:user) }
let_it_be(:runner) { create(:ci_runner, active: true, locked: false, run_untagged: true) }
+ let_it_be(:described_class) { Mutations::Ci::Runner::Update }
let(:current_ctx) { { current_user: user } }
let(:mutated_runner) { subject[:runner] }
@@ -49,6 +50,7 @@ RSpec.describe Mutations::Ci::Runner::Update do
{
id: runner.to_global_id,
description: 'updated description',
+ maintenance_note: 'updated maintenance note',
maximum_timeout: 900,
access_level: 'ref_protected',
active: false,
@@ -84,6 +86,16 @@ RSpec.describe Mutations::Ci::Runner::Update do
)
end
end
+
+ context 'with too long maintenance note' do
+ it 'returns a descriptive error' do
+ mutation_params[:maintenance_note] = '1' * 1025
+
+ expect(subject[:errors]).to contain_exactly(
+ 'Maintenance note is too long (maximum is 1024 characters)'
+ )
+ end
+ end
end
end
end
diff --git a/spec/graphql/mutations/clusters/agent_tokens/create_spec.rb b/spec/graphql/mutations/clusters/agent_tokens/create_spec.rb
index 45d421509d0..7998be19c20 100644
--- a/spec/graphql/mutations/clusters/agent_tokens/create_spec.rb
+++ b/spec/graphql/mutations/clusters/agent_tokens/create_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Mutations::Clusters::AgentTokens::Create do
+ include GraphqlHelpers
+
subject(:mutation) { described_class.new(object: nil, context: context, field: nil) }
let_it_be(:cluster_agent) { create(:cluster_agent) }
@@ -10,7 +12,7 @@ RSpec.describe Mutations::Clusters::AgentTokens::Create do
let(:context) do
GraphQL::Query::Context.new(
- query: OpenStruct.new(schema: nil),
+ query: query_double(schema: nil), # rubocop:disable RSpec/VerifiedDoubles
values: { current_user: user },
object: nil
)
diff --git a/spec/graphql/mutations/clusters/agents/create_spec.rb b/spec/graphql/mutations/clusters/agents/create_spec.rb
index c80b6f6cdad..e2c04254ed8 100644
--- a/spec/graphql/mutations/clusters/agents/create_spec.rb
+++ b/spec/graphql/mutations/clusters/agents/create_spec.rb
@@ -3,13 +3,15 @@
require 'spec_helper'
RSpec.describe Mutations::Clusters::Agents::Create do
+ include GraphqlHelpers
+
subject(:mutation) { described_class.new(object: nil, context: context, field: nil) }
let(:project) { create(:project, :public, :repository) }
let(:user) { create(:user) }
let(:context) do
GraphQL::Query::Context.new(
- query: OpenStruct.new(schema: nil),
+ query: query_double(schema: nil), # rubocop:disable RSpec/VerifiedDoubles
values: { current_user: user },
object: nil
)
diff --git a/spec/graphql/mutations/clusters/agents/delete_spec.rb b/spec/graphql/mutations/clusters/agents/delete_spec.rb
index e0ecff5fe44..c3a2c0dcbb4 100644
--- a/spec/graphql/mutations/clusters/agents/delete_spec.rb
+++ b/spec/graphql/mutations/clusters/agents/delete_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Mutations::Clusters::Agents::Delete do
+ include GraphqlHelpers
+
subject(:mutation) { described_class.new(object: nil, context: context, field: nil) }
let(:cluster_agent) { create(:cluster_agent) }
@@ -10,7 +12,7 @@ RSpec.describe Mutations::Clusters::Agents::Delete do
let(:user) { create(:user) }
let(:context) do
GraphQL::Query::Context.new(
- query: OpenStruct.new(schema: nil),
+ query: query_double(schema: nil), # rubocop:disable RSpec/VerifiedDoubles
values: { current_user: user },
object: nil
)
diff --git a/spec/graphql/mutations/commits/create_spec.rb b/spec/graphql/mutations/commits/create_spec.rb
index 097e70bada6..9fc9c731b96 100644
--- a/spec/graphql/mutations/commits/create_spec.rb
+++ b/spec/graphql/mutations/commits/create_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Mutations::Commits::Create do
+ include GraphqlHelpers
+
subject(:mutation) { described_class.new(object: nil, context: context, field: nil) }
let_it_be(:user) { create(:user) }
@@ -10,7 +12,7 @@ RSpec.describe Mutations::Commits::Create do
let(:context) do
GraphQL::Query::Context.new(
- query: OpenStruct.new(schema: nil),
+ query: query_double(schema: nil), # rubocop:disable RSpec/VerifiedDoubles
values: { current_user: user },
object: nil
)
diff --git a/spec/graphql/mutations/customer_relations/contacts/create_spec.rb b/spec/graphql/mutations/customer_relations/contacts/create_spec.rb
index dafc7b4c367..f2bbf0949fb 100644
--- a/spec/graphql/mutations/customer_relations/contacts/create_spec.rb
+++ b/spec/graphql/mutations/customer_relations/contacts/create_spec.rb
@@ -40,17 +40,6 @@ RSpec.describe Mutations::CustomerRelations::Contacts::Create do
group.add_developer(user)
end
- context 'when the feature flag is disabled' do
- before do
- stub_feature_flags(customer_relations: false)
- end
-
- it 'raises an error' do
- expect { resolve_mutation }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
- .with_message("The resource that you are attempting to access does not exist or you don't have permission to perform this action")
- end
- end
-
context 'when crm_enabled is false' do
let(:group) { create(:group) }
diff --git a/spec/graphql/mutations/customer_relations/contacts/update_spec.rb b/spec/graphql/mutations/customer_relations/contacts/update_spec.rb
index c8206eca442..421bb4f1b06 100644
--- a/spec/graphql/mutations/customer_relations/contacts/update_spec.rb
+++ b/spec/graphql/mutations/customer_relations/contacts/update_spec.rb
@@ -57,17 +57,6 @@ RSpec.describe Mutations::CustomerRelations::Contacts::Update do
it 'updates the organization with correct values' do
expect(resolve_mutation[:contact]).to have_attributes(attributes)
end
-
- context 'when the feature is disabled' do
- before do
- stub_feature_flags(customer_relations: false)
- end
-
- it 'raises an error' do
- expect { resolve_mutation }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
- .with_message("The resource that you are attempting to access does not exist or you don't have permission to perform this action")
- end
- end
end
end
diff --git a/spec/graphql/mutations/customer_relations/organizations/create_spec.rb b/spec/graphql/mutations/customer_relations/organizations/create_spec.rb
index ee78d2b16f6..ffc9632350a 100644
--- a/spec/graphql/mutations/customer_relations/organizations/create_spec.rb
+++ b/spec/graphql/mutations/customer_relations/organizations/create_spec.rb
@@ -39,17 +39,6 @@ RSpec.describe Mutations::CustomerRelations::Organizations::Create do
group.add_developer(user)
end
- context 'when the feature is disabled' do
- before do
- stub_feature_flags(customer_relations: false)
- end
-
- it 'raises an error' do
- expect { resolve_mutation }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
- .with_message("The resource that you are attempting to access does not exist or you don't have permission to perform this action")
- end
- end
-
context 'when the params are invalid' do
before do
valid_params[:name] = nil
diff --git a/spec/graphql/mutations/customer_relations/organizations/update_spec.rb b/spec/graphql/mutations/customer_relations/organizations/update_spec.rb
index 90fd7a0a9f1..f0f37ee9c47 100644
--- a/spec/graphql/mutations/customer_relations/organizations/update_spec.rb
+++ b/spec/graphql/mutations/customer_relations/organizations/update_spec.rb
@@ -56,17 +56,6 @@ RSpec.describe Mutations::CustomerRelations::Organizations::Update do
expect(resolve_mutation[:organization]).to have_attributes(attributes)
end
- context 'when the feature flag is disabled' do
- before do
- stub_feature_flags(customer_relations: false)
- end
-
- it 'raises an error' do
- expect { resolve_mutation }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
- .with_message("The resource that you are attempting to access does not exist or you don't have permission to perform this action")
- end
- end
-
context 'when the feature is disabled' do
let_it_be(:group) { create(:group) }
diff --git a/spec/graphql/mutations/incident_management/timeline_event/create_spec.rb b/spec/graphql/mutations/incident_management/timeline_event/create_spec.rb
index 63faecad5d5..ea74e427dd6 100644
--- a/spec/graphql/mutations/incident_management/timeline_event/create_spec.rb
+++ b/spec/graphql/mutations/incident_management/timeline_event/create_spec.rb
@@ -22,7 +22,8 @@ RSpec.describe Mutations::IncidentManagement::TimelineEvent::Create do
occurred_at: args[:occurred_at].to_s,
incident: incident,
author: current_user,
- promoted_from_note: nil
+ promoted_from_note: nil,
+ editable: true
)
end
diff --git a/spec/graphql/mutations/incident_management/timeline_event/promote_from_note_spec.rb b/spec/graphql/mutations/incident_management/timeline_event/promote_from_note_spec.rb
index 598ee496cf1..4541f8af7d3 100644
--- a/spec/graphql/mutations/incident_management/timeline_event/promote_from_note_spec.rb
+++ b/spec/graphql/mutations/incident_management/timeline_event/promote_from_note_spec.rb
@@ -27,7 +27,8 @@ RSpec.describe Mutations::IncidentManagement::TimelineEvent::PromoteFromNote do
occurred_at: comment.created_at.to_s,
incident: incident,
author: current_user,
- promoted_from_note: comment
+ promoted_from_note: comment,
+ editable: true
)
end
diff --git a/spec/graphql/mutations/issues/set_escalation_status_spec.rb b/spec/graphql/mutations/issues/set_escalation_status_spec.rb
index d41118b1812..f04d396efb8 100644
--- a/spec/graphql/mutations/issues/set_escalation_status_spec.rb
+++ b/spec/graphql/mutations/issues/set_escalation_status_spec.rb
@@ -50,16 +50,6 @@ RSpec.describe Mutations::Issues::SetEscalationStatus do
expect { result }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable, 'Feature unavailable for provided issue')
end
end
-
- context 'with feature disabled' do
- before do
- stub_feature_flags(incident_escalations: false)
- end
-
- it 'raises an error' do
- expect { result }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable, 'Feature unavailable for provided issue')
- end
- end
end
end
end
diff --git a/spec/graphql/mutations/release_asset_links/create_spec.rb b/spec/graphql/mutations/release_asset_links/create_spec.rb
index 86a6c77fa3f..a5291a00799 100644
--- a/spec/graphql/mutations/release_asset_links/create_spec.rb
+++ b/spec/graphql/mutations/release_asset_links/create_spec.rb
@@ -37,7 +37,7 @@ RSpec.describe Mutations::ReleaseAssetLinks::Create do
context 'when the user has access and no validation errors occur' do
it 'creates a new release asset link', :aggregate_failures do
- expect(subject).to eq({
+ expect(subject).to include({
link: release.reload.links.first,
errors: []
})
diff --git a/spec/graphql/mutations/security/ci_configuration/base_security_analyzer_spec.rb b/spec/graphql/mutations/security/ci_configuration/base_security_analyzer_spec.rb
index 818a7d303bd..668768189df 100644
--- a/spec/graphql/mutations/security/ci_configuration/base_security_analyzer_spec.rb
+++ b/spec/graphql/mutations/security/ci_configuration/base_security_analyzer_spec.rb
@@ -7,8 +7,10 @@ RSpec.describe Mutations::Security::CiConfiguration::BaseSecurityAnalyzer do
it 'raises a NotImplementedError error if the resolve method is called on the base class' do
user = create(:user)
+ mutation = described_class.new(context: { current_user: user }, object: nil, field: nil)
project = create(:project, :public, :repository)
project.add_developer(user)
- expect { resolve(described_class, args: { project_path: project.full_path }, ctx: { current_user: user }) }.to raise_error(NotImplementedError)
+
+ expect { mutation.resolve(project_path: project.full_path ) }.to raise_error(NotImplementedError)
end
end
diff --git a/spec/graphql/mutations/terraform/state/delete_spec.rb b/spec/graphql/mutations/terraform/state/delete_spec.rb
index 313a85a4bac..66d4b50741f 100644
--- a/spec/graphql/mutations/terraform/state/delete_spec.rb
+++ b/spec/graphql/mutations/terraform/state/delete_spec.rb
@@ -34,12 +34,12 @@ RSpec.describe Mutations::Terraform::State::Delete do
state.project.add_maintainer(user)
end
- it 'deletes the state', :aggregate_failures do
- expect do
- expect(subject).to eq(errors: [])
- end.to change { ::Terraform::State.count }.by(-1)
+ it 'schedules the state for deletion', :aggregate_failures do
+ expect_next_instance_of(Terraform::States::TriggerDestroyService, state, current_user: user) do |service|
+ expect(service).to receive(:execute).once.and_return(ServiceResponse.success)
+ end
- expect { state.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ subject
end
end
diff --git a/spec/graphql/mutations/work_items/update_task_spec.rb b/spec/graphql/mutations/work_items/update_task_spec.rb
new file mode 100644
index 00000000000..cb93e97504a
--- /dev/null
+++ b/spec/graphql/mutations/work_items/update_task_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::WorkItems::UpdateTask do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user).tap { |user| project.add_developer(user) } }
+ let_it_be(:referenced_work_item, refind: true) { create(:work_item, project: project, title: 'REFERENCED') }
+ let_it_be(:parent_work_item) do
+ create(:work_item, project: project, description: "- [ ] #{referenced_work_item.to_reference}+")
+ end
+
+ let(:task_params) { { title: 'UPDATED' } }
+ let(:task_input) { { id: referenced_work_item.to_global_id }.merge(task_params) }
+ let(:input) { { id: parent_work_item.to_global_id, task_data: task_input } }
+ let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }, field: nil) }
+
+ describe '#resolve' do
+ subject(:resolve) do
+ mutation.resolve(**input)
+ end
+
+ before do
+ stub_spam_services
+ end
+
+ context 'when user has sufficient permissions' do
+ let(:current_user) { developer }
+
+ it 'expires etag cache for parent work item' do
+ allow(WorkItem).to receive(:find).and_call_original
+ allow(WorkItem).to receive(:find).with(parent_work_item.id.to_s).and_return(parent_work_item)
+
+ expect(parent_work_item).to receive(:expire_etag_cache)
+
+ resolve
+ end
+ end
+ end
+end
diff --git a/spec/graphql/mutations/work_items/update_widgets_spec.rb b/spec/graphql/mutations/work_items/update_widgets_spec.rb
new file mode 100644
index 00000000000..2e54b81b5c7
--- /dev/null
+++ b/spec/graphql/mutations/work_items/update_widgets_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::WorkItems::UpdateWidgets do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user).tap { |user| project.add_developer(user) } }
+
+ let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }, field: nil) }
+
+ describe '#resolve' do
+ before do
+ stub_spam_services
+ end
+
+ context 'when no work item matches the given id' do
+ let(:current_user) { developer }
+ let(:gid) { global_id_of(id: non_existing_record_id, model_name: WorkItem.name) }
+
+ it 'raises an error' do
+ expect { mutation.resolve(id: gid, resolve: true) }.to raise_error(
+ Gitlab::Graphql::Errors::ResourceNotAvailable,
+ Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR
+ )
+ end
+ end
+
+ context 'when user can access the requested work item', :aggregate_failures do
+ let(:current_user) { developer }
+ let(:args) { {} }
+
+ let_it_be(:work_item) { create(:work_item, project: project) }
+
+ subject { mutation.resolve(id: work_item.to_global_id, **args) }
+
+ context 'when `:work_items` is disabled for a project' do
+ let_it_be(:project2) { create(:project) }
+
+ it 'returns an error' do
+ stub_feature_flags(work_items: project2) # only enable `work_item` for project2
+
+ expect(subject[:errors]).to contain_exactly('`work_items` feature flag disabled for this project')
+ end
+ end
+
+ context 'when resolved with an input for description widget' do
+ let(:args) { { description_widget: { description: "updated description" } } }
+
+ it 'returns the updated work item' do
+ expect(subject[:work_item].description).to eq("updated description")
+ expect(subject[:errors]).to be_empty
+ end
+ end
+ end
+ end
+end
diff --git a/spec/graphql/resolvers/alert_management/alert_resolver_spec.rb b/spec/graphql/resolvers/alert_management/alert_resolver_spec.rb
index 14ebe85d80e..618735837e8 100644
--- a/spec/graphql/resolvers/alert_management/alert_resolver_spec.rb
+++ b/spec/graphql/resolvers/alert_management/alert_resolver_spec.rb
@@ -68,6 +68,6 @@ RSpec.describe Resolvers::AlertManagement::AlertResolver do
private
def resolve_alerts(args = {}, context = { current_user: current_user })
- resolve(described_class, obj: project, args: args, ctx: context)
+ resolve(described_class, obj: project, args: args, ctx: context, arg_style: :internal)
end
end
diff --git a/spec/graphql/resolvers/board_list_issues_resolver_spec.rb b/spec/graphql/resolvers/board_list_issues_resolver_spec.rb
index 392385d2a30..ab1f19abaad 100644
--- a/spec/graphql/resolvers/board_list_issues_resolver_spec.rb
+++ b/spec/graphql/resolvers/board_list_issues_resolver_spec.rb
@@ -128,6 +128,6 @@ RSpec.describe Resolvers::BoardListIssuesResolver do
end
def resolve_board_list_issues(args: {}, current_user: user)
- resolve(described_class, obj: list, args: args, ctx: { current_user: current_user })
+ resolve(described_class, obj: list, args: args, ctx: { current_user: current_user }, arg_style: :internal)
end
end
diff --git a/spec/graphql/resolvers/board_lists_resolver_spec.rb b/spec/graphql/resolvers/board_lists_resolver_spec.rb
index 7a1d8590546..c882ad7c818 100644
--- a/spec/graphql/resolvers/board_lists_resolver_spec.rb
+++ b/spec/graphql/resolvers/board_lists_resolver_spec.rb
@@ -100,6 +100,8 @@ RSpec.describe Resolvers::BoardListsResolver do
end
def resolve_board_lists(args: {}, current_user: user)
- resolve(described_class, obj: board, args: args, ctx: { current_user: current_user })
+ resolve(described_class, obj: board, args: args, ctx: { current_user: current_user },
+ arg_style: :internal
+ )
end
end
diff --git a/spec/graphql/resolvers/ci/group_runners_resolver_spec.rb b/spec/graphql/resolvers/ci/group_runners_resolver_spec.rb
index 89a2437a189..f99f48f5b07 100644
--- a/spec/graphql/resolvers/ci/group_runners_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/group_runners_resolver_spec.rb
@@ -6,7 +6,10 @@ RSpec.describe Resolvers::Ci::GroupRunnersResolver do
include GraphqlHelpers
describe '#resolve' do
- subject { resolve(described_class, obj: obj, ctx: { current_user: user }, args: args) }
+ subject do
+ resolve(described_class, obj: obj, ctx: { current_user: user }, args: args,
+ arg_style: :internal)
+ end
include_context 'runners resolver setup'
diff --git a/spec/graphql/resolvers/ci/jobs_resolver_spec.rb b/spec/graphql/resolvers/ci/jobs_resolver_spec.rb
index 1b69bf7f63a..6c228861ddf 100644
--- a/spec/graphql/resolvers/ci/jobs_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/jobs_resolver_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Resolvers::Ci::JobsResolver do
describe '#resolve' do
context 'when security_report_types is empty' do
it "returns all of the pipeline's jobs" do
- jobs = resolve(described_class, obj: pipeline)
+ jobs = resolve(described_class, obj: pipeline, arg_style: :internal)
expect(jobs).to contain_exactly(
have_attributes(name: 'Normal job'),
@@ -37,7 +37,8 @@ RSpec.describe Resolvers::Ci::JobsResolver do
::Types::Security::ReportTypeEnum.values['SAST'].value,
::Types::Security::ReportTypeEnum.values['DAST'].value
]
- jobs = resolve(described_class, obj: pipeline, args: { security_report_types: report_types })
+ jobs = resolve(described_class, obj: pipeline, args: { security_report_types: report_types },
+ arg_style: :internal)
expect(jobs).to contain_exactly(
have_attributes(name: 'DAST job'),
@@ -48,7 +49,7 @@ RSpec.describe Resolvers::Ci::JobsResolver do
context 'when a job has tags' do
it "returns jobs with tags when applicable" do
- jobs = resolve(described_class, obj: pipeline)
+ jobs = resolve(described_class, obj: pipeline, arg_style: :internal)
expect(jobs).to contain_exactly(
have_attributes(tag_list: []),
have_attributes(tag_list: []),
diff --git a/spec/graphql/resolvers/ci/runners_resolver_spec.rb b/spec/graphql/resolvers/ci/runners_resolver_spec.rb
index 9251fbf24d9..b1f5f7b3e43 100644
--- a/spec/graphql/resolvers/ci/runners_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/runners_resolver_spec.rb
@@ -9,7 +9,10 @@ RSpec.describe Resolvers::Ci::RunnersResolver do
let(:obj) { nil }
let(:args) { {} }
- subject { resolve(described_class, obj: obj, ctx: { current_user: user }, args: args) }
+ subject do
+ resolve(described_class, obj: obj, ctx: { current_user: user }, args: args,
+ arg_style: :internal)
+ end
include_context 'runners resolver setup'
diff --git a/spec/graphql/resolvers/concerns/caching_array_resolver_spec.rb b/spec/graphql/resolvers/concerns/caching_array_resolver_spec.rb
index 852aaf66201..892ab53a53e 100644
--- a/spec/graphql/resolvers/concerns/caching_array_resolver_spec.rb
+++ b/spec/graphql/resolvers/concerns/caching_array_resolver_spec.rb
@@ -79,7 +79,7 @@ RSpec.describe ::CachingArrayResolver do
expect(User).to receive(:from_union).twice.and_call_original
results = users.in_groups_of(2, false).map do |users|
- resolve(resolver, args: { username: users.map(&:username) }, schema: schema)
+ resolve(resolver, args: { username: users.map(&:username) }, schema: schema, arg_style: :internal)
end
expect(results.flat_map(&method(:force))).to match_array(users)
@@ -208,6 +208,6 @@ RSpec.describe ::CachingArrayResolver do
args = { is_admin: admin }
opts = resolver.field_options
allow(resolver).to receive(:field_options).and_return(opts.merge(max_page_size: max_page_size))
- resolve(resolver, args: args, ctx: query_context, schema: schema)
+ resolve(resolver, args: args, ctx: query_context, schema: schema, arg_style: :internal)
end
end
diff --git a/spec/graphql/resolvers/concerns/resolves_groups_spec.rb b/spec/graphql/resolvers/concerns/resolves_groups_spec.rb
index bfbbae29e92..d15c8f2ee42 100644
--- a/spec/graphql/resolvers/concerns/resolves_groups_spec.rb
+++ b/spec/graphql/resolvers/concerns/resolves_groups_spec.rb
@@ -27,11 +27,9 @@ RSpec.describe ResolvesGroups do
let_it_be(:lookahead_fields) do
<<~FIELDS
- contacts { nodes { id } }
containerRepositoriesCount
customEmoji { nodes { id } }
fullPath
- organizations { nodes { id } }
path
dependencyProxyBlobCount
dependencyProxyBlobs { nodes { fileName } }
diff --git a/spec/graphql/resolvers/container_repositories_resolver_spec.rb b/spec/graphql/resolvers/container_repositories_resolver_spec.rb
index a17d2a7b0d5..d7aa761320f 100644
--- a/spec/graphql/resolvers/container_repositories_resolver_spec.rb
+++ b/spec/graphql/resolvers/container_repositories_resolver_spec.rb
@@ -15,7 +15,10 @@ RSpec.describe Resolvers::ContainerRepositoriesResolver do
describe '#resolve' do
let(:object) { project }
- subject { resolve(described_class, ctx: { current_user: user }, args: args, obj: object) }
+ subject do
+ resolve(described_class, ctx: { current_user: user }, args: args, obj: object,
+ arg_style: :internal)
+ end
shared_examples 'returning container repositories' do
it { is_expected.to contain_exactly(container_repositories) }
diff --git a/spec/graphql/resolvers/container_repository_tags_resolver_spec.rb b/spec/graphql/resolvers/container_repository_tags_resolver_spec.rb
index 4e7ea253c87..9747f663759 100644
--- a/spec/graphql/resolvers/container_repository_tags_resolver_spec.rb
+++ b/spec/graphql/resolvers/container_repository_tags_resolver_spec.rb
@@ -12,7 +12,10 @@ RSpec.describe Resolvers::ContainerRepositoryTagsResolver do
let(:args) { { sort: nil } }
describe '#resolve' do
- let(:resolver) { resolve(described_class, ctx: { current_user: user }, obj: repository, args: args) }
+ let(:resolver) do
+ resolve(described_class, ctx: { current_user: user }, obj: repository, args: args,
+ arg_style: :internal)
+ end
before do
stub_container_registry_config(enabled: true)
diff --git a/spec/graphql/resolvers/crm/contacts_resolver_spec.rb b/spec/graphql/resolvers/crm/contacts_resolver_spec.rb
new file mode 100644
index 00000000000..98da4aeac28
--- /dev/null
+++ b/spec/graphql/resolvers/crm/contacts_resolver_spec.rb
@@ -0,0 +1,92 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::Crm::ContactsResolver do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group, :crm_enabled) }
+
+ let_it_be(:contact_a) do
+ create(
+ :contact,
+ group: group,
+ first_name: "ABC",
+ last_name: "DEF",
+ email: "ghi@test.com",
+ description: "LMNO",
+ state: "inactive"
+ )
+ end
+
+ let_it_be(:contact_b) do
+ create(
+ :contact,
+ group: group,
+ first_name: "PQR",
+ last_name: "STU",
+ email: "vwx@test.com",
+ description: "YZ",
+ state: "active"
+ )
+ end
+
+ describe '#resolve' do
+ context 'with unauthorized user' do
+ it 'does not rise an error and returns no contacts' do
+ expect { resolve_contacts(group) }.not_to raise_error
+ expect(resolve_contacts(group)).to be_empty
+ end
+ end
+
+ context 'with authorized user' do
+ it 'does not rise an error and returns all contacts in the correct order' do
+ group.add_reporter(user)
+
+ expect { resolve_contacts(group) }.not_to raise_error
+ expect(resolve_contacts(group)).to eq([contact_a, contact_b])
+ end
+ end
+
+ context 'without parent' do
+ it 'returns no contacts' do
+ expect(resolve_contacts(nil)).to be_empty
+ end
+ end
+
+ context 'with a group parent' do
+ before do
+ group.add_developer(user)
+ end
+
+ context 'when no filter is provided' do
+ it 'returns all the contacts in the correct order' do
+ expect(resolve_contacts(group)).to eq([contact_a, contact_b])
+ end
+ end
+
+ context 'when search term is provided' do
+ it 'returns the correct contacts' do
+ expect(resolve_contacts(group, { search: "x@test.com" })).to contain_exactly(contact_b)
+ end
+ end
+
+ context 'when state is provided' do
+ it 'returns the correct contacts' do
+ expect(resolve_contacts(group, { state: :inactive })).to contain_exactly(contact_a)
+ end
+ end
+
+ context 'when ids are provided' do
+ it 'returns the correct contacts' do
+ expect(resolve_contacts(group, { ids: [contact_a.to_global_id] })).to contain_exactly(contact_a)
+ end
+ end
+ end
+ end
+
+ def resolve_contacts(parent, args = {}, context = { current_user: user })
+ resolve(described_class, obj: parent, args: args, ctx: context)
+ end
+end
diff --git a/spec/graphql/resolvers/crm/organizations_resolver_spec.rb b/spec/graphql/resolvers/crm/organizations_resolver_spec.rb
new file mode 100644
index 00000000000..323f134ffc3
--- /dev/null
+++ b/spec/graphql/resolvers/crm/organizations_resolver_spec.rb
@@ -0,0 +1,88 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::Crm::OrganizationsResolver do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group, :crm_enabled) }
+
+ let_it_be(:organization_a) do
+ create(
+ :organization,
+ group: group,
+ name: "ABC",
+ state: "inactive"
+ )
+ end
+
+ let_it_be(:organization_b) do
+ create(
+ :organization,
+ group: group,
+ name: "DEF",
+ state: "active"
+ )
+ end
+
+ describe '#resolve' do
+ context 'with unauthorized user' do
+ it 'does not rise an error and returns no organizations' do
+ expect { resolve_organizations(group) }.not_to raise_error
+ expect(resolve_organizations(group)).to be_empty
+ end
+ end
+
+ context 'with authorized user' do
+ it 'does not rise an error and returns all organizations in the correct order' do
+ group.add_reporter(user)
+
+ expect { resolve_organizations(group) }.not_to raise_error
+ expect(resolve_organizations(group)).to eq([organization_a, organization_b])
+ end
+ end
+
+ context 'without parent' do
+ it 'returns no organizations' do
+ expect(resolve_organizations(nil)).to be_empty
+ end
+ end
+
+ context 'with a group parent' do
+ before do
+ group.add_developer(user)
+ end
+
+ context 'when no filter is provided' do
+ it 'returns all the organizations in the correct order' do
+ expect(resolve_organizations(group)).to eq([organization_a, organization_b])
+ end
+ end
+
+ context 'when search term is provided' do
+ it 'returns the correct organizations' do
+ expect(resolve_organizations(group, { search: "def" })).to contain_exactly(organization_b)
+ end
+ end
+
+ context 'when state is provided' do
+ it 'returns the correct organizations' do
+ expect(resolve_organizations(group, { state: :inactive })).to contain_exactly(organization_a)
+ end
+ end
+
+ context 'when ids are provided' do
+ it 'returns the correct organizations' do
+ expect(resolve_organizations(group, {
+ ids: [organization_b.to_global_id]
+ })).to contain_exactly(organization_b)
+ end
+ end
+ end
+ end
+
+ def resolve_organizations(parent, args = {}, context = { current_user: user })
+ resolve(described_class, obj: parent, args: args, ctx: context)
+ end
+end
diff --git a/spec/graphql/resolvers/design_management/versions_resolver_spec.rb b/spec/graphql/resolvers/design_management/versions_resolver_spec.rb
index 8eab0222cf6..3a2ed445484 100644
--- a/spec/graphql/resolvers/design_management/versions_resolver_spec.rb
+++ b/spec/graphql/resolvers/design_management/versions_resolver_spec.rb
@@ -90,6 +90,8 @@ RSpec.describe Resolvers::DesignManagement::VersionsResolver do
end
context 'and they do not match' do
+ subject(:result) { resolve_versions(object) }
+
let(:params) do
{
earlier_or_equal_to_sha: first_version.sha,
@@ -104,14 +106,6 @@ RSpec.describe Resolvers::DesignManagement::VersionsResolver do
end
end
end
-
- context 'by at_version in parent' do
- before do
- query_context[:at_version_argument] = first_version.to_global_id
- end
-
- it_behaves_like 'a query for all_versions up to the first_version'
- end
end
end
diff --git a/spec/graphql/resolvers/group_labels_resolver_spec.rb b/spec/graphql/resolvers/group_labels_resolver_spec.rb
index 2031e534703..71290885e6b 100644
--- a/spec/graphql/resolvers/group_labels_resolver_spec.rb
+++ b/spec/graphql/resolvers/group_labels_resolver_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe Resolvers::GroupLabelsResolver do
it 'does not raise error' do
group.add_guest(current_user)
- expect { resolve_labels(subgroup) }.not_to raise_error
+ expect(resolve_labels(subgroup)).to be_instance_of(Gitlab::Graphql::Pagination::Keyset::Connection)
end
end
diff --git a/spec/graphql/resolvers/group_milestones_resolver_spec.rb b/spec/graphql/resolvers/group_milestones_resolver_spec.rb
index 7abc779a63c..a32a031a88f 100644
--- a/spec/graphql/resolvers/group_milestones_resolver_spec.rb
+++ b/spec/graphql/resolvers/group_milestones_resolver_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Resolvers::GroupMilestonesResolver do
let_it_be(:current_user) { create(:user) }
def resolve_group_milestones(args = {}, context = { current_user: current_user })
- resolve(described_class, obj: group, args: args, ctx: context)
+ resolve(described_class, obj: group, args: args, ctx: context, arg_style: :internal)
end
let_it_be(:now) { Time.now }
@@ -126,16 +126,6 @@ RSpec.describe Resolvers::GroupMilestonesResolver do
end
end
- context 'when user cannot read milestones' do
- it 'generates an error' do
- unauthorized_user = create(:user)
-
- expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
- resolve_group_milestones({}, { current_user: unauthorized_user })
- end
- end
- end
-
context 'when including descendant milestones in a public group' do
let_it_be(:group) { create(:group, :public) }
diff --git a/spec/graphql/resolvers/group_packages_resolver_spec.rb b/spec/graphql/resolvers/group_packages_resolver_spec.rb
index eba3a5f2de8..c600f9c9f9a 100644
--- a/spec/graphql/resolvers/group_packages_resolver_spec.rb
+++ b/spec/graphql/resolvers/group_packages_resolver_spec.rb
@@ -2,15 +2,16 @@
require 'spec_helper'
-RSpec.describe Resolvers::GroupPackagesResolver do
+RSpec.describe 'Resolvers::GroupPackagesResolver' do
include GraphqlHelpers
+ let_it_be(:described_class) { Resolvers::GroupPackagesResolver }
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group, :public) }
let_it_be(:project) { create(:project, :public, group: group, path: 'a') }
let(:args) do
- { sort: :created_desc }
+ { sort: 'CREATED_DESC' }
end
describe '#resolve' do
@@ -26,13 +27,13 @@ RSpec.describe Resolvers::GroupPackagesResolver do
let_it_be(:package4) { create(:package, project: project2 ) }
context 'filter by package_name' do
- let(:args) { { sort: :project_path_desc } }
+ let(:args) { { sort: 'PROJECT_PATH_DESC' } }
it { is_expected.to eq([package4, package2, package3, package]) }
end
context 'filter by package_type' do
- let(:args) { { sort: :project_path_asc } }
+ let(:args) { { sort: 'PROJECT_PATH_ASC' } }
it { is_expected.to eq([package, package3, package2, package4]) }
end
diff --git a/spec/graphql/resolvers/incident_management/timeline_events_resolver_spec.rb b/spec/graphql/resolvers/incident_management/timeline_events_resolver_spec.rb
index 046cf242d56..6604141abfe 100644
--- a/spec/graphql/resolvers/incident_management/timeline_events_resolver_spec.rb
+++ b/spec/graphql/resolvers/incident_management/timeline_events_resolver_spec.rb
@@ -2,9 +2,10 @@
require 'spec_helper'
-RSpec.describe Resolvers::IncidentManagement::TimelineEventsResolver do
+RSpec.describe 'Resolvers::IncidentManagement::TimelineEventsResolver' do
include GraphqlHelpers
+ let_it_be(:described_class) { Resolvers::IncidentManagement::TimelineEventsResolver }
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:incident) { create(:incident, project: project) }
diff --git a/spec/graphql/resolvers/issue_status_counts_resolver_spec.rb b/spec/graphql/resolvers/issue_status_counts_resolver_spec.rb
index 77f4ce4cac5..86a4154f23b 100644
--- a/spec/graphql/resolvers/issue_status_counts_resolver_spec.rb
+++ b/spec/graphql/resolvers/issue_status_counts_resolver_spec.rb
@@ -70,7 +70,7 @@ RSpec.describe Resolvers::IssueStatusCountsResolver do
end
context 'when both assignee_username and assignee_usernames are provided' do
- it 'generates a mutually exclusive filter error' do
+ it 'returns a mutually exclusive filter error' do
expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError, 'only one of [assigneeUsernames, assigneeUsername] arguments is allowed at the same time.') do
resolve_issue_status_counts(assignee_usernames: [current_user.username], assignee_username: current_user.username)
end
diff --git a/spec/graphql/resolvers/issues_resolver_spec.rb b/spec/graphql/resolvers/issues_resolver_spec.rb
index e6ec9d8c895..a5b5a8e4f72 100644
--- a/spec/graphql/resolvers/issues_resolver_spec.rb
+++ b/spec/graphql/resolvers/issues_resolver_spec.rb
@@ -428,6 +428,22 @@ RSpec.describe Resolvers::IssuesResolver do
end
end
+ context 'when sorting by closed at' do
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:closed_issue1) { create(:issue, project: project, closed_at: 3.days.from_now) }
+ let_it_be(:closed_issue2) { create(:issue, project: project, closed_at: nil) }
+ let_it_be(:closed_issue3) { create(:issue, project: project, closed_at: 2.days.ago) }
+ let_it_be(:closed_issue4) { create(:issue, project: project, closed_at: nil) }
+
+ it 'sorts issues ascending' do
+ expect(resolve_issues(sort: :closed_at_asc).to_a).to eq [closed_issue3, closed_issue1, closed_issue4, closed_issue2]
+ end
+
+ it 'sorts issues descending' do
+ expect(resolve_issues(sort: :closed_at_desc).to_a).to eq [closed_issue1, closed_issue3, closed_issue4, closed_issue2]
+ end
+ end
+
context 'when sorting by due date' do
let_it_be(:project) { create(:project, :public) }
let_it_be(:due_issue1) { create(:issue, project: project, due_date: 3.days.from_now) }
@@ -573,22 +589,6 @@ RSpec.describe Resolvers::IssuesResolver do
issues = resolve_issues(sort: :created_desc).to_a
expect(issues).to eq([resolved_incident, issue_no_status, triggered_incident])
end
-
- context 'when incident_escalations feature flag is disabled' do
- before do
- stub_feature_flags(incident_escalations: false)
- end
-
- it 'defaults ascending status sort to created_desc' do
- issues = resolve_issues(sort: :escalation_status_asc).to_a
- expect(issues).to eq([resolved_incident, issue_no_status, triggered_incident])
- end
-
- it 'defaults descending status sort to created_desc' do
- issues = resolve_issues(sort: :escalation_status_desc).to_a
- expect(issues).to eq([resolved_incident, issue_no_status, triggered_incident])
- end
- end
end
context 'when sorting with non-stable cursors' do
@@ -701,6 +701,6 @@ RSpec.describe Resolvers::IssuesResolver do
end
def resolve_issues(args = {}, context = { current_user: current_user })
- resolve(described_class, obj: project, args: args, ctx: context)
+ resolve(described_class, obj: project, args: args, ctx: context, arg_style: :internal)
end
end
diff --git a/spec/graphql/resolvers/merge_requests_resolver_spec.rb b/spec/graphql/resolvers/merge_requests_resolver_spec.rb
index e4eaeb9bc3c..ab3d7edc6bd 100644
--- a/spec/graphql/resolvers/merge_requests_resolver_spec.rb
+++ b/spec/graphql/resolvers/merge_requests_resolver_spec.rb
@@ -174,7 +174,7 @@ RSpec.describe Resolvers::MergeRequestsResolver do
context 'with draft argument' do
before do
- merge_request_4.update!(title: MergeRequest.wip_title(merge_request_4.title))
+ merge_request_4.update!(title: MergeRequest.draft_title(merge_request_4.title))
end
context 'with draft: true argument' do
@@ -411,6 +411,6 @@ RSpec.describe Resolvers::MergeRequestsResolver do
end
def resolve_mr(project, resolver: described_class, user: current_user, **args)
- resolve(resolver, obj: project, args: args, ctx: { current_user: user })
+ resolve(resolver, obj: project, args: args, ctx: { current_user: user }, arg_style: :internal)
end
end
diff --git a/spec/graphql/resolvers/namespace_projects_resolver_spec.rb b/spec/graphql/resolvers/namespace_projects_resolver_spec.rb
index eb4d0ab6f37..78dd5173449 100644
--- a/spec/graphql/resolvers/namespace_projects_resolver_spec.rb
+++ b/spec/graphql/resolvers/namespace_projects_resolver_spec.rb
@@ -152,6 +152,6 @@ RSpec.describe Resolvers::NamespaceProjectsResolver do
end
def resolve_projects(args = { include_subgroups: false, sort: nil, search: nil, ids: nil }, context = { current_user: current_user })
- resolve(described_class, obj: namespace, args: args, ctx: context)
+ resolve(described_class, obj: namespace, args: args, ctx: context, arg_style: :internal)
end
end
diff --git a/spec/graphql/resolvers/packages_base_resolver_spec.rb b/spec/graphql/resolvers/packages_base_resolver_spec.rb
index 8f9865c3785..7766fdd4994 100644
--- a/spec/graphql/resolvers/packages_base_resolver_spec.rb
+++ b/spec/graphql/resolvers/packages_base_resolver_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Resolvers::PackagesBaseResolver do
include GraphqlHelpers
describe '#resolve' do
- subject { resolve(described_class) }
+ subject { resolve(described_class, arg_style: :internal) }
it 'throws an error' do
expect { subject }.to raise_error(NotImplementedError)
diff --git a/spec/graphql/resolvers/project_milestones_resolver_spec.rb b/spec/graphql/resolvers/project_milestones_resolver_spec.rb
index 2cf490c2b6a..ad1190e3df7 100644
--- a/spec/graphql/resolvers/project_milestones_resolver_spec.rb
+++ b/spec/graphql/resolvers/project_milestones_resolver_spec.rb
@@ -2,12 +2,15 @@
require 'spec_helper'
-RSpec.describe Resolvers::ProjectMilestonesResolver do
+RSpec.describe 'Resolvers::ProjectMilestonesResolver' do
include GraphqlHelpers
describe '#resolve' do
+ let_it_be(:described_class) { Resolvers::ProjectMilestonesResolver }
let_it_be(:project) { create(:project, :private) }
let_it_be(:current_user) { create(:user) }
+ let_it_be(:now) { Time.now }
+ let_it_be(:now_date) { now.to_date }
before_all do
project.add_developer(current_user)
@@ -25,7 +28,7 @@ RSpec.describe Resolvers::ProjectMilestonesResolver do
it 'calls MilestonesFinder to retrieve all milestones' do
expect(MilestonesFinder).to receive(:new)
- .with(args(project_ids: project.id, state: 'all'))
+ .with(args(project_ids: project.id, state: 'all', sort: :due_date_asc))
.and_call_original
resolve_project_milestones
@@ -42,7 +45,8 @@ RSpec.describe Resolvers::ProjectMilestonesResolver do
it 'calls MilestonesFinder with correct parameters' do
expect(MilestonesFinder).to receive(:new)
- .with(args(project_ids: project.id, group_ids: contain_exactly(group, parent_group), state: 'all'))
+ .with(args(project_ids: project.id, group_ids: contain_exactly(group, parent_group),
+ state: 'all', sort: :due_date_asc))
.and_call_original
resolve_project_milestones(include_ancestors: true)
@@ -54,7 +58,7 @@ RSpec.describe Resolvers::ProjectMilestonesResolver do
milestone = create(:milestone, project: project)
expect(MilestonesFinder).to receive(:new)
- .with(args(ids: [milestone.id.to_s], project_ids: project.id, state: 'all'))
+ .with(args(ids: [milestone.id.to_s], project_ids: project.id, state: 'all', sort: :due_date_asc))
.and_call_original
resolve_project_milestones(ids: [milestone.to_global_id])
@@ -64,7 +68,7 @@ RSpec.describe Resolvers::ProjectMilestonesResolver do
context 'by state' do
it 'calls MilestonesFinder with correct parameters' do
expect(MilestonesFinder).to receive(:new)
- .with(args(project_ids: project.id, state: 'closed'))
+ .with(args(project_ids: project.id, state: 'closed', sort: :due_date_asc))
.and_call_original
resolve_project_milestones(state: 'closed')
@@ -74,13 +78,13 @@ RSpec.describe Resolvers::ProjectMilestonesResolver do
context 'by sort' do
it 'calls MilestonesFinder with correct parameters' do
expect(MilestonesFinder).to receive(:new)
- .with(args(project_ids: project.id, state: 'all', sort: :due_date_desc))
+ .with(args(project_ids: project.id, state: 'all', sort: :due_date_asc))
.and_call_original
- resolve_project_milestones(sort: :due_date_desc)
+ resolve_project_milestones(sort: 'DUE_DATE_ASC')
end
- %i[expired_last_due_date_asc expired_last_due_date_desc].each do |sort_by|
+ %w[EXPIRED_LAST_DUE_DATE_ASC EXPIRED_LAST_DUE_DATE_DESC].each do |sort_by|
it "uses offset-pagination when sorting by #{sort_by}" do
resolved = resolve_project_milestones(sort: sort_by)
@@ -92,11 +96,12 @@ RSpec.describe Resolvers::ProjectMilestonesResolver do
context 'by timeframe' do
context 'when start_date and end_date are present' do
it 'calls MilestonesFinder with correct parameters' do
- start_date = Time.now
- end_date = Time.now + 5.days
+ start_date = now
+ end_date = now + 5.days
expect(MilestonesFinder).to receive(:new)
- .with(args(project_ids: project.id, state: 'all', start_date: start_date, end_date: end_date))
+ .with(args(project_ids: project.id, state: 'all',
+ start_date: start_date, end_date: end_date, sort: :due_date_asc))
.and_call_original
resolve_project_milestones(start_date: start_date, end_date: end_date)
@@ -105,7 +110,7 @@ RSpec.describe Resolvers::ProjectMilestonesResolver do
context 'when start date is after end_date' do
it 'generates an error' do
expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError, 'startDate is after endDate') do
- resolve_project_milestones(start_date: Time.now, end_date: Time.now - 2.days)
+ resolve_project_milestones(start_date: now, end_date: now - 2.days)
end
end
end
@@ -114,7 +119,7 @@ RSpec.describe Resolvers::ProjectMilestonesResolver do
context 'when only start_date is present' do
it 'generates an error' do
expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError, /Both startDate and endDate/) do
- resolve_project_milestones(start_date: Time.now)
+ resolve_project_milestones(start_date: now)
end
end
end
@@ -122,18 +127,19 @@ RSpec.describe Resolvers::ProjectMilestonesResolver do
context 'when only end_date is present' do
it 'generates an error' do
expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError, /Both startDate and endDate/) do
- resolve_project_milestones(end_date: Time.now)
+ resolve_project_milestones(end_date: now)
end
end
end
context 'when passing a timeframe' do
it 'calls MilestonesFinder with correct parameters' do
- start_date = Time.now
- end_date = Time.now + 5.days
+ start_date = now_date
+ end_date = now_date + 5.days
expect(MilestonesFinder).to receive(:new)
- .with(args(project_ids: project.id, state: 'all', start_date: start_date, end_date: end_date))
+ .with(args(project_ids: project.id, state: 'all',
+ sort: :due_date_asc, start_date: start_date, end_date: end_date))
.and_call_original
resolve_project_milestones(timeframe: { start: start_date, end: end_date })
@@ -144,7 +150,7 @@ RSpec.describe Resolvers::ProjectMilestonesResolver do
context 'when title is present' do
it 'calls MilestonesFinder with correct parameters' do
expect(MilestonesFinder).to receive(:new)
- .with(args(title: '13.5', state: 'all', project_ids: project.id))
+ .with(args(title: '13.5', state: 'all', sort: :due_date_asc, project_ids: project.id))
.and_call_original
resolve_project_milestones(title: '13.5')
@@ -154,7 +160,7 @@ RSpec.describe Resolvers::ProjectMilestonesResolver do
context 'when search_title is present' do
it 'calls MilestonesFinder with correct parameters' do
expect(MilestonesFinder).to receive(:new)
- .with(args(search_title: '13', state: 'all', project_ids: project.id))
+ .with(args(search_title: '13', state: 'all', sort: :due_date_asc, project_ids: project.id))
.and_call_original
resolve_project_milestones(search_title: '13')
@@ -163,24 +169,14 @@ RSpec.describe Resolvers::ProjectMilestonesResolver do
context 'when containing date is present' do
it 'calls MilestonesFinder with correct parameters' do
- t = Time.now
+ t = now
expect(MilestonesFinder).to receive(:new)
- .with(args(containing_date: t, state: 'all', project_ids: project.id))
+ .with(args(containing_date: t, state: 'all', sort: :due_date_asc, project_ids: project.id))
.and_call_original
resolve_project_milestones(containing_date: t)
end
end
-
- context 'when user cannot read milestones' do
- it 'generates an error' do
- unauthorized_user = create(:user)
-
- expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
- resolve_project_milestones({}, { current_user: unauthorized_user })
- end
- end
- end
end
end
diff --git a/spec/graphql/resolvers/project_packages_resolver_spec.rb b/spec/graphql/resolvers/project_packages_resolver_spec.rb
index 66a94bd42dd..c3ce30f6ef9 100644
--- a/spec/graphql/resolvers/project_packages_resolver_spec.rb
+++ b/spec/graphql/resolvers/project_packages_resolver_spec.rb
@@ -2,18 +2,22 @@
require 'spec_helper'
-RSpec.describe Resolvers::ProjectPackagesResolver do
+RSpec.describe 'Resolvers::ProjectPackagesResolver' do
include GraphqlHelpers
+ let_it_be(:described_class) { Resolvers::ProjectPackagesResolver }
let_it_be(:user) { create(:user) }
let_it_be_with_reload(:project) { create(:project, :public) }
let(:args) do
- { sort: :created_desc }
+ { sort: 'CREATED_DESC' }
end
describe '#resolve' do
- subject { resolve(described_class, ctx: { current_user: user }, obj: project, args: args).to_a }
+ subject do
+ resolve(described_class, ctx: { current_user: user }, obj: project, args: args)
+ .to_a
+ end
it_behaves_like 'group and projects packages resolver'
end
diff --git a/spec/graphql/resolvers/releases_resolver_spec.rb b/spec/graphql/resolvers/releases_resolver_spec.rb
index 89623be891f..6ba9a6c33a1 100644
--- a/spec/graphql/resolvers/releases_resolver_spec.rb
+++ b/spec/graphql/resolvers/releases_resolver_spec.rb
@@ -87,6 +87,6 @@ RSpec.describe Resolvers::ReleasesResolver do
def resolve_releases
context = { current_user: current_user }
- resolve(described_class, obj: project, args: args, ctx: context)
+ resolve(described_class, obj: project, args: args, ctx: context, arg_style: :internal)
end
end
diff --git a/spec/graphql/resolvers/user_resolver_spec.rb b/spec/graphql/resolvers/user_resolver_spec.rb
index 446d765d3ee..d57b015b705 100644
--- a/spec/graphql/resolvers/user_resolver_spec.rb
+++ b/spec/graphql/resolvers/user_resolver_spec.rb
@@ -6,8 +6,29 @@ RSpec.describe Resolvers::UserResolver do
include GraphqlHelpers
describe '#resolve' do
+ let_it_be(:current_user) { nil }
let_it_be(:user) { create(:user) }
+ shared_examples 'queries user' do
+ context 'authenticated access' do
+ let_it_be(:current_user) { create(:user) }
+
+ it 'returns the correct user' do
+ expect(
+ resolve_user(args)
+ ).to eq(user)
+ end
+ end
+
+ context 'unauthenticated access' do
+ it 'forbids search' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ resolve_user(args)
+ end
+ end
+ end
+ end
+
context 'when neither an ID or a username is provided' do
it 'generates an ArgumentError' do
expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError) do
@@ -23,25 +44,21 @@ RSpec.describe Resolvers::UserResolver do
end
context 'by username' do
- it 'returns the correct user' do
- expect(
- resolve_user(username: user.username)
- ).to eq(user)
+ include_examples "queries user" do
+ let(:args) { { username: user.username } }
end
end
context 'by ID' do
- it 'returns the correct user' do
- expect(
- resolve_user(id: user.to_global_id)
- ).to eq(user)
+ include_examples "queries user" do
+ let(:args) { { id: user.to_global_id } }
end
end
end
private
- def resolve_user(args = {})
- sync(resolve(described_class, args: args))
+ def resolve_user(args = {}, context = { current_user: current_user })
+ sync(resolve(described_class, args: args, ctx: context))
end
end
diff --git a/spec/graphql/resolvers/users/groups_resolver_spec.rb b/spec/graphql/resolvers/users/groups_resolver_spec.rb
index 5ac7aac4898..bbe9b6371cf 100644
--- a/spec/graphql/resolvers/users/groups_resolver_spec.rb
+++ b/spec/graphql/resolvers/users/groups_resolver_spec.rb
@@ -93,6 +93,6 @@ RSpec.describe Resolvers::Users::GroupsResolver do
end
def resolve_groups(args:, current_user:, obj:)
- resolve(described_class, args: args, ctx: { current_user: current_user }, obj: obj)&.items
+ resolve(described_class, args: args, ctx: { current_user: current_user }, obj: obj, arg_style: :internal)&.items
end
end
diff --git a/spec/graphql/resolvers/users_resolver_spec.rb b/spec/graphql/resolvers/users_resolver_spec.rb
index 1ba296912a3..dda15303676 100644
--- a/spec/graphql/resolvers/users_resolver_spec.rb
+++ b/spec/graphql/resolvers/users_resolver_spec.rb
@@ -14,14 +14,6 @@ RSpec.describe Resolvers::UsersResolver do
end
describe '#resolve' do
- it 'generates an error when read_users_list is not authorized' do
- expect(Ability).to receive(:allowed?).with(current_user, :read_users_list).and_return(false)
-
- expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
- resolve_users
- end
- end
-
context 'when no arguments are passed' do
it 'returns all users' do
expect(resolve_users).to contain_exactly(user1, user2, current_user)
@@ -79,8 +71,10 @@ RSpec.describe Resolvers::UsersResolver do
end
end
- it 'allows to search by username' do
- expect(resolve_users(args: { usernames: [user1.username] })).to contain_exactly(user1)
+ it 'prohibits search by username' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ resolve_users(args: { usernames: [user1.username] })
+ end
end
end
end
diff --git a/spec/graphql/resolvers/work_items_resolver_spec.rb b/spec/graphql/resolvers/work_items_resolver_spec.rb
new file mode 100644
index 00000000000..29eac0ab46e
--- /dev/null
+++ b/spec/graphql/resolvers/work_items_resolver_spec.rb
@@ -0,0 +1,190 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::WorkItemsResolver do
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:reporter) { create(:user) }
+
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:other_project) { create(:project, group: group) }
+
+ let_it_be(:item1) do
+ create(:work_item, project: project, state: :opened, created_at:
+ 3.hours.ago, updated_at: 3.hours.ago)
+ end
+
+ let_it_be(:item2) do
+ create(:work_item, project: project, state: :closed, title: 'foo',
+ created_at: 1.hour.ago, updated_at: 1.hour.ago, closed_at:
+ 1.hour.ago)
+ end
+
+ let_it_be(:item3) do
+ create(:work_item, project: other_project, state: :closed, title: 'foo',
+ created_at: 1.hour.ago, updated_at: 1.hour.ago, closed_at:
+ 1.hour.ago)
+ end
+
+ let_it_be(:item4) { create(:work_item) }
+
+ specify do
+ expect(described_class).to have_nullable_graphql_type(Types::WorkItemType.connection_type)
+ end
+
+ context "with a project" do
+ before_all do
+ project.add_developer(current_user)
+ project.add_reporter(reporter)
+ end
+
+ describe '#resolve' do
+ it 'finds all items' do
+ expect(resolve_items).to contain_exactly(item1, item2)
+ end
+
+ it 'filters by state' do
+ expect(resolve_items(state: 'opened')).to contain_exactly(item1)
+ expect(resolve_items(state: 'closed')).to contain_exactly(item2)
+ end
+
+ context 'when searching items' do
+ it 'returns correct items' do
+ expect(resolve_items(search: 'foo')).to contain_exactly(item2)
+ end
+
+ it 'uses project search optimization' do
+ expected_arguments = a_hash_including(
+ search: 'foo',
+ attempt_project_search_optimizations: true
+ )
+ expect(::WorkItems::WorkItemsFinder).to receive(:new).with(anything, expected_arguments).and_call_original
+
+ resolve_items(search: 'foo')
+ end
+
+ context 'with anonymous user' do
+ let_it_be(:public_project) { create(:project, :public) }
+ let_it_be(:public_item) { create(:work_item, project: public_project, title: 'Test item') }
+
+ context 'with disable_anonymous_search enabled' do
+ before do
+ stub_feature_flags(disable_anonymous_search: true)
+ end
+
+ it 'generates an error' do
+ error_message = "User must be authenticated to include the `search` argument."
+
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ArgumentError, error_message) do
+ resolve(described_class, obj: public_project, args: { search: 'test' }, ctx: { current_user: nil })
+ end
+ end
+ end
+
+ context 'with disable_anonymous_search disabled' do
+ before do
+ stub_feature_flags(disable_anonymous_search: false)
+ end
+
+ it 'returns correct items' do
+ expect(
+ resolve(described_class, obj: public_project, args: { search: 'test' }, ctx: { current_user: nil })
+ ).to contain_exactly(public_item)
+ end
+ end
+ end
+ end
+
+ describe 'sorting' do
+ context 'when sorting by created' do
+ it 'sorts items ascending' do
+ expect(resolve_items(sort: 'created_asc').to_a).to eq [item1, item2]
+ end
+
+ it 'sorts items descending' do
+ expect(resolve_items(sort: 'created_desc').to_a).to eq [item2, item1]
+ end
+ end
+
+ context 'when sorting by title' do
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:item1) { create(:work_item, project: project, title: 'foo') }
+ let_it_be(:item2) { create(:work_item, project: project, title: 'bar') }
+ let_it_be(:item3) { create(:work_item, project: project, title: 'baz') }
+ let_it_be(:item4) { create(:work_item, project: project, title: 'Baz 2') }
+
+ it 'sorts items ascending' do
+ expect(resolve_items(sort: :title_asc).to_a).to eq [item2, item3, item4, item1]
+ end
+
+ it 'sorts items descending' do
+ expect(resolve_items(sort: :title_desc).to_a).to eq [item1, item4, item3, item2]
+ end
+ end
+ end
+
+ it 'returns items user can see' do
+ project.add_guest(current_user)
+
+ create(:work_item, confidential: true)
+
+ expect(resolve_items).to contain_exactly(item1, item2)
+ end
+
+ it 'batches queries that only include IIDs', :request_store do
+ result = batch_sync(max_queries: 7) do
+ [item1, item2]
+ .map { |item| resolve_items(iid: item.iid.to_s) }
+ .flat_map(&:to_a)
+ end
+
+ expect(result).to contain_exactly(item1, item2)
+ end
+
+ it 'finds a specific item with iids', :request_store do
+ result = batch_sync(max_queries: 7) do
+ resolve_items(iids: [item1.iid]).to_a
+ end
+
+ expect(result).to contain_exactly(item1)
+ end
+
+ it 'finds multiple items with iids' do
+ create(:work_item, project: project, author: current_user)
+
+ expect(batch_sync { resolve_items(iids: [item1.iid, item2.iid]).to_a })
+ .to contain_exactly(item1, item2)
+ end
+
+ it 'finds only the items within the project we are looking at' do
+ another_project = create(:project)
+ iids = [item1, item2].map(&:iid)
+
+ iids.each do |iid|
+ create(:work_item, project: another_project, iid: iid)
+ end
+
+ expect(batch_sync { resolve_items(iids: iids).to_a }).to contain_exactly(item1, item2)
+ end
+ end
+ end
+
+ context "when passing a non existent, batch loaded project" do
+ let!(:project) do
+ BatchLoader::GraphQL.for("non-existent-path").batch do |_fake_paths, loader, _|
+ loader.call("non-existent-path", nil)
+ end
+ end
+
+ it "returns nil without breaking" do
+ expect(resolve_items(iids: ["don't", "break"])).to be_empty
+ end
+ end
+
+ def resolve_items(args = {}, context = { current_user: current_user })
+ resolve(described_class, obj: project, args: args, ctx: context, arg_style: :internal)
+ end
+end
diff --git a/spec/graphql/subscriptions/issuable_updated_spec.rb b/spec/graphql/subscriptions/issuable_updated_spec.rb
index 0b8fcf67513..bc640e9e3c4 100644
--- a/spec/graphql/subscriptions/issuable_updated_spec.rb
+++ b/spec/graphql/subscriptions/issuable_updated_spec.rb
@@ -52,7 +52,8 @@ RSpec.describe Subscriptions::IssuableUpdated do
let(:current_user) { unauthorized_user }
it 'unsubscribes the user' do
- expect { subject }.to throw_symbol(:graphql_subscription_unsubscribed)
+ # GraphQL::Execution::Execute::Skip is returned when unsubscribed
+ expect(subject).to be_an(GraphQL::Execution::Execute::Skip)
end
end
end
diff --git a/spec/graphql/types/base_edge_spec.rb b/spec/graphql/types/base_edge_spec.rb
index 3afb4202173..b02ccbaffef 100644
--- a/spec/graphql/types/base_edge_spec.rb
+++ b/spec/graphql/types/base_edge_spec.rb
@@ -25,7 +25,6 @@ RSpec.describe Types::BaseEdge do
Class.new(GraphQL::Schema) do
lazy_resolve ::Gitlab::Graphql::Lazy, :force
- use ::GraphQL::Pagination::Connections
use ::Gitlab::Graphql::Pagination::Connections
query(Class.new(::Types::BaseObject) do
diff --git a/spec/graphql/types/base_field_spec.rb b/spec/graphql/types/base_field_spec.rb
index 9d02f061435..439678e7e16 100644
--- a/spec/graphql/types/base_field_spec.rb
+++ b/spec/graphql/types/base_field_spec.rb
@@ -3,6 +3,92 @@
require 'spec_helper'
RSpec.describe Types::BaseField do
+ describe 'authorized?' do
+ let(:object) { double }
+ let(:current_user) { nil }
+ let(:ctx) { { current_user: current_user } }
+
+ it 'defaults to true' do
+ field = described_class.new(name: 'test', type: GraphQL::Types::String, null: true)
+
+ expect(field).to be_authorized(object, nil, ctx)
+ end
+
+ it 'tests the field authorization, if provided' do
+ field = described_class.new(name: 'test', type: GraphQL::Types::String, null: true, authorize: :foo)
+
+ expect(Ability).to receive(:allowed?).with(current_user, :foo, object).and_return(false)
+
+ expect(field).not_to be_authorized(object, nil, ctx)
+ end
+
+ it 'tests the field authorization, if provided, when it succeeds' do
+ field = described_class.new(name: 'test', type: GraphQL::Types::String, null: true, authorize: :foo)
+
+ expect(Ability).to receive(:allowed?).with(current_user, :foo, object).and_return(true)
+
+ expect(field).to be_authorized(object, nil, ctx)
+ end
+
+ it 'only tests the resolver authorization if it authorizes_object?' do
+ resolver = Class.new
+
+ field = described_class.new(name: 'test', type: GraphQL::Types::String, null: true,
+ resolver_class: resolver)
+
+ expect(field).to be_authorized(object, nil, ctx)
+ end
+
+ it 'tests the resolver authorization, if provided' do
+ resolver = Class.new do
+ include Gitlab::Graphql::Authorize::AuthorizeResource
+
+ authorizes_object!
+ end
+
+ field = described_class.new(name: 'test', type: GraphQL::Types::String, null: true,
+ resolver_class: resolver)
+
+ expect(resolver).to receive(:authorized?).with(object, ctx).and_return(false)
+
+ expect(field).not_to be_authorized(object, nil, ctx)
+ end
+
+ it 'tests field authorization before resolver authorization, when field auth fails' do
+ resolver = Class.new do
+ include Gitlab::Graphql::Authorize::AuthorizeResource
+
+ authorizes_object!
+ end
+
+ field = described_class.new(name: 'test', type: GraphQL::Types::String, null: true,
+ authorize: :foo,
+ resolver_class: resolver)
+
+ expect(Ability).to receive(:allowed?).with(current_user, :foo, object).and_return(false)
+ expect(resolver).not_to receive(:authorized?)
+
+ expect(field).not_to be_authorized(object, nil, ctx)
+ end
+
+ it 'tests field authorization before resolver authorization, when field auth succeeds' do
+ resolver = Class.new do
+ include Gitlab::Graphql::Authorize::AuthorizeResource
+
+ authorizes_object!
+ end
+
+ field = described_class.new(name: 'test', type: GraphQL::Types::String, null: true,
+ authorize: :foo,
+ resolver_class: resolver)
+
+ expect(Ability).to receive(:allowed?).with(current_user, :foo, object).and_return(true)
+ expect(resolver).to receive(:authorized?).with(object, ctx).and_return(false)
+
+ expect(field).not_to be_authorized(object, nil, ctx)
+ end
+ end
+
context 'when considering complexity' do
let(:resolver) do
Class.new(described_class) do
diff --git a/spec/graphql/types/base_object_spec.rb b/spec/graphql/types/base_object_spec.rb
index 45dc885ecba..3c42c708187 100644
--- a/spec/graphql/types/base_object_spec.rb
+++ b/spec/graphql/types/base_object_spec.rb
@@ -137,7 +137,6 @@ RSpec.describe Types::BaseObject do
Class.new(GraphQL::Schema) do
lazy_resolve ::Gitlab::Graphql::Lazy, :force
- use ::GraphQL::Pagination::Connections
use ::Gitlab::Graphql::Pagination::Connections
query(Class.new(::Types::BaseObject) do
diff --git a/spec/graphql/types/ci/detailed_status_type_spec.rb b/spec/graphql/types/ci/detailed_status_type_spec.rb
index 5ed79b73a47..0c05227aec2 100644
--- a/spec/graphql/types/ci/detailed_status_type_spec.rb
+++ b/spec/graphql/types/ci/detailed_status_type_spec.rb
@@ -17,12 +17,10 @@ RSpec.describe Types::Ci::DetailedStatusType do
describe 'id field' do
it 'correctly renders the field' do
- parent_object = double(:parent_object, object: stage)
- parent = double(:parent, object: parent_object)
status = stage.detailed_status(stage.pipeline.user)
expected_id = "#{status.id}-#{stage.id}"
- expect(resolve_field('id', status, extras: { parent: parent })).to eq(expected_id)
+ expect(resolve_field('id', status, extras: { parent: stage }, arg_style: :internal)).to eq(expected_id)
end
end
@@ -38,7 +36,7 @@ RSpec.describe Types::Ci::DetailedStatusType do
title: status.action_title
}
- expect(resolve_field('action', status)).to eq(expected_status)
+ expect(resolve_field('action', status, arg_style: :internal)).to eq(expected_status)
end
end
end
diff --git a/spec/graphql/types/ci/pipeline_merge_request_event_type_enum_spec.rb b/spec/graphql/types/ci/pipeline_merge_request_event_type_enum_spec.rb
new file mode 100644
index 00000000000..3a90e4f1fd9
--- /dev/null
+++ b/spec/graphql/types/ci/pipeline_merge_request_event_type_enum_spec.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['PipelineMergeRequestEventType'] do
+ specify { expect(described_class.graphql_name).to eq('PipelineMergeRequestEventType') }
+
+ it 'has specific values' do
+ expect(described_class.values).to match a_hash_including(
+ 'MERGED_RESULT' => have_attributes(value: :merged_result),
+ 'DETACHED' => have_attributes(value: :detached)
+ )
+ end
+end
diff --git a/spec/graphql/types/ci/pipeline_type_spec.rb b/spec/graphql/types/ci/pipeline_type_spec.rb
index 94d1b42da37..9dee834d05f 100644
--- a/spec/graphql/types/ci/pipeline_type_spec.rb
+++ b/spec/graphql/types/ci/pipeline_type_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Types::Ci::PipelineType do
coverage created_at updated_at started_at finished_at committed_at
stages user retryable cancelable jobs source_job job job_artifacts downstream
upstream path project active user_permissions warnings commit commit_path uses_needs
- test_report_summary test_suite ref ref_path warning_messages
+ test_report_summary test_suite ref ref_path warning_messages merge_request_event_type
]
if Gitlab.ee?
diff --git a/spec/graphql/types/ci/runner_type_spec.rb b/spec/graphql/types/ci/runner_type_spec.rb
index 26ac7a4da8d..4ec35db13fb 100644
--- a/spec/graphql/types/ci/runner_type_spec.rb
+++ b/spec/graphql/types/ci/runner_type_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe GitlabSchema.types['CiRunner'] do
id description created_at contacted_at maximum_timeout access_level active paused status
version short_sha revision locked run_untagged ip_address runner_type tag_list
project_count job_count admin_url edit_admin_url user_permissions executor_name architecture_name platform_name
- maintenance_note groups projects jobs token_expires_at
+ maintenance_note maintenance_note_html groups projects jobs token_expires_at owner_project
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/ci/status_action_type_spec.rb b/spec/graphql/types/ci/status_action_type_spec.rb
index ab7dee3dd11..4c467bf240e 100644
--- a/spec/graphql/types/ci/status_action_type_spec.rb
+++ b/spec/graphql/types/ci/status_action_type_spec.rb
@@ -25,15 +25,9 @@ RSpec.describe Types::Ci::StatusActionType do
stage = build(:ci_stage_entity, status: :skipped)
status = stage.detailed_status(stage.pipeline.user)
- grandparent_object = double(:grandparent_object, object: stage)
- parent_object = double(:parent_object, object: status)
-
- grandparent = double(:parent, object: grandparent_object)
- parent = double(:parent, object: parent_object, parent: grandparent)
-
expected_id = "#{stage.class.name}-#{status.id}"
- expect(resolve_field('id', status, extras: { parent: parent })).to eq(expected_id)
+ expect(resolve_field('id', status, extras: { parent: status }, arg_style: :internal)).to eq(expected_id)
end
end
end
diff --git a/spec/graphql/types/issue_type_spec.rb b/spec/graphql/types/issue_type_spec.rb
index 1d4590cbb4e..e7454b85357 100644
--- a/spec/graphql/types/issue_type_spec.rb
+++ b/spec/graphql/types/issue_type_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe GitlabSchema.types['Issue'] do
confidential hidden discussion_locked upvotes downvotes merge_requests_count user_notes_count user_discussions_count web_path web_url relative_position
emails_disabled subscribed time_estimate total_time_spent human_time_estimate human_total_time_spent closed_at created_at updated_at task_completion_status
design_collection alert_management_alert severity current_user_todos moved moved_to
- create_note_email timelogs project_id customer_relations_contacts escalation_status]
+ closed_as_duplicate_of create_note_email timelogs project_id customer_relations_contacts escalation_status]
fields.each do |field_name|
expect(described_class).to have_graphql_field(field_name)
@@ -291,14 +291,6 @@ RSpec.describe GitlabSchema.types['Issue'] do
let!(:escalation_status) { create(:incident_management_issuable_escalation_status, issue: issue) }
it { is_expected.to eq(escalation_status.status_name.to_s.upcase) }
-
- context 'with feature disabled' do
- before do
- stub_feature_flags(incident_escalations: false)
- end
-
- it { is_expected.to be_nil }
- end
end
end
end
diff --git a/spec/graphql/types/limited_countable_connection_type_spec.rb b/spec/graphql/types/limited_countable_connection_type_spec.rb
new file mode 100644
index 00000000000..30af26cdb83
--- /dev/null
+++ b/spec/graphql/types/limited_countable_connection_type_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::LimitedCountableConnectionType do
+ it 'has the expected fields' do
+ expected_fields = %i[count page_info]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/packages/cleanup/keep_duplicated_package_files_enum_spec.rb b/spec/graphql/types/packages/cleanup/keep_duplicated_package_files_enum_spec.rb
new file mode 100644
index 00000000000..d7f24a9edfd
--- /dev/null
+++ b/spec/graphql/types/packages/cleanup/keep_duplicated_package_files_enum_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['PackagesCleanupKeepDuplicatedPackageFilesEnum'] do
+ it 'exposes all options' do
+ expect(described_class.values.keys)
+ .to contain_exactly(*Types::Packages::Cleanup::KeepDuplicatedPackageFilesEnum::OPTIONS_MAPPING.values)
+ end
+
+ it 'uses all possible options from model' do
+ all_options = Packages::Cleanup::Policy::KEEP_N_DUPLICATED_PACKAGE_FILES_VALUES
+ expect(described_class::OPTIONS_MAPPING.keys).to contain_exactly(*all_options)
+ end
+end
diff --git a/spec/graphql/types/packages/cleanup/policy_type_spec.rb b/spec/graphql/types/packages/cleanup/policy_type_spec.rb
new file mode 100644
index 00000000000..f48651ed832
--- /dev/null
+++ b/spec/graphql/types/packages/cleanup/policy_type_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['PackagesCleanupPolicy'] do
+ specify { expect(described_class.graphql_name).to eq('PackagesCleanupPolicy') }
+
+ specify do
+ expect(described_class.description)
+ .to eq('A packages cleanup policy designed to keep only packages and packages assets that matter most')
+ end
+
+ specify { expect(described_class).to require_graphql_authorizations(:admin_package) }
+
+ describe 'keep_n_duplicated_package_files' do
+ subject { described_class.fields['keepNDuplicatedPackageFiles'] }
+
+ it { is_expected.to have_non_null_graphql_type(Types::Packages::Cleanup::KeepDuplicatedPackageFilesEnum) }
+ end
+
+ describe 'next_run_at' do
+ subject { described_class.fields['nextRunAt'] }
+
+ it { is_expected.to have_nullable_graphql_type(Types::TimeType) }
+ end
+end
diff --git a/spec/graphql/types/project_type_spec.rb b/spec/graphql/types/project_type_spec.rb
index a08bd717c72..2e994bf7820 100644
--- a/spec/graphql/types/project_type_spec.rb
+++ b/spec/graphql/types/project_type_spec.rb
@@ -36,12 +36,40 @@ RSpec.describe GitlabSchema.types['Project'] do
pipeline_analytics squash_read_only sast_ci_configuration
cluster_agent cluster_agents agent_configurations
ci_template timelogs merge_commit_template squash_commit_template work_item_types
- recent_issue_boards ci_config_path_or_default
+ recent_issue_boards ci_config_path_or_default packages_cleanup_policy
]
expect(described_class).to include_graphql_fields(*expected_fields)
end
+ describe 'count' do
+ let_it_be(:user) { create(:user) }
+
+ let(:query) do
+ %(
+ query {
+ projects {
+ count
+ edges {
+ node {
+ id
+ }
+ }
+ }
+ }
+ )
+ end
+
+ subject { GitlabSchema.execute(query, context: { current_user: user }).as_json }
+
+ it 'returns valid projects count' do
+ create(:project, namespace: user.namespace)
+ create(:project, namespace: user.namespace)
+
+ expect(subject.dig('data', 'projects', 'count')).to eq(2)
+ end
+ end
+
describe 'container_registry_enabled' do
let_it_be(:project, reload: true) { create(:project, :public) }
let_it_be(:user) { create(:user) }
@@ -393,6 +421,12 @@ RSpec.describe GitlabSchema.types['Project'] do
it { is_expected.to have_graphql_type(Types::ContainerExpirationPolicyType) }
end
+ describe 'packages cleanup policy field' do
+ subject { described_class.fields['packagesCleanupPolicy'] }
+
+ it { is_expected.to have_graphql_type(Types::Packages::Cleanup::PolicyType) }
+ end
+
describe 'terraform state field' do
subject { described_class.fields['terraformState'] }
diff --git a/spec/graphql/types/terraform/state_type_spec.rb b/spec/graphql/types/terraform/state_type_spec.rb
index 9f65bb926d7..5098adbf45c 100644
--- a/spec/graphql/types/terraform/state_type_spec.rb
+++ b/spec/graphql/types/terraform/state_type_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe GitlabSchema.types['TerraformState'] do
it { expect(described_class).to require_graphql_authorizations(:read_terraform_state) }
describe 'fields' do
- let(:fields) { %i[id name locked_by_user locked_at latest_version created_at updated_at] }
+ let(:fields) { %i[id name locked_by_user locked_at latest_version created_at updated_at deleted_at] }
it { expect(described_class).to have_graphql_fields(fields) }
@@ -17,6 +17,7 @@ RSpec.describe GitlabSchema.types['TerraformState'] do
it { expect(described_class.fields['lockedAt'].type).not_to be_non_null }
it { expect(described_class.fields['createdAt'].type).to be_non_null }
it { expect(described_class.fields['updatedAt'].type).to be_non_null }
+ it { expect(described_class.fields['deletedAt'].type).not_to be_non_null }
it { expect(described_class.fields['latestVersion'].type).not_to be_non_null }
it { expect(described_class.fields['latestVersion'].complexity).to eq(3) }
diff --git a/spec/graphql/types/time_type_spec.rb b/spec/graphql/types/time_type_spec.rb
index 3b0d257e1d7..367a2371694 100644
--- a/spec/graphql/types/time_type_spec.rb
+++ b/spec/graphql/types/time_type_spec.rb
@@ -21,8 +21,7 @@ RSpec.describe GitlabSchema.types['Time'] do
.to raise_error(GraphQL::CoercionError)
end
- it 'rejects nil' do
- expect { described_class.coerce_isolated_input(nil) }
- .to raise_error(GraphQL::CoercionError)
+ it 'allows nil' do
+ expect(described_class.coerce_isolated_input(nil)).to be_nil
end
end
diff --git a/spec/graphql/types/todo_type_spec.rb b/spec/graphql/types/todo_type_spec.rb
index 8de63ebfda5..c7bb7c67959 100644
--- a/spec/graphql/types/todo_type_spec.rb
+++ b/spec/graphql/types/todo_type_spec.rb
@@ -4,7 +4,19 @@ require 'spec_helper'
RSpec.describe GitlabSchema.types['Todo'] do
it 'has the correct fields' do
- expected_fields = [:id, :project, :group, :author, :action, :target, :target_type, :body, :state, :created_at]
+ expected_fields = [
+ :id,
+ :project,
+ :group,
+ :author,
+ :action,
+ :target,
+ :target_type,
+ :body,
+ :state,
+ :created_at,
+ :note
+ ]
expect(described_class).to have_graphql_fields(*expected_fields)
end
diff --git a/spec/graphql/types/user_type_spec.rb b/spec/graphql/types/user_type_spec.rb
index c913a4c3662..fec6a771640 100644
--- a/spec/graphql/types/user_type_spec.rb
+++ b/spec/graphql/types/user_type_spec.rb
@@ -91,8 +91,8 @@ RSpec.describe GitlabSchema.types['User'] do
context 'when requester is nil' do
let(:current_user) { nil }
- it 'returns `****`' do
- expect(user_name).to eq('****')
+ it 'returns nothing' do
+ expect(user_name).to be_nil
end
end
@@ -134,8 +134,8 @@ RSpec.describe GitlabSchema.types['User'] do
context 'when requester is nil' do
let(:current_user) { nil }
- it 'returns `****`' do
- expect(user_name).to eq('****')
+ it 'returns nothing' do
+ expect(user_name).to be_nil
end
end
diff --git a/spec/graphql/types/work_item_type_spec.rb b/spec/graphql/types/work_item_type_spec.rb
index a0480506156..7ed58786b5b 100644
--- a/spec/graphql/types/work_item_type_spec.rb
+++ b/spec/graphql/types/work_item_type_spec.rb
@@ -10,7 +10,18 @@ RSpec.describe GitlabSchema.types['WorkItem'] do
specify { expect(described_class).to expose_permissions_using(Types::PermissionTypes::WorkItem) }
it 'has specific fields' do
- fields = %i[description description_html id iid lock_version state title title_html userPermissions work_item_type]
+ fields = %i[
+ description
+ description_html
+ id
+ iid
+ lock_version
+ state title
+ title_html
+ userPermissions
+ widgets
+ work_item_type
+ ]
fields.each do |field_name|
expect(described_class).to have_graphql_fields(*fields)
diff --git a/spec/graphql/types/work_items/widget_interface_spec.rb b/spec/graphql/types/work_items/widget_interface_spec.rb
new file mode 100644
index 00000000000..ee40bcc10ca
--- /dev/null
+++ b/spec/graphql/types/work_items/widget_interface_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::WorkItems::WidgetInterface do
+ include GraphqlHelpers
+
+ it 'exposes the expected fields' do
+ expected_fields = %i[type]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+
+ describe ".resolve_type" do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:widget_class, :widget_type_name) do
+ WorkItems::Widgets::Description | Types::WorkItems::Widgets::DescriptionType
+ WorkItems::Widgets::Hierarchy | Types::WorkItems::Widgets::HierarchyType
+ end
+
+ with_them do
+ it 'knows the correct type for objects' do
+ expect(
+ described_class.resolve_type(widget_class.new(build(:work_item)), {})
+ ).to eq(widget_type_name)
+ end
+ end
+
+ it 'raises an error for an unknown type' do
+ project = build(:project)
+
+ expect { described_class.resolve_type(project, {}) }
+ .to raise_error("Unknown GraphQL type for widget #{project}")
+ end
+ end
+end
diff --git a/spec/graphql/types/work_items/widget_type_enum_spec.rb b/spec/graphql/types/work_items/widget_type_enum_spec.rb
new file mode 100644
index 00000000000..e7ac9b9c317
--- /dev/null
+++ b/spec/graphql/types/work_items/widget_type_enum_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['WorkItemWidgetType'] do
+ specify { expect(described_class.graphql_name).to eq('WorkItemWidgetType') }
+
+ it 'exposes all the existing widget type values' do
+ expect(described_class.values.transform_values { |v| v.value }).to include(
+ 'DESCRIPTION' => :description
+ )
+ end
+end
diff --git a/spec/graphql/types/work_items/widgets/description_type_spec.rb b/spec/graphql/types/work_items/widgets/description_type_spec.rb
new file mode 100644
index 00000000000..5ade1fe4aa2
--- /dev/null
+++ b/spec/graphql/types/work_items/widgets/description_type_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::WorkItems::Widgets::DescriptionType do
+ it 'exposes the expected fields' do
+ expected_fields = %i[description description_html type]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/work_items/widgets/hierarchy_type_spec.rb b/spec/graphql/types/work_items/widgets/hierarchy_type_spec.rb
new file mode 100644
index 00000000000..1722a07c5f4
--- /dev/null
+++ b/spec/graphql/types/work_items/widgets/hierarchy_type_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::WorkItems::Widgets::HierarchyType do
+ it 'exposes the expected fields' do
+ expected_fields = %i[parent children type]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/helpers/access_tokens_helper_spec.rb b/spec/helpers/access_tokens_helper_spec.rb
index c2c918bc6b0..d34251d03db 100644
--- a/spec/helpers/access_tokens_helper_spec.rb
+++ b/spec/helpers/access_tokens_helper_spec.rb
@@ -64,4 +64,12 @@ RSpec.describe AccessTokensHelper do
}.to_json)
end
end
+
+ describe '#expires_at_field_data', :freeze_time do
+ it 'returns expected hash' do
+ expect(helper.expires_at_field_data).to eq({
+ min_date: 1.day.from_now.iso8601
+ })
+ end
+ end
end
diff --git a/spec/helpers/admin/application_settings/settings_helper_spec.rb b/spec/helpers/admin/application_settings/settings_helper_spec.rb
new file mode 100644
index 00000000000..9981e0d12bd
--- /dev/null
+++ b/spec/helpers/admin/application_settings/settings_helper_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Admin::ApplicationSettings::SettingsHelper do
+ describe '#inactive_projects_deletion_data' do
+ let(:delete_inactive_projects) { true }
+ let(:inactive_projects_delete_after_months) { 2 }
+ let(:inactive_projects_min_size_mb) { 250 }
+ let(:inactive_projects_send_warning_email_after_months) { 1 }
+
+ let_it_be(:application_settings) { build(:application_setting) }
+
+ before do
+ stub_application_setting(delete_inactive_projects: delete_inactive_projects)
+ stub_application_setting(inactive_projects_delete_after_months: inactive_projects_delete_after_months)
+ stub_application_setting(inactive_projects_min_size_mb: inactive_projects_min_size_mb)
+ stub_application_setting(
+ inactive_projects_send_warning_email_after_months: inactive_projects_send_warning_email_after_months
+ )
+ end
+
+ subject(:result) { helper.inactive_projects_deletion_data(application_settings) }
+
+ it 'has the expected data' do
+ expect(result).to eq({
+ delete_inactive_projects: delete_inactive_projects.to_s,
+ inactive_projects_delete_after_months: inactive_projects_delete_after_months,
+ inactive_projects_min_size_mb: inactive_projects_min_size_mb,
+ inactive_projects_send_warning_email_after_months: inactive_projects_send_warning_email_after_months
+ })
+ end
+ end
+end
diff --git a/spec/helpers/ci/pipeline_editor_helper_spec.rb b/spec/helpers/ci/pipeline_editor_helper_spec.rb
index 429d4c7941a..8366506aa45 100644
--- a/spec/helpers/ci/pipeline_editor_helper_spec.rb
+++ b/spec/helpers/ci/pipeline_editor_helper_spec.rb
@@ -31,7 +31,13 @@ RSpec.describe Ci::PipelineEditorHelper do
allow(helper)
.to receive(:image_path)
- .and_return('foo')
+ .with('illustrations/empty-state/empty-dag-md.svg')
+ .and_return('illustrations/empty.svg')
+
+ allow(helper)
+ .to receive(:image_path)
+ .with('illustrations/project-run-CICD-pipelines-sm.svg')
+ .and_return('illustrations/validate.svg')
end
subject(:pipeline_editor_data) { helper.js_pipeline_editor_data(project) }
@@ -43,7 +49,7 @@ RSpec.describe Ci::PipelineEditorHelper do
"ci-examples-help-page-path" => help_page_path('ci/examples/index'),
"ci-help-page-path" => help_page_path('ci/index'),
"default-branch" => project.default_branch_or_main,
- "empty-state-illustration-path" => 'foo',
+ "empty-state-illustration-path" => 'illustrations/empty.svg',
"initial-branch-name" => nil,
"includes-help-page-path" => help_page_path('ci/yaml/includes'),
"lint-help-page-path" => help_page_path('ci/lint', anchor: 'check-cicd-syntax'),
@@ -57,6 +63,7 @@ RSpec.describe Ci::PipelineEditorHelper do
"project-namespace" => project.namespace.full_path,
"runner-help-page-path" => help_page_path('ci/runners/index'),
"total-branches" => project.repository.branches.length,
+ "validate-tab-illustration-path" => 'illustrations/validate.svg',
"yml-help-page-path" => help_page_path('ci/yaml/index')
})
end
@@ -71,7 +78,7 @@ RSpec.describe Ci::PipelineEditorHelper do
"ci-examples-help-page-path" => help_page_path('ci/examples/index'),
"ci-help-page-path" => help_page_path('ci/index'),
"default-branch" => project.default_branch_or_main,
- "empty-state-illustration-path" => 'foo',
+ "empty-state-illustration-path" => 'illustrations/empty.svg',
"initial-branch-name" => nil,
"includes-help-page-path" => help_page_path('ci/yaml/includes'),
"lint-help-page-path" => help_page_path('ci/lint', anchor: 'check-cicd-syntax'),
@@ -85,6 +92,7 @@ RSpec.describe Ci::PipelineEditorHelper do
"project-namespace" => project.namespace.full_path,
"runner-help-page-path" => help_page_path('ci/runners/index'),
"total-branches" => 0,
+ "validate-tab-illustration-path" => 'illustrations/validate.svg',
"yml-help-page-path" => help_page_path('ci/yaml/index')
})
end
diff --git a/spec/helpers/ci/runners_helper_spec.rb b/spec/helpers/ci/runners_helper_spec.rb
index cf62579338f..4d1b1c7682c 100644
--- a/spec/helpers/ci/runners_helper_spec.rb
+++ b/spec/helpers/ci/runners_helper_spec.rb
@@ -84,12 +84,14 @@ RSpec.describe Ci::RunnersHelper do
end
it 'returns the data in format' do
- expect(helper.admin_runners_data_attributes).to eq({
+ expect(helper.admin_runners_data_attributes).to include(
runner_install_help_page: 'https://docs.gitlab.com/runner/install/',
registration_token: Gitlab::CurrentSettings.runners_registration_token,
online_contact_timeout_secs: 7200,
- stale_timeout_secs: 7889238
- })
+ stale_timeout_secs: 7889238,
+ empty_state_svg_path: start_with('/assets/illustrations/pipelines_empty'),
+ empty_state_filtered_svg_path: start_with('/assets/illustrations/magnifying-glass')
+ )
end
end
@@ -130,14 +132,16 @@ RSpec.describe Ci::RunnersHelper do
let(:group) { create(:group) }
it 'returns group data to render a runner list' do
- expect(helper.group_runners_data_attributes(group)).to eq({
+ expect(helper.group_runners_data_attributes(group)).to include(
registration_token: group.runners_token,
group_id: group.id,
group_full_path: group.full_path,
runner_install_help_page: 'https://docs.gitlab.com/runner/install/',
online_contact_timeout_secs: 7200,
- stale_timeout_secs: 7889238
- })
+ stale_timeout_secs: 7889238,
+ empty_state_svg_path: start_with('/assets/illustrations/pipelines_empty'),
+ empty_state_filtered_svg_path: start_with('/assets/illustrations/magnifying-glass')
+ )
end
end
diff --git a/spec/helpers/diff_helper_spec.rb b/spec/helpers/diff_helper_spec.rb
index 84e702cd6a9..cf16807723b 100644
--- a/spec/helpers/diff_helper_spec.rb
+++ b/spec/helpers/diff_helper_spec.rb
@@ -468,4 +468,25 @@ RSpec.describe DiffHelper do
it { is_expected.to be_nil }
end
end
+
+ describe '#conflicts' do
+ let(:merge_request) { instance_double(MergeRequest) }
+
+ before do
+ allow(helper).to receive(:merge_request).and_return(merge_request)
+ allow(helper).to receive(:options).and_return(merge_ref_head_diff: true)
+ end
+
+ context 'when Gitlab::Git::Conflict::Resolver::ConflictSideMissing exception is raised' do
+ before do
+ allow_next_instance_of(MergeRequests::Conflicts::ListService, merge_request, allow_tree_conflicts: true) do |svc|
+ allow(svc).to receive_message_chain(:conflicts, :files).and_raise(Gitlab::Git::Conflict::Resolver::ConflictSideMissing)
+ end
+ end
+
+ it 'returns an empty hash' do
+ expect(helper.conflicts(allow_tree_conflicts: true)).to eq({})
+ end
+ end
+ end
end
diff --git a/spec/helpers/emails_helper_spec.rb b/spec/helpers/emails_helper_spec.rb
index 39b919fa925..220e154aad8 100644
--- a/spec/helpers/emails_helper_spec.rb
+++ b/spec/helpers/emails_helper_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe EmailsHelper do
+ include EmailsHelperTestHelper
+
describe 'closure_reason_text' do
context 'when given a MergeRequest' do
let(:merge_request) { create(:merge_request) }
@@ -225,30 +227,40 @@ RSpec.describe EmailsHelper do
describe '#header_logo' do
context 'there is a brand item with a logo' do
- it 'returns the brand header logo' do
- appearance = create :appearance, header_logo: fixture_file_upload('spec/fixtures/dk.png')
+ let_it_be(:appearance) { create(:appearance) }
+
+ let(:logo_path) { 'spec/fixtures/dk.png' }
+ before do
+ appearance.update!(header_logo: fixture_file_upload(logo_path))
+ end
+
+ it 'returns the brand header logo' do
expect(header_logo).to eq(
%{<img style="height: 50px" src="/uploads/-/system/appearance/header_logo/#{appearance.id}/dk.png" />}
)
end
+
+ context 'that is a SVG file' do
+ let(:logo_path) { 'spec/fixtures/logo_sample.svg' }
+
+ it 'returns the default header logo' do
+ expect(header_logo).to match(default_header_logo)
+ end
+ end
end
context 'there is a brand item without a logo' do
it 'returns the default header logo' do
create :appearance, header_logo: nil
- expect(header_logo).to match(
- %r{<img alt="GitLab" src="/images/mailers/gitlab_logo\.(?:gif|png)" width="\d+" height="\d+" />}
- )
+ expect(header_logo).to match(default_header_logo)
end
end
context 'there is no brand item' do
it 'returns the default header logo' do
- expect(header_logo).to match(
- %r{<img alt="GitLab" src="/images/mailers/gitlab_logo\.(?:gif|png)" width="\d+" height="\d+" />}
- )
+ expect(header_logo).to match(default_header_logo)
end
end
end
diff --git a/spec/helpers/environments_helper_spec.rb b/spec/helpers/environments_helper_spec.rb
index 52f02fba4ec..e4d4f18ad68 100644
--- a/spec/helpers/environments_helper_spec.rb
+++ b/spec/helpers/environments_helper_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe EnvironmentsHelper do
it 'returns data' do
expect(metrics_data).to include(
- 'settings_path' => edit_project_integration_path(project, 'prometheus'),
+ 'settings_path' => edit_project_settings_integration_path(project, 'prometheus'),
'clusters_path' => project_clusters_path(project),
'metrics_dashboard_base_path' => project_metrics_dashboard_path(project, environment: environment),
'current_environment_name' => environment.name,
@@ -39,7 +39,6 @@ RSpec.describe EnvironmentsHelper do
'custom_metrics_path' => project_prometheus_metrics_path(project),
'validate_query_path' => validate_query_project_prometheus_metrics_path(project),
'custom_metrics_available' => 'true',
- 'alerts_endpoint' => project_prometheus_alerts_path(project, environment_id: environment.id, format: :json),
'custom_dashboard_base_path' => Gitlab::Metrics::Dashboard::RepoDashboardFinder::DASHBOARD_ROOT,
'operations_settings_path' => project_settings_operations_path(project),
'can_access_operations_settings' => 'true',
diff --git a/spec/helpers/form_helper_spec.rb b/spec/helpers/form_helper_spec.rb
index 79c96e65a0e..c9c8c6b13b6 100644
--- a/spec/helpers/form_helper_spec.rb
+++ b/spec/helpers/form_helper_spec.rb
@@ -10,11 +10,16 @@ RSpec.describe FormHelper do
expect(helper.form_errors(model)).to be_nil
end
- it 'renders an alert div' do
+ it 'renders an appropriately styled alert div' do
model = double(errors: errors_stub('Error 1'))
- expect(helper.form_errors(model))
+ expect(helper.form_errors(model, pajamas_alert: false))
.to include('<div class="alert alert-danger" id="error_explanation">')
+
+ expect(helper.form_errors(model, pajamas_alert: true))
+ .to include(
+ '<div class="gl-alert gl-mb-5 gl-alert-danger gl-alert-not-dismissible" id="error_explanation" role="alert">'
+ )
end
it 'contains a summary message' do
@@ -22,9 +27,9 @@ RSpec.describe FormHelper do
multi_errors = double(errors: errors_stub('A', 'B', 'C'))
expect(helper.form_errors(single_error))
- .to include('<h4>The form contains the following error:')
+ .to include('The form contains the following error:')
expect(helper.form_errors(multi_errors))
- .to include('<h4>The form contains the following errors:')
+ .to include('The form contains the following errors:')
end
it 'renders each message' do
@@ -58,6 +63,43 @@ RSpec.describe FormHelper do
end
end
+ it 'renders help page links' do
+ stubbed_errors = ActiveModel::Errors.new(double).tap do |errors|
+ errors.add(:base, 'No text.', help_page_url: 'http://localhost/doc/user/index.html')
+ errors.add(
+ :base,
+ 'With text.',
+ help_link_text: 'Documentation page title.',
+ help_page_url: 'http://localhost/doc/administration/index.html'
+ )
+ errors.add(
+ :base,
+ 'With HTML text.',
+ help_link_text: '<foo>',
+ help_page_url: 'http://localhost/doc/security/index.html'
+ )
+ end
+
+ model = double(errors: stubbed_errors)
+
+ errors = helper.form_errors(model)
+
+ aggregate_failures do
+ expect(errors).to include(
+ '<li>No text. <a target="_blank" rel="noopener noreferrer" ' \
+ 'href="http://localhost/doc/user/index.html">Learn more.</a></li>'
+ )
+ expect(errors).to include(
+ '<li>With text. <a target="_blank" rel="noopener noreferrer" ' \
+ 'href="http://localhost/doc/administration/index.html">Documentation page title.</a></li>'
+ )
+ expect(errors).to include(
+ '<li>With HTML text. <a target="_blank" rel="noopener noreferrer" ' \
+ 'href="http://localhost/doc/security/index.html">&lt;foo&gt;</a></li>'
+ )
+ end
+ end
+
def errors_stub(*messages)
ActiveModel::Errors.new(double).tap do |errors|
messages.each { |msg| errors.add(:base, msg) }
diff --git a/spec/helpers/groups/crm_settings_helper_spec.rb b/spec/helpers/groups/crm_settings_helper_spec.rb
deleted file mode 100644
index 87690e7debc..00000000000
--- a/spec/helpers/groups/crm_settings_helper_spec.rb
+++ /dev/null
@@ -1,47 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Groups::CrmSettingsHelper do
- let_it_be(:root_group) { create(:group) }
-
- describe '#crm_feature_available?' do
- subject do
- helper.crm_feature_available?(group)
- end
-
- context 'in root group' do
- let(:group) { root_group }
-
- context 'when feature flag is enabled' do
- it { is_expected.to be_truthy }
- end
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(customer_relations: false)
- end
-
- it { is_expected.to be_falsy }
- end
- end
-
- context 'in subgroup' do
- let_it_be(:subgroup) { create(:group, parent: root_group) }
-
- let(:group) { subgroup }
-
- context 'when feature flag is enabled' do
- it { is_expected.to be_truthy }
- end
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(customer_relations: false)
- end
-
- it { is_expected.to be_falsy }
- end
- end
- end
-end
diff --git a/spec/helpers/groups/group_members_helper_spec.rb b/spec/helpers/groups/group_members_helper_spec.rb
index ab11bc1f5fd..d308df3a017 100644
--- a/spec/helpers/groups/group_members_helper_spec.rb
+++ b/spec/helpers/groups/group_members_helper_spec.rb
@@ -5,15 +5,8 @@ require "spec_helper"
RSpec.describe Groups::GroupMembersHelper do
include MembersPresentation
- let_it_be(:current_user) { create(:user) }
let_it_be(:group) { create(:group) }
- before do
- allow(helper).to receive(:can?).with(current_user, :export_group_memberships, group).and_return(false)
- allow(helper).to receive(:can?).with(current_user, :owner_access, group).and_return(true)
- allow(helper).to receive(:current_user).and_return(current_user)
- end
-
describe '.group_member_select_options' do
before do
helper.instance_variable_set(:@group, group)
@@ -27,12 +20,24 @@ RSpec.describe Groups::GroupMembersHelper do
describe '#group_members_app_data' do
include_context 'group_group_link'
+ let_it_be(:current_user) { create(:user) }
+
let(:members) { create_list(:group_member, 2, group: shared_group, created_by: current_user) }
let(:invited) { create_list(:group_member, 2, :invited, group: shared_group, created_by: current_user) }
let!(:access_requests) { create_list(:group_member, 2, :access_request, group: shared_group, created_by: current_user) }
let(:members_collection) { members }
+ before do
+ allow(helper).to receive(:can?).with(current_user, :export_group_memberships, group).and_return(false)
+ allow(helper).to receive(:can?).with(current_user, :owner_access, group).and_return(true)
+ allow(helper).to receive(:current_user).and_return(current_user)
+ allow(helper).to receive(:can?).with(current_user, :export_group_memberships, shared_group).and_return(true)
+ allow(helper).to receive(:group_group_member_path).with(shared_group, ':id').and_return('/groups/foo-bar/-/group_members/:id')
+ allow(helper).to receive(:group_group_link_path).with(shared_group, ':id').and_return('/groups/foo-bar/-/group_links/:id')
+ allow(helper).to receive(:can?).with(current_user, :admin_group_member, shared_group).and_return(true)
+ end
+
subject do
helper.group_members_app_data(
shared_group,
@@ -54,13 +59,6 @@ RSpec.describe Groups::GroupMembersHelper do
end
end
- before do
- allow(helper).to receive(:can?).with(current_user, :export_group_memberships, shared_group).and_return(true)
- allow(helper).to receive(:group_group_member_path).with(shared_group, ':id').and_return('/groups/foo-bar/-/group_members/:id')
- allow(helper).to receive(:group_group_link_path).with(shared_group, ':id').and_return('/groups/foo-bar/-/group_links/:id')
- allow(helper).to receive(:can?).with(current_user, :admin_group_member, shared_group).and_return(true)
- end
-
it 'returns expected json' do
expected = {
source_id: shared_group.id,
@@ -137,24 +135,6 @@ RSpec.describe Groups::GroupMembersHelper do
expect(subject[:group][:members].map { |link| link[:id] }).to match_array(result)
end
end
-
- context 'when group_member_inherited_group disabled' do
- before do
- stub_feature_flags(group_member_inherited_group: false)
- end
-
- where(:include_relations, :result) do
- [:inherited, :direct] | lazy { [sub_group_group_link.id] }
- [:inherited] | lazy { [sub_group_group_link.id] }
- [:direct] | lazy { [sub_group_group_link.id] }
- end
-
- with_them do
- it 'always returns direct member links' do
- expect(subject[:group][:members].map { |link| link[:id] }).to match_array(result)
- end
- end
- end
end
end
@@ -188,4 +168,10 @@ RSpec.describe Groups::GroupMembersHelper do
end
end
end
+
+ describe '#group_member_header_subtext' do
+ it 'contains expected text with group name' do
+ expect(helper.group_member_header_subtext(group)).to match("You can invite a new member to .*#{group.name}")
+ end
+ end
end
diff --git a/spec/helpers/groups_helper_spec.rb b/spec/helpers/groups_helper_spec.rb
index 8859ed27022..bcbe571db5e 100644
--- a/spec/helpers/groups_helper_spec.rb
+++ b/spec/helpers/groups_helper_spec.rb
@@ -419,4 +419,89 @@ RSpec.describe GroupsHelper do
expect(localized_jobs_to_be_done_choices.keys).to match_array(NamespaceSetting.jobs_to_be_dones.keys)
end
end
+
+ describe '#group_name_and_path_app_data' do
+ let_it_be(:group) { build(:group, name: 'My awesome group', path: 'my-awesome-group') }
+ let_it_be(:subgroup) { build(:group, parent: group) }
+ let_it_be(:root_url) { 'https://gitlab.com/' }
+
+ before do
+ allow(Gitlab.config.mattermost).to receive(:enabled).and_return(true)
+ allow(helper).to receive(:root_url) { root_url }
+ end
+
+ context 'when group has a parent' do
+ it 'returns expected hash' do
+ expect(group_name_and_path_app_data(subgroup)).to match(
+ { base_path: 'https://gitlab.com/my-awesome-group', mattermost_enabled: 'true' }
+ )
+ end
+ end
+
+ context 'when group does not have a parent' do
+ it 'returns expected hash' do
+ expect(group_name_and_path_app_data(group)).to match(
+ { base_path: root_url, mattermost_enabled: 'true' }
+ )
+ end
+ end
+ end
+
+ describe '#subgroups_and_projects_list_app_data' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:user) { create(:user) }
+
+ before do
+ allow(helper).to receive(:current_user).and_return(user)
+
+ allow(helper).to receive(:can?).with(user, :create_subgroup, group) { true }
+ allow(helper).to receive(:can?).with(user, :create_projects, group) { true }
+ end
+
+ it 'returns expected hash' do
+ expect(helper.subgroups_and_projects_list_app_data(group)).to match({
+ show_schema_markup: 'true',
+ new_subgroup_path: including("groups/new?parent_id=#{group.id}"),
+ new_project_path: including("/projects/new?namespace_id=#{group.id}"),
+ new_subgroup_illustration: including('illustrations/subgroup-create-new-sm'),
+ new_project_illustration: including('illustrations/project-create-new-sm'),
+ empty_subgroup_illustration: including('illustrations/empty-state/empty-subgroup-md'),
+ render_empty_state: 'true',
+ can_create_subgroups: 'true',
+ can_create_projects: 'true'
+ })
+ end
+ end
+
+ describe "#enabled_git_access_protocol_options_for_group" do
+ subject { helper.enabled_git_access_protocol_options_for_group }
+
+ before do
+ expect(::Gitlab::CurrentSettings).to receive(:enabled_git_access_protocol).and_return(instance_setting)
+ end
+
+ context "instance setting is nil" do
+ let(:instance_setting) { nil }
+
+ it { is_expected.to contain_exactly([_("Both SSH and HTTP(S)"), "all"], [_("Only SSH"), "ssh"], [_("Only HTTP(S)"), "http"]) }
+ end
+
+ context "instance setting is blank" do
+ let(:instance_setting) { nil }
+
+ it { is_expected.to contain_exactly([_("Both SSH and HTTP(S)"), "all"], [_("Only SSH"), "ssh"], [_("Only HTTP(S)"), "http"]) }
+ end
+
+ context "instance setting is ssh" do
+ let(:instance_setting) { "ssh" }
+
+ it { is_expected.to contain_exactly([_("Only SSH"), "ssh"]) }
+ end
+
+ context "instance setting is http" do
+ let(:instance_setting) { "http" }
+
+ it { is_expected.to contain_exactly([_("Only HTTP(S)"), "http"]) }
+ end
+ end
end
diff --git a/spec/helpers/issues_helper_spec.rb b/spec/helpers/issues_helper_spec.rb
index 58f06899cd6..a58fe9a6cd9 100644
--- a/spec/helpers/issues_helper_spec.rb
+++ b/spec/helpers/issues_helper_spec.rb
@@ -365,12 +365,14 @@ RSpec.describe IssuesHelper do
expected = {
autocomplete_award_emojis_path: autocomplete_award_emojis_path,
calendar_path: '#',
+ can_create_projects: 'true',
empty_state_svg_path: '#',
full_path: group.full_path,
has_any_issues: false.to_s,
has_any_projects: true.to_s,
is_signed_in: current_user.present?.to_s,
jira_integration_path: help_page_url('integration/jira/issues', anchor: 'view-jira-issues'),
+ new_project_path: new_project_path(namespace_id: group.id),
rss_path: '#',
sign_in_path: new_user_session_path
}
@@ -450,6 +452,43 @@ RSpec.describe IssuesHelper do
end
end
+ describe '#status_box_class' do
+ context 'when object is expired' do
+ it 'returns orange background' do
+ milestone = build(:milestone, due_date: Date.today.prev_month)
+ expect(helper.status_box_class(milestone)).to eq('gl-bg-orange-500')
+ end
+ end
+
+ context 'when object is merged' do
+ it 'returns blue background' do
+ merge_request = build(:merge_request, :merged)
+ expect(helper.status_box_class(merge_request)).to eq('badge-info')
+ end
+ end
+
+ context 'when object is closed' do
+ it 'returns red background' do
+ merge_request = build(:merge_request, :closed)
+ expect(helper.status_box_class(merge_request)).to eq('badge-danger')
+ end
+ end
+
+ context 'when object is upcoming' do
+ it 'returns gray background' do
+ milestone = build(:milestone, start_date: Date.today.next_month)
+ expect(helper.status_box_class(milestone)).to eq('gl-bg-gray-500')
+ end
+ end
+
+ context 'when object is opened' do
+ it 'returns green background' do
+ merge_request = build(:merge_request, :opened)
+ expect(helper.status_box_class(merge_request)).to eq('badge-success')
+ end
+ end
+ end
+
describe '#issue_hidden?' do
context 'when issue is hidden' do
let_it_be(:banned_user) { build(:user, :banned) }
diff --git a/spec/helpers/jira_connect_helper_spec.rb b/spec/helpers/jira_connect_helper_spec.rb
index 169a5c0076a..4d2fc3d9ee6 100644
--- a/spec/helpers/jira_connect_helper_spec.rb
+++ b/spec/helpers/jira_connect_helper_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe JiraConnectHelper do
let(:client_id) { '123' }
before do
- stub_env('JIRA_CONNECT_OAUTH_CLIENT_ID', client_id)
+ stub_application_setting(jira_connect_application_key: client_id)
end
subject { helper.jira_connect_app_data([subscription]) }
diff --git a/spec/helpers/markup_helper_spec.rb b/spec/helpers/markup_helper_spec.rb
index a7e657f2636..8a7a6d003f4 100644
--- a/spec/helpers/markup_helper_spec.rb
+++ b/spec/helpers/markup_helper_spec.rb
@@ -467,6 +467,33 @@ FooBar
end
end
+ context 'when rendering takes too long' do
+ before do
+ stub_const("MarkupHelper::RENDER_TIMEOUT", 0.1)
+ allow(Gitlab::OtherMarkup).to receive(:render) { sleep(0.2) }
+ end
+
+ it 'times out' do
+ expect(Gitlab::RenderTimeout).to receive(:timeout).and_call_original
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ instance_of(Timeout::Error),
+ project_id: project.id, file_name: file_name
+ )
+
+ subject
+ end
+
+ context 'when markup_rendering_timeout is disabled' do
+ it 'waits until the execution completes' do
+ stub_feature_flags(markup_rendering_timeout: false)
+
+ expect(Gitlab::RenderTimeout).not_to receive(:timeout)
+
+ subject
+ end
+ end
+ end
+
context 'when file is a markdown file' do
let(:file_name) { 'foo.md' }
diff --git a/spec/helpers/nav/new_dropdown_helper_spec.rb b/spec/helpers/nav/new_dropdown_helper_spec.rb
index ab206152e3d..4f32ac5b5c6 100644
--- a/spec/helpers/nav/new_dropdown_helper_spec.rb
+++ b/spec/helpers/nav/new_dropdown_helper_spec.rb
@@ -55,7 +55,7 @@ RSpec.describe Nav::NewDropdownHelper do
end
it 'has title' do
- expect(subject[:title]).to eq('New...')
+ expect(subject[:title]).to eq('Create new')
end
context 'when current_user is nil (anonymous)' do
diff --git a/spec/helpers/nav/top_nav_helper_spec.rb b/spec/helpers/nav/top_nav_helper_spec.rb
index e4422dde407..9d43e057521 100644
--- a/spec/helpers/nav/top_nav_helper_spec.rb
+++ b/spec/helpers/nav/top_nav_helper_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe Nav::TopNavHelper do
let(:current_user) { nil }
before do
+ stub_application_setting(snowplow_enabled: true)
allow(helper).to receive(:current_user) { current_user }
end
@@ -50,49 +51,40 @@ RSpec.describe Nav::TopNavHelper do
context 'when current_user is nil (anonymous)' do
it 'has expected :primary' do
expected_primary = [
- ::Gitlab::Nav::TopNavMenuItem.build(
- href: '/explore',
- icon: 'project',
- id: 'project',
- title: 'Projects'
- ),
- ::Gitlab::Nav::TopNavMenuItem.build(
- href: '/explore/groups',
- icon: 'group',
- id: 'groups',
- title: 'Groups'
- ),
- ::Gitlab::Nav::TopNavMenuItem.build(
- href: '/explore/snippets',
- icon: 'snippet',
- id: 'snippets',
- title: 'Snippets'
- )
- ]
+ { href: '/explore', icon: 'project', id: 'project', title: 'Projects' },
+ { href: '/explore/groups', icon: 'group', id: 'groups', title: 'Groups' },
+ { href: '/explore/snippets', icon: 'snippet', id: 'snippets', title: 'Snippets' }
+ ].map do |item|
+ ::Gitlab::Nav::TopNavMenuItem.build(**item)
+ end
+
expect(subject[:primary]).to eq(expected_primary)
end
it 'has expected :shortcuts' do
expected_shortcuts = [
- ::Gitlab::Nav::TopNavMenuItem.build(
+ {
href: '/explore',
id: 'project-shortcut',
title: 'Projects',
css_class: 'dashboard-shortcuts-projects'
- ),
- ::Gitlab::Nav::TopNavMenuItem.build(
+ },
+ {
href: '/explore/groups',
id: 'groups-shortcut',
title: 'Groups',
css_class: 'dashboard-shortcuts-groups'
- ),
- ::Gitlab::Nav::TopNavMenuItem.build(
+ },
+ {
href: '/explore/snippets',
id: 'snippets-shortcut',
title: 'Snippets',
css_class: 'dashboard-shortcuts-snippets'
- )
- ]
+ }
+ ].map do |item|
+ ::Gitlab::Nav::TopNavMenuItem.build(**item)
+ end
+
expect(subject[:shortcuts]).to eq(expected_shortcuts)
end
@@ -171,21 +163,41 @@ RSpec.describe Nav::TopNavHelper do
it 'has expected :linksPrimary' do
expected_links_primary = [
::Gitlab::Nav::TopNavMenuItem.build(
+ data: {
+ qa_selector: 'menu_item_link',
+ qa_title: 'Your projects',
+ **menu_data_tracking_attrs('your_projects')
+ },
href: '/dashboard/projects',
id: 'your',
title: 'Your projects'
),
::Gitlab::Nav::TopNavMenuItem.build(
+ data: {
+ qa_selector: 'menu_item_link',
+ qa_title: 'Starred projects',
+ **menu_data_tracking_attrs('starred_projects')
+ },
href: '/dashboard/projects/starred',
id: 'starred',
title: 'Starred projects'
),
::Gitlab::Nav::TopNavMenuItem.build(
+ data: {
+ qa_selector: 'menu_item_link',
+ qa_title: 'Explore projects',
+ **menu_data_tracking_attrs('explore_projects')
+ },
href: '/explore',
id: 'explore',
title: 'Explore projects'
),
::Gitlab::Nav::TopNavMenuItem.build(
+ data: {
+ qa_selector: 'menu_item_link',
+ qa_title: 'Explore topics',
+ **menu_data_tracking_attrs('explore_topics')
+ },
href: '/explore/projects/topics',
id: 'topics',
title: 'Explore topics'
@@ -197,6 +209,11 @@ RSpec.describe Nav::TopNavHelper do
it 'has expected :linksSecondary' do
expected_links_secondary = [
::Gitlab::Nav::TopNavMenuItem.build(
+ data: {
+ qa_selector: 'menu_item_link',
+ qa_title: 'Create new project',
+ **menu_data_tracking_attrs('create_new_project')
+ },
href: '/projects/new',
id: 'create',
title: 'Create new project'
@@ -282,11 +299,21 @@ RSpec.describe Nav::TopNavHelper do
it 'has expected :linksPrimary' do
expected_links_primary = [
::Gitlab::Nav::TopNavMenuItem.build(
+ data: {
+ qa_selector: 'menu_item_link',
+ qa_title: 'Your groups',
+ **menu_data_tracking_attrs('your_groups')
+ },
href: '/dashboard/groups',
id: 'your',
title: 'Your groups'
),
::Gitlab::Nav::TopNavMenuItem.build(
+ data: {
+ qa_selector: 'menu_item_link',
+ qa_title: 'Explore groups',
+ **menu_data_tracking_attrs('explore_groups')
+ },
href: '/explore/groups',
id: 'explore',
title: 'Explore groups'
@@ -298,6 +325,11 @@ RSpec.describe Nav::TopNavHelper do
it 'has expected :linksSecondary' do
expected_links_secondary = [
::Gitlab::Nav::TopNavMenuItem.build(
+ data: {
+ qa_selector: 'menu_item_link',
+ qa_title: 'Create group',
+ **menu_data_tracking_attrs('create_group')
+ },
href: '/groups/new',
id: 'create',
title: 'Create group'
@@ -356,7 +388,8 @@ RSpec.describe Nav::TopNavHelper do
it 'has expected :primary' do
expected_primary = ::Gitlab::Nav::TopNavMenuItem.build(
data: {
- qa_selector: 'milestones_link'
+ qa_selector: 'milestones_link',
+ **menu_data_tracking_attrs('milestones')
},
href: '/dashboard/milestones',
icon: 'clock',
@@ -383,7 +416,8 @@ RSpec.describe Nav::TopNavHelper do
it 'has expected :primary' do
expected_primary = ::Gitlab::Nav::TopNavMenuItem.build(
data: {
- qa_selector: 'snippets_link'
+ qa_selector: 'snippets_link',
+ **menu_data_tracking_attrs('snippets')
},
href: '/dashboard/snippets',
icon: 'snippet',
@@ -410,7 +444,8 @@ RSpec.describe Nav::TopNavHelper do
it 'has expected :primary' do
expected_primary = ::Gitlab::Nav::TopNavMenuItem.build(
data: {
- qa_selector: 'activity_link'
+ qa_selector: 'activity_link',
+ **menu_data_tracking_attrs('activity')
},
href: '/dashboard/activity',
icon: 'history',
@@ -439,6 +474,11 @@ RSpec.describe Nav::TopNavHelper do
it 'has admin as first :secondary item' do
expected_admin_item = ::Gitlab::Nav::TopNavMenuItem.build(
+ data: {
+ qa_selector: 'menu_item_link',
+ qa_title: 'Admin',
+ **menu_data_tracking_attrs('admin')
+ },
id: 'admin',
title: 'Admin',
icon: 'admin',
@@ -458,7 +498,7 @@ RSpec.describe Nav::TopNavHelper do
title: 'Leave Admin Mode',
icon: 'lock-open',
href: '/admin/session/destroy',
- data: { method: 'post' }
+ data: { method: 'post', **menu_data_tracking_attrs('leave_admin_mode') }
)
expect(subject[:secondary].last).to eq(expected_leave_admin_mode_item)
end
@@ -469,6 +509,11 @@ RSpec.describe Nav::TopNavHelper do
it 'has enter_admin_mode as last :secondary item' do
expected_enter_admin_mode_item = ::Gitlab::Nav::TopNavMenuItem.build(
+ data: {
+ qa_selector: 'menu_item_link',
+ qa_title: 'Enter Admin Mode',
+ **menu_data_tracking_attrs('enter_admin_mode')
+ },
id: 'enter_admin_mode',
title: 'Enter Admin Mode',
icon: 'lock',
@@ -533,4 +578,12 @@ RSpec.describe Nav::TopNavHelper do
end
end
end
+
+ def menu_data_tracking_attrs(label)
+ {
+ track_label: "menu_#{label}",
+ track_action: 'click_dropdown',
+ track_property: 'navigation'
+ }
+ end
end
diff --git a/spec/helpers/notes_helper_spec.rb b/spec/helpers/notes_helper_spec.rb
index 913a38d353f..68a6b6293c8 100644
--- a/spec/helpers/notes_helper_spec.rb
+++ b/spec/helpers/notes_helper_spec.rb
@@ -329,10 +329,6 @@ RSpec.describe NotesHelper do
allow(helper).to receive(:current_user).and_return(guest)
end
- it 'sets last_fetched_at to 0 when start_at_zero is true' do
- expect(helper.notes_data(issue, true)[:lastFetchedAt]).to eq(0)
- end
-
it 'includes the current notes filter for the user' do
guest.set_notes_filter(UserPreference::NOTES_FILTERS[:only_comments], issue)
diff --git a/spec/helpers/operations_helper_spec.rb b/spec/helpers/operations_helper_spec.rb
index 857771ebba6..9e50712a386 100644
--- a/spec/helpers/operations_helper_spec.rb
+++ b/spec/helpers/operations_helper_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe OperationsHelper do
expect(subject).to eq(
'alerts_setup_url' => help_page_path('operations/incident_management/integrations.md', anchor: 'configuration'),
'alerts_usage_url' => project_alert_management_index_path(project),
- 'prometheus_form_path' => project_integration_path(project, prometheus_integration),
+ 'prometheus_form_path' => project_settings_integration_path(project, prometheus_integration),
'prometheus_reset_key_path' => reset_alerting_token_project_settings_operations_path(project),
'prometheus_authorization_key' => nil,
'prometheus_api_url' => nil,
diff --git a/spec/helpers/preferences_helper_spec.rb b/spec/helpers/preferences_helper_spec.rb
index 01235c7bb51..99f750bb858 100644
--- a/spec/helpers/preferences_helper_spec.rb
+++ b/spec/helpers/preferences_helper_spec.rb
@@ -75,7 +75,7 @@ RSpec.describe PreferencesHelper do
it "returns user's theme's css_class" do
stub_user(theme_id: 3)
- expect(helper.user_application_theme).to eq 'ui-light'
+ expect(helper.user_application_theme).to eq 'ui-light-gray'
end
it 'returns the default when id is invalid' do
diff --git a/spec/helpers/projects/pipeline_helper_spec.rb b/spec/helpers/projects/pipeline_helper_spec.rb
index 90cf3cb03f8..d04aa9a9d04 100644
--- a/spec/helpers/projects/pipeline_helper_spec.rb
+++ b/spec/helpers/projects/pipeline_helper_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Projects::PipelineHelper do
+ include Ci::BuildsHelper
+
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:raw_pipeline) { create(:ci_pipeline, project: project, ref: 'master', sha: project.commit.id) }
@@ -14,10 +16,14 @@ RSpec.describe Projects::PipelineHelper do
it 'returns pipeline tabs data' do
expect(pipeline_tabs_data).to include({
can_generate_codequality_reports: pipeline.can_generate_codequality_reports?.to_json,
+ failed_jobs_count: pipeline.failed_builds.count,
+ failed_jobs_summary: prepare_failed_jobs_summary_data(pipeline.failed_builds),
+ full_path: project.full_path,
graphql_resource_etag: graphql_etag_pipeline_path(pipeline),
metrics_path: namespace_project_ci_prometheus_metrics_histograms_path(namespace_id: project.namespace, project_id: project, format: :json),
pipeline_iid: pipeline.iid,
- pipeline_project_path: project.full_path
+ pipeline_project_path: project.full_path,
+ total_job_count: pipeline.total_size
})
end
end
diff --git a/spec/helpers/projects/project_members_helper_spec.rb b/spec/helpers/projects/project_members_helper_spec.rb
index 4e3a0147509..2414a1782c5 100644
--- a/spec/helpers/projects/project_members_helper_spec.rb
+++ b/spec/helpers/projects/project_members_helper_spec.rb
@@ -86,4 +86,30 @@ RSpec.describe Projects::ProjectMembersHelper do
end
end
end
+
+ describe '#project_member_header_subtext' do
+ before do
+ allow(helper).to receive(:can?).with(current_user, :admin_project_member, project).and_return(can_admin_member)
+ end
+
+ context 'when user can admin project members' do
+ let(:can_admin_member) { true }
+
+ before do
+ assign(:project, project)
+ end
+
+ it 'contains expected text' do
+ expect(helper.project_member_header_subtext(project)).to match('You can invite a new member to')
+ end
+ end
+
+ context 'when user cannot admin project members' do
+ let(:can_admin_member) { false }
+
+ it 'contains expected text' do
+ expect(helper.project_member_header_subtext(project)).to match('Members can be added by project')
+ end
+ end
+ end
end
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index d13c5dfcc9e..e0c98bbc161 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -1112,4 +1112,200 @@ RSpec.describe ProjectsHelper do
it_behaves_like 'configure import method modal'
end
+
+ describe '#show_inactive_project_deletion_banner?' do
+ shared_examples 'does not show the banner' do |pass_project: true|
+ it { expect(helper.show_inactive_project_deletion_banner?(pass_project ? project : nil)).to be(false) }
+ end
+
+ context 'with no project' do
+ it_behaves_like 'does not show the banner', pass_project: false
+ end
+
+ context 'with unsaved project' do
+ let_it_be(:project) { build(:project) }
+
+ it_behaves_like 'does not show the banner'
+ end
+
+ context 'with the setting disabled' do
+ before do
+ stub_application_setting(delete_inactive_projects: false)
+ end
+
+ it_behaves_like 'does not show the banner'
+ end
+
+ context 'with the setting enabled' do
+ before do
+ stub_application_setting(delete_inactive_projects: true)
+ end
+
+ context 'with the feature flag disabled' do
+ before do
+ stub_feature_flags(inactive_projects_deletion: false)
+ end
+
+ it_behaves_like 'does not show the banner'
+ end
+
+ context 'with the feature flag enabled' do
+ before do
+ stub_feature_flags(inactive_projects_deletion: true)
+ stub_application_setting(inactive_projects_min_size_mb: 0)
+ stub_application_setting(inactive_projects_send_warning_email_after_months: 1)
+ end
+
+ context 'with an active project' do
+ it_behaves_like 'does not show the banner'
+ end
+
+ context 'with an inactive project' do
+ before do
+ project.statistics.storage_size = 1.megabyte
+ project.last_activity_at = 1.year.ago
+ project.save!
+ end
+
+ it 'shows the banner' do
+ expect(helper.show_inactive_project_deletion_banner?(project)).to be(true)
+ end
+ end
+ end
+ end
+ end
+
+ describe '#inactive_project_deletion_date' do
+ let(:tracker) { instance_double(::Gitlab::InactiveProjectsDeletionWarningTracker) }
+
+ before do
+ stub_application_setting(inactive_projects_delete_after_months: 2)
+ stub_application_setting(inactive_projects_send_warning_email_after_months: 1)
+
+ allow(::Gitlab::InactiveProjectsDeletionWarningTracker).to receive(:new).with(project.id).and_return(tracker)
+ allow(tracker).to receive(:scheduled_deletion_date).and_return('2022-03-01')
+ end
+
+ it 'returns the deletion date' do
+ expect(helper.inactive_project_deletion_date(project)).to eq('2022-03-01')
+ end
+ end
+
+ describe '#can_admin_associated_clusters?' do
+ let_it_be(:current_user) { create(:user) }
+ let_it_be_with_reload(:project) { create(:project) }
+
+ subject { helper.send(:can_admin_associated_clusters?, project) }
+
+ before do
+ allow(helper).to receive(:current_user).and_return(current_user)
+ allow(helper)
+ .to receive(:can?)
+ .with(current_user, :admin_cluster, namespace)
+ .and_return(user_can_admin_cluster)
+ end
+
+ context 'when project has a cluster' do
+ let_it_be(:namespace) { project }
+
+ before do
+ create(:cluster, projects: [namespace])
+ end
+
+ context 'if user can admin cluster' do
+ let_it_be(:user_can_admin_cluster) { true }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'if user can not admin cluster' do
+ let_it_be(:user_can_admin_cluster) { false }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ context 'when project has a group cluster' do
+ let_it_be(:namespace) { create(:group) }
+
+ before do
+ project.update!(namespace: namespace)
+ create(:cluster, :group, groups: [namespace])
+ end
+
+ context 'if user can admin cluster' do
+ let_it_be(:user_can_admin_cluster) { true }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'if user can not admin cluster' do
+ let_it_be(:user_can_admin_cluster) { false }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ context 'when project doesn\'t have a cluster' do
+ let_it_be(:namespace) { project }
+
+ context 'if user can admin cluster' do
+ let_it_be(:user_can_admin_cluster) { true }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'if user can not admin cluster' do
+ let_it_be(:user_can_admin_cluster) { false }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+ end
+
+ describe '#show_clusters_alert?' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { helper.show_clusters_alert?(project) }
+
+ where(:is_gitlab_com, :user_can_admin_cluster, :expected) do
+ false | false | false
+ false | true | false
+ true | false | false
+ true | true | true
+ end
+
+ with_them do
+ before do
+ allow(::Gitlab).to receive(:com?).and_return(is_gitlab_com)
+ allow(helper).to receive(:can_admin_associated_clusters?).and_return(user_can_admin_cluster)
+ end
+
+ it { is_expected.to eq(expected) }
+ end
+ end
+
+ describe '#clusters_deprecation_alert_message' do
+ subject { helper.clusters_deprecation_alert_message }
+
+ before do
+ allow(helper).to receive(:has_active_license?).and_return(has_active_license)
+ end
+
+ context 'if user has an active licence' do
+ let_it_be(:has_active_license) { true }
+
+ it 'displays the correct messagee' do
+ expect(subject).to eq(s_('Clusters|The certificate-based Kubernetes integration has been deprecated and will be turned off at the end of November 2022. Please %{linkStart}migrate to the GitLab agent for Kubernetes%{linkEnd} or reach out to GitLab support.'))
+ end
+ end
+
+ context 'if user doesn\'t have an active licence' do
+ let_it_be(:has_active_license) { false }
+
+ it 'displays the correct message' do
+ expect(subject).to eq(s_('Clusters|The certificate-based Kubernetes integration has been deprecated and will be turned off at the end of November 2022. Please %{linkStart}migrate to the GitLab agent for Kubernetes%{linkEnd}.'))
+ end
+ end
+ end
end
diff --git a/spec/helpers/routing/pseudonymization_helper_spec.rb b/spec/helpers/routing/pseudonymization_helper_spec.rb
index cf716931fe2..dd4cc55ed2b 100644
--- a/spec/helpers/routing/pseudonymization_helper_spec.rb
+++ b/spec/helpers/routing/pseudonymization_helper_spec.rb
@@ -8,8 +8,7 @@ RSpec.describe ::Routing::PseudonymizationHelper do
let_it_be(:project) { create(:project, group: group) }
let_it_be(:subproject) { create(:project, group: subgroup) }
let_it_be(:issue) { create(:issue, project: project) }
-
- let(:merge_request) { create(:merge_request, source_project: project) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
let(:subject) { helper.masked_page_url(group: group, project: project) }
diff --git a/spec/helpers/search_helper_spec.rb b/spec/helpers/search_helper_spec.rb
index 8e2ec014383..4117d577f20 100644
--- a/spec/helpers/search_helper_spec.rb
+++ b/spec/helpers/search_helper_spec.rb
@@ -534,24 +534,26 @@ RSpec.describe SearchHelper do
end
describe '#repository_ref' do
+ using RSpec::Parameterized::TableSyntax
+
let_it_be(:project) { create(:project, :repository) }
- let(:params) { { repository_ref: 'the-repository-ref-param' } }
+ let(:default_branch) { project.default_branch }
+ let(:params) { { repository_ref: ref, project_id: project_id } }
subject { repository_ref(project) }
- it { is_expected.to eq('the-repository-ref-param') }
-
- context 'when the param :repository_ref is not set' do
- let(:params) { { repository_ref: nil } }
-
- it { is_expected.to eq(project.default_branch) }
+ where(:project_id, :ref, :expected_ref) do
+ 123 | 'ref-param' | 'ref-param'
+ 123 | nil | ref(:default_branch)
+ 123 | 111111 | '111111'
+ nil | 'ref-param' | ref(:default_branch)
end
- context 'when the repository_ref param is a number' do
- let(:params) { { repository_ref: 111111 } }
-
- it { is_expected.to eq('111111') }
+ with_them do
+ it 'returns expected_ref' do
+ expect(repository_ref(project)).to eq(expected_ref)
+ end
end
end
diff --git a/spec/helpers/snippets_helper_spec.rb b/spec/helpers/snippets_helper_spec.rb
index 913be164a00..37520affc5a 100644
--- a/spec/helpers/snippets_helper_spec.rb
+++ b/spec/helpers/snippets_helper_spec.rb
@@ -64,6 +64,33 @@ RSpec.describe SnippetsHelper do
end
end
+ describe '#embedded_snippet_copy_button' do
+ let(:blob) { snippet.blobs.first }
+ let(:ref) { blob.repository.root_ref }
+
+ subject { embedded_copy_snippet_button(blob) }
+
+ context 'for Personal Snippets' do
+ let(:snippet) { public_personal_snippet }
+
+ it 'returns copy button of embedded snippets' do
+ expect(subject).to eq(copy_button("#{blob.id}"))
+ end
+ end
+
+ context 'for Project Snippets' do
+ let(:snippet) { public_project_snippet }
+
+ it 'returns copy button of embedded snippets' do
+ expect(subject).to eq(copy_button("#{blob.id}"))
+ end
+ end
+
+ def copy_button(blob_id)
+ "<button class=\"gl-button btn btn-default copy-to-clipboard-btn\" title=\"Copy snippet contents\" onclick=\"copyToClipboard(&#39;.blob-content[data-blob-id=&quot;#{blob_id}&quot;] &gt; pre&#39;)\">#{external_snippet_icon('copy-to-clipboard')}</button>"
+ end
+ end
+
describe '#snippet_badge' do
let(:snippet) { build(:personal_snippet, visibility) }
diff --git a/spec/helpers/sorting_helper_spec.rb b/spec/helpers/sorting_helper_spec.rb
index e20fb77ad75..1ee920d1c95 100644
--- a/spec/helpers/sorting_helper_spec.rb
+++ b/spec/helpers/sorting_helper_spec.rb
@@ -62,6 +62,12 @@ RSpec.describe SortingHelper do
end
end
+ describe '#can_sort_by_issue_weight?' do
+ it 'returns false' do
+ expect(helper.can_sort_by_issue_weight?(false)).to be_falsey
+ end
+ end
+
def stub_controller_path(value)
allow(helper.controller).to receive(:controller_path).and_return(value)
end
diff --git a/spec/helpers/storage_helper_spec.rb b/spec/helpers/storage_helper_spec.rb
index 5bc4024ae24..c2c508cf485 100644
--- a/spec/helpers/storage_helper_spec.rb
+++ b/spec/helpers/storage_helper_spec.rb
@@ -51,7 +51,7 @@ RSpec.describe StorageHelper do
end
end
- describe "storage_enforcement_banner" do
+ describe "storage_enforcement_banner", :saas do
let_it_be_with_refind(:current_user) { create(:user) }
let_it_be(:free_group) { create(:group) }
let_it_be(:paid_group) { create(:group) }
@@ -60,8 +60,9 @@ RSpec.describe StorageHelper do
allow(helper).to receive(:can?).with(current_user, :admin_namespace, free_group).and_return(true)
allow(helper).to receive(:can?).with(current_user, :admin_namespace, paid_group).and_return(true)
allow(helper).to receive(:current_user) { current_user }
- allow(Gitlab).to receive(:com?).and_return(true)
allow(paid_group).to receive(:paid?).and_return(true)
+
+ stub_feature_flags(namespace_storage_limit_bypass_date_check: false)
end
describe "#storage_enforcement_banner_info" do
@@ -108,6 +109,28 @@ RSpec.describe StorageHelper do
expect(helper.storage_enforcement_banner_info(free_group)[:text]).to eql("From #{storage_enforcement_date} storage limits will apply to this namespace. You are currently using 100 KB of namespace storage. View and manage your usage from <strong>Group settings &gt; Usage quotas</strong>.")
end
end
+
+ context 'when the given group is a sub-group' do
+ let_it_be(:sub_group) { build(:group) }
+
+ before do
+ allow(sub_group).to receive(:root_ancestor).and_return(free_group)
+ end
+
+ it 'returns the banner hash' do
+ expect(helper.storage_enforcement_banner_info(sub_group).keys).to match_array(%i(text variant callouts_feature_name callouts_path learn_more_link))
+ end
+ end
+ end
+ end
+
+ context 'when the :storage_banner_bypass_date_check is enabled', :freeze_time do
+ before do
+ stub_feature_flags(namespace_storage_limit_bypass_date_check: true)
+ end
+
+ it 'returns the enforcement info' do
+ expect(helper.storage_enforcement_banner_info(free_group)[:text]).to include("From #{Date.current} storage limits will apply to this namespace.")
end
end
diff --git a/spec/helpers/todos_helper_spec.rb b/spec/helpers/todos_helper_spec.rb
index 3787864e144..922fb1d7c92 100644
--- a/spec/helpers/todos_helper_spec.rb
+++ b/spec/helpers/todos_helper_spec.rb
@@ -152,7 +152,7 @@ RSpec.describe TodosHelper do
shared_examples 'a rendered state pill' do |attr|
it 'returns expected html' do
aggregate_failures do
- expect(subject).to have_css(".status-box-#{attr[:type]}-#{attr[:state].dasherize}")
+ expect(subject).to have_css(attr[:css])
expect(subject).to have_content(attr[:state].capitalize)
end
end
@@ -167,12 +167,20 @@ RSpec.describe TodosHelper do
it_behaves_like 'no state pill'
+ context 'closed MR' do
+ before do
+ todo.target.update!(state: 'closed')
+ end
+
+ it_behaves_like 'a rendered state pill', css: '.gl-bg-red-500', state: 'closed'
+ end
+
context 'merged MR' do
before do
todo.target.update!(state: 'merged')
end
- it_behaves_like 'a rendered state pill', type: 'mr', state: 'merged'
+ it_behaves_like 'a rendered state pill', css: '.gl-bg-blue-500', state: 'merged'
end
end
@@ -186,7 +194,7 @@ RSpec.describe TodosHelper do
todo.target.update!(state: 'closed')
end
- it_behaves_like 'a rendered state pill', type: 'issue', state: 'closed'
+ it_behaves_like 'a rendered state pill', css: '.gl-bg-blue-500', state: 'closed'
end
end
@@ -200,7 +208,7 @@ RSpec.describe TodosHelper do
todo.target.resolve!
end
- it_behaves_like 'a rendered state pill', type: 'alert', state: 'resolved'
+ it_behaves_like 'a rendered state pill', css: '.gl-bg-blue-500', state: 'resolved'
end
end
end
diff --git a/spec/helpers/tooling/visual_review_helper_spec.rb b/spec/helpers/tooling/visual_review_helper_spec.rb
new file mode 100644
index 00000000000..7fb9f5fadf2
--- /dev/null
+++ b/spec/helpers/tooling/visual_review_helper_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Tooling::VisualReviewHelper do
+ describe '#visual_review_toolbar_options' do
+ subject(:result) { helper.visual_review_toolbar_options }
+
+ before do
+ stub_env('REVIEW_APPS_MERGE_REQUEST_IID', '123')
+ end
+
+ it 'returns the correct params' do
+ expect(result).to eq(
+ 'data-merge-request-id': '123',
+ 'data-mr-url': 'https://gitlab.com',
+ 'data-project-id': '278964',
+ 'data-project-path': 'gitlab-org/gitlab',
+ 'data-require-auth': false,
+ 'id': 'review-app-toolbar-script',
+ 'src': 'https://gitlab.com/assets/webpack/visual_review_toolbar.js'
+ )
+ end
+ end
+end
diff --git a/spec/initializers/forbid_sidekiq_in_transactions_spec.rb b/spec/initializers/forbid_sidekiq_in_transactions_spec.rb
index 6cd15d37ad4..a89ac73f6fa 100644
--- a/spec/initializers/forbid_sidekiq_in_transactions_spec.rb
+++ b/spec/initializers/forbid_sidekiq_in_transactions_spec.rb
@@ -3,36 +3,57 @@
require 'spec_helper'
RSpec.describe 'Sidekiq::Worker' do
- let(:worker_class) do
- Class.new do
- include Sidekiq::Worker
+ shared_examples_for 'a forbiddable operation within a transaction' do
+ it 'allows the operation outside of a transaction' do
+ expect { operation }.not_to raise_error
+ end
- def perform
+ it 'forbids the operation within a transaction' do
+ ApplicationRecord.transaction do
+ expect { operation }.to raise_error(Sidekiq::Worker::EnqueueFromTransactionError)
end
end
- end
- it 'allows sidekiq worker outside of a transaction' do
- expect { worker_class.perform_async }.not_to raise_error
- end
+ it 'allows the oepration within a transaction if skipped' do
+ Sidekiq::Worker.skipping_transaction_check do
+ ApplicationRecord.transaction do
+ expect { operation }.not_to raise_error
+ end
+ end
+ end
- it 'forbids queue sidekiq worker in a transaction' do
- Project.transaction do
- expect { worker_class.perform_async }.to raise_error(Sidekiq::Worker::EnqueueFromTransactionError)
+ it 'forbids the operation if it is within a Ci::ApplicationRecord transaction' do
+ Ci::Pipeline.transaction do
+ expect { operation }.to raise_error(Sidekiq::Worker::EnqueueFromTransactionError)
+ end
end
end
- it 'allows sidekiq worker in a transaction if skipped' do
- Sidekiq::Worker.skipping_transaction_check do
- Project.transaction do
- expect { worker_class.perform_async }.not_to raise_error
+ context 'for sidekiq workers' do
+ let(:worker_class) do
+ Class.new do
+ include Sidekiq::Worker
+
+ def perform
+ end
end
end
+
+ let(:operation) { worker_class.perform_async }
+
+ it_behaves_like 'a forbiddable operation within a transaction'
end
- it 'forbids queue sidekiq worker in a Ci::ApplicationRecord transaction' do
- Ci::Pipeline.transaction do
- expect { worker_class.perform_async }.to raise_error(Sidekiq::Worker::EnqueueFromTransactionError)
+ context 'for mailers' do
+ let(:mailer_class) do
+ Class.new(ApplicationMailer) do
+ def test_mail
+ end
+ end
end
+
+ let(:operation) { mailer_class.test_mail.deliver_later }
+
+ it_behaves_like 'a forbiddable operation within a transaction'
end
end
diff --git a/spec/initializers/mail_encoding_patch_spec.rb b/spec/initializers/mail_encoding_patch_spec.rb
index 12539c9ca52..e6cede817b5 100644
--- a/spec/initializers/mail_encoding_patch_spec.rb
+++ b/spec/initializers/mail_encoding_patch_spec.rb
@@ -1,11 +1,13 @@
# frozen_string_literal: true
# rubocop:disable RSpec/VariableDefinition, RSpec/VariableName
-require 'fast_spec_helper'
+require 'spec_helper'
require 'mail'
require_relative '../../config/initializers/mail_encoding_patch'
RSpec.describe 'Mail quoted-printable transfer encoding patch and Unicode characters' do
+ include FixtureHelpers
+
shared_examples 'email encoding' do |email|
it 'enclosing in a new object does not change the encoded original' do
new_email = Mail.new(email)
@@ -204,5 +206,35 @@ RSpec.describe 'Mail quoted-printable transfer encoding patch and Unicode charac
end
end
end
+
+ context 'empty text mail with unsual body encoding' do
+ it 'decodes email successfully' do
+ email = Mail::Message.new(nil)
+
+ Mail::Encodings.get_all.each do |encoder|
+ email.body = nil
+ email.body.charset = 'utf-8'
+ email.body.encoding = encoder.to_s
+
+ expect { email.encoded }.not_to raise_error
+ end
+ end
+ end
+
+ context 'frozen email boy content with unsual body encoding' do
+ let(:content) { fixture_file("emails/ios_default.eml") }
+
+ it 'decodes email successfully' do
+ email = Mail::Message.new(content)
+
+ Mail::Encodings.get_all.each do |encoder|
+ email.body = content.freeze
+ email.body.charset = 'utf-8'
+ email.body.encoding = encoder.to_s
+
+ expect { email.encoded }.not_to raise_error
+ end
+ end
+ end
end
# rubocop:enable RSpec/VariableDefinition, RSpec/VariableName
diff --git a/spec/initializers/omniauth_spec.rb b/spec/initializers/omniauth_spec.rb
index 928eac8c533..f0a73162dd2 100644
--- a/spec/initializers/omniauth_spec.rb
+++ b/spec/initializers/omniauth_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'OmniAuth initializer for GitLab' do
- let(:load_omniauth_initializer) do
+ def load_omniauth_initializer
load Rails.root.join('config/initializers/omniauth.rb')
end
@@ -15,32 +15,15 @@ RSpec.describe 'OmniAuth initializer for GitLab' do
before do
allow(Settings).to receive(:gitlab).and_return({ 'base_url' => base_url })
allow(Gitlab::OmniauthInitializer).to receive(:full_host).and_return('proc')
- end
-
- context 'with feature flags not available' do
- before do
- expect(Feature).to receive(:feature_flags_available?).and_return(false)
- load_omniauth_initializer
- end
- it { is_expected.to eq(base_url) }
+ load_omniauth_initializer
end
- context 'with the omniauth_initializer_fullhost_proc FF disabled' do
- before do
- stub_feature_flags(omniauth_initializer_fullhost_proc: false)
- load_omniauth_initializer
- end
-
- it { is_expected.to eq(base_url) }
+ # to clear existing mocks and prevent order-dependent failures
+ after(:all) do
+ load_omniauth_initializer
end
- context 'with the omniauth_initializer_fullhost_proc FF disabled' do
- before do
- load_omniauth_initializer
- end
-
- it { is_expected.to eq('proc') }
- end
+ it { is_expected.to eq('proc') }
end
end
diff --git a/spec/initializers/set_active_support_hash_digest_class_spec.rb b/spec/initializers/set_active_support_hash_digest_class_spec.rb
new file mode 100644
index 00000000000..256e8a1f218
--- /dev/null
+++ b/spec/initializers/set_active_support_hash_digest_class_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'setting ActiveSupport::Digest.hash_digest_class' do
+ it 'sets overrides config.active_support.hash_digest_class' do
+ expect(ActiveSupport::Digest.hash_digest_class).to eq(Gitlab::HashDigest::Facade)
+ end
+end
diff --git a/spec/initializers/validate_database_config_spec.rb b/spec/initializers/validate_database_config_spec.rb
index 5f3f950a852..23a3d9a2950 100644
--- a/spec/initializers/validate_database_config_spec.rb
+++ b/spec/initializers/validate_database_config_spec.rb
@@ -14,9 +14,6 @@ RSpec.describe 'validate database config' do
end
before do
- allow(File).to receive(:exist?).and_call_original
- allow(File).to receive(:exist?).with(Rails.root.join("config/database_geo.yml")).and_return(false)
-
# The `AS::ConfigurationFile` calls `read` in `def initialize`
# thus we cannot use `expect_next_instance_of`
# rubocop:disable RSpec/AnyInstanceOf
diff --git a/spec/lib/api/entities/ci/job_request/image_spec.rb b/spec/lib/api/entities/ci/job_request/image_spec.rb
index 55aade03129..3ab14ffc3ae 100644
--- a/spec/lib/api/entities/ci/job_request/image_spec.rb
+++ b/spec/lib/api/entities/ci/job_request/image_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe API::Entities::Ci::JobRequest::Image do
let(:ports) { [{ number: 80, protocol: 'http', name: 'name' }]}
- let(:image) { double(name: 'image_name', entrypoint: ['foo'], ports: ports)}
+ let(:image) { double(name: 'image_name', entrypoint: ['foo'], ports: ports, pull_policy: ['if-not-present']) }
let(:entity) { described_class.new(image) }
subject { entity.as_json }
@@ -28,4 +28,18 @@ RSpec.describe API::Entities::Ci::JobRequest::Image do
expect(subject[:ports]).to be_nil
end
end
+
+ it 'returns the pull policy' do
+ expect(subject[:pull_policy]).to eq(['if-not-present'])
+ end
+
+ context 'when the FF ci_docker_image_pull_policy is disabled' do
+ before do
+ stub_feature_flags(ci_docker_image_pull_policy: false)
+ end
+
+ it 'does not return the pull policy' do
+ expect(subject).not_to have_key(:pull_policy)
+ end
+ end
end
diff --git a/spec/lib/api/entities/personal_access_token_with_details_spec.rb b/spec/lib/api/entities/personal_access_token_with_details_spec.rb
new file mode 100644
index 00000000000..a53d6febba1
--- /dev/null
+++ b/spec/lib/api/entities/personal_access_token_with_details_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::PersonalAccessTokenWithDetails do
+ describe '#as_json' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:token) { create(:personal_access_token, user: user, expires_at: nil) }
+
+ let(:entity) { described_class.new(token) }
+
+ it 'returns token data' do
+ expect(entity.as_json).to eq({
+ id: token.id,
+ name: token.name,
+ revoked: false,
+ created_at: token.created_at,
+ scopes: ['api'],
+ user_id: user.id,
+ last_used_at: nil,
+ active: true,
+ expires_at: nil,
+ expired: false,
+ expires_soon: false,
+ revoke_path: Gitlab::Routing.url_helpers.revoke_profile_personal_access_token_path(token)
+ })
+ end
+ end
+end
diff --git a/spec/lib/api/entities/wiki_page_spec.rb b/spec/lib/api/entities/wiki_page_spec.rb
index 238c8233a14..c75bba12484 100644
--- a/spec/lib/api/entities/wiki_page_spec.rb
+++ b/spec/lib/api/entities/wiki_page_spec.rb
@@ -43,6 +43,23 @@ RSpec.describe API::Entities::WikiPage do
expect(subject[:content]).to eq("<div>&#x000A;<p><strong>Test</strong> <em>content</em></p>&#x000A;</div>")
end
end
+
+ context 'when content contains a reference' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
+ let(:issue) { create(:issue, project: project) }
+ let(:wiki_page) { create(:wiki_page, wiki: project.wiki, title: 'page_with_ref', content: issue.to_reference) }
+ let(:expected_content) { %r{<a href=".*#{issue.iid}".*>#{issue.to_reference}</a>} }
+
+ before do
+ params[:current_user] = user
+ project.add_developer(user)
+ end
+
+ it 'expands the reference in the content' do
+ expect(subject[:content]).to match(expected_content)
+ end
+ end
end
context 'when it is false' do
diff --git a/spec/lib/api/helpers/project_stats_refresh_conflicts_helpers_spec.rb b/spec/lib/api/helpers/project_stats_refresh_conflicts_helpers_spec.rb
new file mode 100644
index 00000000000..ae5c21e01c3
--- /dev/null
+++ b/spec/lib/api/helpers/project_stats_refresh_conflicts_helpers_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Helpers::ProjectStatsRefreshConflictsHelpers do
+ let_it_be(:project) { create(:project) }
+
+ let(:api_class) do
+ Class.new do
+ include API::Helpers::ProjectStatsRefreshConflictsHelpers
+ end
+ end
+
+ let(:api_controller) { api_class.new }
+
+ describe '#reject_if_build_artifacts_size_refreshing!' do
+ let(:entrypoint) { '/some/thing' }
+
+ before do
+ allow(project).to receive(:refreshing_build_artifacts_size?).and_return(refreshing)
+ allow(api_controller).to receive_message_chain(:request, :path).and_return(entrypoint)
+ end
+
+ context 'when project is undergoing stats refresh' do
+ let(:refreshing) { true }
+
+ it 'logs and returns a 409 conflict error' do
+ expect(Gitlab::ProjectStatsRefreshConflictsLogger)
+ .to receive(:warn_request_rejected_during_stats_refresh)
+ .with(project.id)
+
+ expect(api_controller).to receive(:conflict!)
+
+ api_controller.reject_if_build_artifacts_size_refreshing!(project)
+ end
+ end
+
+ context 'when project is not undergoing stats refresh' do
+ let(:refreshing) { false }
+
+ it 'does nothing' do
+ expect(Gitlab::ProjectStatsRefreshConflictsLogger).not_to receive(:warn_request_rejected_during_stats_refresh)
+ expect(api_controller).not_to receive(:conflict)
+
+ api_controller.reject_if_build_artifacts_size_refreshing!(project)
+ end
+ end
+ end
+end
diff --git a/spec/lib/api/helpers/sse_helpers_spec.rb b/spec/lib/api/helpers/sse_helpers_spec.rb
deleted file mode 100644
index 397051d9142..00000000000
--- a/spec/lib/api/helpers/sse_helpers_spec.rb
+++ /dev/null
@@ -1,44 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe API::Helpers::SSEHelpers do
- include Gitlab::Routing
-
- let_it_be(:project) { create(:project) }
-
- subject { Class.new.include(described_class).new }
-
- describe '#request_from_sse?' do
- before do
- allow(subject).to receive(:request).and_return(request)
- end
-
- context 'when referer is nil' do
- let(:request) { double(referer: nil)}
-
- it 'returns false' do
- expect(URI).not_to receive(:parse)
- expect(subject.request_from_sse?(project)).to eq false
- end
- end
-
- context 'when referer is not from SSE' do
- let(:request) { double(referer: 'https://gitlab.com')}
-
- it 'returns false' do
- expect(URI).to receive(:parse).and_call_original
- expect(subject.request_from_sse?(project)).to eq false
- end
- end
-
- context 'when referer is from SSE' do
- let(:request) { double(referer: project_show_sse_path(project, 'master/README.md'))}
-
- it 'returns true' do
- expect(URI).to receive(:parse).and_call_original
- expect(subject.request_from_sse?(project)).to eq true
- end
- end
- end
-end
diff --git a/spec/lib/api/helpers_spec.rb b/spec/lib/api/helpers_spec.rb
index 78ce9642392..23c97e2c0a3 100644
--- a/spec/lib/api/helpers_spec.rb
+++ b/spec/lib/api/helpers_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe API::Helpers do
using RSpec::Parameterized::TableSyntax
- subject { Class.new.include(described_class).new }
+ subject(:helper) { Class.new.include(described_class).new }
describe '#current_user' do
include Rack::Test::Methods
@@ -69,17 +69,17 @@ RSpec.describe API::Helpers do
shared_examples 'project finder' do
context 'when project exists' do
it 'returns requested project' do
- expect(subject.find_project(existing_id)).to eq(project)
+ expect(helper.find_project(existing_id)).to eq(project)
end
it 'returns nil' do
- expect(subject.find_project(non_existing_id)).to be_nil
+ expect(helper.find_project(non_existing_id)).to be_nil
end
end
context 'when project id is not provided' do
it 'returns nil' do
- expect(subject.find_project(nil)).to be_nil
+ expect(helper.find_project(nil)).to be_nil
end
end
end
@@ -105,7 +105,7 @@ RSpec.describe API::Helpers do
it 'does not hit the database' do
expect(Project).not_to receive(:find_by_full_path)
- subject.find_project(non_existing_id)
+ helper.find_project(non_existing_id)
end
end
end
@@ -116,7 +116,7 @@ RSpec.describe API::Helpers do
it 'does not return the project pending delete' do
expect(Project).not_to receive(:find_by_full_path)
- expect(subject.find_project(project_pending_delete.id)).to be_nil
+ expect(helper.find_project(project_pending_delete.id)).to be_nil
end
end
@@ -126,7 +126,7 @@ RSpec.describe API::Helpers do
it 'does not return the hidden project' do
expect(Project).not_to receive(:find_by_full_path)
- expect(subject.find_project(hidden_project.id)).to be_nil
+ expect(helper.find_project(hidden_project.id)).to be_nil
end
end
end
@@ -138,25 +138,25 @@ RSpec.describe API::Helpers do
shared_examples 'private project without access' do
before do
project.update_column(:visibility_level, Gitlab::VisibilityLevel.level_value('private'))
- allow(subject).to receive(:authenticate_non_public?).and_return(false)
+ allow(helper).to receive(:authenticate_non_public?).and_return(false)
end
it 'returns not found' do
- expect(subject).to receive(:not_found!)
+ expect(helper).to receive(:not_found!)
- subject.find_project!(project.id)
+ helper.find_project!(project.id)
end
end
context 'when user is authenticated' do
before do
- allow(subject).to receive(:current_user).and_return(user)
- allow(subject).to receive(:initial_current_user).and_return(user)
+ allow(helper).to receive(:current_user).and_return(user)
+ allow(helper).to receive(:initial_current_user).and_return(user)
end
context 'public project' do
it 'returns requested project' do
- expect(subject.find_project!(project.id)).to eq(project)
+ expect(helper.find_project!(project.id)).to eq(project)
end
end
@@ -167,13 +167,13 @@ RSpec.describe API::Helpers do
context 'when user is not authenticated' do
before do
- allow(subject).to receive(:current_user).and_return(nil)
- allow(subject).to receive(:initial_current_user).and_return(nil)
+ allow(helper).to receive(:current_user).and_return(nil)
+ allow(helper).to receive(:initial_current_user).and_return(nil)
end
context 'public project' do
it 'returns requested project' do
- expect(subject.find_project!(project.id)).to eq(project)
+ expect(helper.find_project!(project.id)).to eq(project)
end
end
@@ -188,21 +188,21 @@ RSpec.describe API::Helpers do
let(:user) { project.first_owner}
before do
- allow(subject).to receive(:current_user).and_return(user)
- allow(subject).to receive(:authorized_project_scope?).and_return(true)
- allow(subject).to receive(:job_token_authentication?).and_return(false)
- allow(subject).to receive(:authenticate_non_public?).and_return(false)
+ allow(helper).to receive(:current_user).and_return(user)
+ allow(helper).to receive(:authorized_project_scope?).and_return(true)
+ allow(helper).to receive(:job_token_authentication?).and_return(false)
+ allow(helper).to receive(:authenticate_non_public?).and_return(false)
end
shared_examples 'project finder' do
context 'when project exists' do
it 'returns requested project' do
- expect(subject.find_project!(existing_id)).to eq(project)
+ expect(helper.find_project!(existing_id)).to eq(project)
end
it 'returns nil' do
- expect(subject).to receive(:render_api_error!).with('404 Project Not Found', 404)
- expect(subject.find_project!(non_existing_id)).to be_nil
+ expect(helper).to receive(:render_api_error!).with('404 Project Not Found', 404)
+ expect(helper.find_project!(non_existing_id)).to be_nil
end
end
end
@@ -227,9 +227,9 @@ RSpec.describe API::Helpers do
it 'does not hit the database' do
expect(Project).not_to receive(:find_by_full_path)
- expect(subject).to receive(:render_api_error!).with('404 Project Not Found', 404)
+ expect(helper).to receive(:render_api_error!).with('404 Project Not Found', 404)
- subject.find_project!(non_existing_id)
+ helper.find_project!(non_existing_id)
end
end
end
@@ -243,25 +243,25 @@ RSpec.describe API::Helpers do
shared_examples 'private group without access' do
before do
group.update_column(:visibility_level, Gitlab::VisibilityLevel.level_value('private'))
- allow(subject).to receive(:authenticate_non_public?).and_return(false)
+ allow(helper).to receive(:authenticate_non_public?).and_return(false)
end
it 'returns not found' do
- expect(subject).to receive(:not_found!)
+ expect(helper).to receive(:not_found!)
- subject.find_group!(group.id)
+ helper.find_group!(group.id)
end
end
context 'when user is authenticated' do
before do
- allow(subject).to receive(:current_user).and_return(user)
- allow(subject).to receive(:initial_current_user).and_return(user)
+ allow(helper).to receive(:current_user).and_return(user)
+ allow(helper).to receive(:initial_current_user).and_return(user)
end
context 'public group' do
it 'returns requested group' do
- expect(subject.find_group!(group.id)).to eq(group)
+ expect(helper.find_group!(group.id)).to eq(group)
end
end
@@ -272,13 +272,13 @@ RSpec.describe API::Helpers do
context 'when user is not authenticated' do
before do
- allow(subject).to receive(:current_user).and_return(nil)
- allow(subject).to receive(:initial_current_user).and_return(nil)
+ allow(helper).to receive(:current_user).and_return(nil)
+ allow(helper).to receive(:initial_current_user).and_return(nil)
end
context 'public group' do
it 'returns requested group' do
- expect(subject.find_group!(group.id)).to eq(group)
+ expect(helper.find_group!(group.id)).to eq(group)
end
end
@@ -293,21 +293,21 @@ RSpec.describe API::Helpers do
let(:user) { group.first_owner }
before do
- allow(subject).to receive(:current_user).and_return(user)
- allow(subject).to receive(:authorized_project_scope?).and_return(true)
- allow(subject).to receive(:job_token_authentication?).and_return(false)
- allow(subject).to receive(:authenticate_non_public?).and_return(false)
+ allow(helper).to receive(:current_user).and_return(user)
+ allow(helper).to receive(:authorized_project_scope?).and_return(true)
+ allow(helper).to receive(:job_token_authentication?).and_return(false)
+ allow(helper).to receive(:authenticate_non_public?).and_return(false)
end
shared_examples 'group finder' do
context 'when group exists' do
it 'returns requested group' do
- expect(subject.find_group!(existing_id)).to eq(group)
+ expect(helper.find_group!(existing_id)).to eq(group)
end
it 'returns nil' do
- expect(subject).to receive(:render_api_error!).with('404 Group Not Found', 404)
- expect(subject.find_group!(non_existing_id)).to be_nil
+ expect(helper).to receive(:render_api_error!).with('404 Group Not Found', 404)
+ expect(helper.find_group!(non_existing_id)).to be_nil
end
end
end
@@ -335,25 +335,25 @@ RSpec.describe API::Helpers do
shared_examples 'private group without access' do
before do
group.update_column(:visibility_level, Gitlab::VisibilityLevel.level_value('private'))
- allow(subject).to receive(:authenticate_non_public?).and_return(false)
+ allow(helper).to receive(:authenticate_non_public?).and_return(false)
end
it 'returns not found' do
- expect(subject).to receive(:not_found!)
+ expect(helper).to receive(:not_found!)
- subject.find_group_by_full_path!(group.full_path)
+ helper.find_group_by_full_path!(group.full_path)
end
end
context 'when user is authenticated' do
before do
- allow(subject).to receive(:current_user).and_return(user)
- allow(subject).to receive(:initial_current_user).and_return(user)
+ allow(helper).to receive(:current_user).and_return(user)
+ allow(helper).to receive(:initial_current_user).and_return(user)
end
context 'public group' do
it 'returns requested group' do
- expect(subject.find_group_by_full_path!(group.full_path)).to eq(group)
+ expect(helper.find_group_by_full_path!(group.full_path)).to eq(group)
end
end
@@ -367,7 +367,7 @@ RSpec.describe API::Helpers do
end
it 'returns requested group with access' do
- expect(subject.find_group_by_full_path!(group.full_path)).to eq(group)
+ expect(helper.find_group_by_full_path!(group.full_path)).to eq(group)
end
end
end
@@ -375,13 +375,13 @@ RSpec.describe API::Helpers do
context 'when user is not authenticated' do
before do
- allow(subject).to receive(:current_user).and_return(nil)
- allow(subject).to receive(:initial_current_user).and_return(nil)
+ allow(helper).to receive(:current_user).and_return(nil)
+ allow(helper).to receive(:initial_current_user).and_return(nil)
end
context 'public group' do
it 'returns requested group' do
- expect(subject.find_group_by_full_path!(group.full_path)).to eq(group)
+ expect(helper.find_group_by_full_path!(group.full_path)).to eq(group)
end
end
@@ -397,13 +397,13 @@ RSpec.describe API::Helpers do
shared_examples 'namespace finder' do
context 'when namespace exists' do
it 'returns requested namespace' do
- expect(subject.find_namespace(existing_id)).to eq(namespace)
+ expect(helper.find_namespace(existing_id)).to eq(namespace)
end
end
context "when namespace doesn't exists" do
it 'returns nil' do
- expect(subject.find_namespace(non_existing_id)).to be_nil
+ expect(helper.find_namespace(non_existing_id)).to be_nil
end
end
end
@@ -427,9 +427,9 @@ RSpec.describe API::Helpers do
let(:user1) { create(:user) }
before do
- allow(subject).to receive(:current_user).and_return(user1)
- allow(subject).to receive(:header).and_return(nil)
- allow(subject).to receive(:not_found!).and_raise('404 Namespace not found')
+ allow(helper).to receive(:current_user).and_return(user1)
+ allow(helper).to receive(:header).and_return(nil)
+ allow(helper).to receive(:not_found!).and_raise('404 Namespace not found')
end
context 'when namespace is group' do
@@ -477,7 +477,7 @@ RSpec.describe API::Helpers do
describe '#find_namespace!' do
let(:namespace_finder) do
- subject.find_namespace!(namespace.id)
+ helper.find_namespace!(namespace.id)
end
it_behaves_like 'user namespace finder'
@@ -488,7 +488,7 @@ RSpec.describe API::Helpers do
let_it_be(:other_project) { create(:project) }
let_it_be(:job) { create(:ci_build) }
- let(:send_authorized_project_scope) { subject.authorized_project_scope?(project) }
+ let(:send_authorized_project_scope) { helper.authorized_project_scope?(project) }
where(:job_token_authentication, :route_setting, :feature_flag, :same_job_project, :expected_result) do
false | false | false | false | true
@@ -511,9 +511,9 @@ RSpec.describe API::Helpers do
with_them do
before do
- allow(subject).to receive(:job_token_authentication?).and_return(job_token_authentication)
- allow(subject).to receive(:route_authentication_setting).and_return(job_token_scope: route_setting ? :project : nil)
- allow(subject).to receive(:current_authenticated_job).and_return(job)
+ allow(helper).to receive(:job_token_authentication?).and_return(job_token_authentication)
+ allow(helper).to receive(:route_authentication_setting).and_return(job_token_scope: route_setting ? :project : nil)
+ allow(helper).to receive(:current_authenticated_job).and_return(job)
allow(job).to receive(:project).and_return(same_job_project ? project : other_project)
stub_feature_flags(ci_job_token_scope: false)
@@ -531,15 +531,15 @@ RSpec.describe API::Helpers do
let(:blob) { double(name: 'foobar') }
let(:send_git_blob) do
- subject.send(:send_git_blob, repository, blob)
- subject.header
+ helper.send(:send_git_blob, repository, blob)
+ helper.header
end
before do
- allow(subject).to receive(:env).and_return({})
- allow(subject).to receive(:content_type)
- allow(subject).to receive(:header).and_return({})
- allow(subject).to receive(:body).and_return('')
+ allow(helper).to receive(:env).and_return({})
+ allow(helper).to receive(:content_type)
+ allow(helper).to receive(:header).and_return({})
+ allow(helper).to receive(:body).and_return('')
allow(Gitlab::Workhorse).to receive(:send_git_blob)
end
@@ -572,19 +572,19 @@ RSpec.describe API::Helpers do
it 'tracks redis hll event' do
expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event).with(event_name, values: value)
- subject.increment_unique_values(event_name, value)
+ helper.increment_unique_values(event_name, value)
end
it 'logs an exception for unknown event' do
expect(Gitlab::AppLogger).to receive(:warn).with("Redis tracking event failed for event: #{unknown_event}, message: Unknown event #{unknown_event}")
- subject.increment_unique_values(unknown_event, value)
+ helper.increment_unique_values(unknown_event, value)
end
it 'does not track event for nil values' do
expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
- subject.increment_unique_values(unknown_event, nil)
+ helper.increment_unique_values(unknown_event, nil)
end
end
@@ -639,18 +639,6 @@ RSpec.describe API::Helpers do
it 'converts to id' do
is_expected.to eq({ 'id' => 'asc' })
end
-
- context 'when replace_order_by_created_at_with_id feature flag is disabled' do
- before do
- stub_feature_flags(replace_order_by_created_at_with_id: false)
- end
-
- include_examples '#order_options_with_tie_breaker'
-
- it 'maintains created_at order' do
- is_expected.to eq({ 'created_at' => 'asc', 'id' => 'asc' })
- end
- end
end
end
@@ -659,21 +647,21 @@ RSpec.describe API::Helpers do
context 'when unmodified check passes' do
before do
- allow(subject).to receive(:check_unmodified_since!).with(project.updated_at).and_return(true)
+ allow(helper).to receive(:check_unmodified_since!).with(project.updated_at).and_return(true)
end
it 'destroys given project' do
- allow(subject).to receive(:status).with(204)
- allow(subject).to receive(:body).with(false)
+ allow(helper).to receive(:status).with(204)
+ allow(helper).to receive(:body).with(false)
expect(project).to receive(:destroy).and_call_original
- expect { subject.destroy_conditionally!(project) }.to change(Project, :count).by(-1)
+ expect { helper.destroy_conditionally!(project) }.to change(Project, :count).by(-1)
end
end
context 'when unmodified check fails' do
before do
- allow(subject).to receive(:check_unmodified_since!).with(project.updated_at).and_throw(:error)
+ allow(helper).to receive(:check_unmodified_since!).with(project.updated_at).and_throw(:error)
end
# #destroy_conditionally! uses Grape errors which Ruby-throws a symbol, shifting execution to somewhere else.
@@ -683,7 +671,7 @@ RSpec.describe API::Helpers do
it 'does not destroy given project' do
expect(project).not_to receive(:destroy)
- expect { subject.destroy_conditionally!(project) }.to throw_symbol(:error).and change { Project.count }.by(0)
+ expect { helper.destroy_conditionally!(project) }.to throw_symbol(:error).and change { Project.count }.by(0)
end
end
end
@@ -692,30 +680,30 @@ RSpec.describe API::Helpers do
let(:unmodified_since_header) { Time.now.change(usec: 0) }
before do
- allow(subject).to receive(:headers).and_return('If-Unmodified-Since' => unmodified_since_header.to_s)
+ allow(helper).to receive(:headers).and_return('If-Unmodified-Since' => unmodified_since_header.to_s)
end
context 'when last modified is later than header value' do
it 'renders error' do
- expect(subject).to receive(:render_api_error!)
+ expect(helper).to receive(:render_api_error!)
- subject.check_unmodified_since!(unmodified_since_header + 1.hour)
+ helper.check_unmodified_since!(unmodified_since_header + 1.hour)
end
end
context 'when last modified is earlier than header value' do
it 'does not render error' do
- expect(subject).not_to receive(:render_api_error!)
+ expect(helper).not_to receive(:render_api_error!)
- subject.check_unmodified_since!(unmodified_since_header - 1.hour)
+ helper.check_unmodified_since!(unmodified_since_header - 1.hour)
end
end
context 'when last modified is equal to header value' do
it 'does not render error' do
- expect(subject).not_to receive(:render_api_error!)
+ expect(helper).not_to receive(:render_api_error!)
- subject.check_unmodified_since!(unmodified_since_header)
+ helper.check_unmodified_since!(unmodified_since_header)
end
end
@@ -723,9 +711,9 @@ RSpec.describe API::Helpers do
let(:unmodified_since_header) { nil }
it 'does not render error' do
- expect(subject).not_to receive(:render_api_error!)
+ expect(helper).not_to receive(:render_api_error!)
- subject.check_unmodified_since!(Time.now)
+ helper.check_unmodified_since!(Time.now)
end
end
@@ -733,9 +721,9 @@ RSpec.describe API::Helpers do
let(:unmodified_since_header) { "abcd" }
it 'does not render error' do
- expect(subject).not_to receive(:render_api_error!)
+ expect(helper).not_to receive(:render_api_error!)
- subject.check_unmodified_since!(Time.now)
+ helper.check_unmodified_since!(Time.now)
end
end
end
@@ -810,14 +798,71 @@ RSpec.describe API::Helpers do
before do
u = current_user_set ? user : nil
- subject.instance_variable_set(:@current_user, u)
+ helper.instance_variable_set(:@current_user, u)
- allow(subject).to receive(:params).and_return(params)
+ allow(helper).to receive(:params).and_return(params)
end
it 'returns the expected result' do
- expect(subject.order_by_similarity?(allow_unauthorized: allow_unauthorized)).to eq(expected)
+ expect(helper.order_by_similarity?(allow_unauthorized: allow_unauthorized)).to eq(expected)
end
end
end
+
+ describe '#render_api_error_with_reason!' do
+ before do
+ allow(helper).to receive(:env).and_return({})
+ allow(helper).to receive(:header).and_return({})
+ allow(helper).to receive(:error!)
+ end
+
+ it 'renders error with code' do
+ expect(helper).to receive(:set_status_code_in_env).with(999)
+ expect(helper).to receive(:error!).with({ 'message' => 'a message - good reason' }, 999, {})
+
+ helper.render_api_error_with_reason!(999, 'a message', 'good reason')
+ end
+ end
+
+ describe '#unauthorized!' do
+ it 'renders 401' do
+ expect(helper).to receive(:render_api_error_with_reason!).with(401, '401 Unauthorized', nil)
+
+ helper.unauthorized!
+ end
+
+ it 'renders 401 with a reason' do
+ expect(helper).to receive(:render_api_error_with_reason!).with(401, '401 Unauthorized', 'custom reason')
+
+ helper.unauthorized!('custom reason')
+ end
+ end
+
+ describe '#forbidden!' do
+ it 'renders 401' do
+ expect(helper).to receive(:render_api_error_with_reason!).with(403, '403 Forbidden', nil)
+
+ helper.forbidden!
+ end
+
+ it 'renders 401 with a reason' do
+ expect(helper).to receive(:render_api_error_with_reason!).with(403, '403 Forbidden', 'custom reason')
+
+ helper.forbidden!('custom reason')
+ end
+ end
+
+ describe '#bad_request!' do
+ it 'renders 400' do
+ expect(helper).to receive(:render_api_error_with_reason!).with(400, '400 Bad request', nil)
+
+ helper.bad_request!
+ end
+
+ it 'renders 401 with a reason' do
+ expect(helper).to receive(:render_api_error_with_reason!).with(400, '400 Bad request', 'custom reason')
+
+ helper.bad_request!('custom reason')
+ end
+ end
end
diff --git a/spec/lib/api/integrations/slack/events/url_verification_spec.rb b/spec/lib/api/integrations/slack/events/url_verification_spec.rb
new file mode 100644
index 00000000000..2778f0d708d
--- /dev/null
+++ b/spec/lib/api/integrations/slack/events/url_verification_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Integrations::Slack::Events::UrlVerification do
+ describe '.call' do
+ it 'returns the challenge' do
+ expect(described_class.call({ challenge: 'foo' })).to eq({ challenge: 'foo' })
+ end
+ end
+end
diff --git a/spec/lib/atlassian/jira_connect/client_spec.rb b/spec/lib/atlassian/jira_connect/client_spec.rb
index dd3130c78bf..0ae0f02c46e 100644
--- a/spec/lib/atlassian/jira_connect/client_spec.rb
+++ b/spec/lib/atlassian/jira_connect/client_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Atlassian::JiraConnect::Client do
include StubRequests
- subject { described_class.new('https://gitlab-test.atlassian.net', 'sample_secret') }
+ subject(:client) { described_class.new('https://gitlab-test.atlassian.net', 'sample_secret') }
let_it_be(:project) { create_default(:project, :repository) }
let_it_be(:mrs_by_title) { create_list(:merge_request, 4, :unique_branches, :jira_title) }
@@ -413,4 +413,41 @@ RSpec.describe Atlassian::JiraConnect::Client do
expect { subject.send(:store_dev_info, project: project, merge_requests: merge_requests) }.not_to exceed_query_limit(control_count)
end
end
+
+ describe '#user_info' do
+ let(:account_id) { '12345' }
+ let(:response_body) do
+ {
+ groups: {
+ items: [
+ { name: 'site-admins' }
+ ]
+ }
+ }.to_json
+ end
+
+ before do
+ stub_full_request("https://gitlab-test.atlassian.net/rest/api/3/user?accountId=#{account_id}&expand=groups")
+ .to_return(status: response_status, body: response_body, headers: { 'Content-Type': 'application/json' })
+ end
+
+ context 'with a successful response' do
+ let(:response_status) { 200 }
+
+ it 'returns a JiraUser instance' do
+ jira_user = client.user_info(account_id)
+
+ expect(jira_user).to be_a(Atlassian::JiraConnect::JiraUser)
+ expect(jira_user).to be_site_admin
+ end
+ end
+
+ context 'with a failed response' do
+ let(:response_status) { 401 }
+
+ it 'returns nil' do
+ expect(client.user_info(account_id)).to be_nil
+ end
+ end
+ end
end
diff --git a/spec/lib/backup/manager_spec.rb b/spec/lib/backup/manager_spec.rb
index a2477834dde..519d414f643 100644
--- a/spec/lib/backup/manager_spec.rb
+++ b/spec/lib/backup/manager_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe Backup::Manager do
# is trying to display a diff and `File.exist?` is stubbed. Adding a
# default stub fixes this.
allow(File).to receive(:exist?).and_call_original
+ allow(FileUtils).to receive(:rm_rf).and_call_original
allow(progress).to receive(:puts)
allow(progress).to receive(:print)
@@ -171,12 +172,14 @@ RSpec.describe Backup::Manager do
allow(task2).to receive(:dump).with(File.join(Gitlab.config.backup.path, 'task2.tar.gz'), full_backup_id)
end
- it 'executes tar' do
+ it 'creates a backup tar' do
travel_to(backup_time) do
subject.create # rubocop:disable Rails/SaveBang
-
- expect(Kernel).to have_received(:system).with(*pack_tar_cmdline)
end
+
+ expect(Kernel).to have_received(:system).with(*pack_tar_cmdline)
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'backup_information.yml'))
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'tmp'))
end
context 'when BACKUP is set' do
@@ -203,6 +206,8 @@ RSpec.describe Backup::Manager do
end.to raise_error(Backup::Error, 'Backup failed')
expect(Gitlab::BackupLogger).to have_received(:info).with(message: "Creating archive #{pack_tar_file} failed")
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'backup_information.yml'))
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'tmp'))
end
end
@@ -597,6 +602,7 @@ RSpec.describe Backup::Manager do
skipped: 'tar',
tar_version: be_a(String)
)
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'tmp'))
end
end
@@ -697,6 +703,8 @@ RSpec.describe Backup::Manager do
expect(Kernel).to have_received(:system).with(*unpack_tar_cmdline)
expect(Kernel).to have_received(:system).with(*pack_tar_cmdline)
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'backup_information.yml'))
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'tmp'))
end
context 'untar fails' do
@@ -724,6 +732,8 @@ RSpec.describe Backup::Manager do
end.to raise_error(Backup::Error, 'Backup failed')
expect(Gitlab::BackupLogger).to have_received(:info).with(message: "Creating archive #{pack_tar_file} failed")
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'backup_information.yml'))
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'tmp'))
end
end
@@ -786,6 +796,8 @@ RSpec.describe Backup::Manager do
expect(Kernel).to have_received(:system).with(*unpack_tar_cmdline)
expect(Kernel).to have_received(:system).with(*pack_tar_cmdline)
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'backup_information.yml'))
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'tmp'))
end
context 'untar fails' do
@@ -817,6 +829,8 @@ RSpec.describe Backup::Manager do
end.to raise_error(Backup::Error, 'Backup failed')
expect(Gitlab::BackupLogger).to have_received(:info).with(message: "Creating archive #{pack_tar_file} failed")
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'backup_information.yml'))
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'tmp'))
end
end
@@ -1001,6 +1015,8 @@ RSpec.describe Backup::Manager do
subject.restore
expect(Kernel).to have_received(:system).with(*tar_cmdline)
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'backup_information.yml'))
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'tmp'))
end
context 'tar fails' do
@@ -1031,22 +1047,6 @@ RSpec.describe Backup::Manager do
.with(a_string_matching('GitLab version mismatch'))
end
end
-
- describe 'tmp files' do
- let(:path) { File.join(Gitlab.config.backup.path, 'tmp') }
-
- before do
- allow(FileUtils).to receive(:rm_rf).and_call_original
- end
-
- it 'removes backups/tmp dir' do
- expect(FileUtils).to receive(:rm_rf).with(path).and_call_original
-
- subject.restore
-
- expect(Gitlab::BackupLogger).to have_received(:info).with(message: 'Deleting backups/tmp ... ')
- end
- end
end
context 'when there is a non-tarred backup in the directory' do
@@ -1066,6 +1066,7 @@ RSpec.describe Backup::Manager do
expect(progress).to have_received(:puts)
.with(a_string_matching('Non tarred backup found '))
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'tmp'))
end
context 'on version mismatch' do
@@ -1082,22 +1083,6 @@ RSpec.describe Backup::Manager do
.with(a_string_matching('GitLab version mismatch'))
end
end
-
- describe 'tmp files' do
- let(:path) { File.join(Gitlab.config.backup.path, 'tmp') }
-
- before do
- allow(FileUtils).to receive(:rm_rf).and_call_original
- end
-
- it 'removes backups/tmp dir' do
- expect(FileUtils).to receive(:rm_rf).with(path).and_call_original
-
- subject.restore
-
- expect(Gitlab::BackupLogger).to have_received(:info).with(message: 'Deleting backups/tmp ... ')
- end
- end
end
end
end
diff --git a/spec/lib/backup/repositories_spec.rb b/spec/lib/backup/repositories_spec.rb
index 1581e4793e3..8bcf1e46c33 100644
--- a/spec/lib/backup/repositories_spec.rb
+++ b/spec/lib/backup/repositories_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Backup::Repositories do
let(:progress) { spy(:stdout) }
let(:strategy) { spy(:strategy) }
let(:storages) { [] }
+ let(:paths) { [] }
let(:destination) { 'repositories' }
let(:backup_id) { 'backup_id' }
@@ -13,7 +14,8 @@ RSpec.describe Backup::Repositories do
described_class.new(
progress,
strategy: strategy,
- storages: storages
+ storages: storages,
+ paths: paths
)
end
@@ -107,6 +109,52 @@ RSpec.describe Backup::Repositories do
expect(strategy).to have_received(:finish!)
end
end
+
+ describe 'paths' do
+ let_it_be(:project) { create(:project, :repository) }
+
+ context 'project path' do
+ let(:paths) { [project.full_path] }
+
+ it 'calls enqueue for all repositories on the specified project', :aggregate_failures do
+ excluded_project = create(:project, :repository)
+ excluded_project_snippet = create(:project_snippet, :repository, project: excluded_project)
+ excluded_personal_snippet = create(:personal_snippet, :repository, author: excluded_project.first_owner)
+
+ subject.dump(destination, backup_id)
+
+ expect(strategy).to have_received(:start).with(:create, destination, backup_id: backup_id)
+ expect(strategy).not_to have_received(:enqueue).with(excluded_project, Gitlab::GlRepository::PROJECT)
+ expect(strategy).not_to have_received(:enqueue).with(excluded_project_snippet, Gitlab::GlRepository::SNIPPET)
+ expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:finish!)
+ end
+ end
+
+ context 'group path' do
+ let(:paths) { [project.namespace.full_path] }
+
+ it 'calls enqueue for all repositories on all descendant projects', :aggregate_failures do
+ excluded_project = create(:project, :repository)
+ excluded_project_snippet = create(:project_snippet, :repository, project: excluded_project)
+ excluded_personal_snippet = create(:personal_snippet, :repository, author: excluded_project.first_owner)
+
+ subject.dump(destination, backup_id)
+
+ expect(strategy).to have_received(:start).with(:create, destination, backup_id: backup_id)
+ expect(strategy).not_to have_received(:enqueue).with(excluded_project, Gitlab::GlRepository::PROJECT)
+ expect(strategy).not_to have_received(:enqueue).with(excluded_project_snippet, Gitlab::GlRepository::SNIPPET)
+ expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:finish!)
+ end
+ end
+ end
end
describe '#restore' do
@@ -138,7 +186,7 @@ RSpec.describe Backup::Repositories do
expect(pool_repository.object_pool.exists?).to be(true)
end
- it 'skips pools with no source project, :sidekiq_might_not_need_inline' do
+ it 'skips pools when no source project is found', :sidekiq_might_not_need_inline do
pool_repository = create(:pool_repository, state: :obsolete)
pool_repository.update_column(:source_project_id, nil)
@@ -208,5 +256,49 @@ RSpec.describe Backup::Repositories do
expect(strategy).to have_received(:finish!)
end
end
+
+ context 'paths' do
+ context 'project path' do
+ let(:paths) { [project.full_path] }
+
+ it 'calls enqueue for all repositories on the specified project', :aggregate_failures do
+ excluded_project = create(:project, :repository)
+ excluded_project_snippet = create(:project_snippet, :repository, project: excluded_project)
+ excluded_personal_snippet = create(:personal_snippet, :repository, author: excluded_project.first_owner)
+
+ subject.restore(destination)
+
+ expect(strategy).to have_received(:start).with(:restore, destination)
+ expect(strategy).not_to have_received(:enqueue).with(excluded_project, Gitlab::GlRepository::PROJECT)
+ expect(strategy).not_to have_received(:enqueue).with(excluded_project_snippet, Gitlab::GlRepository::SNIPPET)
+ expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:finish!)
+ end
+ end
+
+ context 'group path' do
+ let(:paths) { [project.namespace.full_path] }
+
+ it 'calls enqueue for all repositories on all descendant projects', :aggregate_failures do
+ excluded_project = create(:project, :repository)
+ excluded_project_snippet = create(:project_snippet, :repository, project: excluded_project)
+ excluded_personal_snippet = create(:personal_snippet, :repository, author: excluded_project.first_owner)
+
+ subject.restore(destination)
+
+ expect(strategy).to have_received(:start).with(:restore, destination)
+ expect(strategy).not_to have_received(:enqueue).with(excluded_project, Gitlab::GlRepository::PROJECT)
+ expect(strategy).not_to have_received(:enqueue).with(excluded_project_snippet, Gitlab::GlRepository::SNIPPET)
+ expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:finish!)
+ end
+ end
+ end
end
end
diff --git a/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb b/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb
index c6f0e592cdf..d17deaa4736 100644
--- a/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb
@@ -93,7 +93,7 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter do
it 'includes default classes' do
doc = reference_filter("Issue #{reference}")
- expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue has-tooltip'
+ expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue'
end
it 'includes a data-project attribute' do
@@ -112,6 +112,14 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter do
expect(link.attr('data-issue')).to eq issue.id.to_s
end
+ it 'includes data attributes for issuable popover' do
+ doc = reference_filter("See #{reference}")
+ link = doc.css('a').first
+
+ expect(link.attr('data-project-path')).to eq project.full_path
+ expect(link.attr('data-iid')).to eq issue.iid.to_s
+ end
+
it 'includes a data-original attribute' do
doc = reference_filter("See #{reference}")
link = doc.css('a').first
@@ -201,7 +209,7 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter do
it 'includes default classes' do
doc = reference_filter("Fixed (#{reference}.)")
- expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue has-tooltip'
+ expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue'
end
it 'ignores invalid issue IDs on the referenced project' do
@@ -253,7 +261,7 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter do
it 'includes default classes' do
doc = reference_filter("Fixed (#{reference}.)")
- expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue has-tooltip'
+ expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue'
end
it 'ignores invalid issue IDs on the referenced project' do
@@ -305,7 +313,7 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter do
it 'includes default classes' do
doc = reference_filter("Fixed (#{reference}.)")
- expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue has-tooltip'
+ expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue'
end
it 'ignores invalid issue IDs on the referenced project' do
@@ -347,7 +355,7 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter do
it 'includes default classes' do
doc = reference_filter("Fixed (#{reference}.)")
- expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue has-tooltip'
+ expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue'
end
end
@@ -378,7 +386,7 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter do
it 'includes default classes' do
doc = reference_filter("Fixed (#{reference_link}.)")
- expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue has-tooltip'
+ expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue'
end
end
@@ -409,7 +417,7 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter do
it 'includes default classes' do
doc = reference_filter("Fixed (#{reference_link}.)")
- expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue has-tooltip'
+ expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue'
end
end
diff --git a/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb b/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb
index e5809ac6949..42e8cf1c857 100644
--- a/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb
@@ -77,9 +77,9 @@ RSpec.describe Banzai::Filter::References::MergeRequestReferenceFilter do
expect(reference_filter(act).to_html).to eq exp
end
- it 'has no title' do
+ it 'has the MR title in the title attribute' do
doc = reference_filter("Merge #{reference}")
- expect(doc.css('a').first.attr('title')).to eq ""
+ expect(doc.css('a').first.attr('title')).to eq(merge.title)
end
it 'escapes the title attribute' do
@@ -169,7 +169,6 @@ RSpec.describe Banzai::Filter::References::MergeRequestReferenceFilter do
expect(link.attr('data-project')).to eq project2.id.to_s
expect(link.attr('data-project-path')).to eq project2.full_path
expect(link.attr('data-iid')).to eq merge.iid.to_s
- expect(link.attr('data-mr-title')).to eq merge.title
end
it 'ignores invalid merge IDs on the referenced project' do
@@ -273,12 +272,6 @@ RSpec.describe Banzai::Filter::References::MergeRequestReferenceFilter do
expect(doc.text).to eq("See #{mr.to_reference(full: true)} (#{commit.short_id})")
end
- it 'has valid title attribute' do
- doc = reference_filter("See #{reference}")
-
- expect(doc.css('a').first.attr('title')).to eq(commit.title)
- end
-
it 'ignores invalid commit short_ids on link text' do
invalidate_commit_reference =
urls.project_merge_request_url(mr.project, mr) + "/diffs?commit_id=12345678"
diff --git a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
index 16c958ec10b..33adca0ddfc 100644
--- a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
+++ b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do
it "highlights as plaintext" do
result = filter('<pre><code>def fun end</code></pre>')
- expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">def fun end</span></code></pre><copy-code></copy-code></div>')
+ expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">def fun end</span></code></pre><copy-code></copy-code></div>')
end
include_examples "XSS prevention", ""
@@ -59,7 +59,7 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do
it "highlights as plaintext" do
result = filter('<pre lang="gnuplot"><code>This is a test</code></pre>')
- expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre><copy-code></copy-code></div>')
+ expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="gnuplot" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre><copy-code></copy-code></div>')
end
include_examples "XSS prevention", "gnuplot"
@@ -130,13 +130,13 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do
it "includes it in the highlighted code block" do
result = filter('<pre data-sourcepos="1:1-3:3"><code lang="plaintext">This is a test</code></pre>')
- expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre><copy-code></copy-code></div>')
+ expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre><copy-code></copy-code></div>')
end
it "escape sourcepos metadata to prevent XSS" do
result = filter('<pre data-sourcepos="&#34;%22 href=&#34;x&#34;></pre><base href=http://unsafe-website.com/><pre x=&#34;"><code></code></pre>')
- expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre data-sourcepos=\'"%22 href="x"&gt;&lt;/pre&gt;&lt;base href=http://unsafe-website.com/&gt;&lt;pre x="\' class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code></code></pre><copy-code></copy-code></div>')
+ expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre data-sourcepos=\'"%22 href="x"&gt;&lt;/pre&gt;&lt;base href=http://unsafe-website.com/&gt;&lt;pre x="\' class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code></code></pre><copy-code></copy-code></div>')
end
end
@@ -150,7 +150,7 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do
it "highlights as plaintext" do
result = filter('<pre lang="ruby"><code>This is a test</code></pre>')
- expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight" lang="" v-pre="true"><code><span id="LC1" class="line" lang="">This is a test</span></code></pre><copy-code></copy-code></div>')
+ expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight" lang="" data-canonical-lang="ruby" v-pre="true"><code><span id="LC1" class="line" lang="">This is a test</span></code></pre><copy-code></copy-code></div>')
end
include_examples "XSS prevention", "ruby"
diff --git a/spec/lib/bulk_imports/groups/stage_spec.rb b/spec/lib/bulk_imports/groups/stage_spec.rb
index 8ce25ff87d7..528d65615b1 100644
--- a/spec/lib/bulk_imports/groups/stage_spec.rb
+++ b/spec/lib/bulk_imports/groups/stage_spec.rb
@@ -4,47 +4,86 @@ require 'spec_helper'
RSpec.describe BulkImports::Groups::Stage do
let(:ancestor) { create(:group) }
- let(:group) { create(:group, parent: ancestor) }
+ let(:group) { build(:group, parent: ancestor) }
let(:bulk_import) { build(:bulk_import) }
- let(:entity) { build(:bulk_import_entity, bulk_import: bulk_import, group: group, destination_namespace: ancestor.full_path) }
-
- let(:pipelines) do
- [
- [0, BulkImports::Groups::Pipelines::GroupPipeline],
- [1, BulkImports::Groups::Pipelines::GroupAttributesPipeline],
- [1, BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline],
- [1, BulkImports::Groups::Pipelines::NamespaceSettingsPipeline],
- [1, BulkImports::Common::Pipelines::MembersPipeline],
- [1, BulkImports::Common::Pipelines::LabelsPipeline],
- [1, BulkImports::Common::Pipelines::MilestonesPipeline],
- [1, BulkImports::Common::Pipelines::BadgesPipeline],
- [2, BulkImports::Common::Pipelines::BoardsPipeline],
- [2, BulkImports::Common::Pipelines::UploadsPipeline]
- ]
+ let(:entity) do
+ build(:bulk_import_entity, bulk_import: bulk_import, group: group, destination_namespace: ancestor.full_path)
end
it 'raises error when initialized without a BulkImport' do
- expect { described_class.new({}) }.to raise_error(ArgumentError, 'Expected an argument of type ::BulkImports::Entity')
+ expect { described_class.new({}) }.to raise_error(
+ ArgumentError, 'Expected an argument of type ::BulkImports::Entity'
+ )
end
- describe '.pipelines' do
- it 'list all the pipelines with their stage number, ordered by stage' do
- expect(described_class.new(entity).pipelines & pipelines).to contain_exactly(*pipelines)
- expect(described_class.new(entity).pipelines.last.last).to eq(BulkImports::Common::Pipelines::EntityFinisher)
+ describe '#pipelines' do
+ it 'lists all the pipelines' do
+ pipelines = described_class.new(entity).pipelines
+
+ expect(pipelines).to include(
+ hash_including({
+ pipeline: BulkImports::Groups::Pipelines::GroupPipeline,
+ stage: 0
+ }),
+ hash_including({
+ pipeline: BulkImports::Groups::Pipelines::GroupAttributesPipeline,
+ stage: 1
+ })
+ )
+ expect(pipelines.last).to match(hash_including({ pipeline: BulkImports::Common::Pipelines::EntityFinisher }))
+ end
+
+ it 'only has pipelines with valid keys' do
+ pipeline_keys = described_class.new(entity).pipelines.collect(&:keys).flatten.uniq
+ allowed_keys = %i[pipeline stage minimum_source_version maximum_source_version]
+
+ expect(pipeline_keys - allowed_keys).to be_empty
+ end
+
+ it 'only has pipelines with valid versions' do
+ pipelines = described_class.new(entity).pipelines
+ minimum_source_versions = pipelines.collect { _1[:minimum_source_version] }.flatten.compact
+ maximum_source_versions = pipelines.collect { _1[:maximum_source_version] }.flatten.compact
+ version_regex = /^(\d+)\.(\d+)\.0$/
+
+ expect(minimum_source_versions.all? { version_regex =~ _1 }).to eq(true)
+ expect(maximum_source_versions.all? { version_regex =~ _1 }).to eq(true)
+ end
+
+ context 'when stages are out of order in the config hash' do
+ it 'lists all the pipelines ordered by stage' do
+ allow_next_instance_of(BulkImports::Groups::Stage) do |stage|
+ allow(stage).to receive(:config).and_return(
+ {
+ a: { stage: 2 },
+ b: { stage: 1 },
+ c: { stage: 0 },
+ d: { stage: 2 }
+ }
+ )
+ end
+
+ expected_stages = described_class.new(entity).pipelines.collect { _1[:stage] }
+ expect(expected_stages).to eq([0, 1, 2, 2])
+ end
end
context 'when bulk_import_projects feature flag is enabled' do
it 'includes project entities pipeline' do
stub_feature_flags(bulk_import_projects: true)
- expect(described_class.new(entity).pipelines).to include([1, BulkImports::Groups::Pipelines::ProjectEntitiesPipeline])
+ expect(described_class.new(entity).pipelines).to include(
+ hash_including({ pipeline: BulkImports::Groups::Pipelines::ProjectEntitiesPipeline })
+ )
end
context 'when feature flag is enabled on root ancestor level' do
it 'includes project entities pipeline' do
stub_feature_flags(bulk_import_projects: ancestor)
- expect(described_class.new(entity).pipelines).to include([1, BulkImports::Groups::Pipelines::ProjectEntitiesPipeline])
+ expect(described_class.new(entity).pipelines).to include(
+ hash_including({ pipeline: BulkImports::Groups::Pipelines::ProjectEntitiesPipeline })
+ )
end
end
@@ -54,7 +93,9 @@ RSpec.describe BulkImports::Groups::Stage do
entity = create(:bulk_import_entity, destination_namespace: '')
- expect(described_class.new(entity).pipelines).to include([1, BulkImports::Groups::Pipelines::ProjectEntitiesPipeline])
+ expect(described_class.new(entity).pipelines).to include(
+ hash_including({ pipeline: BulkImports::Groups::Pipelines::ProjectEntitiesPipeline })
+ )
end
end
end
@@ -63,7 +104,9 @@ RSpec.describe BulkImports::Groups::Stage do
it 'does not include project entities pipeline' do
stub_feature_flags(bulk_import_projects: false)
- expect(described_class.new(entity).pipelines.flatten).not_to include(BulkImports::Groups::Pipelines::ProjectEntitiesPipeline)
+ expect(described_class.new(entity).pipelines).not_to include(
+ hash_including({ pipeline: BulkImports::Groups::Pipelines::ProjectEntitiesPipeline })
+ )
end
end
end
diff --git a/spec/lib/bulk_imports/projects/pipelines/design_bundle_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/design_bundle_pipeline_spec.rb
new file mode 100644
index 00000000000..39b539ece21
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/design_bundle_pipeline_spec.rb
@@ -0,0 +1,171 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::DesignBundlePipeline do
+ let_it_be(:design) { create(:design, :with_file) }
+
+ let(:portable) { create(:project) }
+ let(:tmpdir) { Dir.mktmpdir }
+ let(:design_bundle_path) { File.join(tmpdir, 'design.bundle') }
+ let(:entity) { create(:bulk_import_entity, :project_entity, project: portable, source_full_path: 'test') }
+ let(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ subject(:pipeline) { described_class.new(context) }
+
+ before do
+ design.repository.bundle_to_disk(design_bundle_path)
+
+ allow(portable).to receive(:lfs_enabled?).and_return(true)
+ allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir)
+ end
+
+ after do
+ FileUtils.remove_entry(tmpdir) if Dir.exist?(tmpdir)
+ end
+
+ describe '#run' do
+ it 'imports design repository into destination project and removes tmpdir' do
+ allow(pipeline)
+ .to receive(:extract)
+ .and_return(BulkImports::Pipeline::ExtractedData.new(data: [design_bundle_path]))
+
+ expect(portable.design_repository).to receive(:create_from_bundle).with(design_bundle_path).and_call_original
+
+ pipeline.run
+
+ expect(portable.design_repository.exists?).to eq(true)
+ end
+ end
+
+ describe '#extract' do
+ it 'downloads & extracts design bundle filepath' do
+ download_service = instance_double("BulkImports::FileDownloadService")
+ decompression_service = instance_double("BulkImports::FileDecompressionService")
+ extraction_service = instance_double("BulkImports::ArchiveExtractionService")
+
+ expect(BulkImports::FileDownloadService)
+ .to receive(:new)
+ .with(
+ configuration: context.configuration,
+ relative_url: "/#{entity.pluralized_name}/test/export_relations/download?relation=design",
+ tmpdir: tmpdir,
+ filename: 'design.tar.gz')
+ .and_return(download_service)
+ expect(BulkImports::FileDecompressionService)
+ .to receive(:new)
+ .with(tmpdir: tmpdir, filename: 'design.tar.gz')
+ .and_return(decompression_service)
+ expect(BulkImports::ArchiveExtractionService)
+ .to receive(:new)
+ .with(tmpdir: tmpdir, filename: 'design.tar')
+ .and_return(extraction_service)
+
+ expect(download_service).to receive(:execute)
+ expect(decompression_service).to receive(:execute)
+ expect(extraction_service).to receive(:execute)
+
+ extracted_data = pipeline.extract(context)
+
+ expect(extracted_data.data).to contain_exactly(design_bundle_path)
+ end
+ end
+
+ describe '#load' do
+ before do
+ allow(pipeline)
+ .to receive(:extract)
+ .and_return(BulkImports::Pipeline::ExtractedData.new(data: [design_bundle_path]))
+ end
+
+ it 'creates design repository from bundle' do
+ expect(portable.design_repository).to receive(:create_from_bundle).with(design_bundle_path).and_call_original
+
+ pipeline.load(context, design_bundle_path)
+
+ expect(portable.design_repository.exists?).to eq(true)
+ end
+
+ context 'when lfs is disabled' do
+ it 'returns' do
+ allow(portable).to receive(:lfs_enabled?).and_return(false)
+
+ expect(portable.design_repository).not_to receive(:create_from_bundle)
+
+ pipeline.load(context, design_bundle_path)
+
+ expect(portable.design_repository.exists?).to eq(false)
+ end
+ end
+
+ context 'when file does not exist' do
+ it 'returns' do
+ expect(portable.design_repository).not_to receive(:create_from_bundle)
+
+ pipeline.load(context, File.join(tmpdir, 'bogus'))
+
+ expect(portable.design_repository.exists?).to eq(false)
+ end
+ end
+
+ context 'when path is directory' do
+ it 'returns' do
+ expect(portable.design_repository).not_to receive(:create_from_bundle)
+
+ pipeline.load(context, tmpdir)
+
+ expect(portable.design_repository.exists?).to eq(false)
+ end
+ end
+
+ context 'when path is symlink' do
+ it 'returns' do
+ symlink = File.join(tmpdir, 'symlink')
+
+ FileUtils.ln_s(File.join(tmpdir, design_bundle_path), symlink)
+
+ expect(portable.design_repository).not_to receive(:create_from_bundle)
+
+ pipeline.load(context, symlink)
+
+ expect(portable.design_repository.exists?).to eq(false)
+ end
+ end
+
+ context 'when path is not under tmpdir' do
+ it 'returns' do
+ expect { pipeline.load(context, '/home/test.txt') }
+ .to raise_error(StandardError, 'path /home/test.txt is not allowed')
+ end
+ end
+
+ context 'when path is being traversed' do
+ it 'raises an error' do
+ expect { pipeline.load(context, File.join(tmpdir, '..')) }
+ .to raise_error(Gitlab::Utils::PathTraversalAttackError, 'Invalid path')
+ end
+ end
+ end
+
+ describe '#after_run' do
+ it 'removes tmpdir' do
+ allow(FileUtils).to receive(:remove_entry).and_call_original
+ expect(FileUtils).to receive(:remove_entry).with(tmpdir).and_call_original
+
+ pipeline.after_run(nil)
+
+ expect(Dir.exist?(tmpdir)).to eq(false)
+ end
+
+ context 'when tmpdir does not exist' do
+ it 'does not attempt to remove tmpdir' do
+ FileUtils.remove_entry(tmpdir)
+
+ expect(FileUtils).not_to receive(:remove_entry).with(tmpdir)
+
+ pipeline.after_run(nil)
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb
index aa9c7486c27..4320d5dc119 100644
--- a/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb
@@ -54,17 +54,13 @@ RSpec.describe BulkImports::Projects::Pipelines::ProjectAttributesPipeline do
subject(:pipeline) { described_class.new(context) }
- before do
- allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir)
- end
-
- after do
- FileUtils.remove_entry(tmpdir) if Dir.exist?(tmpdir)
- end
-
describe '#run' do
before do
- allow(pipeline).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: project_attributes))
+ allow_next_instance_of(BulkImports::Common::Extractors::JsonExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(
+ BulkImports::Pipeline::ExtractedData.new(data: project_attributes)
+ )
+ end
pipeline.run
end
@@ -84,46 +80,6 @@ RSpec.describe BulkImports::Projects::Pipelines::ProjectAttributesPipeline do
end
end
- describe '#extract' do
- before do
- file_download_service = instance_double("BulkImports::FileDownloadService")
- file_decompression_service = instance_double("BulkImports::FileDecompressionService")
-
- expect(BulkImports::FileDownloadService)
- .to receive(:new)
- .with(
- configuration: context.configuration,
- relative_url: "/#{entity.pluralized_name}/#{entity.source_full_path}/export_relations/download?relation=self",
- tmpdir: tmpdir,
- filename: 'self.json.gz')
- .and_return(file_download_service)
-
- expect(BulkImports::FileDecompressionService)
- .to receive(:new)
- .with(tmpdir: tmpdir, filename: 'self.json.gz')
- .and_return(file_decompression_service)
-
- expect(file_download_service).to receive(:execute)
- expect(file_decompression_service).to receive(:execute)
- end
-
- it 'downloads, decompresses & decodes json' do
- allow(pipeline).to receive(:json_attributes).and_return("{\"test\":\"test\"}")
-
- extracted_data = pipeline.extract(context)
-
- expect(extracted_data.data).to match_array([{ 'test' => 'test' }])
- end
-
- context 'when json parsing error occurs' do
- it 'raises an error' do
- allow(pipeline).to receive(:json_attributes).and_return("invalid")
-
- expect { pipeline.extract(context) }.to raise_error(BulkImports::Error)
- end
- end
- end
-
describe '#transform' do
it 'removes prohibited attributes from hash' do
input = { 'description' => 'description', 'issues' => [], 'milestones' => [], 'id' => 5 }
@@ -145,35 +101,13 @@ RSpec.describe BulkImports::Projects::Pipelines::ProjectAttributesPipeline do
end
end
- describe '#json_attributes' do
- it 'reads raw json from file' do
- filepath = File.join(tmpdir, 'self.json')
-
- FileUtils.touch(filepath)
- expect_file_read(filepath)
-
- pipeline.json_attributes
- end
- end
-
describe '#after_run' do
- it 'removes tmp dir' do
- allow(FileUtils).to receive(:remove_entry).and_call_original
- expect(FileUtils).to receive(:remove_entry).with(tmpdir).and_call_original
+ it 'calls extractor#remove_tmpdir' do
+ expect_next_instance_of(BulkImports::Common::Extractors::JsonExtractor) do |extractor|
+ expect(extractor).to receive(:remove_tmpdir)
+ end
pipeline.after_run(nil)
-
- expect(Dir.exist?(tmpdir)).to eq(false)
- end
-
- context 'when dir does not exist' do
- it 'does not attempt to remove tmpdir' do
- FileUtils.remove_entry(tmpdir)
-
- expect(FileUtils).not_to receive(:remove_entry).with(tmpdir)
-
- pipeline.after_run(nil)
- end
end
end
diff --git a/spec/lib/bulk_imports/projects/pipelines/releases_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/releases_pipeline_spec.rb
index 2279e66720e..2633598b48d 100644
--- a/spec/lib/bulk_imports/projects/pipelines/releases_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/releases_pipeline_spec.rb
@@ -31,7 +31,8 @@ RSpec.describe BulkImports::Projects::Pipelines::ReleasesPipeline do
'created_at' => '2019-12-26T10:17:14.621Z',
'updated_at' => '2019-12-26T10:17:14.621Z',
'released_at' => '2019-12-26T10:17:14.615Z',
- 'sha' => '901de3a8bd5573f4a049b1457d28bc1592ba6bf9'
+ 'sha' => '901de3a8bd5573f4a049b1457d28bc1592ba6bf9',
+ 'author_id' => user.id
}.merge(attributes)
end
@@ -45,11 +46,11 @@ RSpec.describe BulkImports::Projects::Pipelines::ReleasesPipeline do
allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [with_index]))
end
-
- pipeline.run
end
it 'imports release into destination project' do
+ pipeline.run
+
expect(project.releases.count).to eq(1)
imported_release = project.releases.last
@@ -62,6 +63,7 @@ RSpec.describe BulkImports::Projects::Pipelines::ReleasesPipeline do
expect(imported_release.updated_at.to_s).to eq('2019-12-26 10:17:14 UTC')
expect(imported_release.released_at.to_s).to eq('2019-12-26 10:17:14 UTC')
expect(imported_release.sha).to eq(release['sha'])
+ expect(imported_release.author_id).to eq(release['author_id'])
end
end
@@ -78,6 +80,8 @@ RSpec.describe BulkImports::Projects::Pipelines::ReleasesPipeline do
let(:attributes) {{ 'links' => [link] }}
it 'restores release links' do
+ pipeline.run
+
release_link = project.releases.last.links.first
aggregate_failures do
@@ -105,6 +109,8 @@ RSpec.describe BulkImports::Projects::Pipelines::ReleasesPipeline do
let(:attributes) {{ 'milestone_releases' => [{ 'milestone' => milestone }] }}
it 'restores release milestone' do
+ pipeline.run
+
release_milestone = project.releases.last.milestone_releases.first.milestone
aggregate_failures do
@@ -118,5 +124,33 @@ RSpec.describe BulkImports::Projects::Pipelines::ReleasesPipeline do
end
end
end
+
+ context 'evidences' do
+ it 'creates release evidence' do
+ expect(::Releases::CreateEvidenceWorker).to receive(:perform_async)
+
+ pipeline.run
+ end
+
+ context 'when release is historical' do
+ let(:attributes) {{ 'released_at' => '2018-12-26T10:17:14.621Z' }}
+
+ it 'does not create release evidence' do
+ expect(::Releases::CreateEvidenceWorker).not_to receive(:perform_async)
+
+ pipeline.run
+ end
+ end
+
+ context 'when release is upcoming' do
+ let(:attributes) {{ 'released_at' => Time.zone.now + 30.days }}
+
+ it 'does not create release evidence' do
+ expect(::Releases::CreateEvidenceWorker).not_to receive(:perform_async)
+
+ pipeline.run
+ end
+ end
+ end
end
end
diff --git a/spec/lib/bulk_imports/projects/pipelines/repository_bundle_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/repository_bundle_pipeline_spec.rb
new file mode 100644
index 00000000000..712c37ee578
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/repository_bundle_pipeline_spec.rb
@@ -0,0 +1,169 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::RepositoryBundlePipeline do
+ let_it_be(:source) { create(:project, :repository) }
+
+ let(:portable) { create(:project) }
+ let(:tmpdir) { Dir.mktmpdir }
+ let(:bundle_path) { File.join(tmpdir, 'repository.bundle') }
+ let(:entity) { create(:bulk_import_entity, :project_entity, project: portable, source_full_path: 'test') }
+ let(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ subject(:pipeline) { described_class.new(context) }
+
+ before do
+ source.repository.bundle_to_disk(bundle_path)
+
+ allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir)
+ end
+
+ after do
+ FileUtils.remove_entry(tmpdir) if Dir.exist?(tmpdir)
+ end
+
+ describe '#run' do
+ before do
+ allow(pipeline).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [bundle_path]))
+ end
+
+ it 'imports repository into destination project and removes tmpdir' do
+ expect(portable.repository).to receive(:create_from_bundle).with(bundle_path).and_call_original
+
+ pipeline.run
+
+ expect(portable.repository.exists?).to eq(true)
+ expect(Dir.exist?(tmpdir)).to eq(false)
+ end
+
+ context 'when something goes wrong during import' do
+ it 'marks entity as failed' do
+ allow(pipeline).to receive(:load).and_raise(StandardError)
+
+ pipeline.run
+
+ expect(entity.failed?).to eq(true)
+ end
+ end
+ end
+
+ describe '#extract' do
+ it 'downloads & extracts repository bundle filepath' do
+ download_service = instance_double("BulkImports::FileDownloadService")
+ decompression_service = instance_double("BulkImports::FileDecompressionService")
+ extraction_service = instance_double("BulkImports::ArchiveExtractionService")
+
+ expect(BulkImports::FileDownloadService)
+ .to receive(:new)
+ .with(
+ configuration: context.configuration,
+ relative_url: "/#{entity.pluralized_name}/test/export_relations/download?relation=repository",
+ tmpdir: tmpdir,
+ filename: 'repository.tar.gz')
+ .and_return(download_service)
+ expect(BulkImports::FileDecompressionService)
+ .to receive(:new)
+ .with(tmpdir: tmpdir, filename: 'repository.tar.gz')
+ .and_return(decompression_service)
+ expect(BulkImports::ArchiveExtractionService)
+ .to receive(:new)
+ .with(tmpdir: tmpdir, filename: 'repository.tar')
+ .and_return(extraction_service)
+
+ expect(download_service).to receive(:execute)
+ expect(decompression_service).to receive(:execute)
+ expect(extraction_service).to receive(:execute)
+
+ extracted_data = pipeline.extract(context)
+
+ expect(extracted_data.data).to contain_exactly(bundle_path)
+ end
+ end
+
+ describe '#load' do
+ before do
+ allow(pipeline)
+ .to receive(:extract)
+ .and_return(BulkImports::Pipeline::ExtractedData.new(data: [bundle_path]))
+ end
+
+ it 'creates repository from bundle' do
+ expect(portable.repository).to receive(:create_from_bundle).with(bundle_path).and_call_original
+
+ pipeline.load(context, bundle_path)
+
+ expect(portable.repository.exists?).to eq(true)
+ end
+
+ context 'when file does not exist' do
+ it 'returns' do
+ expect(portable.repository).not_to receive(:create_from_bundle)
+
+ pipeline.load(context, File.join(tmpdir, 'bogus'))
+
+ expect(portable.repository.exists?).to eq(false)
+ end
+ end
+
+ context 'when path is directory' do
+ it 'returns' do
+ expect(portable.repository).not_to receive(:create_from_bundle)
+
+ pipeline.load(context, tmpdir)
+
+ expect(portable.repository.exists?).to eq(false)
+ end
+ end
+
+ context 'when path is symlink' do
+ it 'returns' do
+ symlink = File.join(tmpdir, 'symlink')
+
+ FileUtils.ln_s(File.join(tmpdir, bundle_path), symlink)
+
+ expect(portable.repository).not_to receive(:create_from_bundle)
+
+ pipeline.load(context, symlink)
+
+ expect(portable.repository.exists?).to eq(false)
+ end
+ end
+
+ context 'when path is not under tmpdir' do
+ it 'returns' do
+ expect { pipeline.load(context, '/home/test.txt') }
+ .to raise_error(StandardError, 'path /home/test.txt is not allowed')
+ end
+ end
+
+ context 'when path is being traversed' do
+ it 'raises an error' do
+ expect { pipeline.load(context, File.join(tmpdir, '..')) }
+ .to raise_error(Gitlab::Utils::PathTraversalAttackError, 'Invalid path')
+ end
+ end
+ end
+
+ describe '#after_run' do
+ it 'removes tmpdir' do
+ allow(FileUtils).to receive(:remove_entry).and_call_original
+ expect(FileUtils).to receive(:remove_entry).with(tmpdir).and_call_original
+
+ pipeline.after_run(nil)
+
+ expect(Dir.exist?(tmpdir)).to eq(false)
+ end
+
+ context 'when tmpdir does not exist' do
+ it 'does not attempt to remove tmpdir' do
+ FileUtils.remove_entry(tmpdir)
+
+ expect(FileUtils).not_to receive(:remove_entry).with(tmpdir)
+
+ pipeline.after_run(nil)
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/stage_spec.rb b/spec/lib/bulk_imports/projects/stage_spec.rb
index e81d9cc5fb4..fc670d10655 100644
--- a/spec/lib/bulk_imports/projects/stage_spec.rb
+++ b/spec/lib/bulk_imports/projects/stage_spec.rb
@@ -2,38 +2,7 @@
require 'spec_helper'
-# Any new stages must be added to
-# `ee/spec/lib/ee/bulk_imports/projects/stage_spec.rb` as well.
RSpec.describe BulkImports::Projects::Stage do
- let(:pipelines) do
- [
- [0, BulkImports::Projects::Pipelines::ProjectPipeline],
- [1, BulkImports::Projects::Pipelines::RepositoryPipeline],
- [1, BulkImports::Projects::Pipelines::ProjectAttributesPipeline],
- [2, BulkImports::Common::Pipelines::LabelsPipeline],
- [2, BulkImports::Common::Pipelines::MilestonesPipeline],
- [2, BulkImports::Common::Pipelines::BadgesPipeline],
- [3, BulkImports::Projects::Pipelines::IssuesPipeline],
- [3, BulkImports::Projects::Pipelines::SnippetsPipeline],
- [4, BulkImports::Projects::Pipelines::SnippetsRepositoryPipeline],
- [4, BulkImports::Common::Pipelines::BoardsPipeline],
- [4, BulkImports::Projects::Pipelines::MergeRequestsPipeline],
- [4, BulkImports::Projects::Pipelines::ExternalPullRequestsPipeline],
- [4, BulkImports::Projects::Pipelines::ProtectedBranchesPipeline],
- [4, BulkImports::Projects::Pipelines::ProjectFeaturePipeline],
- [4, BulkImports::Projects::Pipelines::ContainerExpirationPolicyPipeline],
- [4, BulkImports::Projects::Pipelines::ServiceDeskSettingPipeline],
- [4, BulkImports::Projects::Pipelines::ReleasesPipeline],
- [5, BulkImports::Projects::Pipelines::CiPipelinesPipeline],
- [5, BulkImports::Common::Pipelines::WikiPipeline],
- [5, BulkImports::Common::Pipelines::UploadsPipeline],
- [5, BulkImports::Common::Pipelines::LfsObjectsPipeline],
- [5, BulkImports::Projects::Pipelines::AutoDevopsPipeline],
- [5, BulkImports::Projects::Pipelines::PipelineSchedulesPipeline],
- [6, BulkImports::Common::Pipelines::EntityFinisher]
- ]
- end
-
subject do
entity = build(:bulk_import_entity, :project_entity)
@@ -41,9 +10,49 @@ RSpec.describe BulkImports::Projects::Stage do
end
describe '#pipelines' do
- it 'list all the pipelines with their stage number, ordered by stage' do
- expect(subject.pipelines & pipelines).to contain_exactly(*pipelines)
- expect(subject.pipelines.last.last).to eq(BulkImports::Common::Pipelines::EntityFinisher)
+ it 'list all the pipelines' do
+ pipelines = subject.pipelines
+
+ expect(pipelines).to include(
+ hash_including({ stage: 0, pipeline: BulkImports::Projects::Pipelines::ProjectPipeline }),
+ hash_including({ stage: 1, pipeline: BulkImports::Projects::Pipelines::RepositoryPipeline })
+ )
+ expect(pipelines.last).to match(hash_including({ pipeline: BulkImports::Common::Pipelines::EntityFinisher }))
+ end
+
+ it 'only have pipelines with valid keys' do
+ pipeline_keys = subject.pipelines.collect(&:keys).flatten.uniq
+ allowed_keys = %i[pipeline stage minimum_source_version maximum_source_version]
+
+ expect(pipeline_keys - allowed_keys).to be_empty
+ end
+
+ it 'only has pipelines with valid versions' do
+ pipelines = subject.pipelines
+ minimum_source_versions = pipelines.collect { _1[:minimum_source_version] }.flatten.compact
+ maximum_source_versions = pipelines.collect { _1[:maximum_source_version] }.flatten.compact
+ version_regex = /^(\d+)\.(\d+)\.0$/
+
+ expect(minimum_source_versions.all? { version_regex =~ _1 }).to eq(true)
+ expect(maximum_source_versions.all? { version_regex =~ _1 }).to eq(true)
+ end
+
+ context 'when stages are out of order in the config hash' do
+ it 'list all the pipelines ordered by stage' do
+ allow_next_instance_of(BulkImports::Projects::Stage) do |stage|
+ allow(stage).to receive(:config).and_return(
+ {
+ a: { stage: 2 },
+ b: { stage: 1 },
+ c: { stage: 0 },
+ d: { stage: 2 }
+ }
+ )
+ end
+
+ expected_stages = subject.pipelines.collect { _1[:stage] }
+ expect(expected_stages).to eq([0, 1, 2, 2])
+ end
end
end
end
diff --git a/spec/lib/container_registry/migration_spec.rb b/spec/lib/container_registry/migration_spec.rb
index 81dac354b8b..fea66d3c8f4 100644
--- a/spec/lib/container_registry/migration_spec.rb
+++ b/spec/lib/container_registry/migration_spec.rb
@@ -58,17 +58,20 @@ RSpec.describe ContainerRegistry::Migration do
describe '.capacity' do
subject { described_class.capacity }
- where(:ff_1_enabled, :ff_2_enabled, :ff_5_enabled, :ff_10_enabled, :ff_25_enabled, :expected_result) do
- false | false | false | false | false | 0
- true | false | false | false | false | 1
- false | true | false | false | false | 2
- true | true | false | false | false | 2
- false | false | true | false | false | 5
- true | true | true | false | false | 5
- false | false | false | true | false | 10
- true | true | true | true | false | 10
- false | false | false | false | true | 25
- true | true | true | true | true | 25
+ where(:ff_1_enabled, :ff_2_enabled, :ff_5_enabled,
+ :ff_10_enabled, :ff_25_enabled, :ff_40_enabled, :expected_result) do
+ false | false | false | false | false | false | 0
+ true | false | false | false | false | false | 1
+ false | true | false | false | false | false | 2
+ true | true | false | false | false | false | 2
+ false | false | true | false | false | false | 5
+ true | true | true | false | false | false | 5
+ false | false | false | true | false | false | 10
+ true | true | true | true | false | false | 10
+ false | false | false | false | true | false | 25
+ true | true | true | true | true | false | 25
+ false | false | false | false | false | true | 40
+ true | true | true | true | true | true | 40
end
with_them do
@@ -78,7 +81,8 @@ RSpec.describe ContainerRegistry::Migration do
container_registry_migration_phase2_capacity_2: ff_2_enabled,
container_registry_migration_phase2_capacity_5: ff_5_enabled,
container_registry_migration_phase2_capacity_10: ff_10_enabled,
- container_registry_migration_phase2_capacity_25: ff_25_enabled
+ container_registry_migration_phase2_capacity_25: ff_25_enabled,
+ container_registry_migration_phase2_capacity_40: ff_40_enabled
)
end
@@ -182,6 +186,18 @@ RSpec.describe ContainerRegistry::Migration do
end
end
+ describe '.pre_import_tags_rate' do
+ let(:value) { 2.5 }
+
+ before do
+ stub_application_setting(container_registry_pre_import_tags_rate: value)
+ end
+
+ it 'returns the matching application_setting' do
+ expect(described_class.pre_import_tags_rate).to eq(value)
+ end
+ end
+
describe '.target_plans' do
subject { described_class.target_plans }
@@ -214,31 +230,30 @@ RSpec.describe ContainerRegistry::Migration do
end
end
- describe '.enqueue_twice?' do
- subject { described_class.enqueue_twice? }
+ describe '.delete_container_repository_worker_support?' do
+ subject { described_class.delete_container_repository_worker_support? }
it { is_expected.to eq(true) }
context 'feature flag disabled' do
before do
- stub_feature_flags(container_registry_migration_phase2_enqueue_twice: false)
+ stub_feature_flags(container_registry_migration_phase2_delete_container_repository_worker_support: false)
end
it { is_expected.to eq(false) }
end
end
- describe '.enqueue_loop?' do
- subject { described_class.enqueuer_loop? }
+ describe '.dynamic_pre_import_timeout_for' do
+ let(:container_repository) { build(:container_repository) }
- it { is_expected.to eq(true) }
+ subject { described_class.dynamic_pre_import_timeout_for(container_repository) }
- context 'feature flag disabled' do
- before do
- stub_feature_flags(container_registry_migration_phase2_enqueuer_loop: false)
- end
+ it 'returns the expected seconds' do
+ stub_application_setting(container_registry_pre_import_tags_rate: 0.6)
+ expect(container_repository).to receive(:tags_count).and_return(50)
- it { is_expected.to eq(false) }
+ expect(subject).to eq((0.6 * 50).seconds)
end
end
end
diff --git a/spec/lib/error_tracking/stacktrace_builder_spec.rb b/spec/lib/error_tracking/stacktrace_builder_spec.rb
new file mode 100644
index 00000000000..46d0bde8122
--- /dev/null
+++ b/spec/lib/error_tracking/stacktrace_builder_spec.rb
@@ -0,0 +1,95 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'support/helpers/fast_rails_root'
+require 'oj'
+
+RSpec.describe ErrorTracking::StacktraceBuilder do
+ include FastRailsRoot
+
+ describe '#stacktrace' do
+ let(:original_payload) { Gitlab::Json.parse(File.read(rails_root_join('spec/fixtures', payload_file))) }
+ let(:payload) { original_payload }
+ let(:payload_file) { 'error_tracking/parsed_event.json' }
+
+ subject(:stacktrace) { described_class.new(payload).stacktrace }
+
+ context 'with full error context' do
+ it 'generates a correct stacktrace in expected format' do
+ expected_context = [
+ [132, " end\n"],
+ [133, "\n"],
+ [134, " begin\n"],
+ [135, " block.call(work, *extra)\n"],
+ [136, " rescue Exception => e\n"],
+ [137, " STDERR.puts \"Error reached top of thread-pool: #\{e.message\} (#\{e.class\})\"\n"],
+ [138, " end\n"]
+ ]
+
+ expected_entry = {
+ 'lineNo' => 135,
+ 'context' => expected_context,
+ 'filename' => 'puma/thread_pool.rb',
+ 'function' => 'block in spawn_thread',
+ 'colNo' => 0
+ }
+
+ expect(stacktrace).to be_kind_of(Array)
+ expect(stacktrace.first).to eq(expected_entry)
+ end
+ end
+
+ context 'when error context is missing' do
+ let(:payload_file) { 'error_tracking/browser_event.json' }
+
+ it 'generates a stacktrace without context' do
+ expected_entry = {
+ 'lineNo' => 6395,
+ 'context' => [],
+ 'filename' => 'webpack-internal:///./node_modules/vue/dist/vue.runtime.esm.js',
+ 'function' => 'hydrate',
+ 'colNo' => 0
+ }
+
+ expect(stacktrace).to be_kind_of(Array)
+ expect(stacktrace.first).to eq(expected_entry)
+ end
+ end
+
+ context 'with empty payload' do
+ let(:payload) { {} }
+
+ it { is_expected.to eq([]) }
+ end
+
+ context 'without exception field' do
+ let(:payload) { original_payload.except('exception') }
+
+ it { is_expected.to eq([]) }
+ end
+
+ context 'without exception.values field' do
+ before do
+ original_payload['exception'].delete('values')
+ end
+
+ it { is_expected.to eq([]) }
+ end
+
+ context 'without any exception.values[].stacktrace fields' do
+ before do
+ original_payload.dig('exception', 'values').each { |value| value['stacktrace'] = '' }
+ end
+
+ it { is_expected.to eq([]) }
+ end
+
+ context 'without any exception.values[].stacktrace.frame fields' do
+ before do
+ original_payload.dig('exception', 'values').each { |value| value['stacktrace'].delete('frames') }
+ end
+
+ it { is_expected.to eq([]) }
+ end
+ end
+end
diff --git a/spec/lib/generators/gitlab/usage_metric_generator_spec.rb b/spec/lib/generators/gitlab/usage_metric_generator_spec.rb
index 207ecb88aad..2bf50a5fa24 100644
--- a/spec/lib/generators/gitlab/usage_metric_generator_spec.rb
+++ b/spec/lib/generators/gitlab/usage_metric_generator_spec.rb
@@ -32,6 +32,7 @@ RSpec.describe Gitlab::UsageMetricGenerator, :silence_stdout do
let(:sample_metric_dir) { 'lib/generators/gitlab/usage_metric_generator' }
let(:generic_sample_metric) { fixture_file(File.join(sample_metric_dir, 'sample_generic_metric.rb')) }
let(:database_sample_metric) { fixture_file(File.join(sample_metric_dir, 'sample_database_metric.rb')) }
+ let(:numbers_sample_metric) { fixture_file(File.join(sample_metric_dir, 'sample_numbers_metric.rb')) }
let(:sample_spec) { fixture_file(File.join(sample_metric_dir, 'sample_metric_test.rb')) }
it 'creates CE metric instrumentation files using the template' do
@@ -63,6 +64,17 @@ RSpec.describe Gitlab::UsageMetricGenerator, :silence_stdout do
end
end
+ context 'for numbers type' do
+ let(:options) { { 'type' => 'numbers', 'operation' => 'add' } }
+
+ it 'creates the metric instrumentation file using the template' do
+ described_class.new(args, options).invoke_all
+
+ expect_generated_file(ce_temp_dir, 'count_foo_metric.rb', numbers_sample_metric)
+ expect_generated_file(spec_ce_temp_dir, 'count_foo_metric_spec.rb', sample_spec)
+ end
+ end
+
context 'with type option missing' do
let(:options) { {} }
@@ -94,5 +106,21 @@ RSpec.describe Gitlab::UsageMetricGenerator, :silence_stdout do
expect { described_class.new(args, options).invoke_all }.to raise_error(ArgumentError, /Unknown operation 'sleep'/)
end
end
+
+ context 'without operation for numbers metric' do
+ let(:options) { { 'type' => 'numbers' } }
+
+ it 'raises an ArgumentError' do
+ expect { described_class.new(args, options).invoke_all }.to raise_error(ArgumentError, /Unknown operation ''/)
+ end
+ end
+
+ context 'with wrong operation for numbers metric' do
+ let(:options) { { 'type' => 'numbers', 'operation' => 'sleep' } }
+
+ it 'raises an ArgumentError' do
+ expect { described_class.new(args, options).invoke_all }.to raise_error(ArgumentError, /Unknown operation 'sleep'/)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb
index 8eb75feaa8d..7e36d89a2a1 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb
@@ -27,6 +27,26 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::Aggregated::RecordsFetcher do
expect(returned_iids).to eq(expected_issue_ids)
end
+
+ it 'passes a hash with all expected attributes to the serializer' do
+ expected_attributes = [
+ 'created_at',
+ 'id',
+ 'iid',
+ 'title',
+ :end_event_timestamp,
+ :start_event_timestamp,
+ :total_time,
+ :author,
+ :namespace_path,
+ :project_path
+ ]
+ serializer = instance_double(records_fetcher.send(:serializer).class.name)
+ allow(records_fetcher).to receive(:serializer).and_return(serializer)
+ expect(serializer).to receive(:represent).at_least(:once).with(hash_including(*expected_attributes)).and_return({})
+
+ records_fetcher.serialized_records
+ end
end
describe '#serialized_records' do
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb
index dc46dade87e..ec394bb9f05 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb
@@ -8,15 +8,18 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::RecordsFetcher do
end
let(:params) { { from: 1.year.ago, current_user: user } }
+ let(:records_fetcher) do
+ Gitlab::Analytics::CycleAnalytics::DataCollector.new(
+ stage: stage,
+ params: params
+ ).records_fetcher
+ end
let_it_be(:project) { create(:project, :empty_repo) }
let_it_be(:user) { create(:user) }
subject do
- Gitlab::Analytics::CycleAnalytics::DataCollector.new(
- stage: stage,
- params: params
- ).records_fetcher.serialized_records
+ records_fetcher.serialized_records
end
describe '#serialized_records' do
@@ -28,6 +31,26 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::RecordsFetcher do
it 'returns all records' do
expect(subject.size).to eq(2)
end
+
+ it 'passes a hash with all expected attributes to the serializer' do
+ expected_attributes = [
+ 'created_at',
+ 'id',
+ 'iid',
+ 'title',
+ 'end_event_timestamp',
+ 'start_event_timestamp',
+ 'total_time',
+ :author,
+ :namespace_path,
+ :project_path
+ ]
+ serializer = instance_double(records_fetcher.send(:serializer).class.name)
+ allow(records_fetcher).to receive(:serializer).and_return(serializer)
+ expect(serializer).to receive(:represent).twice.with(hash_including(*expected_attributes)).and_return({})
+
+ subject
+ end
end
describe 'for issue based stage' do
diff --git a/spec/lib/gitlab/asciidoc_spec.rb b/spec/lib/gitlab/asciidoc_spec.rb
index d86191ca0c2..bfea1315d90 100644
--- a/spec/lib/gitlab/asciidoc_spec.rb
+++ b/spec/lib/gitlab/asciidoc_spec.rb
@@ -94,7 +94,7 @@ module Gitlab
# Move this test back to the items hash when removing `use_cmark_renderer` feature flag.
it "does not convert dangerous fenced code with inline script into HTML" do
input = '```mypre"><script>alert(3)</script>'
- output = "<div>\n<div>\n<div class=\"gl-relative markdown-code-block js-markdown-code\">\n<pre class=\"code highlight js-syntax-highlight language-plaintext\" lang=\"plaintext\" v-pre=\"true\"><code></code></pre>\n<copy-code></copy-code>\n</div>\n</div>\n</div>"
+ output = "<div>\n<div>\n<div class=\"gl-relative markdown-code-block js-markdown-code\">\n<pre class=\"code highlight js-syntax-highlight language-plaintext\" lang=\"plaintext\" data-canonical-lang=\"mypre\" v-pre=\"true\"><code></code></pre>\n<copy-code></copy-code>\n</div>\n</div>\n</div>"
expect(render(input, context)).to include(output)
end
@@ -360,7 +360,7 @@ module Gitlab
<div>
<div>
<div class="gl-relative markdown-code-block js-markdown-code">
- <pre class="code highlight js-syntax-highlight language-javascript" lang="javascript" v-pre="true"><code><span id="LC1" class="line" lang="javascript"><span class="nx">console</span><span class="p">.</span><span class="nx">log</span><span class="p">(</span><span class="dl">'</span><span class="s1">hello world</span><span class="dl">'</span><span class="p">)</span></span></code></pre>
+ <pre class="code highlight js-syntax-highlight language-javascript" lang="javascript" data-canonical-lang="js" v-pre="true"><code><span id="LC1" class="line" lang="javascript"><span class="nx">console</span><span class="p">.</span><span class="nx">log</span><span class="p">(</span><span class="dl">'</span><span class="s1">hello world</span><span class="dl">'</span><span class="p">)</span></span></code></pre>
<copy-code></copy-code>
</div>
</div>
@@ -390,7 +390,7 @@ module Gitlab
<div>class.cpp</div>
<div>
<div class="gl-relative markdown-code-block js-markdown-code">
- <pre class="code highlight js-syntax-highlight language-cpp" lang="cpp" v-pre="true"><code><span id="LC1" class="line" lang="cpp"><span class="cp">#include &lt;stdio.h&gt;</span></span>
+ <pre class="code highlight js-syntax-highlight language-cpp" lang="cpp" data-canonical-lang="c++" v-pre="true"><code><span id="LC1" class="line" lang="cpp"><span class="cp">#include</span> <span class="cpf">&lt;stdio.h&gt;</span></span>
<span id="LC2" class="line" lang="cpp"></span>
<span id="LC3" class="line" lang="cpp"><span class="k">for</span> <span class="p">(</span><span class="kt">int</span> <span class="n">i</span> <span class="o">=</span> <span class="mi">0</span><span class="p">;</span> <span class="n">i</span> <span class="o">&lt;</span> <span class="mi">5</span><span class="p">;</span> <span class="n">i</span><span class="o">++</span><span class="p">)</span> <span class="p">{</span></span>
<span id="LC4" class="line" lang="cpp"> <span class="n">std</span><span class="o">::</span><span class="n">cout</span><span class="o">&lt;&lt;</span><span class="s">"*"</span><span class="o">&lt;&lt;</span><span class="n">std</span><span class="o">::</span><span class="n">endl</span><span class="p">;</span></span>
diff --git a/spec/lib/gitlab/audit/unauthenticated_author_spec.rb b/spec/lib/gitlab/audit/unauthenticated_author_spec.rb
index 4e5c477fc2a..70716ee7f4c 100644
--- a/spec/lib/gitlab/audit/unauthenticated_author_spec.rb
+++ b/spec/lib/gitlab/audit/unauthenticated_author_spec.rb
@@ -13,5 +13,11 @@ RSpec.describe Gitlab::Audit::UnauthenticatedAuthor do
expect(described_class.new)
.to have_attributes(id: -1, name: 'An unauthenticated user')
end
+
+ describe '#impersonated?' do
+ it 'returns false' do
+ expect(described_class.new.impersonated?).to be(false)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/auth/o_auth/user_spec.rb b/spec/lib/gitlab/auth/o_auth/user_spec.rb
index 6cb9085c3ad..5f5e7f211f8 100644
--- a/spec/lib/gitlab/auth/o_auth/user_spec.rb
+++ b/spec/lib/gitlab/auth/o_auth/user_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Auth::OAuth::User do
include LdapHelpers
+ include TermsHelper
let(:oauth_user) { described_class.new(auth_hash) }
let(:oauth_user_2) { described_class.new(auth_hash_2) }
@@ -144,6 +145,49 @@ RSpec.describe Gitlab::Auth::OAuth::User do
expect(gl_user).to be_password_automatically_set
end
+ context 'terms of service' do
+ context 'when terms are enforced' do
+ before do
+ enforce_terms
+ end
+
+ context 'when feature flag update_oauth_registration_flow is enabled' do
+ before do
+ stub_feature_flags(update_oauth_registration_flow: true)
+ end
+
+ it 'creates the user with accepted terms' do
+ oauth_user.save # rubocop:disable Rails/SaveBang
+
+ expect(gl_user).to be_persisted
+ expect(gl_user.terms_accepted?).to be(true)
+ end
+ end
+
+ context 'when feature flag update_oauth_registration_flow is disabled' do
+ before do
+ stub_feature_flags(update_oauth_registration_flow: false)
+ end
+
+ it 'creates the user without accepted terms' do
+ oauth_user.save # rubocop:disable Rails/SaveBang
+
+ expect(gl_user).to be_persisted
+ expect(gl_user.terms_accepted?).to be(false)
+ end
+ end
+ end
+
+ context 'when terms are not enforced' do
+ it 'creates the user without accepted terms' do
+ oauth_user.save # rubocop:disable Rails/SaveBang
+
+ expect(gl_user).to be_persisted
+ expect(gl_user.terms_accepted?).to be(false)
+ end
+ end
+ end
+
shared_examples 'to verify compliance with allow_single_sign_on' do
context 'provider is marked as external' do
it 'marks user as external' do
diff --git a/spec/lib/gitlab/background_migration/backfill_namespace_id_for_project_route_spec.rb b/spec/lib/gitlab/background_migration/backfill_namespace_id_for_project_route_spec.rb
index 2dcd4645c84..2949bc068c8 100644
--- a/spec/lib/gitlab/background_migration/backfill_namespace_id_for_project_route_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_namespace_id_for_project_route_spec.rb
@@ -1,8 +1,8 @@
# frozen_string_literal: true
require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceIdForProjectRoute do
+# this needs the schema to be before we introduce the not null constraint on routes#namespace_id
+RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceIdForProjectRoute, schema: 20220606060825 do
let(:migration) { described_class.new }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
diff --git a/spec/lib/gitlab/background_migration/backfill_project_feature_package_registry_access_level_spec.rb b/spec/lib/gitlab/background_migration/backfill_project_feature_package_registry_access_level_spec.rb
new file mode 100644
index 00000000000..fd6c055b9f6
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_project_feature_package_registry_access_level_spec.rb
@@ -0,0 +1,124 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillProjectFeaturePackageRegistryAccessLevel do
+ let(:non_null_project_features) { { pages_access_level: 20 } }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:project_features) { table(:project_features) }
+
+ let(:namespace1) { namespaces.create!(name: 'namespace 1', path: 'namespace1') }
+ let(:namespace2) { namespaces.create!(name: 'namespace 2', path: 'namespace2') }
+ let(:namespace3) { namespaces.create!(name: 'namespace 3', path: 'namespace3') }
+ let(:namespace4) { namespaces.create!(name: 'namespace 4', path: 'namespace4') }
+ let(:namespace5) { namespaces.create!(name: 'namespace 5', path: 'namespace5') }
+ let(:namespace6) { namespaces.create!(name: 'namespace 6', path: 'namespace6') }
+
+ let(:project1) do
+ projects.create!(namespace_id: namespace1.id, project_namespace_id: namespace1.id, packages_enabled: false)
+ end
+
+ let(:project2) do
+ projects.create!(namespace_id: namespace2.id, project_namespace_id: namespace2.id, packages_enabled: nil)
+ end
+
+ let(:project3) do
+ projects.create!(
+ namespace_id: namespace3.id,
+ project_namespace_id: namespace3.id,
+ packages_enabled: true,
+ visibility_level: Gitlab::VisibilityLevel::PRIVATE
+ )
+ end
+
+ let(:project4) do
+ projects.create!(
+ namespace_id: namespace4.id,
+ project_namespace_id: namespace4.id,
+ packages_enabled: true, visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ end
+
+ let(:project5) do
+ projects.create!(
+ namespace_id: namespace5.id,
+ project_namespace_id: namespace5.id,
+ packages_enabled: true,
+ visibility_level: Gitlab::VisibilityLevel::PUBLIC
+ )
+ end
+
+ let(:project6) do
+ projects.create!(namespace_id: namespace6.id, project_namespace_id: namespace6.id, packages_enabled: false)
+ end
+
+ let!(:project_feature1) do
+ project_features.create!(
+ project_id: project1.id,
+ package_registry_access_level: ProjectFeature::ENABLED,
+ **non_null_project_features
+ )
+ end
+
+ let!(:project_feature2) do
+ project_features.create!(
+ project_id: project2.id,
+ package_registry_access_level: ProjectFeature::ENABLED,
+ **non_null_project_features
+ )
+ end
+
+ let!(:project_feature3) do
+ project_features.create!(
+ project_id: project3.id,
+ package_registry_access_level: ProjectFeature::DISABLED,
+ **non_null_project_features
+ )
+ end
+
+ let!(:project_feature4) do
+ project_features.create!(
+ project_id: project4.id,
+ package_registry_access_level: ProjectFeature::DISABLED,
+ **non_null_project_features
+ )
+ end
+
+ let!(:project_feature5) do
+ project_features.create!(
+ project_id: project5.id,
+ package_registry_access_level: ProjectFeature::DISABLED,
+ **non_null_project_features
+ )
+ end
+
+ let!(:project_feature6) do
+ project_features.create!(
+ project_id: project6.id,
+ package_registry_access_level: ProjectFeature::ENABLED,
+ **non_null_project_features
+ )
+ end
+
+ subject(:perform_migration) do
+ described_class.new(start_id: project1.id,
+ end_id: project5.id,
+ batch_table: :projects,
+ batch_column: :id,
+ sub_batch_size: 2,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection)
+ .perform
+ end
+
+ it 'backfills project_features.package_registry_access_level', :aggregate_failures do
+ perform_migration
+
+ expect(project_feature1.reload.package_registry_access_level).to eq(ProjectFeature::DISABLED)
+ expect(project_feature2.reload.package_registry_access_level).to eq(ProjectFeature::DISABLED)
+ expect(project_feature3.reload.package_registry_access_level).to eq(ProjectFeature::PRIVATE)
+ expect(project_feature4.reload.package_registry_access_level).to eq(ProjectFeature::ENABLED)
+ expect(project_feature5.reload.package_registry_access_level).to eq(ProjectFeature::PUBLIC)
+ expect(project_feature6.reload.package_registry_access_level).to eq(ProjectFeature::ENABLED)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_project_member_namespace_id_spec.rb b/spec/lib/gitlab/background_migration/backfill_project_member_namespace_id_spec.rb
new file mode 100644
index 00000000000..ca7ca41a33e
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_project_member_namespace_id_spec.rb
@@ -0,0 +1,104 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillProjectMemberNamespaceId, :migration, schema: 20220516054011 do
+ let(:migration) do
+ described_class.new(start_id: 1, end_id: 10,
+ batch_table: table_name, batch_column: batch_column,
+ sub_batch_size: sub_batch_size, pause_ms: pause_ms,
+ connection: ApplicationRecord.connection)
+ end
+
+ let(:members_table) { table(:members) }
+ let(:projects_table) { table(:projects) }
+ let(:namespaces_table) { table(:namespaces) }
+
+ let(:table_name) { 'members' }
+ let(:batch_column) { :id }
+ let(:sub_batch_size) { 100 }
+ let(:pause_ms) { 0 }
+
+ subject(:perform_migration) do
+ migration.perform
+ end
+
+ before do
+ namespaces_table.create!(id: 201, name: 'group1', path: 'group1', type: 'Group')
+ namespaces_table.create!(id: 202, name: 'group2', path: 'group2', type: 'Group')
+ namespaces_table.create!(id: 300, name: 'project-namespace-1', path: 'project-namespace-1-path', type: 'Project')
+ namespaces_table.create!(id: 301, name: 'project-namespace-2', path: 'project-namespace-2-path', type: 'Project')
+ namespaces_table.create!(id: 302, name: 'project-namespace-3', path: 'project-namespace-3-path', type: 'Project')
+
+ projects_table.create!(id: 100, name: 'project1', path: 'project1', namespace_id: 202, project_namespace_id: 300)
+ projects_table.create!(id: 101, name: 'project2', path: 'project2', namespace_id: 202, project_namespace_id: 301)
+ projects_table.create!(id: 102, name: 'project3', path: 'project3', namespace_id: 202, project_namespace_id: 302)
+
+ # project1, no member namespace (fill in)
+ members_table.create!(id: 1, source_id: 100,
+ source_type: 'Project', type: 'ProjectMember',
+ member_namespace_id: nil, access_level: 10, notification_level: 3)
+ # bogus source id, no member namespace id (do nothing)
+ members_table.create!(id: 2, source_id: non_existing_record_id,
+ source_type: 'Project', type: 'ProjectMember',
+ member_namespace_id: nil, access_level: 10, notification_level: 3)
+ # project3, existing member namespace id (do nothing)
+ members_table.create!(id: 3, source_id: 102,
+ source_type: 'Project', type: 'ProjectMember',
+ member_namespace_id: 300, access_level: 10, notification_level: 3)
+
+ # Group memberships (do not change)
+ # group1, no member namespace (do nothing)
+ members_table.create!(id: 4, source_id: 201,
+ source_type: 'Namespace', type: 'GroupMember',
+ member_namespace_id: nil, access_level: 10, notification_level: 3)
+ # group2, existing member namespace (do nothing)
+ members_table.create!(id: 5, source_id: 202,
+ source_type: 'Namespace', type: 'GroupMember',
+ member_namespace_id: 201, access_level: 10, notification_level: 3)
+
+ # Project Namespace memberships (do not change)
+ # project namespace, existing member namespace (do nothing)
+ members_table.create!(id: 6, source_id: 300,
+ source_type: 'Namespace', type: 'ProjectNamespaceMember',
+ member_namespace_id: 201, access_level: 10, notification_level: 3)
+ # project namespace, not member namespace (do nothing)
+ members_table.create!(id: 7, source_id: 301,
+ source_type: 'Namespace', type: 'ProjectNamespaceMember',
+ member_namespace_id: 201, access_level: 10, notification_level: 3)
+ end
+
+ it 'backfills `member_namespace_id` for the selected records', :aggregate_failures do
+ expect(members_table.where(type: 'ProjectMember', member_namespace_id: nil).count).to eq 2
+ expect(members_table.where(type: 'GroupMember', member_namespace_id: nil).count).to eq 1
+
+ queries = ActiveRecord::QueryRecorder.new do
+ perform_migration
+ end
+
+ # rubocop:disable Layout/LineLength
+ expect(queries.count).to eq(3)
+ expect(members_table.where(type: 'ProjectMember', member_namespace_id: nil).count).to eq 1 # just the bogus one
+ expect(members_table.where(type: 'ProjectMember').pluck(:member_namespace_id)).to match_array([nil, 300, 300])
+ expect(members_table.where(type: 'GroupMember', member_namespace_id: nil).count).to eq 1
+ expect(members_table.where(type: 'GroupMember').pluck(:member_namespace_id)).to match_array([nil, 201])
+ # rubocop:enable Layout/LineLength
+ end
+
+ it 'tracks timings of queries' do
+ expect(migration.batch_metrics.timings).to be_empty
+
+ expect { perform_migration }.to change { migration.batch_metrics.timings }
+ end
+
+ context 'when given a negative pause_ms' do
+ let(:pause_ms) { -9 }
+ let(:sub_batch_size) { 2 }
+
+ it 'uses 0 as a floor for pause_ms' do
+ expect(migration).to receive(:sleep).with(0)
+
+ perform_migration
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/cleanup_orphaned_routes_spec.rb b/spec/lib/gitlab/background_migration/cleanup_orphaned_routes_spec.rb
new file mode 100644
index 00000000000..a09d5559d33
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/cleanup_orphaned_routes_spec.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+# this needs the schema to be before we introduce the not null constraint on routes#namespace_id
+RSpec.describe Gitlab::BackgroundMigration::CleanupOrphanedRoutes, schema: 20220606060825 do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:routes) { table(:routes) }
+
+ let!(:namespace1) { namespaces.create!(name: 'batchtest1', type: 'Group', path: 'space1') }
+ let!(:namespace2) { namespaces.create!(name: 'batchtest2', type: 'Group', parent_id: namespace1.id, path: 'space2') }
+ let!(:namespace3) { namespaces.create!(name: 'batchtest3', type: 'Group', parent_id: namespace2.id, path: 'space3') }
+
+ let!(:proj_namespace1) { namespaces.create!(name: 'proj1', path: 'proj1', type: 'Project', parent_id: namespace1.id) }
+ let!(:proj_namespace2) { namespaces.create!(name: 'proj2', path: 'proj2', type: 'Project', parent_id: namespace2.id) }
+ let!(:proj_namespace3) { namespaces.create!(name: 'proj3', path: 'proj3', type: 'Project', parent_id: namespace3.id) }
+
+ # rubocop:disable Layout/LineLength
+ let!(:proj1) { projects.create!(name: 'proj1', path: 'proj1', namespace_id: namespace1.id, project_namespace_id: proj_namespace1.id) }
+ let!(:proj2) { projects.create!(name: 'proj2', path: 'proj2', namespace_id: namespace2.id, project_namespace_id: proj_namespace2.id) }
+ let!(:proj3) { projects.create!(name: 'proj3', path: 'proj3', namespace_id: namespace3.id, project_namespace_id: proj_namespace3.id) }
+
+ # valid namespace routes with not null namespace_id
+ let!(:namespace_route1) { routes.create!(path: 'space1', source_id: namespace1.id, source_type: 'Namespace', namespace_id: namespace1.id) }
+ # valid namespace routes with null namespace_id
+ let!(:namespace_route2) { routes.create!(path: 'space1/space2', source_id: namespace2.id, source_type: 'Namespace') }
+ let!(:namespace_route3) { routes.create!(path: 'space1/space3', source_id: namespace3.id, source_type: 'Namespace') }
+ # invalid/orphaned namespace route
+ let!(:orphaned_namespace_route_a) { routes.create!(path: 'space1/space4', source_id: non_existing_record_id, source_type: 'Namespace') }
+ let!(:orphaned_namespace_route_b) { routes.create!(path: 'space1/space5', source_id: non_existing_record_id - 1, source_type: 'Namespace') }
+
+ # valid project routes with not null namespace_id
+ let!(:proj_route1) { routes.create!(path: 'space1/proj1', source_id: proj1.id, source_type: 'Project', namespace_id: proj_namespace1.id) }
+ # valid project routes with null namespace_id
+ let!(:proj_route2) { routes.create!(path: 'space1/space2/proj2', source_id: proj2.id, source_type: 'Project') }
+ let!(:proj_route3) { routes.create!(path: 'space1/space3/proj3', source_id: proj3.id, source_type: 'Project') }
+ # invalid/orphaned namespace route
+ let!(:orphaned_project_route_a) { routes.create!(path: 'space1/space3/proj5', source_id: non_existing_record_id, source_type: 'Project') }
+ let!(:orphaned_project_route_b) { routes.create!(path: 'space1/space3/proj6', source_id: non_existing_record_id - 1, source_type: 'Project') }
+ # rubocop:enable Layout/LineLength
+
+ let!(:migration_attrs) do
+ {
+ start_id: Route.minimum(:id),
+ end_id: Route.maximum(:id),
+ batch_table: :routes,
+ batch_column: :id,
+ sub_batch_size: 100,
+ pause_ms: 0,
+ connection: ApplicationRecord.connection
+ }
+ end
+
+ let!(:migration) { described_class.new(**migration_attrs) }
+
+ subject(:perform_migration) { migration.perform }
+
+ it 'cleans orphaned routes', :aggregate_failures do
+ all_route_ids = Route.pluck(:id)
+
+ orphaned_route_ids = [
+ orphaned_namespace_route_a, orphaned_namespace_route_b, orphaned_project_route_a, orphaned_project_route_b
+ ].pluck(:id)
+ remaining_routes = (all_route_ids - orphaned_route_ids).sort
+
+ expect { perform_migration }.to change { Route.pluck(:id) }.to contain_exactly(*remaining_routes)
+ expect(Route.all).to all(have_attributes(namespace_id: be_present))
+
+ # expect that routes that had namespace_id set did not change namespace_id
+ expect(namespace_route1.reload.namespace_id).to eq(namespace1.id)
+ expect(proj_route1.reload.namespace_id).to eq(proj_namespace1.id)
+ end
+
+ it 'tracks timings of queries' do
+ expect(migration.batch_metrics.timings).to be_empty
+
+ expect { perform_migration }.to change { migration.batch_metrics.timings }
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/encrypt_integration_properties_spec.rb b/spec/lib/gitlab/background_migration/encrypt_integration_properties_spec.rb
index 7334867e8fb..38e8b159e63 100644
--- a/spec/lib/gitlab/background_migration/encrypt_integration_properties_spec.rb
+++ b/spec/lib/gitlab/background_migration/encrypt_integration_properties_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::EncryptIntegrationProperties do
+RSpec.describe Gitlab::BackgroundMigration::EncryptIntegrationProperties, schema: 20220415124804 do
let(:integrations) do
table(:integrations) do |integrations|
integrations.send :attr_encrypted, :encrypted_properties_tmp,
diff --git a/spec/lib/gitlab/background_migration/fix_merge_request_diff_commit_users_spec.rb b/spec/lib/gitlab/background_migration/fix_merge_request_diff_commit_users_spec.rb
index c343ee438b8..99df21562b0 100644
--- a/spec/lib/gitlab/background_migration/fix_merge_request_diff_commit_users_spec.rb
+++ b/spec/lib/gitlab/background_migration/fix_merge_request_diff_commit_users_spec.rb
@@ -2,314 +2,23 @@
require 'spec_helper'
-# The underlying migration relies on the global models (e.g. Project). This
-# means we also need to use FactoryBot factories to ensure everything is
-# operating using the same types. If we use `table()` and similar methods we
-# would have to duplicate a lot of logic just for these tests.
-#
# rubocop: disable RSpec/FactoriesInMigrationSpecs
RSpec.describe Gitlab::BackgroundMigration::FixMergeRequestDiffCommitUsers do
let(:migration) { described_class.new }
describe '#perform' do
context 'when the project exists' do
- it 'processes the project' do
+ it 'does nothing' do
project = create(:project)
- expect(migration).to receive(:process).with(project)
- expect(migration).to receive(:schedule_next_job)
-
- migration.perform(project.id)
- end
-
- it 'marks the background job as finished' do
- project = create(:project)
-
- Gitlab::Database::BackgroundMigrationJob.create!(
- class_name: 'FixMergeRequestDiffCommitUsers',
- arguments: [project.id]
- )
-
- migration.perform(project.id)
-
- job = Gitlab::Database::BackgroundMigrationJob
- .find_by(class_name: 'FixMergeRequestDiffCommitUsers')
-
- expect(job.status).to eq('succeeded')
+ expect { migration.perform(project.id) }.not_to raise_error
end
end
context 'when the project does not exist' do
it 'does nothing' do
- expect(migration).not_to receive(:process)
- expect(migration).to receive(:schedule_next_job)
-
- migration.perform(-1)
- end
- end
- end
-
- describe '#process' do
- it 'processes the merge requests of the project' do
- project = create(:project, :repository)
- commit = project.commit
- mr = create(
- :merge_request_with_diffs,
- source_project: project,
- target_project: project
- )
-
- diff = mr.merge_request_diffs.first
-
- create(
- :merge_request_diff_commit,
- merge_request_diff: diff,
- sha: commit.sha,
- relative_order: 9000
- )
-
- migration.process(project)
-
- updated = diff
- .merge_request_diff_commits
- .find_by(sha: commit.sha, relative_order: 9000)
-
- expect(updated.commit_author_id).not_to be_nil
- expect(updated.committer_id).not_to be_nil
- end
- end
-
- describe '#update_commit' do
- let(:project) { create(:project, :repository) }
- let(:mr) do
- create(
- :merge_request_with_diffs,
- source_project: project,
- target_project: project
- )
- end
-
- let(:diff) { mr.merge_request_diffs.first }
- let(:commit) { project.commit }
-
- def update_row(migration, project, diff, row)
- migration.update_commit(project, row)
-
- diff
- .merge_request_diff_commits
- .find_by(sha: row.sha, relative_order: row.relative_order)
- end
-
- it 'populates missing commit authors' do
- commit_row = create(
- :merge_request_diff_commit,
- merge_request_diff: diff,
- sha: commit.sha,
- relative_order: 9000
- )
-
- updated = update_row(migration, project, diff, commit_row)
-
- expect(updated.commit_author.name).to eq(commit.to_hash[:author_name])
- expect(updated.commit_author.email).to eq(commit.to_hash[:author_email])
- end
-
- it 'populates missing committers' do
- commit_row = create(
- :merge_request_diff_commit,
- merge_request_diff: diff,
- sha: commit.sha,
- relative_order: 9000
- )
-
- updated = update_row(migration, project, diff, commit_row)
-
- expect(updated.committer.name).to eq(commit.to_hash[:committer_name])
- expect(updated.committer.email).to eq(commit.to_hash[:committer_email])
- end
-
- it 'leaves existing commit authors as-is' do
- user = create(:merge_request_diff_commit_user)
- commit_row = create(
- :merge_request_diff_commit,
- merge_request_diff: diff,
- sha: commit.sha,
- relative_order: 9000,
- commit_author: user
- )
-
- updated = update_row(migration, project, diff, commit_row)
-
- expect(updated.commit_author).to eq(user)
- end
-
- it 'leaves existing committers as-is' do
- user = create(:merge_request_diff_commit_user)
- commit_row = create(
- :merge_request_diff_commit,
- merge_request_diff: diff,
- sha: commit.sha,
- relative_order: 9000,
- committer: user
- )
-
- updated = update_row(migration, project, diff, commit_row)
-
- expect(updated.committer).to eq(user)
- end
-
- it 'does nothing when both the author and committer are present' do
- user = create(:merge_request_diff_commit_user)
- commit_row = create(
- :merge_request_diff_commit,
- merge_request_diff: diff,
- sha: commit.sha,
- relative_order: 9000,
- committer: user,
- commit_author: user
- )
-
- recorder = ActiveRecord::QueryRecorder.new do
- migration.update_commit(project, commit_row)
- end
-
- expect(recorder.count).to be_zero
- end
-
- it 'does nothing if the commit does not exist in Git' do
- user = create(:merge_request_diff_commit_user)
- commit_row = create(
- :merge_request_diff_commit,
- merge_request_diff: diff,
- sha: 'kittens',
- relative_order: 9000,
- committer: user,
- commit_author: user
- )
-
- recorder = ActiveRecord::QueryRecorder.new do
- migration.update_commit(project, commit_row)
+ expect { migration.perform(-1) }.not_to raise_error
end
-
- expect(recorder.count).to be_zero
- end
-
- it 'does nothing when the committer/author are missing in the Git commit' do
- user = create(:merge_request_diff_commit_user)
- commit_row = create(
- :merge_request_diff_commit,
- merge_request_diff: diff,
- sha: commit.sha,
- relative_order: 9000,
- committer: user,
- commit_author: user
- )
-
- allow(migration).to receive(:find_or_create_user).and_return(nil)
-
- recorder = ActiveRecord::QueryRecorder.new do
- migration.update_commit(project, commit_row)
- end
-
- expect(recorder.count).to be_zero
- end
- end
-
- describe '#schedule_next_job' do
- it 'schedules the next background migration' do
- Gitlab::Database::BackgroundMigrationJob
- .create!(class_name: 'FixMergeRequestDiffCommitUsers', arguments: [42])
-
- expect(BackgroundMigrationWorker)
- .to receive(:perform_in)
- .with(2.minutes, 'FixMergeRequestDiffCommitUsers', [42])
-
- migration.schedule_next_job
- end
-
- it 'does nothing when there are no jobs' do
- expect(BackgroundMigrationWorker)
- .not_to receive(:perform_in)
-
- migration.schedule_next_job
- end
- end
-
- describe '#find_commit' do
- let(:project) { create(:project, :repository) }
-
- it 'finds a commit using Git' do
- commit = project.commit
- found = migration.find_commit(project, commit.sha)
-
- expect(found).to eq(commit.to_hash)
- end
-
- it 'caches the results' do
- commit = project.commit
-
- migration.find_commit(project, commit.sha)
-
- expect { migration.find_commit(project, commit.sha) }
- .not_to change { Gitlab::GitalyClient.get_request_count }
- end
-
- it 'returns an empty hash if the commit does not exist' do
- expect(migration.find_commit(project, 'kittens')).to eq({})
- end
- end
-
- describe '#find_or_create_user' do
- let(:project) { create(:project, :repository) }
-
- it 'creates missing users' do
- commit = project.commit.to_hash
- id = migration.find_or_create_user(commit, :author_name, :author_email)
-
- expect(MergeRequest::DiffCommitUser.count).to eq(1)
-
- created = MergeRequest::DiffCommitUser.first
-
- expect(created.name).to eq(commit[:author_name])
- expect(created.email).to eq(commit[:author_email])
- expect(created.id).to eq(id)
- end
-
- it 'returns users that already exist' do
- commit = project.commit.to_hash
- user1 = migration.find_or_create_user(commit, :author_name, :author_email)
- user2 = migration.find_or_create_user(commit, :author_name, :author_email)
-
- expect(user1).to eq(user2)
- end
-
- it 'caches the results' do
- commit = project.commit.to_hash
-
- migration.find_or_create_user(commit, :author_name, :author_email)
-
- recorder = ActiveRecord::QueryRecorder.new do
- migration.find_or_create_user(commit, :author_name, :author_email)
- end
-
- expect(recorder.count).to be_zero
- end
-
- it 'returns nil if the commit details are missing' do
- id = migration.find_or_create_user({}, :author_name, :author_email)
-
- expect(id).to be_nil
- end
- end
-
- describe '#matches_row' do
- it 'returns the query matches for the composite primary key' do
- row = double(:commit, merge_request_diff_id: 4, relative_order: 5)
- arel = migration.matches_row(row)
-
- expect(arel.to_sql).to eq(
- '("merge_request_diff_commits"."merge_request_diff_id", "merge_request_diff_commits"."relative_order") = (4, 5)'
- )
end
end
end
diff --git a/spec/lib/gitlab/background_migration/migrate_pages_to_zip_storage_spec.rb b/spec/lib/gitlab/background_migration/migrate_pages_to_zip_storage_spec.rb
deleted file mode 100644
index 557dd8ddee6..00000000000
--- a/spec/lib/gitlab/background_migration/migrate_pages_to_zip_storage_spec.rb
+++ /dev/null
@@ -1,43 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::MigratePagesToZipStorage do
- let(:namespace) { create(:group) } # rubocop: disable RSpec/FactoriesInMigrationSpecs
- let(:migration) { described_class.new }
-
- describe '#perform' do
- context 'when there is project to migrate' do
- let!(:project) { create_project('project') }
-
- after do
- FileUtils.rm_rf(project.pages_path)
- end
-
- it 'migrates project to zip storage' do
- expect_next_instance_of(::Pages::MigrateFromLegacyStorageService,
- anything,
- ignore_invalid_entries: false,
- mark_projects_as_not_deployed: false) do |service|
- expect(service).to receive(:execute_for_batch).with(project.id..project.id).and_call_original
- end
-
- migration.perform(project.id, project.id)
-
- expect(project.reload.pages_metadatum.pages_deployment.file.filename).to eq("_migrated.zip")
- end
- end
- end
-
- def create_project(path)
- project = create(:project) # rubocop: disable RSpec/FactoriesInMigrationSpecs
- project.mark_pages_as_deployed
-
- FileUtils.mkdir_p File.join(project.pages_path, "public")
- File.open(File.join(project.pages_path, "public/index.html"), "w") do |f|
- f.write("Hello!")
- end
-
- project
- end
-end
diff --git a/spec/lib/gitlab/background_migration/set_legacy_open_source_license_available_for_non_public_projects_spec.rb b/spec/lib/gitlab/background_migration/set_legacy_open_source_license_available_for_non_public_projects_spec.rb
new file mode 100644
index 00000000000..035ea6eadcf
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/set_legacy_open_source_license_available_for_non_public_projects_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::SetLegacyOpenSourceLicenseAvailableForNonPublicProjects,
+ :migration,
+ schema: 20220520040416 do
+ let(:namespaces_table) { table(:namespaces) }
+ let(:projects_table) { table(:projects) }
+ let(:project_settings_table) { table(:project_settings) }
+
+ subject(:perform_migration) do
+ described_class.new(start_id: 1,
+ end_id: 30,
+ batch_table: :projects,
+ batch_column: :id,
+ sub_batch_size: 2,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection)
+ .perform
+ end
+
+ let(:queries) { ActiveRecord::QueryRecorder.new { perform_migration } }
+
+ before do
+ namespaces_table.create!(id: 1, name: 'namespace', path: 'namespace-path-1')
+ namespaces_table.create!(id: 2, name: 'namespace', path: 'namespace-path-2', type: 'Project')
+ namespaces_table.create!(id: 3, name: 'namespace', path: 'namespace-path-3', type: 'Project')
+ namespaces_table.create!(id: 4, name: 'namespace', path: 'namespace-path-4', type: 'Project')
+
+ projects_table
+ .create!(id: 11, name: 'proj-1', path: 'path-1', namespace_id: 1, project_namespace_id: 2, visibility_level: 0)
+ projects_table
+ .create!(id: 12, name: 'proj-2', path: 'path-2', namespace_id: 1, project_namespace_id: 3, visibility_level: 10)
+ projects_table
+ .create!(id: 13, name: 'proj-3', path: 'path-3', namespace_id: 1, project_namespace_id: 4, visibility_level: 20)
+
+ project_settings_table.create!(project_id: 11, legacy_open_source_license_available: true)
+ project_settings_table.create!(project_id: 12, legacy_open_source_license_available: true)
+ project_settings_table.create!(project_id: 13, legacy_open_source_license_available: true)
+ end
+
+ it 'sets `legacy_open_source_license_available` attribute to false for non-public projects', :aggregate_failures do
+ expect(queries.count).to eq(3)
+
+ expect(migrated_attribute(11)).to be_falsey
+ expect(migrated_attribute(12)).to be_falsey
+ expect(migrated_attribute(13)).to be_truthy
+ end
+
+ def migrated_attribute(project_id)
+ project_settings_table.find(project_id).legacy_open_source_license_available
+ end
+end
diff --git a/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb
index d2abdb740f8..ab4be5a909a 100644
--- a/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importer do
let(:project) { create(:project, :repository, import_url: import_url, creator: project_creator) }
let(:now) { Time.now.utc.change(usec: 0) }
let(:project_key) { 'TEST' }
- let(:repo_slug) { 'rouge' }
+ let(:repo_slug) { 'rouge-repo' }
let(:sample) { RepoHelpers.sample_compare }
subject { described_class.new(project, recover_missing_commits: true) }
diff --git a/spec/lib/gitlab/checks/changes_access_spec.rb b/spec/lib/gitlab/checks/changes_access_spec.rb
index 41ec11c1055..60118823b5a 100644
--- a/spec/lib/gitlab/checks/changes_access_spec.rb
+++ b/spec/lib/gitlab/checks/changes_access_spec.rb
@@ -49,56 +49,26 @@ RSpec.describe Gitlab::Checks::ChangesAccess do
context 'when changes contain empty revisions' do
let(:expected_commit) { instance_double(Commit) }
- let(:expected_allow_quarantine) { allow_quarantine }
shared_examples 'returns only commits with non empty revisions' do
- before do
- stub_feature_flags(filter_quarantined_commits: filter_quarantined_commits)
- end
-
specify do
expect(project.repository)
.to receive(:new_commits)
- .with([newrev], allow_quarantine: expected_allow_quarantine) { [expected_commit] }
+ .with([newrev]) { [expected_commit] }
expect(subject.commits).to match_array([expected_commit])
end
end
- it_behaves_like 'returns only commits with non empty revisions' do
+ context 'with oldrev' do
let(:changes) { [{ oldrev: oldrev, newrev: newrev }, { newrev: '' }, { newrev: Gitlab::Git::BLANK_SHA }] }
- let(:allow_quarantine) { true }
- let(:filter_quarantined_commits) { true }
+
+ it_behaves_like 'returns only commits with non empty revisions'
end
context 'without oldrev' do
let(:changes) { [{ newrev: newrev }, { newrev: '' }, { newrev: Gitlab::Git::BLANK_SHA }] }
- context 'with disallowed quarantine' do
- # The quarantine directory should not be used because we're lacking
- # oldrev, and we're not filtering commits.
- let(:allow_quarantine) { false }
- let(:filter_quarantined_commits) { false }
-
- it_behaves_like 'returns only commits with non empty revisions'
- end
-
- context 'with allowed quarantine and :filter_quarantined_commits disabled' do
- # When we allow usage of the quarantine but have no oldrev and we're
- # not filtering commits then results returned by the quarantine aren't
- # accurate. We thus mustn't try using it.
- let(:allow_quarantine) { true }
- let(:filter_quarantined_commits) { false }
- let(:expected_allow_quarantine) { false }
-
- it_behaves_like 'returns only commits with non empty revisions'
- end
-
- context 'with allowed quarantine and :filter_quarantined_commits enabled' do
- let(:allow_quarantine) { true }
- let(:filter_quarantined_commits) { true }
-
- it_behaves_like 'returns only commits with non empty revisions'
- end
+ it_behaves_like 'returns only commits with non empty revisions'
end
end
end
diff --git a/spec/lib/gitlab/checks/single_change_access_spec.rb b/spec/lib/gitlab/checks/single_change_access_spec.rb
index 1b34e58797e..8d9f96dd2b4 100644
--- a/spec/lib/gitlab/checks/single_change_access_spec.rb
+++ b/spec/lib/gitlab/checks/single_change_access_spec.rb
@@ -96,26 +96,14 @@ RSpec.describe Gitlab::Checks::SingleChangeAccess do
let(:provided_commits) { nil }
before do
- stub_feature_flags(filter_quarantined_commits: filter_quarantined_commits)
-
expect(project.repository)
.to receive(:new_commits)
- .with(newrev, allow_quarantine: filter_quarantined_commits)
+ .with(newrev)
.once
.and_return(expected_commits)
end
- context 'with :filter_quarantined_commits disabled' do
- let(:filter_quarantined_commits) { false }
-
- it_behaves_like '#commits'
- end
-
- context 'with :filter_quarantined_commits enabled' do
- let(:filter_quarantined_commits) { true }
-
- it_behaves_like '#commits'
- end
+ it_behaves_like '#commits'
end
end
end
diff --git a/spec/lib/gitlab/checks/tag_check_spec.rb b/spec/lib/gitlab/checks/tag_check_spec.rb
index e2e7d9c9648..6cd3a2d1c07 100644
--- a/spec/lib/gitlab/checks/tag_check_spec.rb
+++ b/spec/lib/gitlab/checks/tag_check_spec.rb
@@ -26,8 +26,18 @@ RSpec.describe Gitlab::Checks::TagCheck do
let(:oldrev) { 'be93687618e4b132087f430a4d8fc3a609c9b77c' }
let(:newrev) { '0000000000000000000000000000000000000000' }
- it 'is prevented' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, /cannot be deleted/)
+ context 'via web interface' do
+ let(:protocol) { 'web' }
+
+ it 'is allowed' do
+ expect { subject.validate! }.not_to raise_error
+ end
+ end
+
+ context 'via SSH' do
+ it 'is prevented' do
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, /only delete.*web interface/)
+ end
end
end
@@ -41,6 +51,21 @@ RSpec.describe Gitlab::Checks::TagCheck do
end
end
+ context 'as developer' do
+ before do
+ project.add_developer(user)
+ end
+
+ context 'deletion' do
+ let(:oldrev) { 'be93687618e4b132087f430a4d8fc3a609c9b77c' }
+ let(:newrev) { '0000000000000000000000000000000000000000' }
+
+ it 'is prevented' do
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, /not allowed to delete/)
+ end
+ end
+ end
+
context 'creation' do
let(:oldrev) { '0000000000000000000000000000000000000000' }
let(:newrev) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' }
diff --git a/spec/lib/gitlab/ci/build/image_spec.rb b/spec/lib/gitlab/ci/build/image_spec.rb
index 630dfcd06bb..8f77a1f60ad 100644
--- a/spec/lib/gitlab/ci/build/image_spec.rb
+++ b/spec/lib/gitlab/ci/build/image_spec.rb
@@ -28,8 +28,14 @@ RSpec.describe Gitlab::Ci::Build::Image do
context 'when image is defined as hash' do
let(:entrypoint) { '/bin/sh' }
+ let(:pull_policy) { %w[always if-not-present] }
- let(:job) { create(:ci_build, options: { image: { name: image_name, entrypoint: entrypoint, ports: [80] } } ) }
+ let(:job) do
+ create(:ci_build, options: { image: { name: image_name,
+ entrypoint: entrypoint,
+ ports: [80],
+ pull_policy: pull_policy } } )
+ end
it 'fabricates an object of the proper class' do
is_expected.to be_kind_of(described_class)
@@ -38,6 +44,7 @@ RSpec.describe Gitlab::Ci::Build::Image do
it 'populates fabricated object with the proper attributes' do
expect(subject.name).to eq(image_name)
expect(subject.entrypoint).to eq(entrypoint)
+ expect(subject.pull_policy).to eq(pull_policy)
end
it 'populates the ports' do
diff --git a/spec/lib/gitlab/ci/config/entry/image_spec.rb b/spec/lib/gitlab/ci/config/entry/image_spec.rb
index e16a9a7a74a..bd1ab5d8c41 100644
--- a/spec/lib/gitlab/ci/config/entry/image_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/image_spec.rb
@@ -1,8 +1,16 @@
# frozen_string_literal: true
-require 'spec_helper'
+require 'fast_spec_helper'
+require 'support/helpers/stubbed_feature'
+require 'support/helpers/stub_feature_flags'
RSpec.describe Gitlab::Ci::Config::Entry::Image do
+ include StubFeatureFlags
+
+ before do
+ stub_feature_flags(ci_docker_image_pull_policy: true)
+ end
+
let(:entry) { described_class.new(config) }
context 'when configuration is a string' do
@@ -43,6 +51,12 @@ RSpec.describe Gitlab::Ci::Config::Entry::Image do
expect(entry.ports).to be_nil
end
end
+
+ describe '#pull_policy' do
+ it "returns nil" do
+ expect(entry.pull_policy).to be_nil
+ end
+ end
end
context 'when configuration is a hash' do
@@ -109,6 +123,56 @@ RSpec.describe Gitlab::Ci::Config::Entry::Image do
end
end
end
+
+ context 'when configuration has pull_policy' do
+ let(:config) { { name: 'image:1.0', pull_policy: 'if-not-present' } }
+
+ describe '#valid?' do
+ it 'is valid' do
+ entry.compose!
+
+ expect(entry).to be_valid
+ end
+
+ context 'when the feature flag ci_docker_image_pull_policy is disabled' do
+ before do
+ stub_feature_flags(ci_docker_image_pull_policy: false)
+ end
+
+ it 'is not valid' do
+ entry.compose!
+
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include('image config contains unknown keys: pull_policy')
+ end
+ end
+ end
+
+ describe '#value' do
+ it "returns value" do
+ entry.compose!
+
+ expect(entry.value).to eq(
+ name: 'image:1.0',
+ pull_policy: ['if-not-present']
+ )
+ end
+
+ context 'when the feature flag ci_docker_image_pull_policy is disabled' do
+ before do
+ stub_feature_flags(ci_docker_image_pull_policy: false)
+ end
+
+ it 'is not valid' do
+ entry.compose!
+
+ expect(entry.value).to eq(
+ name: 'image:1.0'
+ )
+ end
+ end
+ end
+ end
end
context 'when entry value is not correct' do
diff --git a/spec/lib/gitlab/ci/config/entry/pull_policy_spec.rb b/spec/lib/gitlab/ci/config/entry/pull_policy_spec.rb
new file mode 100644
index 00000000000..c35355b10c6
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/pull_policy_spec.rb
@@ -0,0 +1,87 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Entry::PullPolicy do
+ let(:entry) { described_class.new(config) }
+
+ describe '#value' do
+ subject(:value) { entry.value }
+
+ context 'when config value is nil' do
+ let(:config) { nil }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when retry value is an empty array' do
+ let(:config) { [] }
+
+ it { is_expected.to eq(nil) }
+ end
+
+ context 'when retry value is string' do
+ let(:config) { "always" }
+
+ it { is_expected.to eq(%w[always]) }
+ end
+
+ context 'when retry value is array' do
+ let(:config) { %w[always if-not-present] }
+
+ it { is_expected.to eq(%w[always if-not-present]) }
+ end
+ end
+
+ describe 'validation' do
+ subject(:valid?) { entry.valid? }
+
+ context 'when retry value is nil' do
+ let(:config) { nil }
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when retry value is an empty array' do
+ let(:config) { [] }
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when retry value is a hash' do
+ let(:config) { {} }
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when retry value is string' do
+ let(:config) { "always" }
+
+ it { is_expected.to eq(true) }
+
+ context 'when it is an invalid policy' do
+ let(:config) { "invalid" }
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when it is an empty string' do
+ let(:config) { "" }
+
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ context 'when retry value is array' do
+ let(:config) { %w[always if-not-present] }
+
+ it { is_expected.to eq(true) }
+
+ context 'when config contains an invalid policy' do
+ let(:config) { %w[always invalid] }
+
+ it { is_expected.to eq(false) }
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/rules/rule/changes_spec.rb b/spec/lib/gitlab/ci/config/entry/rules/rule/changes_spec.rb
new file mode 100644
index 00000000000..3ed4a9f263f
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/rules/rule/changes_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule::Changes do
+ let(:factory) do
+ Gitlab::Config::Entry::Factory.new(described_class)
+ .value(config)
+ end
+
+ subject(:entry) { factory.create! }
+
+ before do
+ entry.compose!
+ end
+
+ describe '.new' do
+ context 'when using a string array' do
+ let(:config) { %w[app/ lib/ spec/ other/* paths/**/*.rb] }
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'when using an integer array' do
+ let(:config) { [1, 2] }
+
+ it { is_expected.not_to be_valid }
+
+ it 'returns errors' do
+ expect(entry.errors).to include(/changes config should be an array of strings/)
+ end
+ end
+
+ context 'when using a string' do
+ let(:config) { 'a regular string' }
+
+ it { is_expected.not_to be_valid }
+
+ it 'reports an error about invalid policy' do
+ expect(entry.errors).to include(/should be an array of strings/)
+ end
+ end
+
+ context 'when using a long array' do
+ let(:config) { ['app/'] * 51 }
+
+ it { is_expected.not_to be_valid }
+
+ it 'returns errors' do
+ expect(entry.errors).to include(/has too many entries \(maximum 50\)/)
+ end
+ end
+
+ context 'when clause is empty' do
+ let(:config) {}
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'when policy strategy does not match' do
+ let(:config) { 'string strategy' }
+
+ it { is_expected.not_to be_valid }
+
+ it 'returns information about errors' do
+ expect(entry.errors)
+ .to include(/should be an array of strings/)
+ end
+ end
+ end
+
+ describe '#value' do
+ subject(:value) { entry.value }
+
+ context 'when using a string array' do
+ let(:config) { %w[app/ lib/ spec/ other/* paths/**/*.rb] }
+
+ it { is_expected.to eq(config) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb b/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
index 86270788431..89d349efe8f 100644
--- a/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
@@ -18,6 +18,10 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
let(:entry) { factory.create! }
+ before do
+ entry.compose!
+ end
+
describe '.new' do
subject { entry }
@@ -121,7 +125,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
it { is_expected.not_to be_valid }
it 'returns errors' do
- expect(subject.errors).to include(/changes should be an array of strings/)
+ expect(subject.errors).to include(/changes config should be an array of strings/)
end
end
@@ -131,7 +135,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
it { is_expected.not_to be_valid }
it 'returns errors' do
- expect(subject.errors).to include(/changes is too long \(maximum is 50 characters\)/)
+ expect(subject.errors).to include(/changes config has too many entries \(maximum 50\)/)
end
end
@@ -434,6 +438,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
end
describe '.default' do
+ let(:config) {}
+
it 'does not have default value' do
expect(described_class.default).to be_nil
end
diff --git a/spec/lib/gitlab/ci/config/external/mapper_spec.rb b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
index 2d2adf09a42..7e1b31fea6a 100644
--- a/spec/lib/gitlab/ci/config/external/mapper_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
include StubRequests
let_it_be(:project) { create(:project, :repository) }
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { project.owner }
let(:local_file) { '/lib/gitlab/ci/templates/non-existent-file.yml' }
let(:remote_url) { 'https://gitlab.com/gitlab-org/gitlab-foss/blob/1234/.gitlab-ci-1.yml' }
@@ -34,6 +34,19 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
describe '#process' do
subject(:process) { mapper.process }
+ shared_examples 'logging config file fetch' do |key, count|
+ it 'propagates the pipeline logger' do
+ process
+
+ fetch_content_log_count = mapper
+ .logger
+ .observations_hash
+ .dig(key, 'count')
+
+ expect(fetch_content_log_count).to eq(count)
+ end
+ end
+
context "when single 'include' keyword is defined" do
context 'when the string is a local file' do
let(:values) do
@@ -45,6 +58,8 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
expect(subject).to contain_exactly(
an_instance_of(Gitlab::Ci::Config::External::File::Local))
end
+
+ it_behaves_like 'logging config file fetch', 'config_file_fetch_local_content_duration_s', 1
end
context 'when the key is a local file hash' do
@@ -68,6 +83,8 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
expect(subject).to contain_exactly(
an_instance_of(Gitlab::Ci::Config::External::File::Remote))
end
+
+ it_behaves_like 'logging config file fetch', 'config_file_fetch_remote_content_duration_s', 1
end
context 'when the key is a remote file hash' do
@@ -92,6 +109,8 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
expect(subject).to contain_exactly(
an_instance_of(Gitlab::Ci::Config::External::File::Template))
end
+
+ it_behaves_like 'logging config file fetch', 'config_file_fetch_template_content_duration_s', 1
end
context 'when the key is a hash of file and remote' do
@@ -118,6 +137,8 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
expect(subject).to contain_exactly(
an_instance_of(Gitlab::Ci::Config::External::File::Project))
end
+
+ it_behaves_like 'logging config file fetch', 'config_file_fetch_project_content_duration_s', 1
end
context "when the key is project's files" do
@@ -131,6 +152,8 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
an_instance_of(Gitlab::Ci::Config::External::File::Project),
an_instance_of(Gitlab::Ci::Config::External::File::Project))
end
+
+ it_behaves_like 'logging config file fetch', 'config_file_fetch_project_content_duration_s', 2
end
end
diff --git a/spec/lib/gitlab/ci/jwt_spec.rb b/spec/lib/gitlab/ci/jwt_spec.rb
index b0d6f5adfb1..179e2efc0c7 100644
--- a/spec/lib/gitlab/ci/jwt_spec.rb
+++ b/spec/lib/gitlab/ci/jwt_spec.rb
@@ -160,20 +160,8 @@ RSpec.describe Gitlab::Ci::Jwt do
subject(:jwt) { described_class.for_build(build) }
- context 'when ci_jwt_signing_key feature flag is disabled' do
+ context 'when ci_jwt_signing_key is present' do
before do
- stub_feature_flags(ci_jwt_signing_key: false)
-
- allow(Rails.application.secrets).to receive(:openid_connect_signing_key).and_return(rsa_key_data)
- end
-
- it_behaves_like 'generating JWT for build'
- end
-
- context 'when ci_jwt_signing_key feature flag is enabled' do
- before do
- stub_feature_flags(ci_jwt_signing_key: true)
-
stub_application_setting(ci_jwt_signing_key: rsa_key_data)
end
diff --git a/spec/lib/gitlab/ci/parsers/coverage/sax_document_spec.rb b/spec/lib/gitlab/ci/parsers/coverage/sax_document_spec.rb
index 0580cb9922b..a9851d78f48 100644
--- a/spec/lib/gitlab/ci/parsers/coverage/sax_document_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/coverage/sax_document_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::Ci::Parsers::Coverage::SaxDocument do
subject(:parse_report) { Nokogiri::XML::SAX::Parser.new(described_class.new(coverage_report, project_path, paths)).parse(cobertura) }
describe '#parse!' do
- let(:coverage_report) { Gitlab::Ci::Reports::CoverageReports.new }
+ let(:coverage_report) { Gitlab::Ci::Reports::CoverageReport.new }
let(:project_path) { 'foo/bar' }
let(:paths) { ['app/user.rb'] }
diff --git a/spec/lib/gitlab/ci/parsers/security/secret_detection_spec.rb b/spec/lib/gitlab/ci/parsers/security/secret_detection_spec.rb
index 1d361e16aad..e8f1d617cb7 100644
--- a/spec/lib/gitlab/ci/parsers/security/secret_detection_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/security/secret_detection_spec.rb
@@ -39,7 +39,7 @@ RSpec.describe Gitlab::Ci::Parsers::Security::SecretDetection do
end
it "generates expected metadata_version" do
- expect(report.findings.first.metadata_version).to eq('3.0')
+ expect(report.findings.first.metadata_version).to eq('14.1.2')
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/limit/rate_limit_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/limit/rate_limit_spec.rb
index aa8aec2af4a..69d809aee85 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/limit/rate_limit_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/limit/rate_limit_spec.rb
@@ -30,10 +30,8 @@ RSpec.describe ::Gitlab::Ci::Pipeline::Chain::Limit::RateLimit, :freeze_time, :c
context 'when the limit is exceeded' do
before do
- allow(Gitlab::ApplicationRateLimiter).to receive(:rate_limits)
- .and_return(pipelines_create: { threshold: 1, interval: 1.minute })
-
- stub_feature_flags(ci_throttle_pipelines_creation_dry_run: false)
+ stub_application_setting(pipeline_limit_per_project_user_sha: 1)
+ stub_feature_flags(ci_enforce_throttle_pipelines_creation_override: false)
end
it 'does not persist the pipeline' do
@@ -55,7 +53,9 @@ RSpec.describe ::Gitlab::Ci::Pipeline::Chain::Limit::RateLimit, :freeze_time, :c
class: described_class.name,
project_id: project.id,
subscription_plan: project.actual_plan_name,
- commit_sha: command.sha
+ commit_sha: command.sha,
+ throttled: true,
+ throttle_override: false
)
)
@@ -101,9 +101,9 @@ RSpec.describe ::Gitlab::Ci::Pipeline::Chain::Limit::RateLimit, :freeze_time, :c
end
end
- context 'when ci_throttle_pipelines_creation is disabled' do
+ context 'when ci_enforce_throttle_pipelines_creation is disabled' do
before do
- stub_feature_flags(ci_throttle_pipelines_creation: false)
+ stub_feature_flags(ci_enforce_throttle_pipelines_creation: false)
end
it 'does not break the chain' do
@@ -118,16 +118,25 @@ RSpec.describe ::Gitlab::Ci::Pipeline::Chain::Limit::RateLimit, :freeze_time, :c
expect(pipeline.errors).to be_empty
end
- it 'does not log anything' do
- expect(Gitlab::AppJsonLogger).not_to receive(:info)
+ it 'creates a log entry' do
+ expect(Gitlab::AppJsonLogger).to receive(:info).with(
+ a_hash_including(
+ class: described_class.name,
+ project_id: project.id,
+ subscription_plan: project.actual_plan_name,
+ commit_sha: command.sha,
+ throttled: false,
+ throttle_override: false
+ )
+ )
perform
end
end
- context 'when ci_throttle_pipelines_creation_dry_run is enabled' do
+ context 'when ci_enforce_throttle_pipelines_creation_override is enabled' do
before do
- stub_feature_flags(ci_throttle_pipelines_creation_dry_run: true)
+ stub_feature_flags(ci_enforce_throttle_pipelines_creation_override: true)
end
it 'does not break the chain' do
@@ -148,7 +157,9 @@ RSpec.describe ::Gitlab::Ci::Pipeline::Chain::Limit::RateLimit, :freeze_time, :c
class: described_class.name,
project_id: project.id,
subscription_plan: project.actual_plan_name,
- commit_sha: command.sha
+ commit_sha: command.sha,
+ throttled: false,
+ throttle_override: true
)
)
diff --git a/spec/lib/gitlab/ci/pipeline/quota/deployments_spec.rb b/spec/lib/gitlab/ci/pipeline/quota/deployments_spec.rb
index 5b0917c5c6f..8f727749ee2 100644
--- a/spec/lib/gitlab/ci/pipeline/quota/deployments_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/quota/deployments_spec.rb
@@ -4,9 +4,8 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Quota::Deployments do
let_it_be_with_refind(:namespace) { create(:namespace) }
- let_it_be_with_reload(:default_plan) { create(:default_plan) }
let_it_be_with_reload(:project) { create(:project, :repository, namespace: namespace) }
- let_it_be(:plan_limits) { create(:plan_limits, plan: default_plan) }
+ let_it_be(:plan_limits) { create(:plan_limits, :default_plan) }
let(:pipeline) { build_stubbed(:ci_pipeline, project: project) }
diff --git a/spec/lib/gitlab/ci/reports/coverage_report_generator_spec.rb b/spec/lib/gitlab/ci/reports/coverage_report_generator_spec.rb
new file mode 100644
index 00000000000..eec218346c2
--- /dev/null
+++ b/spec/lib/gitlab/ci/reports/coverage_report_generator_spec.rb
@@ -0,0 +1,104 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Reports::CoverageReportGenerator, factory_default: :keep do
+ let_it_be(:project) { create_default(:project, :repository).freeze }
+ let_it_be(:pipeline) { build(:ci_pipeline, :with_coverage_reports) }
+
+ describe '#report' do
+ subject { described_class.new(pipeline).report }
+
+ let_it_be(:pipeline) { create(:ci_pipeline, :success) }
+
+ shared_examples 'having a coverage report' do
+ it 'returns coverage reports with collected data' do
+ expected_files = [
+ "auth/token.go",
+ "auth/rpccredentials.go",
+ "app/controllers/abuse_reports_controller.rb"
+ ]
+
+ expect(subject.files.keys).to match_array(expected_files)
+ end
+ end
+
+ context 'when pipeline has multiple builds with coverage reports' do
+ let!(:build_rspec) { create(:ci_build, :success, name: 'rspec', pipeline: pipeline) }
+ let!(:build_golang) { create(:ci_build, :success, name: 'golang', pipeline: pipeline) }
+
+ before do
+ create(:ci_job_artifact, :cobertura, job: build_rspec)
+ create(:ci_job_artifact, :coverage_gocov_xml, job: build_golang)
+ end
+
+ it_behaves_like 'having a coverage report'
+
+ context 'and it is a child pipeline' do
+ let!(:pipeline) { create(:ci_pipeline, :success, child_of: build(:ci_pipeline)) }
+
+ it 'returns empty coverage report' do
+ expect(subject).to be_empty
+ end
+ end
+ end
+
+ context 'when builds are retried' do
+ let!(:build_rspec) { create(:ci_build, :success, name: 'rspec', retried: true, pipeline: pipeline) }
+ let!(:build_golang) { create(:ci_build, :success, name: 'golang', retried: true, pipeline: pipeline) }
+
+ before do
+ create(:ci_job_artifact, :cobertura, job: build_rspec)
+ create(:ci_job_artifact, :coverage_gocov_xml, job: build_golang)
+ end
+
+ it 'does not take retried builds into account' do
+ expect(subject).to be_empty
+ end
+ end
+
+ context 'when pipeline does not have any builds with coverage reports' do
+ it 'returns empty coverage reports' do
+ expect(subject).to be_empty
+ end
+ end
+
+ context 'when pipeline has child pipeline with builds that have coverage reports' do
+ let!(:child_pipeline) { create(:ci_pipeline, :success, child_of: pipeline) }
+
+ let!(:build_rspec) { create(:ci_build, :success, name: 'rspec', pipeline: child_pipeline) }
+ let!(:build_golang) { create(:ci_build, :success, name: 'golang', pipeline: child_pipeline) }
+
+ before do
+ create(:ci_job_artifact, :cobertura, job: build_rspec)
+ create(:ci_job_artifact, :coverage_gocov_xml, job: build_golang)
+ end
+
+ it_behaves_like 'having a coverage report'
+
+ context 'when feature flag ci_child_pipeline_coverage_reports is disabled' do
+ before do
+ stub_feature_flags(ci_child_pipeline_coverage_reports: false)
+ end
+
+ it 'returns empty coverage reports' do
+ expect(subject).to be_empty
+ end
+ end
+ end
+
+ context 'when both parent and child pipeline have builds with coverage reports' do
+ let!(:child_pipeline) { create(:ci_pipeline, :success, child_of: pipeline) }
+
+ let!(:build_rspec) { create(:ci_build, :success, name: 'rspec', pipeline: pipeline) }
+ let!(:build_golang) { create(:ci_build, :success, name: 'golang', pipeline: child_pipeline) }
+
+ before do
+ create(:ci_job_artifact, :cobertura, job: build_rspec)
+ create(:ci_job_artifact, :coverage_gocov_xml, job: build_golang)
+ end
+
+ it_behaves_like 'having a coverage report'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/reports/coverage_reports_spec.rb b/spec/lib/gitlab/ci/reports/coverage_report_spec.rb
index 41ebae863ee..53646f7dfc0 100644
--- a/spec/lib/gitlab/ci/reports/coverage_reports_spec.rb
+++ b/spec/lib/gitlab/ci/reports/coverage_report_spec.rb
@@ -2,11 +2,25 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Reports::CoverageReports do
+RSpec.describe Gitlab::Ci::Reports::CoverageReport do
let(:coverage_report) { described_class.new }
it { expect(coverage_report.files).to eq({}) }
+ describe '#empty?' do
+ context 'when no file has been added' do
+ it { expect(coverage_report.empty?).to be(true) }
+ end
+
+ context 'when file has been added' do
+ before do
+ coverage_report.add_file('app.rb', { 1 => 0, 2 => 1 })
+ end
+
+ it { expect(coverage_report.empty?).to be(false) }
+ end
+ end
+
describe '#pick' do
before do
coverage_report.add_file('app.rb', { 1 => 0, 2 => 1 })
diff --git a/spec/lib/gitlab/ci/runner_upgrade_check_spec.rb b/spec/lib/gitlab/ci/runner_upgrade_check_spec.rb
index f2b4e7573c0..0353432741b 100644
--- a/spec/lib/gitlab/ci/runner_upgrade_check_spec.rb
+++ b/spec/lib/gitlab/ci/runner_upgrade_check_spec.rb
@@ -51,21 +51,22 @@ RSpec.describe Gitlab::Ci::RunnerUpgradeCheck do
end
end
- context 'with Gitlab::VERSION set to 14.0.123' do
+ context 'with Gitlab::VERSION set to 14.0.1' do
before do
- stub_version('14.0.123', 'deadbeef')
+ stub_version('14.0.1', 'deadbeef')
described_class.instance.reset!
end
context 'with valid params' do
where(:runner_version, :expected_result) do
- 'v14.1.0-rc3' | :not_available # not available since the GitLab instance is still on 14.0.x
- 'v14.1.0~beta.1574.gf6ea9389' | :not_available # suffixes are correctly handled
- 'v14.1.0/1.1.0' | :not_available # suffixes are correctly handled
- 'v14.1.0' | :not_available # not available since the GitLab instance is still on 14.0.x
+ 'v15.0.0' | :not_available # not available since the GitLab instance is still on 14.x and a major version might be incompatible
+ 'v14.1.0-rc3' | :recommended # recommended since even though the GitLab instance is still on 14.0.x, there is a patch release (14.1.1) available which might contain security fixes
+ 'v14.1.0~beta.1574.gf6ea9389' | :recommended # suffixes are correctly handled
+ 'v14.1.0/1.1.0' | :recommended # suffixes are correctly handled
+ 'v14.1.0' | :recommended # recommended since even though the GitLab instance is still on 14.0.x, there is a patch release (14.1.1) available which might contain security fixes
'v14.0.1' | :recommended # recommended upgrade since 14.0.2 is available
- 'v14.0.2' | :not_available # not available since 14.0.2 is the latest 14.0.x release available
+ 'v14.0.2' | :not_available # not available since 14.0.2 is the latest 14.0.x release available within the instance's major.minor version
'v13.10.1' | :available # available upgrade: 14.1.1
'v13.10.1~beta.1574.gf6ea9389' | :available # suffixes are correctly handled
'v13.10.1/1.1.0' | :available # suffixes are correctly handled
diff --git a/spec/lib/gitlab/ci/status/build/play_spec.rb b/spec/lib/gitlab/ci/status/build/play_spec.rb
index bb406623d2f..ade07a54877 100644
--- a/spec/lib/gitlab/ci/status/build/play_spec.rb
+++ b/spec/lib/gitlab/ci/status/build/play_spec.rb
@@ -3,9 +3,10 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Status::Build::Play do
- let(:user) { create(:user) }
- let(:project) { create(:project, :stubbed_repository) }
- let(:build) { create(:ci_build, :manual, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :stubbed_repository) }
+ let_it_be_with_refind(:build) { create(:ci_build, :manual, project: project) }
+
let(:status) { Gitlab::Ci::Status::Core.new(build, user) }
subject { described_class.new(status) }
diff --git a/spec/lib/gitlab/ci/status/build/scheduled_spec.rb b/spec/lib/gitlab/ci/status/build/scheduled_spec.rb
index b0cd1ac4dc5..a9f9b82767e 100644
--- a/spec/lib/gitlab/ci/status/build/scheduled_spec.rb
+++ b/spec/lib/gitlab/ci/status/build/scheduled_spec.rb
@@ -3,8 +3,9 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Status::Build::Scheduled do
- let(:user) { create(:user) }
- let(:project) { create(:project, :stubbed_repository) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :stubbed_repository) }
+
let(:build) { create(:ci_build, :scheduled, project: project) }
let(:status) { Gitlab::Ci::Status::Core.new(build, user) }
diff --git a/spec/lib/gitlab/ci/trace/archive_spec.rb b/spec/lib/gitlab/ci/trace/archive_spec.rb
index 5e965f94347..3ae0e5d1f0e 100644
--- a/spec/lib/gitlab/ci/trace/archive_spec.rb
+++ b/spec/lib/gitlab/ci/trace/archive_spec.rb
@@ -29,35 +29,59 @@ RSpec.describe Gitlab::Ci::Trace::Archive do
let(:stream) { StringIO.new(trace, 'rb') }
let(:src_checksum) { Digest::MD5.hexdigest(trace) }
- context 'when the object store is disabled' do
- before do
- stub_artifacts_object_storage(enabled: false)
+ shared_examples 'valid' do
+ it 'does not count as invalid' do
+ subject.execute!(stream)
+
+ expect(metrics)
+ .not_to have_received(:increment_error_counter)
+ .with(error_reason: :archive_invalid_checksum)
end
+ end
- it 'skips validation' do
+ shared_examples 'local checksum only' do
+ it 'generates only local checksum' do
subject.execute!(stream)
+
expect(trace_metadata.checksum).to eq(src_checksum)
expect(trace_metadata.remote_checksum).to be_nil
- expect(metrics)
- .not_to have_received(:increment_error_counter)
- .with(error_reason: :archive_invalid_checksum)
end
end
- context 'with background_upload enabled' do
+ shared_examples 'skips validations' do
+ it_behaves_like 'valid'
+ it_behaves_like 'local checksum only'
+ end
+
+ shared_context 'with FIPS' do
+ context 'with FIPS enabled', :fips_mode do
+ it_behaves_like 'valid'
+
+ it 'does not generate md5 checksums' do
+ subject.execute!(stream)
+
+ expect(trace_metadata.checksum).to be_nil
+ expect(trace_metadata.remote_checksum).to be_nil
+ end
+ end
+ end
+
+ context 'when the object store is disabled' do
before do
- stub_artifacts_object_storage(background_upload: true)
+ stub_artifacts_object_storage(enabled: false)
end
- it 'skips validation' do
- subject.execute!(stream)
+ it_behaves_like 'skips validations'
+ include_context 'with FIPS'
+ end
- expect(trace_metadata.checksum).to eq(src_checksum)
- expect(trace_metadata.remote_checksum).to be_nil
- expect(metrics)
- .not_to have_received(:increment_error_counter)
- .with(error_reason: :archive_invalid_checksum)
+ context 'with background_upload enabled' do
+ before do
+ stub_artifacts_object_storage(background_upload: true)
end
+
+ it_behaves_like 'skips validations'
+ include_context 'with FIPS'
end
context 'with direct_upload enabled' do
@@ -65,27 +89,26 @@ RSpec.describe Gitlab::Ci::Trace::Archive do
stub_artifacts_object_storage(direct_upload: true)
end
- it 'validates the archived trace' do
+ it_behaves_like 'valid'
+
+ it 'checksums match' do
subject.execute!(stream)
expect(trace_metadata.checksum).to eq(src_checksum)
expect(trace_metadata.remote_checksum).to eq(src_checksum)
- expect(metrics)
- .not_to have_received(:increment_error_counter)
- .with(error_reason: :archive_invalid_checksum)
end
context 'when the checksum does not match' do
let(:invalid_remote_checksum) { SecureRandom.hex }
before do
- expect(::Gitlab::Ci::Trace::RemoteChecksum)
+ allow(::Gitlab::Ci::Trace::RemoteChecksum)
.to receive(:new)
.with(an_instance_of(Ci::JobArtifact))
.and_return(double(md5_checksum: invalid_remote_checksum))
end
- it 'validates the archived trace' do
+ it 'counts as invalid' do
subject.execute!(stream)
expect(trace_metadata.checksum).to eq(src_checksum)
@@ -94,7 +117,11 @@ RSpec.describe Gitlab::Ci::Trace::Archive do
.to have_received(:increment_error_counter)
.with(error_reason: :archive_invalid_checksum)
end
+
+ include_context 'with FIPS'
end
+
+ include_context 'with FIPS'
end
end
end
diff --git a/spec/lib/gitlab/ci/variables/builder_spec.rb b/spec/lib/gitlab/ci/variables/builder_spec.rb
index e13a0993fa8..b0704ad7f50 100644
--- a/spec/lib/gitlab/ci/variables/builder_spec.rb
+++ b/spec/lib/gitlab/ci/variables/builder_spec.rb
@@ -64,6 +64,8 @@ RSpec.describe Gitlab::Ci::Variables::Builder do
value: project.path },
{ key: 'CI_PROJECT_TITLE',
value: project.title },
+ { key: 'CI_PROJECT_DESCRIPTION',
+ value: project.description },
{ key: 'CI_PROJECT_PATH',
value: project.full_path },
{ key: 'CI_PROJECT_PATH_SLUG',
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index 1910057622b..3dd9ca35881 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -7,7 +7,7 @@ module Gitlab
RSpec.describe YamlProcessor do
include StubRequests
- subject { described_class.new(config, user: nil).execute }
+ subject(:processor) { described_class.new(config, user: nil).execute }
shared_examples 'returns errors' do |error_message|
it 'adds a message when an error is encountered' do
@@ -965,6 +965,51 @@ module Gitlab
})
end
end
+
+ context 'when image has pull_policy' do
+ let(:config) do
+ <<~YAML
+ image:
+ name: ruby:2.7
+ pull_policy: if-not-present
+
+ test:
+ script: exit 0
+ YAML
+ end
+
+ it { is_expected.to be_valid }
+
+ it "returns image and service when defined" do
+ expect(processor.stage_builds_attributes("test")).to contain_exactly({
+ stage: "test",
+ stage_idx: 2,
+ name: "test",
+ only: { refs: %w[branches tags] },
+ options: {
+ script: ["exit 0"],
+ image: { name: "ruby:2.7", pull_policy: ["if-not-present"] }
+ },
+ allow_failure: false,
+ when: "on_success",
+ job_variables: [],
+ root_variables_inheritance: true,
+ scheduling_type: :stage
+ })
+ end
+
+ context 'when the feature flag ci_docker_image_pull_policy is disabled' do
+ before do
+ stub_feature_flags(ci_docker_image_pull_policy: false)
+ end
+
+ it { is_expected.not_to be_valid }
+
+ it "returns no job" do
+ expect(processor.jobs).to eq({})
+ end
+ end
+ end
end
describe 'Variables' do
diff --git a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
index 2df85434f0e..109e83be294 100644
--- a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
+++ b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
@@ -178,6 +178,16 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
expect(directives['connect_src']).not_to include(snowplow_micro_url)
end
end
+
+ context 'when REVIEW_APPS_ENABLED is set' do
+ before do
+ stub_env('REVIEW_APPS_ENABLED', 'true')
+ end
+
+ it 'adds gitlab-org/gitlab merge requests API endpoint to CSP' do
+ expect(directives['connect_src']).to include('https://gitlab.com/api/v4/projects/278964/merge_requests/')
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/daemon_spec.rb b/spec/lib/gitlab/daemon_spec.rb
index 4d11b0bdc6c..14846e77a0c 100644
--- a/spec/lib/gitlab/daemon_spec.rb
+++ b/spec/lib/gitlab/daemon_spec.rb
@@ -34,6 +34,43 @@ RSpec.describe Gitlab::Daemon do
end
end
+ describe '.initialize_instance' do
+ before do
+ allow(Kernel).to receive(:at_exit)
+ end
+
+ after do
+ described_class.instance_variable_set(:@instance, nil)
+ end
+
+ it 'provides instance of Daemon' do
+ expect(described_class.instance).to be_instance_of(described_class)
+ end
+
+ context 'when instance has already been created' do
+ before do
+ described_class.instance
+ end
+
+ context 'and recreate flag is false' do
+ it 'raises an error' do
+ expect { described_class.initialize_instance }.to raise_error(/singleton instance already initialized/)
+ end
+ end
+
+ context 'and recreate flag is true' do
+ it 'calls stop on existing instance and returns new instance' do
+ old_instance = described_class.instance
+ expect(old_instance).to receive(:stop)
+
+ new_instance = described_class.initialize_instance(recreate: true)
+
+ expect(new_instance.object_id).not_to eq(old_instance.object_id)
+ end
+ end
+ end
+ end
+
context 'when Daemon is enabled' do
before do
allow(subject).to receive(:enabled?).and_return(true)
diff --git a/spec/lib/gitlab/data_builder/pipeline_spec.rb b/spec/lib/gitlab/data_builder/pipeline_spec.rb
index 8b57da8e60b..c2bd20798f1 100644
--- a/spec/lib/gitlab/data_builder/pipeline_spec.rb
+++ b/spec/lib/gitlab/data_builder/pipeline_spec.rb
@@ -8,11 +8,11 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do
let_it_be_with_reload(:pipeline) do
create(:ci_pipeline,
- project: project,
- status: 'success',
- sha: project.commit.sha,
- ref: project.default_branch,
- user: user)
+ project: project,
+ status: 'success',
+ sha: project.commit.sha,
+ ref: project.default_branch,
+ user: user)
end
let!(:build) { create(:ci_build, pipeline: pipeline) }
@@ -48,6 +48,7 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do
avatar_url: user.avatar_url(only_path: false),
email: user.public_email
})
+ expect(data[:source_pipeline]).to be_nil
end
context 'build with runner' do
@@ -132,6 +133,34 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do
end
end
+ context 'when the pipeline has an upstream' do
+ let(:source_pipeline_attrs) { data[:source_pipeline] }
+
+ shared_examples 'source pipeline attributes' do
+ it 'has source pipeline attributes', :aggregate_failures do
+ expect(source_pipeline_attrs[:pipeline_id]).to eq upstream_pipeline.id
+ expect(source_pipeline_attrs[:job_id]).to eq pipeline.source_bridge.id
+ expect(source_pipeline_attrs[:project][:id]).to eq upstream_pipeline.project.id
+ expect(source_pipeline_attrs[:project][:web_url]).to eq upstream_pipeline.project.web_url
+ expect(source_pipeline_attrs[:project][:path_with_namespace]).to eq upstream_pipeline.project.full_path
+ end
+ end
+
+ context 'in same project' do
+ let!(:upstream_pipeline) { create(:ci_pipeline, upstream_of: pipeline, project: project) }
+
+ it_behaves_like 'source pipeline attributes'
+ end
+
+ context 'in different project' do
+ let!(:upstream_pipeline) { create(:ci_pipeline, upstream_of: pipeline) }
+
+ it_behaves_like 'source pipeline attributes'
+
+ it { expect(source_pipeline_attrs[:project][:id]).not_to eq pipeline.project.id }
+ end
+ end
+
context 'avoids N+1 database queries' do
it "with multiple builds" do
# Preparing the pipeline with the minimal builds
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
index f147e8204e6..97459d4a7be 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
@@ -311,6 +311,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
let(:table_name) { :_test_batched_migrations_test_table }
let(:column_name) { :some_id }
let(:job_arguments) { [:some_id, :some_id_convert_to_bigint] }
+ let(:gitlab_schemas) { Gitlab::Database.gitlab_schemas_for_connection(connection) }
let(:migration_status) { :active }
@@ -358,7 +359,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
it 'completes the migration' do
expect(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:find_for_configuration)
- .with('CopyColumnUsingBackgroundMigrationJob', table_name, column_name, job_arguments)
+ .with(gitlab_schemas, 'CopyColumnUsingBackgroundMigrationJob', table_name, column_name, job_arguments)
.and_return(batched_migration)
expect(batched_migration).to receive(:finalize!).and_call_original
@@ -399,7 +400,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
it 'is a no-op' do
expect(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:find_for_configuration)
- .with('CopyColumnUsingBackgroundMigrationJob', table_name, column_name, job_arguments)
+ .with(gitlab_schemas, 'CopyColumnUsingBackgroundMigrationJob', table_name, column_name, job_arguments)
.and_return(batched_migration)
configuration = {
@@ -426,7 +427,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
context 'when the migration does not exist' do
it 'is a no-op' do
expect(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:find_for_configuration)
- .with('CopyColumnUsingBackgroundMigrationJob', table_name, column_name, [:some, :other, :arguments])
+ .with(gitlab_schemas, 'CopyColumnUsingBackgroundMigrationJob', table_name, column_name, [:some, :other, :arguments])
.and_return(nil)
configuration = {
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
index a1c979bba50..8819171cfd0 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
@@ -78,23 +78,41 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
end
describe '.active_migration' do
+ let(:connection) { Gitlab::Database.database_base_models[:main].connection }
let!(:migration1) { create(:batched_background_migration, :finished) }
- context 'without migrations on hold' do
+ subject(:active_migration) { described_class.active_migration(connection: connection) }
+
+ around do |example|
+ Gitlab::Database::SharedModel.using_connection(connection) do
+ example.run
+ end
+ end
+
+ context 'when there are no migrations on hold' do
let!(:migration2) { create(:batched_background_migration, :active) }
let!(:migration3) { create(:batched_background_migration, :active) }
it 'returns the first active migration according to queue order' do
- expect(described_class.active_migration).to eq(migration2)
+ expect(active_migration).to eq(migration2)
end
end
- context 'with migrations are on hold' do
+ context 'when there are migrations on hold' do
let!(:migration2) { create(:batched_background_migration, :active, on_hold_until: 10.minutes.from_now) }
let!(:migration3) { create(:batched_background_migration, :active, on_hold_until: 2.minutes.ago) }
it 'returns the first active migration that is not on hold according to queue order' do
- expect(described_class.active_migration).to eq(migration3)
+ expect(active_migration).to eq(migration3)
+ end
+ end
+
+ context 'when there are migrations not available for the current connection' do
+ let!(:migration2) { create(:batched_background_migration, :active, gitlab_schema: :gitlab_not_existing) }
+ let!(:migration3) { create(:batched_background_migration, :active, gitlab_schema: :gitlab_main) }
+
+ it 'returns the first active migration that is available for the current connection' do
+ expect(active_migration).to eq(migration3)
end
end
end
@@ -553,25 +571,43 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
end
describe '.for_configuration' do
- let!(:migration) do
- create(
- :batched_background_migration,
+ let!(:attributes) do
+ {
job_class_name: 'MyJobClass',
table_name: :projects,
column_name: :id,
- job_arguments: [[:id], [:id_convert_to_bigint]]
- )
+ job_arguments: [[:id], [:id_convert_to_bigint]],
+ gitlab_schema: :gitlab_main
+ }
end
+ let!(:migration) { create(:batched_background_migration, attributes) }
+
before do
- create(:batched_background_migration, job_class_name: 'OtherClass')
- create(:batched_background_migration, table_name: 'other_table')
- create(:batched_background_migration, column_name: 'other_column')
- create(:batched_background_migration, job_arguments: %w[other arguments])
+ create(:batched_background_migration, attributes.merge(job_class_name: 'OtherClass'))
+ create(:batched_background_migration, attributes.merge(table_name: 'other_table'))
+ create(:batched_background_migration, attributes.merge(column_name: 'other_column'))
+ create(:batched_background_migration, attributes.merge(job_arguments: %w[other arguments]))
end
it 'finds the migration matching the given configuration parameters' do
- actual = described_class.for_configuration('MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]])
+ actual = described_class.for_configuration(:gitlab_main, 'MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]])
+
+ expect(actual).to contain_exactly(migration)
+ end
+
+ it 'filters by gitlab schemas available for the connection' do
+ actual = described_class.for_configuration(:gitlab_ci, 'MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]])
+
+ expect(actual).to be_empty
+ end
+
+ it 'doesn not filter by gitlab schemas available for the connection if the column is nor present' do
+ skip_if_multiple_databases_not_setup
+
+ expect(described_class).to receive(:gitlab_schema_column_exists?).and_return(false)
+
+ actual = described_class.for_configuration(:gitlab_main, 'MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]])
expect(actual).to contain_exactly(migration)
end
@@ -579,7 +615,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
describe '.find_for_configuration' do
it 'returns nill if such migration does not exists' do
- expect(described_class.find_for_configuration('MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]])).to be_nil
+ expect(described_class.find_for_configuration(:gitlab_main, 'MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]])).to be_nil
end
it 'returns the migration when it exists' do
@@ -588,10 +624,25 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
job_class_name: 'MyJobClass',
table_name: :projects,
column_name: :id,
- job_arguments: [[:id], [:id_convert_to_bigint]]
+ job_arguments: [[:id], [:id_convert_to_bigint]],
+ gitlab_schema: :gitlab_main
)
- expect(described_class.find_for_configuration('MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]])).to eq(migration)
+ expect(described_class.find_for_configuration(:gitlab_main, 'MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]])).to eq(migration)
+ end
+ end
+
+ describe '.for_gitlab_schema' do
+ let!(:migration) { create(:batched_background_migration, gitlab_schema: :gitlab_main) }
+
+ before do
+ create(:batched_background_migration, gitlab_schema: :gitlab_not_existing)
+ end
+
+ it 'finds the migrations matching the given gitlab schema' do
+ actual = described_class.for_gitlab_schema(:gitlab_main)
+
+ expect(actual).to contain_exactly(migration)
end
end
end
diff --git a/spec/lib/gitlab/database/batch_count_spec.rb b/spec/lib/gitlab/database/batch_count_spec.rb
index 028bdce852e..811d4fad95c 100644
--- a/spec/lib/gitlab/database/batch_count_spec.rb
+++ b/spec/lib/gitlab/database/batch_count_spec.rb
@@ -384,4 +384,58 @@ RSpec.describe Gitlab::Database::BatchCount do
subject { described_class.method(:batch_sum) }
end
end
+
+ describe '#batch_average' do
+ let(:model) { Issue }
+ let(:column) { :weight }
+
+ before do
+ Issue.update_all(weight: 2)
+ end
+
+ it 'returns the average of values in the given column' do
+ expect(described_class.batch_average(model, column)).to eq(2)
+ end
+
+ it 'works when given an Arel column' do
+ expect(described_class.batch_average(model, model.arel_table[column])).to eq(2)
+ end
+
+ it 'works with a batch size of 50K' do
+ expect(described_class.batch_average(model, column, batch_size: 50_000)).to eq(2)
+ end
+
+ it 'works with start and finish provided' do
+ expect(described_class.batch_average(model, column, start: model.minimum(:id), finish: model.maximum(:id))).to eq(2)
+ end
+
+ it "defaults the batch size to #{Gitlab::Database::BatchCounter::DEFAULT_AVERAGE_BATCH_SIZE}" do
+ min_id = model.minimum(:id)
+ relation = instance_double(ActiveRecord::Relation)
+ allow(model).to receive_message_chain(:select, public_send: relation)
+ batch_end_id = min_id + calculate_batch_size(Gitlab::Database::BatchCounter::DEFAULT_AVERAGE_BATCH_SIZE)
+
+ expect(relation).to receive(:where).with("id" => min_id..batch_end_id).and_return(double(send: 1))
+
+ described_class.batch_average(model, column)
+ end
+
+ it_behaves_like 'when a transaction is open' do
+ subject { described_class.batch_average(model, column) }
+ end
+
+ it_behaves_like 'disallowed configurations', :batch_average do
+ let(:args) { [model, column] }
+ let(:default_batch_size) { Gitlab::Database::BatchCounter::DEFAULT_AVERAGE_BATCH_SIZE }
+ let(:small_batch_size) { Gitlab::Database::BatchCounter::DEFAULT_AVERAGE_BATCH_SIZE - 1 }
+ end
+
+ it_behaves_like 'when batch fetch query is canceled' do
+ let(:mode) { :itself }
+ let(:operation) { :average }
+ let(:operation_args) { [column] }
+
+ subject { described_class.method(:batch_average) }
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/each_database_spec.rb b/spec/lib/gitlab/database/each_database_spec.rb
index 191f7017b4c..8345cdfb8fb 100644
--- a/spec/lib/gitlab/database/each_database_spec.rb
+++ b/spec/lib/gitlab/database/each_database_spec.rb
@@ -61,7 +61,11 @@ RSpec.describe Gitlab::Database::EachDatabase do
context 'when shared connections are not included' do
it 'only yields the unshared connections' do
- expect(Gitlab::Database).to receive(:db_config_share_with).twice.and_return(nil, 'main')
+ if Gitlab::Database.has_config?(:ci)
+ expect(Gitlab::Database).to receive(:db_config_share_with).exactly(3).times.and_return(nil, 'main', 'main')
+ else
+ expect(Gitlab::Database).to receive(:db_config_share_with).twice.and_return(nil, 'main')
+ end
expect { |b| described_class.each_database_connection(include_shared: false, &b) }
.to yield_successive_args([ActiveRecord::Base.connection, 'main'])
diff --git a/spec/lib/gitlab/database/gitlab_schema_spec.rb b/spec/lib/gitlab/database/gitlab_schema_spec.rb
index a5a67c2c918..611b2fbad72 100644
--- a/spec/lib/gitlab/database/gitlab_schema_spec.rb
+++ b/spec/lib/gitlab/database/gitlab_schema_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Gitlab::Database::GitlabSchema do
it 'all tables have assigned a known gitlab_schema' do
is_expected.to all(
- match([be_a(String), be_in([:gitlab_shared, :gitlab_main, :gitlab_ci])])
+ match([be_a(String), be_in([:gitlab_internal, :gitlab_shared, :gitlab_main, :gitlab_ci])])
)
end
@@ -42,12 +42,12 @@ RSpec.describe Gitlab::Database::GitlabSchema do
where(:name, :classification) do
'ci_builds' | :gitlab_ci
'my_schema.ci_builds' | :gitlab_ci
- 'information_schema.columns' | :gitlab_shared
+ 'information_schema.columns' | :gitlab_internal
'audit_events_part_5fc467ac26' | :gitlab_main
'_test_gitlab_main_table' | :gitlab_main
'_test_gitlab_ci_table' | :gitlab_ci
'_test_my_table' | :gitlab_shared
- 'pg_attribute' | :gitlab_shared
+ 'pg_attribute' | :gitlab_internal
'my_other_table' | :undefined_my_other_table
end
diff --git a/spec/lib/gitlab/database/load_balancing/configuration_spec.rb b/spec/lib/gitlab/database/load_balancing/configuration_spec.rb
index 77284b4d128..34370c9a21f 100644
--- a/spec/lib/gitlab/database/load_balancing/configuration_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/configuration_spec.rb
@@ -100,14 +100,6 @@ RSpec.describe Gitlab::Database::LoadBalancing::Configuration, :request_store do
expect(config.pool_size).to eq(4)
end
end
-
- it 'calls reuse_primary_connection!' do
- expect_next_instance_of(described_class) do |subject|
- expect(subject).to receive(:reuse_primary_connection!).and_call_original
- end
-
- described_class.for_model(model)
- end
end
describe '#load_balancing_enabled?' do
@@ -203,61 +195,12 @@ RSpec.describe Gitlab::Database::LoadBalancing::Configuration, :request_store do
end
end
- describe '#replica_db_config' do
+ describe '#db_config' do
let(:model) { double(:model, connection_db_config: db_config, connection_specification_name: 'Ci::ApplicationRecord') }
let(:config) { described_class.for_model(model) }
it 'returns exactly db_config' do
- expect(config.replica_db_config).to eq(db_config)
- end
-
- context 'when GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci=main' do
- it 'does not change replica_db_config' do
- stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', 'main')
-
- expect(config.replica_db_config).to eq(db_config)
- end
- end
- end
-
- describe 'reuse_primary_connection!' do
- let(:model) { double(:model, connection_db_config: db_config, connection_specification_name: 'Ci::ApplicationRecord') }
- let(:config) { described_class.for_model(model) }
-
- context 'when GITLAB_LOAD_BALANCING_REUSE_PRIMARY_* not configured' do
- it 'the primary connection uses default specification' do
- stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', nil)
-
- expect(config.primary_connection_specification_name).to eq('Ci::ApplicationRecord')
- end
- end
-
- context 'when GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci=main' do
- before do
- stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', 'main')
- end
-
- it 'the primary connection uses main connection' do
- expect(config.primary_connection_specification_name).to eq('ActiveRecord::Base')
- end
-
- context 'when force_no_sharing_primary_model feature flag is enabled' do
- before do
- stub_feature_flags(force_no_sharing_primary_model: true)
- end
-
- it 'the primary connection uses ci connection' do
- expect(config.primary_connection_specification_name).to eq('Ci::ApplicationRecord')
- end
- end
- end
-
- context 'when GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci=unknown' do
- it 'raises exception' do
- stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', 'unknown')
-
- expect { config.reuse_primary_connection! }.to raise_error /Invalid value for/
- end
+ expect(config.db_config).to eq(db_config)
end
end
end
diff --git a/spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb b/spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb
index ee2718171c0..41312dbedd6 100644
--- a/spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb
@@ -79,42 +79,55 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do
end
end
- describe '.insert_all!' do
+ describe 'methods using exec_insert_all on the connection', :request_store do
+ let(:model_class) do
+ Class.new(ApplicationRecord) do
+ self.table_name = "_test_connection_proxy_insert_all"
+ end
+ end
+
+ let(:session) { Gitlab::Database::LoadBalancing::Session.new }
+
before do
ActiveRecord::Schema.define do
- create_table :_test_connection_proxy_bulk_insert, force: true do |t|
- t.string :name, null: true
+ create_table :_test_connection_proxy_insert_all, force: true do |t|
+ t.string :name, null: false
+ t.index :name, unique: true
end
end
+
+ allow(Gitlab::Database::LoadBalancing::Session).to receive(:current)
+ .and_return(session)
end
after do
ActiveRecord::Schema.define do
- drop_table :_test_connection_proxy_bulk_insert, force: true
+ drop_table :_test_connection_proxy_insert_all, force: true
end
end
- let(:model_class) do
- Class.new(ApplicationRecord) do
- self.table_name = "_test_connection_proxy_bulk_insert"
+ describe '#upsert' do
+ it 'upserts a record and marks the session to stick to the primary' do
+ expect { 2.times { model_class.upsert({ name: 'test' }, unique_by: :name) } }
+ .to change { model_class.count }.from(0).to(1)
+ .and change { session.use_primary? }.from(false).to(true)
end
end
- it 'inserts data in bulk' do
- expect(model_class).to receive(:connection)
- .at_least(:once)
- .and_return(proxy)
+ describe '#insert_all!' do
+ it 'inserts multiple records and marks the session to stick to the primary' do
+ expect { model_class.insert_all([{ name: 'one' }, { name: 'two' }]) }
+ .to change { model_class.count }.from(0).to(2)
+ .and change { session.use_primary? }.from(false).to(true)
+ end
+ end
- expect(proxy).to receive(:write_using_load_balancer)
- .at_least(:once)
- .and_call_original
-
- expect do
- model_class.insert_all! [
- { name: "item1" },
- { name: "item2" }
- ]
- end.to change { model_class.count }.by(2)
+ describe '#insert' do
+ it 'inserts a single record and marks the session to stick to the primary' do
+ expect { model_class.insert({ name: 'single' }) }
+ .to change { model_class.count }.from(0).to(1)
+ .and change { session.use_primary? }.from(false).to(true)
+ end
end
end
diff --git a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
index 3c7819c04b6..34eb64997c1 100644
--- a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
@@ -487,46 +487,6 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
end
end
- describe 'primary connection re-use', :reestablished_active_record_base, :add_ci_connection do
- let(:model) { Ci::ApplicationRecord }
-
- describe '#read' do
- it 'returns ci replica connection' do
- expect { |b| lb.read(&b) }.to yield_with_args do |args|
- expect(args.pool.db_config.name).to eq('ci_replica')
- end
- end
-
- context 'when GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci=main' do
- it 'returns ci replica connection' do
- stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', 'main')
-
- expect { |b| lb.read(&b) }.to yield_with_args do |args|
- expect(args.pool.db_config.name).to eq('ci_replica')
- end
- end
- end
- end
-
- describe '#read_write' do
- it 'returns Ci::ApplicationRecord connection' do
- expect { |b| lb.read_write(&b) }.to yield_with_args do |args|
- expect(args.pool.db_config.name).to eq('ci')
- end
- end
-
- context 'when GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci=main' do
- it 'returns ActiveRecord::Base connection' do
- stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', 'main')
-
- expect { |b| lb.read_write(&b) }.to yield_with_args do |args|
- expect(args.pool.db_config.name).to eq('main')
- end
- end
- end
- end
- end
-
describe '#wal_diff' do
it 'returns the diff between two write locations' do
loc1 = lb.send(:get_write_location, lb.pool.connection)
diff --git a/spec/lib/gitlab/database/load_balancing/setup_spec.rb b/spec/lib/gitlab/database/load_balancing/setup_spec.rb
index c44637b8d06..fa6d71bca7f 100644
--- a/spec/lib/gitlab/database/load_balancing/setup_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/setup_spec.rb
@@ -122,123 +122,68 @@ RSpec.describe Gitlab::Database::LoadBalancing::Setup do
context 'uses correct base models', :reestablished_active_record_base do
using RSpec::Parameterized::TableSyntax
- where do
- {
- "it picks a dedicated CI connection" => {
- env_GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci: nil,
- request_store_active: false,
- ff_force_no_sharing_primary_model: false,
- expectations: {
- main: { read: 'main_replica', write: 'main' },
- ci: { read: 'ci_replica', write: 'ci' }
- }
- },
- "with re-use of primary connection it uses CI connection for reads" => {
- env_GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci: 'main',
- request_store_active: false,
- ff_force_no_sharing_primary_model: false,
- expectations: {
- main: { read: 'main_replica', write: 'main' },
- ci: { read: 'ci_replica', write: 'main' }
- }
- },
- "with re-use and FF force_no_sharing_primary_model enabled with RequestStore it sticks FF and uses CI connection for reads and writes" => {
- env_GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci: 'main',
- request_store_active: true,
- ff_force_no_sharing_primary_model: true,
- expectations: {
- main: { read: 'main_replica', write: 'main' },
- ci: { read: 'ci_replica', write: 'ci' }
- }
- },
- "with re-use and FF force_no_sharing_primary_model enabled without RequestStore it doesn't use FF and uses CI connection for reads only" => {
- env_GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci: 'main',
- request_store_active: true,
- ff_force_no_sharing_primary_model: false,
- expectations: {
- main: { read: 'main_replica', write: 'main' },
- ci: { read: 'ci_replica', write: 'main' }
- }
- }
- }
- end
-
- with_them do
- let(:ci_class) do
- Class.new(ActiveRecord::Base) do
- def self.name
- 'Ci::ApplicationRecordTemporary'
- end
-
- establish_connection ActiveRecord::DatabaseConfigurations::HashConfig.new(
- Rails.env,
- 'ci',
- ActiveRecord::Base.connection_db_config.configuration_hash
- )
+ let(:ci_class) do
+ Class.new(ActiveRecord::Base) do
+ def self.name
+ 'Ci::ApplicationRecordTemporary'
end
- end
- let(:models) do
- {
- main: ActiveRecord::Base,
- ci: ci_class
- }
+ establish_connection ActiveRecord::DatabaseConfigurations::HashConfig.new(
+ Rails.env,
+ 'ci',
+ ActiveRecord::Base.connection_db_config.configuration_hash
+ )
end
+ end
- around do |example|
- if request_store_active
- Gitlab::WithRequestStore.with_request_store do
- stub_feature_flags(force_no_sharing_primary_model: ff_force_no_sharing_primary_model)
- RequestStore.clear!
-
- example.run
- end
- else
- example.run
- end
- end
+ let(:models) do
+ {
+ main: ActiveRecord::Base,
+ ci: ci_class
+ }
+ end
- before do
- allow(Gitlab).to receive(:dev_or_test_env?).and_return(false)
+ before do
+ allow(Gitlab).to receive(:dev_or_test_env?).and_return(false)
- # Rewrite `class_attribute` to use rspec mocking and prevent modifying the objects
- allow_next_instance_of(described_class) do |setup|
- allow(setup).to receive(:configure_connection)
+ # Rewrite `class_attribute` to use rspec mocking and prevent modifying the objects
+ allow_next_instance_of(described_class) do |setup|
+ allow(setup).to receive(:configure_connection)
- allow(setup).to receive(:setup_class_attribute) do |attribute, value|
- allow(setup.model).to receive(attribute) { value }
- end
+ allow(setup).to receive(:setup_class_attribute) do |attribute, value|
+ allow(setup.model).to receive(attribute) { value }
end
+ end
- stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', env_GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci)
-
- # Make load balancer to force init with a dedicated replicas connections
- models.each do |_, model|
- described_class.new(model).tap do |subject|
- subject.configuration.hosts = [subject.configuration.replica_db_config.host]
- subject.setup
- end
+ # Make load balancer to force init with a dedicated replicas connections
+ models.each do |_, model|
+ described_class.new(model).tap do |subject|
+ subject.configuration.hosts = [subject.configuration.db_config.host]
+ subject.setup
end
end
+ end
- it 'results match expectations' do
- result = models.transform_values do |model|
- load_balancer = model.connection.instance_variable_get(:@load_balancer)
-
- {
- read: load_balancer.read { |connection| connection.pool.db_config.name },
- write: load_balancer.read_write { |connection| connection.pool.db_config.name }
- }
- end
+ it 'results match expectations' do
+ result = models.transform_values do |model|
+ load_balancer = model.connection.instance_variable_get(:@load_balancer)
- expect(result).to eq(expectations)
+ {
+ read: load_balancer.read { |connection| connection.pool.db_config.name },
+ write: load_balancer.read_write { |connection| connection.pool.db_config.name }
+ }
end
- it 'does return load_balancer assigned to a given connection' do
- models.each do |name, model|
- expect(model.load_balancer.name).to eq(name)
- expect(model.sticking.instance_variable_get(:@load_balancer)).to eq(model.load_balancer)
- end
+ expect(result).to eq({
+ main: { read: 'main_replica', write: 'main' },
+ ci: { read: 'ci_replica', write: 'ci' }
+ })
+ end
+
+ it 'does return load_balancer assigned to a given connection' do
+ models.each do |name, model|
+ expect(model.load_balancer.name).to eq(name)
+ expect(model.sticking.instance_variable_get(:@load_balancer)).to eq(model.load_balancer)
end
end
end
diff --git a/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb
index 9acf80e684f..b7915e6cf69 100644
--- a/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb
@@ -57,6 +57,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
run_middleware
expect(job['wal_locations']).to be_nil
+ expect(job['wal_location_source']).to be_nil
end
include_examples 'job data consistency'
@@ -96,6 +97,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
run_middleware
expect(job['wal_locations']).to eq(expected_location)
+ expect(job['wal_location_source']).to eq(:replica)
end
include_examples 'job data consistency'
@@ -120,6 +122,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
run_middleware
expect(job['wal_locations']).to eq(expected_location)
+ expect(job['wal_location_source']).to eq(:primary)
end
include_examples 'job data consistency'
@@ -162,6 +165,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
run_middleware
expect(job['wal_locations']).to eq(wal_locations)
+ expect(job['wal_location_source']).to be_nil
end
end
diff --git a/spec/lib/gitlab/database/load_balancing/transaction_leaking_spec.rb b/spec/lib/gitlab/database/load_balancing/transaction_leaking_spec.rb
new file mode 100644
index 00000000000..30e5fbbd803
--- /dev/null
+++ b/spec/lib/gitlab/database/load_balancing/transaction_leaking_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Load balancer behavior with errors inside a transaction', :redis, :delete do
+ let(:model) { ApplicationRecord }
+ let(:db_host) { model.connection_pool.db_config.host }
+
+ let(:test_table_name) { '_test_foo' }
+
+ before do
+ # Patch in our load balancer config, simply pointing at the test database twice
+ allow(Gitlab::Database::LoadBalancing::Configuration).to receive(:for_model) do |base_model|
+ Gitlab::Database::LoadBalancing::Configuration.new(base_model, [db_host, db_host])
+ end
+
+ Gitlab::Database::LoadBalancing::Setup.new(ApplicationRecord).setup
+
+ model.connection.execute(<<~SQL)
+ CREATE TABLE IF NOT EXISTS #{test_table_name} (id SERIAL PRIMARY KEY, value INTEGER)
+ SQL
+ end
+
+ after do
+ model.connection.execute(<<~SQL)
+ DROP TABLE IF EXISTS #{test_table_name}
+ SQL
+ end
+
+ def execute(conn)
+ conn.execute("INSERT INTO #{test_table_name} (value) VALUES (1)")
+ backend_pid = conn.execute("SELECT pg_backend_pid() AS pid").to_a.first['pid']
+
+ # This will result in a PG error, which is not raised.
+ # Instead, we retry the statement on a fresh connection (where the pid is different and it does nothing)
+ # and the load balancer continues with a fresh connection and no transaction if a transaction was open previously
+ conn.execute(<<~SQL)
+ SELECT CASE
+ WHEN pg_backend_pid() = #{backend_pid} THEN
+ pg_terminate_backend(#{backend_pid})
+ END
+ SQL
+
+ # This statement will execute on a new connection, and violate transaction semantics
+ # if we were in a transaction before
+ conn.execute("INSERT INTO #{test_table_name} (value) VALUES (2)")
+ end
+
+ it 'logs a warning when violating transaction semantics with writes' do
+ conn = model.connection
+
+ expect(::Gitlab::Database::LoadBalancing::Logger).to receive(:warn).with(hash_including(event: :transaction_leak))
+
+ conn.transaction do
+ expect(conn).to be_transaction_open
+
+ execute(conn)
+
+ expect(conn).not_to be_transaction_open
+ end
+
+ values = conn.execute("SELECT value FROM #{test_table_name}").to_a.map { |row| row['value'] }
+ expect(values).to contain_exactly(2) # Does not include 1 because the transaction was aborted and leaked
+ end
+
+ it 'does not log a warning when no transaction is open to be leaked' do
+ conn = model.connection
+
+ expect(::Gitlab::Database::LoadBalancing::Logger)
+ .not_to receive(:warn).with(hash_including(event: :transaction_leak))
+
+ expect(conn).not_to be_transaction_open
+
+ execute(conn)
+
+ expect(conn).not_to be_transaction_open
+
+ values = conn.execute("SELECT value FROM #{test_table_name}").to_a.map { |row| row['value'] }
+ expect(values).to contain_exactly(1, 2) # Includes both rows because there was no transaction to roll back
+ end
+end
diff --git a/spec/lib/gitlab/database/migration_helpers/announce_database_spec.rb b/spec/lib/gitlab/database/migration_helpers/announce_database_spec.rb
new file mode 100644
index 00000000000..57c51c9d9c2
--- /dev/null
+++ b/spec/lib/gitlab/database/migration_helpers/announce_database_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::MigrationHelpers::AnnounceDatabase do
+ let(:migration) do
+ ActiveRecord::Migration.new('MyMigration', 1111).extend(described_class)
+ end
+
+ describe '#announce' do
+ it 'prefixes message with database name' do
+ expect { migration.announce('migrating') }.to output(/^main: == 1111 MyMigration: migrating/).to_stdout
+ end
+ end
+
+ describe '#say' do
+ it 'prefixes message with database name' do
+ expect { migration.say('transaction_open?()') }.to output(/^main: -- transaction_open?()/).to_stdout
+ end
+
+ it 'prefixes subitem message with database name' do
+ expect { migration.say('0.0000s', true) }.to output(/^main: -> 0.0000s/).to_stdout
+ end
+ end
+
+ describe '#write' do
+ it 'does not prefix empty write' do
+ expect { migration.write }.to output(/^$/).to_stdout
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 04fe1fad10e..e09016b2b2b 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -2079,6 +2079,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
t.integer :other_id
t.timestamps
end
+
+ allow(model).to receive(:transaction_open?).and_return(false)
end
context 'when the target table does not exist' do
@@ -2191,6 +2193,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
t.timestamps
end
+ allow(model).to receive(:transaction_open?).and_return(false)
+
model.initialize_conversion_of_integer_to_bigint(table, columns, primary_key: primary_key)
model.backfill_conversion_of_integer_to_bigint(table, columns, primary_key: primary_key)
end
@@ -2242,10 +2246,20 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
}
end
+ let(:migration_attributes) do
+ configuration.merge(gitlab_schema: Gitlab::Database.gitlab_schemas_for_connection(model.connection).first)
+ end
+
+ before do
+ allow(model).to receive(:transaction_open?).and_return(false)
+ end
+
subject(:ensure_batched_background_migration_is_finished) { model.ensure_batched_background_migration_is_finished(**configuration) }
it 'raises an error when migration exists and is not marked as finished' do
- create(:batched_background_migration, :active, configuration)
+ expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!).twice
+
+ create(:batched_background_migration, :active, migration_attributes)
allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
allow(runner).to receive(:finalize).with(job_class_name, table, column_name, job_arguments).and_return(false)
@@ -2255,7 +2269,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
.to raise_error "Expected batched background migration for the given configuration to be marked as 'finished', but it is 'active':" \
"\t#{configuration}" \
"\n\n" \
- "Finalize it manually by running" \
+ "Finalize it manually by running the following command in a `bash` or `sh` shell:" \
"\n\n" \
"\tsudo gitlab-rake gitlab:background_migrations:finalize[CopyColumnUsingBackgroundMigrationJob,events,id,'[[\"id\"]\\,[\"id_convert_to_bigint\"]\\,null]']" \
"\n\n" \
@@ -2265,13 +2279,19 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
it 'does not raise error when migration exists and is marked as finished' do
- create(:batched_background_migration, :finished, configuration)
+ expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!)
+
+ create(:batched_background_migration, :finished, migration_attributes)
expect { ensure_batched_background_migration_is_finished }
.not_to raise_error
end
it 'logs a warning when migration does not exist' do
+ expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!)
+
+ create(:batched_background_migration, :active, migration_attributes.merge(gitlab_schema: :gitlab_something_else))
+
expect(Gitlab::AppLogger).to receive(:warn)
.with("Could not find batched background migration for the given configuration: #{configuration}")
@@ -2280,6 +2300,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
it 'finalizes the migration' do
+ expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!).twice
+
migration = create(:batched_background_migration, :active, configuration)
allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
@@ -2291,6 +2313,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
context 'when the flag finalize is false' do
it 'does not finalize the migration' do
+ expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!)
+
create(:batched_background_migration, :active, configuration)
allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
@@ -3257,4 +3281,20 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
model.rename_constraint(:test_table, :fk_old_name, :fk_new_name)
end
end
+
+ describe '#drop_sequence' do
+ it "executes the statement to drop the sequence" do
+ expect(model).to receive(:execute).with /ALTER TABLE "test_table" ALTER COLUMN "test_column" DROP DEFAULT;\nDROP SEQUENCE IF EXISTS "test_table_id_seq"/
+
+ model.drop_sequence(:test_table, :test_column, :test_table_id_seq)
+ end
+ end
+
+ describe '#add_sequence' do
+ it "executes the statement to add the sequence" do
+ expect(model).to receive(:execute).with "CREATE SEQUENCE \"test_table_id_seq\" START 1;\nALTER TABLE \"test_table\" ALTER COLUMN \"test_column\" SET DEFAULT nextval(\'test_table_id_seq\')\n"
+
+ model.add_sequence(:test_table, :test_column, :test_table_id_seq, 1)
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
index b0caa21e01a..c423340a572 100644
--- a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
@@ -444,7 +444,7 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do
it 'does restore connection hierarchy' do
expect_next_instances_of(job_class, 1..) do |job|
expect(job).to receive(:perform) do
- validate_connections!
+ validate_connections_stack!
end
end
diff --git a/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
index d1a66036149..f3414727245 100644
--- a/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
@@ -3,8 +3,14 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers do
+ let(:migration_class) do
+ Class.new(ActiveRecord::Migration[6.1])
+ .include(described_class)
+ .include(Gitlab::Database::Migrations::ReestablishedConnectionStack)
+ end
+
let(:migration) do
- ActiveRecord::Migration.new.extend(described_class)
+ migration_class.new
end
describe '#queue_batched_background_migration' do
@@ -12,6 +18,9 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
before do
allow(Gitlab::Database::PgClass).to receive(:for_table).and_call_original
+ expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!)
+
+ allow(migration).to receive(:transaction_open?).and_return(false)
end
context 'when such migration already exists' do
@@ -27,7 +36,8 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
batch_class_name: 'MyBatchClass',
batch_size: 200,
sub_batch_size: 20,
- job_arguments: [[:id], [:id_convert_to_bigint]]
+ job_arguments: [[:id], [:id_convert_to_bigint]],
+ gitlab_schema: :gitlab_ci
)
expect do
@@ -41,7 +51,8 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
batch_max_value: 1000,
batch_class_name: 'MyBatchClass',
batch_size: 100,
- sub_batch_size: 10)
+ sub_batch_size: 10,
+ gitlab_schema: :gitlab_ci)
end.not_to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }
end
end
@@ -60,7 +71,8 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
batch_class_name: 'MyBatchClass',
batch_size: 100,
max_batch_size: 10000,
- sub_batch_size: 10)
+ sub_batch_size: 10,
+ gitlab_schema: :gitlab_ci)
end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
expect(Gitlab::Database::BackgroundMigration::BatchedMigration.last).to have_attributes(
@@ -76,7 +88,8 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
sub_batch_size: 10,
job_arguments: %w[],
status_name: :active,
- total_tuple_count: pgclass_info.cardinality_estimate)
+ total_tuple_count: pgclass_info.cardinality_estimate,
+ gitlab_schema: 'gitlab_ci')
end
context 'when the job interval is lower than the minimum' do
@@ -160,6 +173,31 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
expect(Gitlab::Database::BackgroundMigration::BatchedMigration.last).to be_finished
end
+
+ context 'when within transaction' do
+ before do
+ allow(migration).to receive(:transaction_open?).and_return(true)
+ end
+
+ it 'does raise an exception' do
+ expect { migration.queue_batched_background_migration('MyJobClass', :events, :id, job_interval: 5.minutes)}
+ .to raise_error /`queue_batched_background_migration` cannot be run inside a transaction./
+ end
+ end
+ end
+ end
+
+ context 'when gitlab_schema is not given' do
+ it 'fetches gitlab_schema from the migration context' do
+ expect(migration).to receive(:gitlab_schema_from_context).and_return(:gitlab_ci)
+
+ expect do
+ migration.queue_batched_background_migration('MyJobClass', :events, :id, job_interval: 5.minutes)
+ end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
+
+ created_migration = Gitlab::Database::BackgroundMigration::BatchedMigration.last
+
+ expect(created_migration.gitlab_schema).to eq('gitlab_ci')
end
end
end
@@ -167,6 +205,12 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
describe '#finalize_batched_background_migration' do
let!(:batched_migration) { create(:batched_background_migration, job_class_name: 'MyClass', table_name: :projects, column_name: :id, job_arguments: []) }
+ before do
+ expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!)
+
+ allow(migration).to receive(:transaction_open?).and_return(false)
+ end
+
it 'finalizes the migration' do
allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
expect(runner).to receive(:finalize).with('MyClass', :projects, :id, [])
@@ -183,24 +227,162 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
end
end
- context 'when uses a CI connection', :reestablished_active_record_base do
+ context 'when within transaction' do
before do
- skip_if_multiple_databases_not_setup
+ allow(migration).to receive(:transaction_open?).and_return(true)
+ end
- ActiveRecord::Base.establish_connection(:ci) # rubocop:disable Database/EstablishConnection
+ it 'does raise an exception' do
+ expect { migration.finalize_batched_background_migration(job_class_name: 'MyJobClass', table_name: :projects, column_name: :id, job_arguments: []) }
+ .to raise_error /`finalize_batched_background_migration` cannot be run inside a transaction./
end
+ end
- it 'raises an exception' do
- ci_migration = create(:batched_background_migration, :active)
+ context 'when running migration in reconfigured ActiveRecord::Base context' do
+ it_behaves_like 'reconfigures connection stack', 'ci' do
+ before do
+ create(:batched_background_migration,
+ job_class_name: 'Ci::MyClass',
+ table_name: :ci_builds,
+ column_name: :id,
+ job_arguments: [],
+ gitlab_schema: :gitlab_ci)
+ end
+
+ context 'when restrict_gitlab_migration is set to gitlab_ci' do
+ it 'finalizes the migration' do
+ migration_class.include(Gitlab::Database::MigrationHelpers::RestrictGitlabSchema)
+ migration_class.restrict_gitlab_migration gitlab_schema: :gitlab_ci
+
+ allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
+ expect(runner).to receive(:finalize).with('Ci::MyClass', :ci_builds, :id, []) do
+ validate_connections_stack!
+ end
+ end
+
+ migration.finalize_batched_background_migration(
+ job_class_name: 'Ci::MyClass', table_name: :ci_builds, column_name: :id, job_arguments: [])
+ end
+ end
+
+ context 'when restrict_gitlab_migration is set to gitlab_main' do
+ it 'does not find any migrations' do
+ migration_class.include(Gitlab::Database::MigrationHelpers::RestrictGitlabSchema)
+ migration_class.restrict_gitlab_migration gitlab_schema: :gitlab_main
+
+ expect do
+ migration.finalize_batched_background_migration(
+ job_class_name: 'Ci::MyClass', table_name: :ci_builds, column_name: :id, job_arguments: [])
+ end.to raise_error /Could not find batched background migration/
+ end
+ end
+
+ context 'when no restrict is set' do
+ it 'does not find any migrations' do
+ expect do
+ migration.finalize_batched_background_migration(
+ job_class_name: 'Ci::MyClass', table_name: :ci_builds, column_name: :id, job_arguments: [])
+ end.to raise_error /Could not find batched background migration/
+ end
+ end
+ end
+ end
+
+ context 'when within transaction' do
+ before do
+ allow(migration).to receive(:transaction_open?).and_return(true)
+ end
+
+ it 'does raise an exception' do
+ expect { migration.finalize_batched_background_migration(job_class_name: 'MyJobClass', table_name: :projects, column_name: :id, job_arguments: []) }
+ .to raise_error /`finalize_batched_background_migration` cannot be run inside a transaction./
+ end
+ end
+ end
+
+ describe '#delete_batched_background_migration' do
+ let(:transaction_open) { false }
+
+ before do
+ expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!)
+
+ allow(migration).to receive(:transaction_open?).and_return(transaction_open)
+ end
+
+ context 'when migration exists' do
+ it 'deletes it' do
+ create(
+ :batched_background_migration,
+ job_class_name: 'MyJobClass',
+ table_name: :projects,
+ column_name: :id,
+ interval: 10.minutes,
+ min_value: 5,
+ max_value: 1005,
+ batch_class_name: 'MyBatchClass',
+ batch_size: 200,
+ sub_batch_size: 20,
+ job_arguments: [[:id], [:id_convert_to_bigint]]
+ )
expect do
- migration.finalize_batched_background_migration(
- job_class_name: ci_migration.job_class_name,
- table_name: ci_migration.table_name,
- column_name: ci_migration.column_name,
- job_arguments: ci_migration.job_arguments
- )
- end.to raise_error /is currently not supported when running in decomposed/
+ migration.delete_batched_background_migration(
+ 'MyJobClass',
+ :projects,
+ :id,
+ [[:id], [:id_convert_to_bigint]])
+ end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.from(1).to(0)
+ end
+ end
+
+ context 'when migration does not exist' do
+ it 'does nothing' do
+ create(
+ :batched_background_migration,
+ job_class_name: 'SomeOtherJobClass',
+ table_name: :projects,
+ column_name: :id,
+ interval: 10.minutes,
+ min_value: 5,
+ max_value: 1005,
+ batch_class_name: 'MyBatchClass',
+ batch_size: 200,
+ sub_batch_size: 20,
+ job_arguments: [[:id], [:id_convert_to_bigint]]
+ )
+
+ expect do
+ migration.delete_batched_background_migration(
+ 'MyJobClass',
+ :projects,
+ :id,
+ [[:id], [:id_convert_to_bigint]])
+ end.not_to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }
+ end
+ end
+
+ context 'when within transaction' do
+ let(:transaction_open) { true }
+
+ it 'raises an exception' do
+ expect { migration.delete_batched_background_migration('MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]]) }
+ .to raise_error /`#delete_batched_background_migration` cannot be run inside a transaction./
+ end
+ end
+ end
+
+ describe '#gitlab_schema_from_context' do
+ context 'when allowed_gitlab_schemas is not available' do
+ it 'defaults to :gitlab_main' do
+ expect(migration.gitlab_schema_from_context).to eq(:gitlab_main)
+ end
+ end
+
+ context 'when allowed_gitlab_schemas is available' do
+ it 'uses schema from allowed_gitlab_schema' do
+ expect(migration).to receive(:allowed_gitlab_schemas).and_return([:gitlab_ci])
+
+ expect(migration.gitlab_schema_from_context).to eq(:gitlab_ci)
end
end
end
diff --git a/spec/lib/gitlab/database/migrations/reestablished_connection_stack_spec.rb b/spec/lib/gitlab/database/migrations/reestablished_connection_stack_spec.rb
index cfb308c63e4..d197f39be40 100644
--- a/spec/lib/gitlab/database/migrations/reestablished_connection_stack_spec.rb
+++ b/spec/lib/gitlab/database/migrations/reestablished_connection_stack_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Gitlab::Database::Migrations::ReestablishedConnectionStack do
it_behaves_like "reconfigures connection stack", db_config_name do
it 'does restore connection hierarchy' do
model.with_restored_connection_stack do
- validate_connections!
+ validate_connections_stack!
end
end
diff --git a/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb b/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb
index fbfff1268cc..2f3d44f6f8f 100644
--- a/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb
+++ b/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb
@@ -4,7 +4,6 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::Migrations::TestBatchedBackgroundRunner, :freeze_time do
include Gitlab::Database::MigrationHelpers
- include Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers
include Database::MigrationTestingHelpers
let(:result_dir) { Dir.mktmpdir }
@@ -13,6 +12,10 @@ RSpec.describe Gitlab::Database::Migrations::TestBatchedBackgroundRunner, :freez
FileUtils.rm_rf(result_dir)
end
+ let(:migration) do
+ ActiveRecord::Migration.new.extend(Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers)
+ end
+
let(:connection) { ApplicationRecord.connection }
let(:table_name) { "_test_column_copying"}
@@ -26,11 +29,13 @@ RSpec.describe Gitlab::Database::Migrations::TestBatchedBackgroundRunner, :freez
insert into #{table_name} (id) select i from generate_series(1, 1000) g(i);
SQL
+
+ allow(migration).to receive(:transaction_open?).and_return(false)
end
context 'running a real background migration' do
it 'runs sampled jobs from the batched background migration' do
- queue_batched_background_migration('CopyColumnUsingBackgroundMigrationJob',
+ migration.queue_batched_background_migration('CopyColumnUsingBackgroundMigrationJob',
table_name, :id,
:id, :data,
batch_size: 100,
@@ -46,7 +51,9 @@ RSpec.describe Gitlab::Database::Migrations::TestBatchedBackgroundRunner, :freez
let(:migration_name) { 'TestBackgroundMigration' }
before do
- queue_batched_background_migration(migration_name, table_name, :id, job_interval: 5.minutes, batch_size: 100)
+ migration.queue_batched_background_migration(
+ migration_name, table_name, :id, job_interval: 5.minutes, batch_size: 100
+ )
end
it 'samples jobs' do
@@ -67,13 +74,13 @@ RSpec.describe Gitlab::Database::Migrations::TestBatchedBackgroundRunner, :freez
travel 3.days
new_migration = define_background_migration('NewMigration') { travel 1.second }
- queue_batched_background_migration('NewMigration', table_name, :id,
+ migration.queue_batched_background_migration('NewMigration', table_name, :id,
job_interval: 5.minutes,
batch_size: 10,
sub_batch_size: 5)
other_new_migration = define_background_migration('NewMigration2') { travel 2.seconds }
- queue_batched_background_migration('NewMigration2', table_name, :id,
+ migration.queue_batched_background_migration('NewMigration2', table_name, :id,
job_interval: 5.minutes,
batch_size: 10,
sub_batch_size: 5)
diff --git a/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb
index 64dcdb9628a..dca4548a0a3 100644
--- a/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb
@@ -8,7 +8,11 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
def has_partition(model, month)
Gitlab::Database::PostgresPartition.for_parent_table(model.table_name).any? do |partition|
- Gitlab::Database::Partitioning::TimePartition.from_sql(model.table_name, partition.name, partition.condition).from == month
+ Gitlab::Database::Partitioning::TimePartition.from_sql(
+ model.table_name,
+ partition.name,
+ partition.condition
+ ).from == month
end
end
@@ -16,14 +20,17 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
subject(:sync_partitions) { described_class.new(model).sync_partitions }
let(:model) { double(partitioning_strategy: partitioning_strategy, table_name: table, connection: connection) }
- let(:partitioning_strategy) { double(missing_partitions: partitions, extra_partitions: [], after_adding_partitions: nil) }
let(:connection) { ActiveRecord::Base.connection }
let(:table) { "issues" }
+ let(:partitioning_strategy) do
+ double(missing_partitions: partitions, extra_partitions: [], after_adding_partitions: nil)
+ end
before do
allow(connection).to receive(:table_exists?).and_call_original
allow(connection).to receive(:table_exists?).with(table).and_return(true)
allow(connection).to receive(:execute).and_call_original
+ expect(partitioning_strategy).to receive(:validate_and_fix)
stub_exclusive_lease(described_class::MANAGEMENT_LEASE_KEY % table, timeout: described_class::LEASE_TIMEOUT)
end
@@ -84,13 +91,16 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
let(:manager) { described_class.new(model) }
let(:model) { double(partitioning_strategy: partitioning_strategy, table_name: table, connection: connection) }
- let(:partitioning_strategy) { double(extra_partitions: extra_partitions, missing_partitions: [], after_adding_partitions: nil) }
let(:connection) { ActiveRecord::Base.connection }
let(:table) { "foo" }
+ let(:partitioning_strategy) do
+ double(extra_partitions: extra_partitions, missing_partitions: [], after_adding_partitions: nil)
+ end
before do
allow(connection).to receive(:table_exists?).and_call_original
allow(connection).to receive(:table_exists?).with(table).and_return(true)
+ expect(partitioning_strategy).to receive(:validate_and_fix)
stub_exclusive_lease(described_class::MANAGEMENT_LEASE_KEY % table, timeout: described_class::LEASE_TIMEOUT)
end
@@ -107,6 +117,24 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
sync_partitions
end
+
+ it 'logs an error if the partitions are not detachable' do
+ allow(Gitlab::Database::PostgresForeignKey).to receive(:by_referenced_table_identifier).with("public.foo")
+ .and_return([double(name: "fk_1", constrained_table_identifier: "public.constrainted_table_1")])
+
+ expect(Gitlab::AppLogger).to receive(:error).with(
+ {
+ message: "Failed to create / detach partition(s)",
+ connection_name: "main",
+ exception_class: Gitlab::Database::Partitioning::PartitionManager::UnsafeToDetachPartitionError,
+ exception_message:
+ "Cannot detach foo1, it would block while checking foreign key fk_1 on public.constrainted_table_1",
+ table_name: "foo"
+ }
+ )
+
+ sync_partitions
+ end
end
describe '#detach_partitions' do
diff --git a/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
index 1cec0463055..d8b06ee1a5d 100644
--- a/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
@@ -37,12 +37,75 @@ RSpec.describe Gitlab::Database::Partitioning::SlidingListStrategy do
describe '#current_partitions' do
it 'detects both partitions' do
expect(strategy.current_partitions).to eq([
- Gitlab::Database::Partitioning::SingleNumericListPartition.new(table_name, 1, partition_name: '_test_partitioned_test_1'),
- Gitlab::Database::Partitioning::SingleNumericListPartition.new(table_name, 2, partition_name: '_test_partitioned_test_2')
+ Gitlab::Database::Partitioning::SingleNumericListPartition.new(
+ table_name, 1, partition_name: '_test_partitioned_test_1'
+ ),
+ Gitlab::Database::Partitioning::SingleNumericListPartition.new(
+ table_name, 2, partition_name: '_test_partitioned_test_2'
+ )
])
end
end
+ describe '#validate_and_fix' do
+ context 'feature flag is disabled' do
+ before do
+ stub_feature_flags(fix_sliding_list_partitioning: false)
+ end
+
+ it 'does not try to fix the default partition value' do
+ connection.change_column_default(model.table_name, strategy.partitioning_key, 3)
+ expect(strategy.model.connection).not_to receive(:change_column_default)
+ strategy.validate_and_fix
+ end
+ end
+
+ context 'feature flag is enabled' do
+ before do
+ stub_feature_flags(fix_sliding_list_partitioning: true)
+ end
+
+ it 'does not call change_column_default if the partitioning in a valid state' do
+ expect(strategy.model.connection).not_to receive(:change_column_default)
+
+ strategy.validate_and_fix
+ end
+
+ it 'calls change_column_default on partition_key with the most default partition number' do
+ connection.change_column_default(model.table_name, strategy.partitioning_key, 1)
+
+ expect(Gitlab::AppLogger).to receive(:warn).with(
+ message: 'Fixed default value of sliding_list_strategy partitioning_key',
+ connection_name: 'main',
+ old_value: 1,
+ new_value: 2,
+ table_name: table_name,
+ column: strategy.partitioning_key
+ )
+
+ expect(strategy.model.connection).to receive(:change_column_default).with(
+ model.table_name, strategy.partitioning_key, 2
+ ).and_call_original
+
+ strategy.validate_and_fix
+ end
+
+ it 'does not change the default column if it has been changed in the meanwhile by another process' do
+ expect(strategy).to receive(:current_default_value).and_return(1, 2)
+
+ expect(strategy.model.connection).not_to receive(:change_column_default)
+
+ expect(Gitlab::AppLogger).to receive(:warn).with(
+ message: 'Table partitions or partition key default value have been changed by another process',
+ table_name: table_name,
+ default_value: 2
+ )
+
+ strategy.validate_and_fix
+ end
+ end
+ end
+
describe '#active_partition' do
it 'is the partition with the largest value' do
expect(strategy.active_partition.value).to eq(2)
@@ -157,6 +220,7 @@ RSpec.describe Gitlab::Database::Partitioning::SlidingListStrategy do
end.not_to raise_error
end
end
+
context 'redirecting inserts as the active partition changes' do
let(:model) do
Class.new(ApplicationRecord) do
diff --git a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb
index 0d687db0f96..62c5ead855a 100644
--- a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana
process_sql(ActiveRecord::Base, "SELECT 1 FROM projects")
end
- context 'properly observes all queries', :add_ci_connection, :request_store do
+ context 'properly observes all queries', :add_ci_connection do
using RSpec::Parameterized::TableSyntax
where do
@@ -28,8 +28,7 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana
expectations: {
gitlab_schemas: "gitlab_main",
db_config_name: "main"
- },
- setup: nil
+ }
},
"for query accessing gitlab_ci and gitlab_main" => {
model: ApplicationRecord,
@@ -37,8 +36,7 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana
expectations: {
gitlab_schemas: "gitlab_ci,gitlab_main",
db_config_name: "main"
- },
- setup: nil
+ }
},
"for query accessing gitlab_ci and gitlab_main the gitlab_schemas is always ordered" => {
model: ApplicationRecord,
@@ -46,8 +44,7 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana
expectations: {
gitlab_schemas: "gitlab_ci,gitlab_main",
db_config_name: "main"
- },
- setup: nil
+ }
},
"for query accessing CI database" => {
model: Ci::ApplicationRecord,
@@ -56,62 +53,6 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana
gitlab_schemas: "gitlab_ci",
db_config_name: "ci"
}
- },
- "for query accessing CI database with re-use and disabled sharing" => {
- model: Ci::ApplicationRecord,
- sql: "SELECT 1 FROM ci_builds",
- expectations: {
- gitlab_schemas: "gitlab_ci",
- db_config_name: "ci",
- ci_dedicated_primary_connection: true
- },
- setup: ->(_) do
- skip_if_multiple_databases_not_setup
- stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', 'main')
- stub_feature_flags(force_no_sharing_primary_model: true)
- end
- },
- "for query accessing CI database with re-use and enabled sharing" => {
- model: Ci::ApplicationRecord,
- sql: "SELECT 1 FROM ci_builds",
- expectations: {
- gitlab_schemas: "gitlab_ci",
- db_config_name: "ci",
- ci_dedicated_primary_connection: false
- },
- setup: ->(_) do
- skip_if_multiple_databases_not_setup
- stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', 'main')
- stub_feature_flags(force_no_sharing_primary_model: false)
- end
- },
- "for query accessing CI database without re-use and disabled sharing" => {
- model: Ci::ApplicationRecord,
- sql: "SELECT 1 FROM ci_builds",
- expectations: {
- gitlab_schemas: "gitlab_ci",
- db_config_name: "ci",
- ci_dedicated_primary_connection: true
- },
- setup: ->(_) do
- skip_if_multiple_databases_not_setup
- stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', nil)
- stub_feature_flags(force_no_sharing_primary_model: true)
- end
- },
- "for query accessing CI database without re-use and enabled sharing" => {
- model: Ci::ApplicationRecord,
- sql: "SELECT 1 FROM ci_builds",
- expectations: {
- gitlab_schemas: "gitlab_ci",
- db_config_name: "ci",
- ci_dedicated_primary_connection: true
- },
- setup: ->(_) do
- skip_if_multiple_databases_not_setup
- stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', nil)
- stub_feature_flags(force_no_sharing_primary_model: false)
- end
}
}
end
@@ -122,15 +63,11 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana
end
it do
- stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', nil)
-
- instance_eval(&setup) if setup
-
allow(::Ci::ApplicationRecord.load_balancer).to receive(:configuration)
.and_return(Gitlab::Database::LoadBalancing::Configuration.for_model(::Ci::ApplicationRecord))
expect(described_class.schemas_metrics).to receive(:increment)
- .with({ ci_dedicated_primary_connection: anything }.merge(expectations)).and_call_original
+ .with(expectations).and_call_original
process_sql(model, sql)
end
diff --git a/spec/lib/gitlab/database/shared_model_spec.rb b/spec/lib/gitlab/database/shared_model_spec.rb
index 574111f4c01..c88edc17817 100644
--- a/spec/lib/gitlab/database/shared_model_spec.rb
+++ b/spec/lib/gitlab/database/shared_model_spec.rb
@@ -27,6 +27,19 @@ RSpec.describe Gitlab::Database::SharedModel do
end
end
+ it 'raises an error if the connection does not include `:gitlab_shared` schema' do
+ allow(Gitlab::Database)
+ .to receive(:gitlab_schemas_for_connection)
+ .with(new_connection)
+ .and_return([:gitlab_main])
+
+ expect_original_connection_around do
+ expect do
+ described_class.using_connection(new_connection) {}
+ end.to raise_error(/Cannot set `SharedModel` to connection/)
+ end
+ end
+
context 'when multiple connection overrides are nested', :aggregate_failures do
let(:second_connection) { double('connection') }
diff --git a/spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb b/spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb
index 2740664d200..e676e5fe034 100644
--- a/spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb
+++ b/spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb
@@ -77,6 +77,7 @@ RSpec.describe Gitlab::DatabaseImporters::InstanceAdministrators::CreateGroup do
create(:user)
expect(result[:status]).to eq(:success)
+ group.reset
expect(group.members.collect(&:user)).to contain_exactly(user, admin1, admin2)
expect(group.members.collect(&:access_level)).to contain_exactly(
Gitlab::Access::OWNER,
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index 23f4f0e7089..064613074cd 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -160,13 +160,15 @@ RSpec.describe Gitlab::Database do
end
end
- context 'when the connection is LoadBalancing::ConnectionProxy' do
- it 'returns primary_db_config' do
- lb_config = ::Gitlab::Database::LoadBalancing::Configuration.new(ActiveRecord::Base)
- lb = ::Gitlab::Database::LoadBalancing::LoadBalancer.new(lb_config)
- proxy = ::Gitlab::Database::LoadBalancing::ConnectionProxy.new(lb)
-
- expect(described_class.db_config_for_connection(proxy)).to eq(lb_config.primary_db_config)
+ context 'when the connection is LoadBalancing::ConnectionProxy', :database_replica do
+ it 'returns primary db config even if ambiguous queries default to replica' do
+ Gitlab::Database::LoadBalancing::Session.current.use_primary!
+ primary_config = described_class.db_config_for_connection(ActiveRecord::Base.connection)
+
+ Gitlab::Database::LoadBalancing::Session.clear_session
+ Gitlab::Database::LoadBalancing::Session.current.fallback_to_replicas_for_ambiguous_queries do
+ expect(described_class.db_config_for_connection(ActiveRecord::Base.connection)).to eq(primary_config)
+ end
end
end
@@ -222,14 +224,7 @@ RSpec.describe Gitlab::Database do
end
describe '.gitlab_schemas_for_connection' do
- it 'does raise exception for invalid connection' do
- expect { described_class.gitlab_schemas_for_connection(:invalid) }.to raise_error /key not found: "unknown"/
- end
-
it 'does return a valid schema depending on a base model used', :request_store do
- # FF due to lib/gitlab/database/load_balancing/configuration.rb:92
- stub_feature_flags(force_no_sharing_primary_model: true)
-
expect(described_class.gitlab_schemas_for_connection(Project.connection)).to include(:gitlab_main, :gitlab_shared)
expect(described_class.gitlab_schemas_for_connection(Ci::Build.connection)).to include(:gitlab_ci, :gitlab_shared)
end
@@ -282,6 +277,15 @@ RSpec.describe Gitlab::Database do
end
end
end
+
+ it 'does return empty for non-adopted connections' do
+ new_connection = ActiveRecord::Base.postgresql_connection(
+ ActiveRecord::Base.connection_db_config.configuration_hash)
+
+ expect(described_class.gitlab_schemas_for_connection(new_connection)).to be_nil
+ ensure
+ new_connection&.disconnect!
+ end
end
describe '#true_value' do
diff --git a/spec/lib/gitlab/diff/custom_diff_spec.rb b/spec/lib/gitlab/diff/custom_diff_spec.rb
deleted file mode 100644
index 77d2a6cbcd6..00000000000
--- a/spec/lib/gitlab/diff/custom_diff_spec.rb
+++ /dev/null
@@ -1,115 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Diff::CustomDiff do
- include RepoHelpers
-
- let(:project) { create(:project, :repository) }
- let(:repository) { project.repository }
- let(:ipynb_blob) { repository.blob_at('f6b7a707', 'files/ipython/markdown-table.ipynb') }
- let(:blob) { repository.blob_at('HEAD', 'files/ruby/regex.rb') }
-
- describe '#preprocess_before_diff' do
- context 'for ipynb files' do
- it 'transforms the diff' do
- expect(described_class.preprocess_before_diff(ipynb_blob.path, nil, ipynb_blob)).not_to include('cells')
- end
-
- it 'adds the blob to the list of transformed blobs' do
- described_class.preprocess_before_diff(ipynb_blob.path, nil, ipynb_blob)
-
- expect(described_class.transformed_for_diff?(ipynb_blob)).to be_truthy
- end
- end
-
- context 'for other files' do
- it 'returns nil' do
- expect(described_class.preprocess_before_diff(blob.path, nil, blob)).to be_nil
- end
-
- it 'does not add the blob to the list of transformed blobs' do
- described_class.preprocess_before_diff(blob.path, nil, blob)
-
- expect(described_class.transformed_for_diff?(blob)).to be_falsey
- end
- end
-
- context 'timeout' do
- subject { described_class.preprocess_before_diff(ipynb_blob.path, nil, ipynb_blob) }
-
- it 'falls back to nil on timeout' do
- allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
- expect(Timeout).to receive(:timeout).and_raise(Timeout::Error)
-
- expect(subject).to be_nil
- end
-
- context 'when in foreground' do
- it 'utilizes timeout for web' do
- expect(Timeout).to receive(:timeout).with(described_class::RENDERED_TIMEOUT_FOREGROUND).and_call_original
-
- expect(subject).not_to include('cells')
- end
-
- it 'increments metrics' do
- counter = Gitlab::Metrics.counter(:ipynb_semantic_diff_timeouts_total, 'desc')
-
- expect(Timeout).to receive(:timeout).and_raise(Timeout::Error)
- expect { subject }.to change { counter.get(source: described_class::FOREGROUND_EXECUTION) }.by(1)
- end
- end
-
- context 'when in background' do
- before do
- allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
- end
-
- it 'utilizes longer timeout for sidekiq' do
- expect(Timeout).to receive(:timeout).with(described_class::RENDERED_TIMEOUT_BACKGROUND).and_call_original
-
- expect(subject).not_to include('cells')
- end
-
- it 'increments metrics' do
- counter = Gitlab::Metrics.counter(:ipynb_semantic_diff_timeouts_total, 'desc')
-
- expect(Timeout).to receive(:timeout).and_raise(Timeout::Error)
- expect { subject }.to change { counter.get(source: described_class::BACKGROUND_EXECUTION) }.by(1)
- end
- end
- end
-
- context 'when invalid ipynb' do
- it 'returns nil' do
- expect(ipynb_blob).to receive(:data).and_return('invalid ipynb')
-
- expect(described_class.preprocess_before_diff(ipynb_blob.path, nil, ipynb_blob)).to be_nil
- end
- end
- end
-
- describe '#transformed_blob_data' do
- it 'transforms blob data if file was processed' do
- described_class.preprocess_before_diff(ipynb_blob.path, nil, ipynb_blob)
-
- expect(described_class.transformed_blob_data(ipynb_blob)).not_to include('cells')
- end
-
- it 'does not transform blob data if file was not processed' do
- expect(described_class.transformed_blob_data(ipynb_blob)).to be_nil
- end
- end
-
- describe '#transformed_blob_language' do
- it 'is md when file was preprocessed' do
- described_class.preprocess_before_diff(ipynb_blob.path, nil, ipynb_blob)
-
- expect(described_class.transformed_blob_language(ipynb_blob)).to eq('md')
- end
-
- it 'is nil for a .ipynb blob that was not preprocessed' do
- expect(described_class.transformed_blob_language(ipynb_blob)).to be_nil
- end
- end
-end
diff --git a/spec/lib/gitlab/diff/file_spec.rb b/spec/lib/gitlab/diff/file_spec.rb
index b7262629e0a..34f4bdde3b5 100644
--- a/spec/lib/gitlab/diff/file_spec.rb
+++ b/spec/lib/gitlab/diff/file_spec.rb
@@ -56,45 +56,21 @@ RSpec.describe Gitlab::Diff::File do
context 'when file is ipynb' do
let(:ipynb_semantic_diff) { false }
- let(:rendered_diffs_viewer) { false }
before do
- stub_feature_flags(ipynb_semantic_diff: ipynb_semantic_diff, rendered_diffs_viewer: rendered_diffs_viewer)
+ stub_feature_flags(ipynb_semantic_diff: ipynb_semantic_diff)
end
- context 'when ipynb_semantic_diff is off, and rendered_viewer is off' do
- it 'does not generate notebook diffs' do
- expect(Gitlab::Diff::CustomDiff).not_to receive(:preprocess_before_diff)
- expect(diff_file.rendered).to be_nil
- end
- end
-
- context 'when ipynb_semantic_diff is off, and rendered_viewer is on' do
- let(:rendered_diffs_viewer) { true }
-
- it 'does not generate rendered diff' do
- expect(Gitlab::Diff::CustomDiff).not_to receive(:preprocess_before_diff)
- expect(diff_file.rendered).to be_nil
- end
- end
-
- context 'when ipynb_semantic_diff is on, and rendered_viewer is off' do
- let(:ipynb_semantic_diff) { true }
+ subject { diff_file.rendered }
- it 'transforms using custom diff CustomDiff' do
- expect(Gitlab::Diff::CustomDiff).to receive(:preprocess_before_diff).and_call_original
- expect(diff_file.rendered).to be_nil
- end
+ context 'when ipynb_semantic_diff is off' do
+ it { is_expected.to be_nil }
end
- context 'when ipynb_semantic_diff is on, and rendered_viewer is on' do
+ context 'and rendered_viewer is on' do
let(:ipynb_semantic_diff) { true }
- let(:rendered_diffs_viewer) { true }
- it 'transforms diff using NotebookDiffFile' do
- expect(Gitlab::Diff::CustomDiff).not_to receive(:preprocess_before_diff)
- expect(diff_file.rendered).not_to be_nil
- end
+ it { is_expected.not_to be_nil }
end
end
end
diff --git a/spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb b/spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb
new file mode 100644
index 00000000000..cb046548880
--- /dev/null
+++ b/spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb
@@ -0,0 +1,134 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rspec-parameterized'
+require 'set'
+
+MOCK_LINE = Struct.new(:text, :type, :index, :old_pos, :new_pos)
+
+def make_lines(old_lines, new_lines, texts = nil, types = nil)
+ old_lines.each_with_index.map do |old, i|
+ MOCK_LINE.new(texts ? texts[i] : '', types ? types[i] : nil, i, old, new_lines[i])
+ end
+end
+
+RSpec.describe Gitlab::Diff::Rendered::Notebook::DiffFileHelper do
+ let(:dummy) { Class.new { include Gitlab::Diff::Rendered::Notebook::DiffFileHelper }.new }
+
+ describe '#strip_diff_frontmatter' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { dummy.strip_diff_frontmatter(diff) }
+
+ where(:diff, :result) do
+ "FileLine1\nFileLine2\n@@ -1,76 +1,74 @@\nhello\n" | "@@ -1,76 +1,74 @@\nhello\n"
+ "" | nil
+ nil | nil
+ end
+
+ with_them do
+ it { is_expected.to eq(result) }
+ end
+ end
+
+ describe '#map_transformed_line_to_source' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { dummy.source_line_from_block(1, transformed_blocks) }
+
+ where(:case, :transformed_blocks, :result) do
+ 'if transformed diff is empty' | [] | 0
+ 'if the transformed line does not map to any in the original file' | [{ source_line: nil }] | 0
+ 'if the transformed line maps to a line in the source file' | [{ source_line: 2 }] | 3
+ end
+
+ with_them do
+ it { is_expected.to eq(result) }
+ end
+ end
+
+ describe '#image_as_rich_text' do
+ let(:img) { 'data:image/png;base64,some_image_here' }
+ let(:line_text) { " ![](#{img})"}
+
+ subject { dummy.image_as_rich_text(line_text) }
+
+ context 'text does not contain image' do
+ let(:img) { "not an image" }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'text contains image' do
+ it { is_expected.to eq("<img src=\"#{img}\">") }
+ end
+
+ context 'text contains image that has malicious html' do
+ let(:img) { 'data:image/png;base64,some_image_here"<div>Hello</div>' }
+
+ it 'sanitizes the html' do
+ expect(subject).not_to include('<div>Hello')
+ end
+
+ it 'adds image to src' do
+ expect(subject).to end_with('/div&gt;">')
+ end
+ end
+ end
+
+ describe '#line_positions_at_source_diff' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:blocks) do
+ {
+ from: [0, 2, 1, nil, nil, 3].map { |i| { source_line: i } },
+ to: [0, 1, nil, 2, nil, 3].map { |i| { source_line: i } }
+ }
+ end
+
+ let(:lines) do
+ make_lines(
+ [1, 2, 3, 4, 5, 5, 5, 5, 6],
+ [1, 2, 2, 2, 2, 3, 4, 5, 6],
+ 'ACBLDJEKF'.split(""),
+ [nil, 'old', 'old', 'old', 'new', 'new', 'new', nil, nil]
+ )
+ end
+
+ subject { dummy.line_positions_at_source_diff(lines, blocks)[index] }
+
+ where(:case, :index, :transformed_positions, :mapped_positions) do
+ " A A" | 0 | [1, 1] | [1, 1] # No change, old_pos and new_pos have mappings
+ "- C " | 1 | [2, 2] | [3, 2] # A removal, both old_pos and new_pos have valid mappings
+ "- B " | 2 | [3, 2] | [2, 2] # A removal, both old_pos and new_pos have valid mappings
+ "- L " | 3 | [4, 2] | [0, 0] # A removal, but old_pos has no mapping
+ "+ D" | 4 | [5, 2] | [4, 2] # An addition, new_pos has mapping but old_pos does not, so old_pos is remapped
+ "+ J" | 5 | [5, 3] | [0, 0] # An addition, but new_pos has no mapping, so neither are remapped
+ "+ E" | 6 | [5, 4] | [4, 3] # An addition, new_pos has mapping but old_pos does not, so old_pos is remapped
+ " K K" | 7 | [5, 5] | [0, 0] # This has no mapping
+ " F F" | 8 | [6, 6] | [4, 4] # No change, old_pos and new_pos have mappings
+ end
+
+ with_them do
+ it { is_expected.to eq(mapped_positions) }
+ end
+ end
+
+ describe '#lines_in_source_diff' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:lines) { make_lines(old_lines, new_lines) }
+
+ subject { dummy.lines_in_source_diff(lines, is_deleted, is_new) }
+
+ where(:old_lines, :new_lines, :is_deleted, :is_new, :existing_lines) do
+ [1, 2, 2] | [1, 1, 4] | false | false | { from: Set[1, 2], to: Set[1, 4] }
+ [1, 2, 2] | [1, 1, 4] | true | false | { from: Set[1, 2], to: Set[] }
+ [1, 2, 2] | [1, 1, 4] | false | true | { from: Set[], to: Set[1, 4] }
+ end
+
+ with_them do
+ it { is_expected.to eq(existing_lines) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb b/spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb
index 1b74e24bf81..c38684a6dc3 100644
--- a/spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb
+++ b/spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb
@@ -66,7 +66,7 @@ RSpec.describe Gitlab::Diff::Rendered::Notebook::DiffFile do
context 'timeout' do
it 'utilizes timeout for web' do
- expect(Timeout).to receive(:timeout).with(described_class::RENDERED_TIMEOUT_FOREGROUND).and_call_original
+ expect(Timeout).to receive(:timeout).with(Gitlab::RenderTimeout::FOREGROUND).and_call_original
nb_file.diff
end
@@ -133,7 +133,7 @@ RSpec.describe Gitlab::Diff::Rendered::Notebook::DiffFile do
end
context 'assigns the correct position' do
- it 'computes de first line where the remove would appear' do
+ it 'computes the first line where the remove would appear' do
expect(nb_file.highlighted_diff_lines[0].old_pos).to eq(3)
expect(nb_file.highlighted_diff_lines[0].new_pos).to eq(3)
@@ -142,8 +142,29 @@ RSpec.describe Gitlab::Diff::Rendered::Notebook::DiffFile do
end
end
- it 'computes de first line where the remove would appear' do
- expect(nb_file.highlighted_diff_lines.map(&:text).join('')).to include('[Hidden Image Output]')
+ context 'has image' do
+ it 'replaces rich text with img to the embedded image' do
+ expect(nb_file.highlighted_diff_lines[58].rich_text).to include('<img')
+ end
+
+ it 'adds image to src' do
+ img = 'data:image/png;base64,some_image_here'
+ allow(diff).to receive(:diff).and_return("@@ -1,76 +1,74 @@\n ![](#{img})")
+
+ expect(nb_file.highlighted_diff_lines[0].rich_text).to include("<img src=\"#{img}\"")
+ end
+ end
+
+ context 'when embedded image has injected html' do
+ let(:commit) { project.commit("4963fefc990451a8ad34289ce266b757456fc88c") }
+
+ it 'prevents injected html to be rendered as html' do
+ expect(nb_file.highlighted_diff_lines[45].rich_text).not_to include('<div>Hello')
+ end
+
+ it 'keeps the injected html as part of the string' do
+ expect(nb_file.highlighted_diff_lines[45].rich_text).to end_with('/div&gt;">')
+ end
end
end
end
diff --git a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
index 8d008986464..6e7806c5d53 100644
--- a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
let(:author_email) { 'jake@adventuretime.ooo' }
let(:message_id) { 'CADkmRc+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com' }
- let_it_be(:group) { create(:group, :private, name: "email") }
+ let_it_be(:group) { create(:group, :private, :crm_enabled, name: "email") }
let(:expected_description) do
"Service desk stuff!\n\n```\na = b\n```\n\n`/label ~label1`\n`/assign @user1`\n`/close`\n![image](uploads/image.png)"
@@ -52,6 +52,14 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
expect(new_issue.issue_email_participants.first.email).to eq(author_email)
end
+ it 'attaches existing CRM contact' do
+ contact = create(:contact, group: group, email: author_email)
+ receiver.execute
+ new_issue = Issue.last
+
+ expect(new_issue.issue_customer_relations_contacts.last.contact).to eq(contact)
+ end
+
it 'sends thank you email' do
expect { receiver.execute }.to have_enqueued_job.on_queue('mailers')
end
diff --git a/spec/lib/gitlab/email/message/repository_push_spec.rb b/spec/lib/gitlab/email/message/repository_push_spec.rb
index 6b1f03e0385..f13d98ec9b9 100644
--- a/spec/lib/gitlab/email/message/repository_push_spec.rb
+++ b/spec/lib/gitlab/email/message/repository_push_spec.rb
@@ -64,7 +64,7 @@ RSpec.describe Gitlab::Email::Message::RepositoryPush do
describe '#commits' do
subject { message.commits }
- it { is_expected.to be_kind_of Array }
+ it { is_expected.to be_kind_of CommitCollection }
it { is_expected.to all(be_instance_of Commit) }
end
diff --git a/spec/lib/gitlab/email/receiver_spec.rb b/spec/lib/gitlab/email/receiver_spec.rb
index 9040731d8fd..79476c63e66 100644
--- a/spec/lib/gitlab/email/receiver_spec.rb
+++ b/spec/lib/gitlab/email/receiver_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe Gitlab::Email::Receiver do
let_it_be(:project) { create(:project) }
let(:handler) { double(:handler, project: project, execute: true, metrics_event: nil, metrics_params: nil) }
+ let(:client_id) { 'email/jake@example.com' }
it 'correctly finds the mail key' do
expect(Gitlab::Email::Handler).to receive(:for).with(an_instance_of(Mail::Message), 'gitlabhq/gitlabhq+auth_token').and_return(handler)
@@ -33,7 +34,7 @@ RSpec.describe Gitlab::Email::Receiver do
metadata = receiver.mail_metadata
expect(metadata.keys).to match_array(%i(mail_uid from_address to_address mail_key references delivered_to envelope_to x_envelope_to meta received_recipients))
- expect(metadata[:meta]).to include(client_id: 'email/jake@example.com', project: project.full_path)
+ expect(metadata[:meta]).to include(client_id: client_id, project: project.full_path)
expect(metadata[meta_key]).to eq(meta_value)
end
end
@@ -89,19 +90,9 @@ RSpec.describe Gitlab::Email::Receiver do
let(:meta_key) { :received_recipients }
let(:meta_value) { ['incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com', 'incoming+gitlabhq/gitlabhq@example.com'] }
- context 'when use_received_header_for_incoming_emails is enabled' do
+ describe 'it uses receive headers to find the key' do
it_behaves_like 'successful receive'
end
-
- context 'when use_received_header_for_incoming_emails is disabled' do
- let(:expected_error) { Gitlab::Email::UnknownIncomingEmail }
-
- before do
- stub_feature_flags(use_received_header_for_incoming_emails: false)
- end
-
- it_behaves_like 'failed receive'
- end
end
end
@@ -126,6 +117,49 @@ RSpec.describe Gitlab::Email::Receiver do
it_behaves_like 'failed receive'
end
+ context "when the email's To field is blank" do
+ before do
+ stub_incoming_email_setting(enabled: true, address: "incoming+%{key}@appmail.example.com")
+ end
+
+ let(:email_raw) do
+ <<~EMAIL
+ Delivered-To: incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com
+ From: "jake@example.com" <jake@example.com>
+ Bcc: "support@example.com" <support@example.com>
+
+ Email content
+ EMAIL
+ end
+
+ let(:meta_key) { :delivered_to }
+ let(:meta_value) { ["incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com"] }
+
+ it_behaves_like 'successful receive'
+ end
+
+ context "when the email's From field is blank" do
+ before do
+ stub_incoming_email_setting(enabled: true, address: "incoming+%{key}@appmail.example.com")
+ end
+
+ let(:email_raw) do
+ <<~EMAIL
+ Delivered-To: incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com
+ To: "support@example.com" <support@example.com>
+
+ Email content
+ EMAIL
+ end
+
+ let(:meta_key) { :delivered_to }
+ let(:meta_value) { ["incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com"] }
+
+ it_behaves_like 'successful receive' do
+ let(:client_id) { 'email/' }
+ end
+ end
+
context 'when the email was auto generated with X-Autoreply header' do
let(:email_raw) { fixture_file('emails/auto_reply.eml') }
let(:expected_error) { Gitlab::Email::AutoGeneratedEmailError }
diff --git a/spec/lib/gitlab/email/reply_parser_spec.rb b/spec/lib/gitlab/email/reply_parser_spec.rb
index c0d177aff4d..c61d941406b 100644
--- a/spec/lib/gitlab/email/reply_parser_spec.rb
+++ b/spec/lib/gitlab/email/reply_parser_spec.rb
@@ -268,5 +268,72 @@ RSpec.describe Gitlab::Email::ReplyParser do
expect(test_parse_body(fixture_file("emails/valid_new_issue_with_quote.eml"), { append_reply: true }))
.to contain_exactly(body, reply)
end
+
+ context 'non-UTF-8 content' do
+ let(:charset) { '; charset=Shift_JIS' }
+ let(:raw_content) do
+ <<-BODY.strip_heredoc.chomp
+ From: Jake the Dog <alan@adventuretime.ooo>
+ To: incoming+email-test-project_id-issue-@appmail.adventuretime.ooo
+ Message-ID: <CAH_Wr+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com>
+ Subject: The message subject! @all
+ Content-Type: text/plain#{charset}
+ Content-Transfer-Encoding: 8bit
+
+ ã“ã‚“ã«ã¡ã¯ã€‚ ã“ã®ä¸–ç•Œã¯ç´ æ™´ã‚‰ã—ã„ã§ã™ã€‚
+ BODY
+ end
+
+ # Strip encoding to simulate the case when Ruby fallback to ASCII-8bit
+ # when it meets an unknown encoding
+ let(:encoded_content) { raw_content.encode("Shift_JIS").bytes.pack("c*") }
+
+ it "parses body under UTF-8 encoding" do
+ expect(test_parse_body(encoded_content))
+ .to eq(<<-BODY.strip_heredoc.chomp)
+ ã“ã‚“ã«ã¡ã¯ã€‚ ã“ã®ä¸–ç•Œã¯ç´ æ™´ã‚‰ã—ã„ã§ã™ã€‚
+ BODY
+ end
+
+ # This test would raise an exception if encoding is not handled properly
+ # Issue: https://gitlab.com/gitlab-org/gitlab/-/issues/364329
+ context 'charset is absent and reply trimming is disabled' do
+ let(:charset) { '' }
+
+ it "parses body under UTF-8 encoding" do
+ expect(test_parse_body(encoded_content, { trim_reply: false }))
+ .to eq(<<-BODY.strip_heredoc.chomp)
+ ã“ã‚“ã«ã¡ã¯ã€‚ ã“ã®ä¸–ç•Œã¯ç´ æ™´ã‚‰ã—ã„ã§ã™ã€‚
+ BODY
+ end
+ end
+
+ context 'multipart email' do
+ let(:raw_content) do
+ <<-BODY.strip_heredoc.chomp
+ From: Jake the Dog <alan@adventuretime.ooo>
+ To: incoming+email-test-project_id-issue-@appmail.adventuretime.ooo
+ Message-ID: <CAH_Wr+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com>
+ Subject: The message subject! @all
+ Content-Type: multipart/alternative;
+ boundary=Apple-Mail-B41C7F8E-3639-49B0-A5D5-440E125A7105
+ Content-Transfer-Encoding: 7bbit
+
+ --Apple-Mail-B41C7F8E-3639-49B0-A5D5-440E125A7105
+ Content-Type: text/plain
+ Content-Transfer-Encodng: 7bit
+
+ ã“ã‚“ã«ã¡ã¯ã€‚ ã“ã®ä¸–ç•Œã¯ç´ æ™´ã‚‰ã—ã„ã§ã™ã€‚
+ BODY
+ end
+
+ it "parses body under UTF-8 encoding" do
+ expect(test_parse_body(encoded_content, { trim_reply: false }))
+ .to eq(<<-BODY.strip_heredoc.chomp)
+ ã“ã‚“ã«ã¡ã¯ã€‚ ã“ã®ä¸–ç•Œã¯ç´ æ™´ã‚‰ã—ã„ã§ã™ã€‚
+ BODY
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/email/service_desk_receiver_spec.rb b/spec/lib/gitlab/email/service_desk_receiver_spec.rb
index 49cbec6fffc..c249a5422ff 100644
--- a/spec/lib/gitlab/email/service_desk_receiver_spec.rb
+++ b/spec/lib/gitlab/email/service_desk_receiver_spec.rb
@@ -53,6 +53,18 @@ RSpec.describe Gitlab::Email::ServiceDeskReceiver do
end
end
+ context 'when the email contains no key in the To header and contains reference header with no key' do
+ let(:email) { fixture_file('emails/service_desk_reference_headers.eml') }
+
+ before do
+ stub_service_desk_email_setting(enabled: true, address: 'support+%{key}@example.com')
+ end
+
+ it 'sends a rejection email' do
+ expect { receiver.execute }.to raise_error(Gitlab::Email::UnknownIncomingEmail)
+ end
+ end
+
context 'when the email does not contain a valid email address' do
before do
stub_service_desk_email_setting(enabled: true, address: 'other_support+%{key}@example.com')
diff --git a/spec/lib/gitlab/error_tracking/logger_spec.rb b/spec/lib/gitlab/error_tracking/logger_spec.rb
new file mode 100644
index 00000000000..751ec10a1f0
--- /dev/null
+++ b/spec/lib/gitlab/error_tracking/logger_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::ErrorTracking::Logger do
+ describe '.capture_exception' do
+ let(:exception) { RuntimeError.new('boom') }
+ let(:payload) { { foo: '123' } }
+ let(:log_entry) { { message: 'boom', context: payload }}
+
+ it 'calls Gitlab::ErrorTracking::Logger.error with formatted log entry' do
+ expect_next_instance_of(Gitlab::ErrorTracking::LogFormatter) do |log_formatter|
+ expect(log_formatter).to receive(:generate_log).with(exception, payload).and_return(log_entry)
+ end
+
+ expect(Gitlab::ErrorTracking::Logger).to receive(:error).with(log_entry)
+
+ described_class.capture_exception(exception, **payload)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/event_store/store_spec.rb b/spec/lib/gitlab/event_store/store_spec.rb
index 94e8f0ff2ff..bbdfecc897a 100644
--- a/spec/lib/gitlab/event_store/store_spec.rb
+++ b/spec/lib/gitlab/event_store/store_spec.rb
@@ -134,6 +134,7 @@ RSpec.describe Gitlab::EventStore::Store do
describe '#publish' do
let(:data) { { name: 'Bob', id: 123 } }
+ let(:serialized_data) { data.deep_stringify_keys }
context 'when event has a subscribed worker' do
let(:store) do
@@ -144,12 +145,21 @@ RSpec.describe Gitlab::EventStore::Store do
end
it 'dispatches the event to the subscribed worker' do
- expect(worker).to receive(:perform_async).with('TestEvent', data)
+ expect(worker).to receive(:perform_async).with('TestEvent', serialized_data)
expect(another_worker).not_to receive(:perform_async)
store.publish(event)
end
+ it 'does not raise any Sidekiq warning' do
+ logger = double(:logger, info: nil)
+ allow(Sidekiq).to receive(:logger).and_return(logger)
+ expect(logger).not_to receive(:warn).with(/do not serialize to JSON safely/)
+ expect(worker).to receive(:perform_async).with('TestEvent', serialized_data).and_call_original
+
+ store.publish(event)
+ end
+
context 'when other workers subscribe to the same event' do
let(:store) do
described_class.new do |store|
@@ -160,8 +170,8 @@ RSpec.describe Gitlab::EventStore::Store do
end
it 'dispatches the event to each subscribed worker' do
- expect(worker).to receive(:perform_async).with('TestEvent', data)
- expect(another_worker).to receive(:perform_async).with('TestEvent', data)
+ expect(worker).to receive(:perform_async).with('TestEvent', serialized_data)
+ expect(another_worker).to receive(:perform_async).with('TestEvent', serialized_data)
expect(unrelated_worker).not_to receive(:perform_async)
store.publish(event)
@@ -215,7 +225,7 @@ RSpec.describe Gitlab::EventStore::Store do
let(:event) { event_klass.new(data: data) }
it 'dispatches the event to the workers satisfying the condition' do
- expect(worker).to receive(:perform_async).with('TestEvent', data)
+ expect(worker).to receive(:perform_async).with('TestEvent', serialized_data)
expect(another_worker).not_to receive(:perform_async)
store.publish(event)
diff --git a/spec/lib/gitlab/fogbugz_import/project_creator_spec.rb b/spec/lib/gitlab/fogbugz_import/project_creator_spec.rb
index 6b8bb2229a9..8be9f55dbb6 100644
--- a/spec/lib/gitlab/fogbugz_import/project_creator_spec.rb
+++ b/spec/lib/gitlab/fogbugz_import/project_creator_spec.rb
@@ -4,7 +4,6 @@ require 'spec_helper'
RSpec.describe Gitlab::FogbugzImport::ProjectCreator do
let(:user) { create(:user) }
-
let(:repo) do
instance_double(Gitlab::FogbugzImport::Repository,
name: 'Vim',
@@ -13,10 +12,11 @@ RSpec.describe Gitlab::FogbugzImport::ProjectCreator do
raw_data: '')
end
+ let(:repo_name) { 'new_name' }
let(:uri) { 'https://testing.fogbugz.com' }
let(:token) { 'token' }
let(:fb_session) { { uri: uri, token: token } }
- let(:project_creator) { described_class.new(repo, fb_session, user.namespace, user) }
+ let(:project_creator) { described_class.new(repo, repo_name, user.namespace, user, fb_session) }
subject do
project_creator.execute
@@ -26,4 +26,9 @@ RSpec.describe Gitlab::FogbugzImport::ProjectCreator do
expect(subject.persisted?).to eq(true)
expect(subject.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
end
+
+ it 'creates project with provided name and path' do
+ expect(subject.name).to eq(repo_name)
+ expect(subject.path).to eq(repo_name)
+ end
end
diff --git a/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb b/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb
index a5f26a212ab..2b1fcac9257 100644
--- a/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb
+++ b/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb
@@ -3,163 +3,194 @@
require 'spec_helper'
RSpec.describe Gitlab::FormBuilders::GitlabUiFormBuilder do
- let_it_be(:user) { build(:user) }
- let_it_be(:fake_template) do
- Object.new.tap do |template|
- template.extend ActionView::Helpers::FormHelper
- template.extend ActionView::Helpers::FormOptionsHelper
- template.extend ActionView::Helpers::TagHelper
- template.extend ActionView::Context
- end
- end
+ include FormBuilderHelpers
- let_it_be(:form_builder) { described_class.new(:user, user, fake_template, {}) }
-
- describe '#gitlab_ui_checkbox_component' do
- let(:optional_args) { {} }
+ let_it_be(:user) { build(:user, :admin) }
- subject(:checkbox_html) { form_builder.gitlab_ui_checkbox_component(:view_diffs_file_by_file, "Show one file at a time on merge request's Changes tab", **optional_args) }
-
- context 'without optional arguments' do
- it 'renders correct html' do
- expected_html = <<~EOS
- <div class="gl-form-checkbox custom-control custom-checkbox">
- <input name="user[view_diffs_file_by_file]" type="hidden" value="0" />
- <input class="custom-control-input" type="checkbox" value="1" name="user[view_diffs_file_by_file]" id="user_view_diffs_file_by_file" />
- <label class="custom-control-label" for="user_view_diffs_file_by_file">
- Show one file at a time on merge request&#39;s Changes tab
- </label>
- </div>
- EOS
+ let_it_be(:form_builder) { described_class.new(:user, user, fake_action_view_base, {}) }
- expect(checkbox_html).to eq(html_strip_whitespace(expected_html))
+ describe '#gitlab_ui_checkbox_component' do
+ context 'when not using slots' do
+ let(:optional_args) { {} }
+
+ subject(:checkbox_html) do
+ form_builder.gitlab_ui_checkbox_component(
+ :view_diffs_file_by_file,
+ "Show one file at a time on merge request's Changes tab",
+ **optional_args
+ )
end
- end
- context 'with optional arguments' do
- let(:optional_args) do
- {
- help_text: 'Instead of all the files changed, show only one file at a time.',
- checkbox_options: { class: 'checkbox-foo-bar' },
- label_options: { class: 'label-foo-bar' },
- checked_value: '3',
- unchecked_value: '1'
- }
+ context 'without optional arguments' do
+ it 'renders correct html' do
+ expected_html = <<~EOS
+ <div class="gl-form-checkbox custom-control custom-checkbox">
+ <input name="user[view_diffs_file_by_file]" type="hidden" value="0" />
+ <input class="custom-control-input" type="checkbox" value="1" name="user[view_diffs_file_by_file]" id="user_view_diffs_file_by_file" />
+ <label class="custom-control-label" for="user_view_diffs_file_by_file">
+ <span>Show one file at a time on merge request&#39;s Changes tab</span>
+ </label>
+ </div>
+ EOS
+
+ expect(html_strip_whitespace(checkbox_html)).to eq(html_strip_whitespace(expected_html))
+ end
end
- it 'renders help text' do
- expected_html = <<~EOS
- <div class="gl-form-checkbox custom-control custom-checkbox">
- <input name="user[view_diffs_file_by_file]" type="hidden" value="1" />
- <input class="custom-control-input checkbox-foo-bar" type="checkbox" value="3" name="user[view_diffs_file_by_file]" id="user_view_diffs_file_by_file" />
- <label class="custom-control-label label-foo-bar" for="user_view_diffs_file_by_file">
- <span>Show one file at a time on merge request&#39;s Changes tab</span>
- <p class="help-text">Instead of all the files changed, show only one file at a time.</p>
- </label>
- </div>
- EOS
-
- expect(checkbox_html).to eq(html_strip_whitespace(expected_html))
- end
-
- it 'passes arguments to `check_box` method' do
- allow(fake_template).to receive(:check_box).and_return('')
-
- checkbox_html
-
- expect(fake_template).to have_received(:check_box).with(:user, :view_diffs_file_by_file, { class: %w(custom-control-input checkbox-foo-bar), object: user }, '3', '1')
+ context 'with optional arguments' do
+ let(:optional_args) do
+ {
+ help_text: 'Instead of all the files changed, show only one file at a time.',
+ checkbox_options: { class: 'checkbox-foo-bar' },
+ label_options: { class: 'label-foo-bar' },
+ checked_value: '3',
+ unchecked_value: '1'
+ }
+ end
+
+ it 'renders help text' do
+ expected_html = <<~EOS
+ <div class="gl-form-checkbox custom-control custom-checkbox">
+ <input name="user[view_diffs_file_by_file]" type="hidden" value="1" />
+ <input class="custom-control-input checkbox-foo-bar" type="checkbox" value="3" name="user[view_diffs_file_by_file]" id="user_view_diffs_file_by_file" />
+ <label class="custom-control-label label-foo-bar" for="user_view_diffs_file_by_file">
+ <span>Show one file at a time on merge request&#39;s Changes tab</span>
+ <p class="help-text" data-testid="pajamas-component-help-text">Instead of all the files changed, show only one file at a time.</p>
+ </label>
+ </div>
+ EOS
+
+ expect(html_strip_whitespace(checkbox_html)).to eq(html_strip_whitespace(expected_html))
+ end
end
- it 'passes arguments to `label` method' do
- allow(fake_template).to receive(:label).and_return('')
-
- checkbox_html
-
- expect(fake_template).to have_received(:label).with(:user, :view_diffs_file_by_file, { class: %w(custom-control-label label-foo-bar), object: user, value: nil })
+ context 'with checkbox_options: { multiple: true }' do
+ let(:optional_args) do
+ {
+ checkbox_options: { multiple: true },
+ checked_value: 'one',
+ unchecked_value: false
+ }
+ end
+
+ it 'renders labels with correct for attributes' do
+ expected_html = <<~EOS
+ <div class="gl-form-checkbox custom-control custom-checkbox">
+ <input class="custom-control-input" type="checkbox" value="one" name="user[view_diffs_file_by_file][]" id="user_view_diffs_file_by_file_one" />
+ <label class="custom-control-label" for="user_view_diffs_file_by_file_one">
+ <span>Show one file at a time on merge request&#39;s Changes tab</span>
+ </label>
+ </div>
+ EOS
+
+ expect(html_strip_whitespace(checkbox_html)).to eq(html_strip_whitespace(expected_html))
+ end
end
end
- context 'with checkbox_options: { multiple: true }' do
- let(:optional_args) do
- {
- checkbox_options: { multiple: true },
- checked_value: 'one',
- unchecked_value: false
- }
+ context 'when using slots' do
+ subject(:checkbox_html) do
+ form_builder.gitlab_ui_checkbox_component(
+ :view_diffs_file_by_file
+ ) do |c|
+ c.label { "Show one file at a time on merge request's Changes tab" }
+ c.help_text { 'Instead of all the files changed, show only one file at a time.' }
+ end
end
- it 'renders labels with correct for attributes' do
+ it 'renders correct html' do
expected_html = <<~EOS
<div class="gl-form-checkbox custom-control custom-checkbox">
- <input class="custom-control-input" type="checkbox" value="one" name="user[view_diffs_file_by_file][]" id="user_view_diffs_file_by_file_one" />
- <label class="custom-control-label" for="user_view_diffs_file_by_file_one">
- Show one file at a time on merge request&#39;s Changes tab
+ <input name="user[view_diffs_file_by_file]" type="hidden" value="0" />
+ <input class="custom-control-input" type="checkbox" value="1" name="user[view_diffs_file_by_file]" id="user_view_diffs_file_by_file" />
+ <label class="custom-control-label" for="user_view_diffs_file_by_file">
+ <span>Show one file at a time on merge request&#39;s Changes tab</span>
+ <p class="help-text" data-testid="pajamas-component-help-text">Instead of all the files changed, show only one file at a time.</p>
</label>
</div>
EOS
- expect(checkbox_html).to eq(html_strip_whitespace(expected_html))
+ expect(html_strip_whitespace(checkbox_html)).to eq(html_strip_whitespace(expected_html))
end
end
end
describe '#gitlab_ui_radio_component' do
- let(:optional_args) { {} }
-
- subject(:radio_html) { form_builder.gitlab_ui_radio_component(:access_level, :admin, "Access Level", **optional_args) }
+ context 'when not using slots' do
+ let(:optional_args) { {} }
+
+ subject(:radio_html) do
+ form_builder.gitlab_ui_radio_component(
+ :access_level,
+ :admin,
+ "Admin",
+ **optional_args
+ )
+ end
- context 'without optional arguments' do
- it 'renders correct html' do
- expected_html = <<~EOS
- <div class="gl-form-radio custom-control custom-radio">
- <input class="custom-control-input" type="radio" value="admin" name="user[access_level]" id="user_access_level_admin" />
- <label class="custom-control-label" for="user_access_level_admin">
- Access Level
- </label>
- </div>
- EOS
+ context 'without optional arguments' do
+ it 'renders correct html' do
+ expected_html = <<~EOS
+ <div class="gl-form-radio custom-control custom-radio">
+ <input class="custom-control-input" type="radio" value="admin" checked="checked" name="user[access_level]" id="user_access_level_admin" />
+ <label class="custom-control-label" for="user_access_level_admin">
+ <span>Admin</span>
+ </label>
+ </div>
+ EOS
+
+ expect(html_strip_whitespace(radio_html)).to eq(html_strip_whitespace(expected_html))
+ end
+ end
- expect(radio_html).to eq(html_strip_whitespace(expected_html))
+ context 'with optional arguments' do
+ let(:optional_args) do
+ {
+ help_text: 'Administrators have access to all groups, projects, and users and can manage all features in this installation',
+ radio_options: { class: 'radio-foo-bar' },
+ label_options: { class: 'label-foo-bar' }
+ }
+ end
+
+ it 'renders help text' do
+ expected_html = <<~EOS
+ <div class="gl-form-radio custom-control custom-radio">
+ <input class="custom-control-input radio-foo-bar" type="radio" value="admin" checked="checked" name="user[access_level]" id="user_access_level_admin" />
+ <label class="custom-control-label label-foo-bar" for="user_access_level_admin">
+ <span>Admin</span>
+ <p class="help-text" data-testid="pajamas-component-help-text">Administrators have access to all groups, projects, and users and can manage all features in this installation</p>
+ </label>
+ </div>
+ EOS
+
+ expect(html_strip_whitespace(radio_html)).to eq(html_strip_whitespace(expected_html))
+ end
end
end
- context 'with optional arguments' do
- let(:optional_args) do
- {
- help_text: 'Administrators have access to all groups, projects, and users and can manage all features in this installation',
- radio_options: { class: 'radio-foo-bar' },
- label_options: { class: 'label-foo-bar' }
- }
+ context 'when using slots' do
+ subject(:radio_html) do
+ form_builder.gitlab_ui_radio_component(
+ :access_level,
+ :admin
+ ) do |c|
+ c.label { "Admin" }
+ c.help_text { 'Administrators have access to all groups, projects, and users and can manage all features in this installation' }
+ end
end
- it 'renders help text' do
+ it 'renders correct html' do
expected_html = <<~EOS
<div class="gl-form-radio custom-control custom-radio">
- <input class="custom-control-input radio-foo-bar" type="radio" value="admin" name="user[access_level]" id="user_access_level_admin" />
- <label class="custom-control-label label-foo-bar" for="user_access_level_admin">
- <span>Access Level</span>
- <p class="help-text">Administrators have access to all groups, projects, and users and can manage all features in this installation</p>
+ <input class="custom-control-input" type="radio" value="admin" checked="checked" name="user[access_level]" id="user_access_level_admin" />
+ <label class="custom-control-label" for="user_access_level_admin">
+ <span>Admin</span>
+ <p class="help-text" data-testid="pajamas-component-help-text">Administrators have access to all groups, projects, and users and can manage all features in this installation</p>
</label>
</div>
EOS
- expect(radio_html).to eq(html_strip_whitespace(expected_html))
- end
-
- it 'passes arguments to `radio_button` method' do
- allow(fake_template).to receive(:radio_button).and_return('')
-
- radio_html
-
- expect(fake_template).to have_received(:radio_button).with(:user, :access_level, :admin, { class: %w(custom-control-input radio-foo-bar), object: user })
- end
-
- it 'passes arguments to `label` method' do
- allow(fake_template).to receive(:label).and_return('')
-
- radio_html
-
- expect(fake_template).to have_received(:label).with(:user, :access_level, { class: %w(custom-control-label label-foo-bar), object: user, value: :admin })
+ expect(html_strip_whitespace(radio_html)).to eq(html_strip_whitespace(expected_html))
end
end
end
diff --git a/spec/lib/gitlab/gfm/reference_rewriter_spec.rb b/spec/lib/gitlab/gfm/reference_rewriter_spec.rb
index 7d4a3655be6..8bb649e78e0 100644
--- a/spec/lib/gitlab/gfm/reference_rewriter_spec.rb
+++ b/spec/lib/gitlab/gfm/reference_rewriter_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe Gitlab::Gfm::ReferenceRewriter do
let(:old_project) { create(:project, name: 'old-project', group: group) }
let(:old_project_ref) { old_project.to_reference_base(new_project) }
let(:text) { 'some text' }
+ let(:note) { create(:note, note: text, project: old_project) }
before do
old_project.add_reporter(user)
@@ -17,7 +18,7 @@ RSpec.describe Gitlab::Gfm::ReferenceRewriter do
describe '#rewrite' do
subject do
- described_class.new(text, old_project, user).rewrite(new_project)
+ described_class.new(note.note, note.note_html, old_project, user).rewrite(new_project)
end
context 'multiple issues and merge requests referenced' do
diff --git a/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb b/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb
index f878f02f410..763e6f1b5f4 100644
--- a/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb
+++ b/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::Gfm::UploadsRewriter do
let(:user) { create(:user) }
let(:old_project) { create(:project) }
let(:new_project) { create(:project) }
- let(:rewriter) { described_class.new(text, old_project, user) }
+ let(:rewriter) { described_class.new(+text, nil, old_project, user) }
context 'text contains links to uploads' do
let(:image_uploader) do
@@ -22,13 +22,21 @@ RSpec.describe Gitlab::Gfm::UploadsRewriter do
"Text and #{image_uploader.markdown_link} and #{zip_uploader.markdown_link}"
end
+ def referenced_files(text, project)
+ referenced_files = text.scan(FileUploader::MARKDOWN_PATTERN).map do
+ UploaderFinder.new(project, $~[:secret], $~[:file]).execute
+ end
+
+ referenced_files.compact.select(&:exists?)
+ end
+
shared_examples "files are accessible" do
describe '#rewrite' do
let!(:new_text) { rewriter.rewrite(new_project) }
let(:old_files) { [image_uploader, zip_uploader] }
let(:new_files) do
- described_class.new(new_text, new_project, user).files
+ referenced_files(new_text, new_project)
end
let(:old_paths) { old_files.map(&:path) }
@@ -68,9 +76,9 @@ RSpec.describe Gitlab::Gfm::UploadsRewriter do
it 'does not rewrite plain links as embedded' do
embedded_link = image_uploader.markdown_link
plain_image_link = embedded_link.delete_prefix('!')
- text = "#{plain_image_link} and #{embedded_link}"
+ text = +"#{plain_image_link} and #{embedded_link}"
- moved_text = described_class.new(text, old_project, user).rewrite(new_project)
+ moved_text = described_class.new(text, nil, old_project, user).rewrite(new_project)
expect(moved_text.scan(/!\[.*?\]/).count).to eq(1)
expect(moved_text.scan(/\A\[.*?\]/).count).to eq(1)
@@ -97,11 +105,5 @@ RSpec.describe Gitlab::Gfm::UploadsRewriter do
it { is_expected.to eq true }
end
-
- describe '#files' do
- subject { rewriter.files }
-
- it { is_expected.to be_an(Array) }
- end
end
end
diff --git a/spec/lib/gitlab/git_access_snippet_spec.rb b/spec/lib/gitlab/git_access_snippet_spec.rb
index b6a61de87a6..a7036a4f20a 100644
--- a/spec/lib/gitlab/git_access_snippet_spec.rb
+++ b/spec/lib/gitlab/git_access_snippet_spec.rb
@@ -121,13 +121,19 @@ RSpec.describe Gitlab::GitAccessSnippet do
if Ability.allowed?(user, :update_snippet, snippet)
expect { push_access_check }.not_to raise_error
else
- expect { push_access_check }.to raise_error(described_class::ForbiddenError)
+ expect { push_access_check }.to raise_error(
+ described_class::ForbiddenError,
+ described_class::ERROR_MESSAGES[:update_snippet]
+ )
end
if Ability.allowed?(user, :read_snippet, snippet)
expect { pull_access_check }.not_to raise_error
else
- expect { pull_access_check }.to raise_error(described_class::ForbiddenError)
+ expect { pull_access_check }.to raise_error(
+ described_class::ForbiddenError,
+ described_class::ERROR_MESSAGES[:read_snippet]
+ )
end
end
end
diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb
index e628a06a542..5ee9cf05b3e 100644
--- a/spec/lib/gitlab/git_access_spec.rb
+++ b/spec/lib/gitlab/git_access_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe Gitlab::GitAccess do
describe '#check with single protocols allowed' do
def disable_protocol(protocol)
- allow(Gitlab::ProtocolAccess).to receive(:allowed?).with(protocol).and_return(false)
+ allow(Gitlab::ProtocolAccess).to receive(:allowed?).with(protocol, project: project).and_return(false)
end
context 'ssh disabled' do
diff --git a/spec/lib/gitlab/git_access_wiki_spec.rb b/spec/lib/gitlab/git_access_wiki_spec.rb
index de3e674c3a7..11c19c7d3f0 100644
--- a/spec/lib/gitlab/git_access_wiki_spec.rb
+++ b/spec/lib/gitlab/git_access_wiki_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe Gitlab::GitAccessWiki do
redirected_path: redirected_path)
end
- RSpec.shared_examples 'wiki access by level' do
+ RSpec.shared_examples 'download wiki access by level' do
where(:project_visibility, :project_member?, :wiki_access_level, :wiki_repo?, :expected_behavior) do
[
# Private project - is a project member
@@ -103,7 +103,7 @@ RSpec.describe Gitlab::GitAccessWiki do
subject { access.check('git-upload-pack', Gitlab::GitAccess::ANY) }
context 'when actor is a user' do
- it_behaves_like 'wiki access by level'
+ it_behaves_like 'download wiki access by level'
end
context 'when the actor is a deploy token' do
@@ -116,6 +116,36 @@ RSpec.describe Gitlab::GitAccessWiki do
subject { access.check('git-upload-pack', changes) }
+ context 'when the wiki feature is enabled' do
+ let(:wiki_access_level) { ProjectFeature::ENABLED }
+
+ it { expect { subject }.not_to raise_error }
+ end
+
+ context 'when the wiki feature is disabled' do
+ let(:wiki_access_level) { ProjectFeature::DISABLED }
+
+ it { expect { subject }.to raise_wiki_forbidden }
+ end
+
+ context 'when the wiki feature is private' do
+ let(:wiki_access_level) { ProjectFeature::PRIVATE }
+
+ it { expect { subject }.to raise_wiki_forbidden }
+ end
+ end
+
+ context 'when the actor is a deploy key' do
+ let_it_be(:actor) { create(:deploy_key) }
+ let_it_be(:deploy_key_project) { create(:deploy_keys_project, project: project, deploy_key: actor) }
+ let_it_be(:user) { actor }
+
+ before do
+ project.project_feature.update_attribute(:wiki_access_level, wiki_access_level)
+ end
+
+ subject { access.check('git-upload-pack', changes) }
+
context 'when the wiki is enabled' do
let(:wiki_access_level) { ProjectFeature::ENABLED }
@@ -140,7 +170,7 @@ RSpec.describe Gitlab::GitAccessWiki do
subject { access.check('git-upload-pack', changes) }
- it_behaves_like 'wiki access by level'
+ it_behaves_like 'download wiki access by level'
end
end
diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
index 92860c9232f..3a34d39c722 100644
--- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
@@ -340,17 +340,12 @@ RSpec.describe Gitlab::GitalyClient::CommitService do
let(:revisions) { [revision] }
let(:gitaly_commits) { create_list(:gitaly_commit, 3) }
let(:expected_commits) { gitaly_commits.map { |c| Gitlab::Git::Commit.new(repository, c) }}
- let(:filter_quarantined_commits) { false }
subject do
- client.list_new_commits(revisions, allow_quarantine: allow_quarantine)
+ client.list_new_commits(revisions)
end
shared_examples 'a #list_all_commits message' do
- before do
- stub_feature_flags(filter_quarantined_commits: filter_quarantined_commits)
- end
-
it 'sends a list_all_commits message' do
expected_repository = repository.gitaly_repository.dup
expected_repository.git_alternate_object_directories = Google::Protobuf::RepeatedField.new(:string)
@@ -360,29 +355,25 @@ RSpec.describe Gitlab::GitalyClient::CommitService do
.with(gitaly_request_with_params(repository: expected_repository), kind_of(Hash))
.and_return([Gitaly::ListAllCommitsResponse.new(commits: gitaly_commits)])
- if filter_quarantined_commits
- # The object directory of the repository must not be set so that we
- # don't use the quarantine directory.
- objects_exist_repo = repository.gitaly_repository.dup
- objects_exist_repo.git_object_directory = ""
-
- # The first request contains the repository, the second request the
- # commit IDs we want to check for existence.
- objects_exist_request = [
- gitaly_request_with_params(repository: objects_exist_repo),
- gitaly_request_with_params(revisions: gitaly_commits.map(&:id))
- ]
-
- objects_exist_response = Gitaly::CheckObjectsExistResponse.new(revisions: revision_existence.map do
- |rev, exists| Gitaly::CheckObjectsExistResponse::RevisionExistence.new(name: rev, exists: exists)
- end)
-
- expect(service).to receive(:check_objects_exist)
- .with(objects_exist_request, kind_of(Hash))
- .and_return([objects_exist_response])
- else
- expect(service).not_to receive(:check_objects_exist)
- end
+ # The object directory of the repository must not be set so that we
+ # don't use the quarantine directory.
+ objects_exist_repo = repository.gitaly_repository.dup
+ objects_exist_repo.git_object_directory = ""
+
+ # The first request contains the repository, the second request the
+ # commit IDs we want to check for existence.
+ objects_exist_request = [
+ gitaly_request_with_params(repository: objects_exist_repo),
+ gitaly_request_with_params(revisions: gitaly_commits.map(&:id))
+ ]
+
+ objects_exist_response = Gitaly::CheckObjectsExistResponse.new(revisions: revision_existence.map do
+ |rev, exists| Gitaly::CheckObjectsExistResponse::RevisionExistence.new(name: rev, exists: exists)
+ end)
+
+ expect(service).to receive(:check_objects_exist)
+ .with(objects_exist_request, kind_of(Hash))
+ .and_return([objects_exist_response])
end
expect(subject).to eq(expected_commits)
@@ -418,49 +409,31 @@ RSpec.describe Gitlab::GitalyClient::CommitService do
}
end
- context 'with allowed quarantine' do
- let(:allow_quarantine) { true }
-
- context 'without commit filtering' do
- it_behaves_like 'a #list_all_commits message'
- end
-
- context 'with commit filtering' do
- let(:filter_quarantined_commits) { true }
-
- context 'reject commits which exist in target repository' do
- let(:revision_existence) { gitaly_commits.to_h { |c| [c.id, true] } }
- let(:expected_commits) { [] }
-
- it_behaves_like 'a #list_all_commits message'
- end
-
- context 'keep commits which do not exist in target repository' do
- let(:revision_existence) { gitaly_commits.to_h { |c| [c.id, false] } }
+ context 'reject commits which exist in target repository' do
+ let(:revision_existence) { gitaly_commits.to_h { |c| [c.id, true] } }
+ let(:expected_commits) { [] }
- it_behaves_like 'a #list_all_commits message'
- end
+ it_behaves_like 'a #list_all_commits message'
+ end
- context 'mixed existing and nonexisting commits' do
- let(:revision_existence) do
- {
- gitaly_commits[0].id => true,
- gitaly_commits[1].id => false,
- gitaly_commits[2].id => true
- }
- end
+ context 'keep commits which do not exist in target repository' do
+ let(:revision_existence) { gitaly_commits.to_h { |c| [c.id, false] } }
- let(:expected_commits) { [Gitlab::Git::Commit.new(repository, gitaly_commits[1])] }
+ it_behaves_like 'a #list_all_commits message'
+ end
- it_behaves_like 'a #list_all_commits message'
- end
+ context 'mixed existing and nonexisting commits' do
+ let(:revision_existence) do
+ {
+ gitaly_commits[0].id => true,
+ gitaly_commits[1].id => false,
+ gitaly_commits[2].id => true
+ }
end
- end
- context 'with disallowed quarantine' do
- let(:allow_quarantine) { false }
+ let(:expected_commits) { [Gitlab::Git::Commit.new(repository, gitaly_commits[1])] }
- it_behaves_like 'a #list_commits message'
+ it_behaves_like 'a #list_all_commits message'
end
end
@@ -472,17 +445,7 @@ RSpec.describe Gitlab::GitalyClient::CommitService do
}
end
- context 'with allowed quarantine' do
- let(:allow_quarantine) { true }
-
- it_behaves_like 'a #list_commits message'
- end
-
- context 'with disallowed quarantine' do
- let(:allow_quarantine) { false }
-
- it_behaves_like 'a #list_commits message'
- end
+ it_behaves_like 'a #list_commits message'
end
end
diff --git a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
index 0c04863f466..4320c5460da 100644
--- a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
@@ -170,6 +170,65 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
Gitlab::Git::PreReceiveError, "something failed")
end
end
+
+ context 'with a custom hook error' do
+ let(:stdout) { nil }
+ let(:stderr) { nil }
+ let(:error_message) { "error_message" }
+ let(:custom_hook_error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::PERMISSION_DENIED,
+ error_message,
+ Gitaly::UserDeleteBranchError.new(
+ custom_hook: Gitaly::CustomHookError.new(
+ stdout: stdout,
+ stderr: stderr,
+ hook_type: Gitaly::CustomHookError::HookType::HOOK_TYPE_PRERECEIVE
+ )))
+ end
+
+ shared_examples 'a failed branch deletion' do
+ it 'raises a PreRecieveError' do
+ expect_any_instance_of(Gitaly::OperationService::Stub)
+ .to receive(:user_delete_branch).with(request, kind_of(Hash))
+ .and_raise(custom_hook_error)
+
+ expect { subject }.to raise_error do |error|
+ expect(error).to be_a(Gitlab::Git::PreReceiveError)
+ expect(error.message).to eq(expected_message)
+ expect(error.raw_message).to eq(expected_raw_message)
+ end
+ end
+ end
+
+ context 'when details contain stderr' do
+ let(:stderr) { "something" }
+ let(:stdout) { "GL-HOOK-ERR: stdout is overridden by stderr" }
+ let(:expected_message) { error_message }
+ let(:expected_raw_message) { stderr }
+
+ it_behaves_like 'a failed branch deletion'
+ end
+
+ context 'when details contain stdout' do
+ let(:stderr) { " \n" }
+ let(:stdout) { "something" }
+ let(:expected_message) { error_message }
+ let(:expected_raw_message) { stdout }
+
+ it_behaves_like 'a failed branch deletion'
+ end
+ end
+
+ context 'with a non-detailed error' do
+ it 'raises a GRPC error' do
+ expect_any_instance_of(Gitaly::OperationService::Stub)
+ .to receive(:user_delete_branch).with(request, kind_of(Hash))
+ .and_raise(GRPC::Internal.new('non-detailed error'))
+
+ expect { subject }.to raise_error(GRPC::Internal)
+ end
+ end
end
describe '#user_merge_branch' do
@@ -212,6 +271,82 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
end
end
+ context 'with a custom hook error' do
+ let(:stdout) { nil }
+ let(:stderr) { nil }
+ let(:error_message) { "error_message" }
+ let(:custom_hook_error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::PERMISSION_DENIED,
+ error_message,
+ Gitaly::UserMergeBranchError.new(
+ custom_hook: Gitaly::CustomHookError.new(
+ stdout: stdout,
+ stderr: stderr,
+ hook_type: Gitaly::CustomHookError::HookType::HOOK_TYPE_PRERECEIVE
+ )))
+ end
+
+ shared_examples 'a failed merge' do
+ it 'raises a PreRecieveError' do
+ expect_any_instance_of(Gitaly::OperationService::Stub)
+ .to receive(:user_merge_branch).with(kind_of(Enumerator), kind_of(Hash))
+ .and_raise(custom_hook_error)
+
+ expect { subject }.to raise_error do |error|
+ expect(error).to be_a(Gitlab::Git::PreReceiveError)
+ expect(error.message).to eq(expected_message)
+ expect(error.raw_message).to eq(expected_raw_message)
+ end
+ end
+ end
+
+ context 'when details contain stderr without prefix' do
+ let(:stderr) { "something" }
+ let(:stdout) { "GL-HOOK-ERR: stdout is overridden by stderr" }
+ let(:expected_message) { error_message }
+ let(:expected_raw_message) { stderr }
+
+ it_behaves_like 'a failed merge'
+ end
+
+ context 'when details contain stderr with prefix' do
+ let(:stderr) { "GL-HOOK-ERR: something" }
+ let(:stdout) { "GL-HOOK-ERR: stdout is overridden by stderr" }
+ let(:expected_message) { "something" }
+ let(:expected_raw_message) { stderr }
+
+ it_behaves_like 'a failed merge'
+ end
+
+ context 'when details contain stdout without prefix' do
+ let(:stderr) { " \n" }
+ let(:stdout) { "something" }
+ let(:expected_message) { error_message }
+ let(:expected_raw_message) { stdout }
+
+ it_behaves_like 'a failed merge'
+ end
+
+ context 'when details contain stdout with prefix' do
+ let(:stderr) { " \n" }
+ let(:stdout) { "GL-HOOK-ERR: something" }
+ let(:expected_message) { "something" }
+ let(:expected_raw_message) { stdout }
+
+ it_behaves_like 'a failed merge'
+ end
+
+ context 'when details contain no stderr or stdout' do
+ let(:stderr) { " \n" }
+ let(:stdout) { "\n \n" }
+ let(:expected_message) { error_message }
+ let(:expected_raw_message) { "\n \n" }
+
+ it_behaves_like 'a failed merge'
+ end
+ end
+
context 'with an exception without the detailed error' do
let(:permission_error) do
GRPC::PermissionDenied.new
@@ -340,6 +475,15 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
end
end
+ shared_examples '#user_cherry_pick with a gRPC error' do
+ it 'raises an exception' do
+ expect_any_instance_of(Gitaly::OperationService::Stub).to receive(:user_cherry_pick)
+ .and_raise(raised_error)
+
+ expect { subject }.to raise_error(expected_error, expected_error_message)
+ end
+ end
+
describe '#user_cherry_pick' do
let(:response_class) { Gitaly::UserCherryPickResponse }
@@ -354,13 +498,74 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
)
end
- before do
- expect_any_instance_of(Gitaly::OperationService::Stub)
- .to receive(:user_cherry_pick).with(kind_of(Gitaly::UserCherryPickRequest), kind_of(Hash))
- .and_return(response)
+ context 'when errors are not raised but returned in the response' do
+ before do
+ expect_any_instance_of(Gitaly::OperationService::Stub)
+ .to receive(:user_cherry_pick).with(kind_of(Gitaly::UserCherryPickRequest), kind_of(Hash))
+ .and_return(response)
+ end
+
+ it_behaves_like 'cherry pick and revert errors'
end
- it_behaves_like 'cherry pick and revert errors'
+ context 'when AccessCheckError is raised' do
+ let(:raised_error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::INTERNAL,
+ 'something failed',
+ Gitaly::UserCherryPickError.new(
+ access_check: Gitaly::AccessCheckError.new(
+ error_message: 'something went wrong'
+ )))
+ end
+
+ let(:expected_error) { Gitlab::Git::PreReceiveError }
+ let(:expected_error_message) { "something went wrong" }
+
+ it_behaves_like '#user_cherry_pick with a gRPC error'
+ end
+
+ context 'when NotAncestorError is raised' do
+ let(:raised_error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::FAILED_PRECONDITION,
+ 'Branch diverged',
+ Gitaly::UserCherryPickError.new(
+ target_branch_diverged: Gitaly::NotAncestorError.new
+ )
+ )
+ end
+
+ let(:expected_error) { Gitlab::Git::CommitError }
+ let(:expected_error_message) { 'branch diverged' }
+
+ it_behaves_like '#user_cherry_pick with a gRPC error'
+ end
+
+ context 'when MergeConflictError is raised' do
+ let(:raised_error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::FAILED_PRECONDITION,
+ 'Conflict',
+ Gitaly::UserCherryPickError.new(
+ cherry_pick_conflict: Gitaly::MergeConflictError.new
+ )
+ )
+ end
+
+ let(:expected_error) { Gitlab::Git::Repository::CreateTreeError }
+ let(:expected_error_message) { }
+
+ it_behaves_like '#user_cherry_pick with a gRPC error'
+ end
+
+ context 'when a non-detailed gRPC error is raised' do
+ let(:raised_error) { GRPC::Internal.new('non-detailed error') }
+ let(:expected_error) { GRPC::Internal }
+ let(:expected_error_message) { }
+
+ it_behaves_like '#user_cherry_pick with a gRPC error'
+ end
end
describe '#user_revert' do
@@ -489,21 +694,6 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
expect(subject).to eq(squash_sha)
end
- context "when git_error is present" do
- let(:response) do
- Gitaly::UserSquashResponse.new(git_error: "something failed")
- end
-
- it "raises a GitError exception" do
- expect_any_instance_of(Gitaly::OperationService::Stub)
- .to receive(:user_squash).with(request, kind_of(Hash))
- .and_return(response)
-
- expect { subject }.to raise_error(
- Gitlab::Git::Repository::GitError, "something failed")
- end
- end
-
shared_examples '#user_squash with an error' do
it 'raises a GitError exception' do
expect_any_instance_of(Gitaly::OperationService::Stub)
diff --git a/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb
index 4287c32b947..2a06983417d 100644
--- a/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb
@@ -55,20 +55,54 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueImporter, :clean_gitlab_redi
describe '#execute' do
let(:importer) { described_class.new(issue, project, client) }
- it 'creates the issue and assignees' do
- expect(importer)
- .to receive(:create_issue)
- .and_return(10)
+ context 'when :issues_full_test_search is disabled' do
+ before do
+ stub_feature_flags(issues_full_text_search: false)
+ end
+
+ it 'creates the issue and assignees but does not update search data' do
+ expect(importer)
+ .to receive(:create_issue)
+ .and_return(10)
+
+ expect(importer)
+ .to receive(:create_assignees)
+ .with(10)
+
+ expect(importer.issuable_finder)
+ .to receive(:cache_database_id)
+ .with(10)
+
+ expect(importer).not_to receive(:update_search_data)
+
+ importer.execute
+ end
+ end
- expect(importer)
- .to receive(:create_assignees)
- .with(10)
+ context 'when :issues_full_text_search feature is enabled' do
+ before do
+ stub_feature_flags(issues_full_text_search: true)
+ end
- expect(importer.issuable_finder)
- .to receive(:cache_database_id)
- .with(10)
+ it 'creates the issue and assignees and updates_search_data' do
+ expect(importer)
+ .to receive(:create_issue)
+ .and_return(10)
+
+ expect(importer)
+ .to receive(:create_assignees)
+ .with(10)
- importer.execute
+ expect(importer.issuable_finder)
+ .to receive(:cache_database_id)
+ .with(10)
+
+ expect(importer)
+ .to receive(:update_search_data)
+ .with(10)
+
+ importer.execute
+ end
end
end
diff --git a/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb b/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb
index 6b3d18f20e9..b0f553dbef7 100644
--- a/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb
@@ -9,6 +9,12 @@ RSpec.describe Gitlab::GithubImport::Importer::ReleasesImporter do
let(:github_release_name) { 'Initial Release' }
let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
let(:released_at) { Time.new(2017, 1, 1, 12, 00) }
+ let(:author) do
+ double(
+ login: 'User A',
+ id: 1
+ )
+ end
let(:github_release) do
double(
@@ -17,11 +23,23 @@ RSpec.describe Gitlab::GithubImport::Importer::ReleasesImporter do
name: github_release_name,
body: 'This is my release',
created_at: created_at,
- published_at: released_at
+ published_at: released_at,
+ author: author
)
end
+ def stub_email_for_github_username(user_name = 'User A', user_email = 'user@example.com')
+ allow_next_instance_of(Gitlab::GithubImport::UserFinder) do |instance|
+ allow(instance).to receive(:email_for_github_username)
+ .with(user_name).and_return(user_email)
+ end
+ end
+
describe '#execute' do
+ before do
+ stub_email_for_github_username
+ end
+
it 'imports the releases in bulk' do
release_hash = {
tag_name: '1.0',
@@ -45,7 +63,8 @@ RSpec.describe Gitlab::GithubImport::Importer::ReleasesImporter do
description: 'This is my release',
created_at: created_at,
updated_at: created_at,
- published_at: nil
+ published_at: nil,
+ author: author
)
expect(importer).to receive(:each_release).and_return([release_double])
@@ -61,6 +80,10 @@ RSpec.describe Gitlab::GithubImport::Importer::ReleasesImporter do
end
describe '#build_releases' do
+ before do
+ stub_email_for_github_username
+ end
+
it 'returns an Array containing release rows' do
expect(importer).to receive(:each_release).and_return([github_release])
@@ -108,11 +131,15 @@ RSpec.describe Gitlab::GithubImport::Importer::ReleasesImporter do
describe '#build' do
let(:release_hash) { importer.build(github_release) }
- it 'returns the attributes of the release as a Hash' do
- expect(release_hash).to be_an_instance_of(Hash)
- end
-
context 'the returned Hash' do
+ before do
+ stub_email_for_github_username
+ end
+
+ it 'returns the attributes of the release as a Hash' do
+ expect(release_hash).to be_an_instance_of(Hash)
+ end
+
it 'includes the tag name' do
expect(release_hash[:tag]).to eq('1.0')
end
@@ -137,6 +164,39 @@ RSpec.describe Gitlab::GithubImport::Importer::ReleasesImporter do
expect(release_hash[:name]).to eq(github_release_name)
end
end
+
+ context 'author_id attribute' do
+ it 'returns the Gitlab user_id when Github release author is found' do
+ # Stub user email which matches a Gitlab user.
+ stub_email_for_github_username('User A', project.users.first.email)
+
+ # Disable cache read as the redis cache key can be set by other specs.
+ # https://gitlab.com/gitlab-org/gitlab/-/blob/88bffda004e0aca9c4b9f2de86bdbcc0b49f2bc7/lib/gitlab/github_import/user_finder.rb#L75
+ # Above line can return different user when read from cache.
+ allow(Gitlab::Cache::Import::Caching).to receive(:read).and_return(nil)
+
+ expect(release_hash[:author_id]).to eq(project.users.first.id)
+ end
+
+ it 'returns ghost user when author is empty in Github release' do
+ allow(github_release).to receive(:author).and_return(nil)
+
+ expect(release_hash[:author_id]).to eq(Gitlab::GithubImport.ghost_user_id)
+ end
+
+ context 'when Github author is not found in Gitlab' do
+ let(:author) { double(login: 'octocat', id: 1 ) }
+
+ before do
+ # Stub user email which does not match a Gitlab user.
+ stub_email_for_github_username('octocat', 'octocat@example.com')
+ end
+
+ it 'returns project creator as author' do
+ expect(release_hash[:author_id]).to eq(project.creator_id)
+ end
+ end
+ end
end
describe '#each_release' do
diff --git a/spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb b/spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb
index 4cd9f9dfad0..1924cd687e4 100644
--- a/spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb
+++ b/spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::GrapeLogging::Loggers::QueueDurationLogger do
describe ".parameters" do
let(:start_time) { Time.new(2018, 01, 01) }
- describe 'when no proxy time is available' do
+ describe 'when no proxy duration is available' do
let(:mock_request) { double('env', env: {}) }
it 'returns an empty hash' do
@@ -16,20 +16,18 @@ RSpec.describe Gitlab::GrapeLogging::Loggers::QueueDurationLogger do
end
end
- describe 'when a proxy time is available' do
+ describe 'when a proxy duration is available' do
let(:mock_request) do
double('env',
env: {
- 'HTTP_GITLAB_WORKHORSE_PROXY_START' => (start_time - 1.hour).to_i * (10**9)
+ 'GITLAB_RAILS_QUEUE_DURATION' => 2.seconds
}
)
end
- it 'returns the correct duration in seconds' do
+ it 'adds the duration to log parameters' do
travel_to(start_time) do
- subject.before
-
- expect(subject.parameters(mock_request, nil)).to eq( { 'queue_duration_s': 1.hour.to_f })
+ expect(subject.parameters(mock_request, nil)).to eq( { 'queue_duration_s': 2.seconds.to_f })
end
end
end
diff --git a/spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb b/spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb
index 0c548e1ce32..ac512e28e7b 100644
--- a/spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb
+++ b/spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb
@@ -103,4 +103,36 @@ RSpec.describe Gitlab::Graphql::Authorize::AuthorizeResource do
.to contain_exactly(:base_authorization, :sub_authorization)
end
end
+
+ describe 'authorizes_object?' do
+ it 'is false by default' do
+ a_class = Class.new do
+ include Gitlab::Graphql::Authorize::AuthorizeResource
+ end
+
+ expect(a_class).not_to be_authorizes_object
+ end
+
+ it 'is true after calling authorizes_object!' do
+ a_class = Class.new do
+ include Gitlab::Graphql::Authorize::AuthorizeResource
+
+ authorizes_object!
+ end
+
+ expect(a_class).to be_authorizes_object
+ end
+
+ it 'is true if a parent authorizes_object' do
+ parent = Class.new do
+ include Gitlab::Graphql::Authorize::AuthorizeResource
+
+ authorizes_object!
+ end
+
+ child = Class.new(parent)
+
+ expect(child).to be_authorizes_object
+ end
+ end
end
diff --git a/spec/lib/gitlab/graphql/markdown_field_spec.rb b/spec/lib/gitlab/graphql/markdown_field_spec.rb
index ed3f19d8cf2..974951ab30c 100644
--- a/spec/lib/gitlab/graphql/markdown_field_spec.rb
+++ b/spec/lib/gitlab/graphql/markdown_field_spec.rb
@@ -3,6 +3,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Graphql::MarkdownField do
include Gitlab::Routing
+ include GraphqlHelpers
describe '.markdown_field' do
it 'creates the field with some default attributes' do
@@ -21,21 +22,12 @@ RSpec.describe Gitlab::Graphql::MarkdownField do
expect { class_with_markdown_field(:test_html, null: true, resolver: 'not really') }
.to raise_error(expected_error)
end
-
- # TODO: remove as part of https://gitlab.com/gitlab-org/gitlab/-/merge_requests/27536
- # so that until that time, the developer check is there
- it 'raises when passing a resolve block' do
- expect { class_with_markdown_field(:test_html, null: true, resolve: -> (_, _, _) { 'not really' } ) }
- .to raise_error(expected_error)
- end
end
context 'resolving markdown' do
let_it_be(:note) { build(:note, note: '# Markdown!') }
let_it_be(:expected_markdown) { '<h1 data-sourcepos="1:1-1:11" dir="auto">Markdown!</h1>' }
- let_it_be(:query_type) { GraphQL::ObjectType.new }
- let_it_be(:schema) { GraphQL::Schema.define(query: query_type, mutation: nil)}
- let_it_be(:query) { GraphQL::Query.new(schema, document: nil, context: {}, variables: {}) }
+ let_it_be(:query) { GraphQL::Query.new(empty_schema, document: nil, context: {}, variables: {}) }
let_it_be(:context) { GraphQL::Query::Context.new(query: query, values: {}, object: nil) }
let(:type_class) { class_with_markdown_field(:note_html, null: false) }
@@ -55,6 +47,20 @@ RSpec.describe Gitlab::Graphql::MarkdownField do
end
end
+ context 'when a block is passed for the resolved object' do
+ let(:type_class) do
+ class_with_markdown_field(:note_html, null: false) do |resolved_object|
+ resolved_object.object
+ end
+ end
+
+ let(:type_instance) { type_class.authorized_new(class_wrapped_object(note), context) }
+
+ it 'renders markdown from the same property as the field name without the `_html` suffix' do
+ expect(field.resolve(type_instance, {}, context)).to eq(expected_markdown)
+ end
+ end
+
describe 'basic verification that references work' do
let_it_be(:project) { create(:project, :public) }
@@ -83,12 +89,22 @@ RSpec.describe Gitlab::Graphql::MarkdownField do
end
end
- def class_with_markdown_field(name, **args)
+ def class_with_markdown_field(name, **args, &blk)
Class.new(Types::BaseObject) do
prepend Gitlab::Graphql::MarkdownField
graphql_name 'MarkdownFieldTest'
- markdown_field name, **args
+ markdown_field name, **args, &blk
end
end
+
+ def class_wrapped_object(object)
+ Class.new do
+ def initialize(object)
+ @object = object
+ end
+
+ attr_accessor :object
+ end.new(object)
+ end
end
diff --git a/spec/lib/gitlab/graphql/negatable_arguments_spec.rb b/spec/lib/gitlab/graphql/negatable_arguments_spec.rb
index 71ef75836c0..04ee1c1b820 100644
--- a/spec/lib/gitlab/graphql/negatable_arguments_spec.rb
+++ b/spec/lib/gitlab/graphql/negatable_arguments_spec.rb
@@ -25,7 +25,9 @@ RSpec.describe Gitlab::Graphql::NegatableArguments do
expect(test_resolver.arguments['not'].type.arguments.keys).to match_array(['foo'])
end
- it 'defines all arguments passed as block even if called multiple times' do
+ # TODO: suffers from the `DuplicateNamesError` error. skip until we upgrade
+ # to the graphql 2.0 gem https://gitlab.com/gitlab-org/gitlab/-/issues/363131
+ xit 'defines all arguments passed as block even if called multiple times' do
test_resolver.negated do
argument :foo, GraphQL::Types::String, required: false
end
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb
index b6c3cb4e04a..97613edee5e 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb
@@ -9,9 +9,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
# The spec will be merged with connection_spec.rb in the future.
let(:nodes) { Project.all.order(id: :asc) }
let(:arguments) { {} }
- let(:query_type) { GraphQL::ObjectType.new }
- let(:schema) { GraphQL::Schema.define(query: query_type, mutation: nil)}
- let(:context) { GraphQL::Query::Context.new(query: query_double(schema: schema), values: nil, object: nil) }
+ let(:context) { GraphQL::Query::Context.new(query: query_double, values: nil, object: nil) }
let_it_be(:column_order_id) { Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(attribute_name: 'id', order_expression: Project.arel_table[:id].asc) }
let_it_be(:column_order_id_desc) { Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(attribute_name: 'id', order_expression: Project.arel_table[:id].desc) }
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
index a4ba288b7f1..61a79d90546 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
@@ -7,9 +7,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
let(:nodes) { Project.all.order(id: :asc) }
let(:arguments) { {} }
- let(:query_type) { GraphQL::ObjectType.new }
- let(:schema) { GraphQL::Schema.define(query: query_type, mutation: nil)}
- let(:context) { GraphQL::Query::Context.new(query: query_double(schema: schema), values: nil, object: nil) }
+ let(:context) { GraphQL::Query::Context.new(query: query_double, values: nil, object: nil) }
subject(:connection) do
described_class.new(nodes, **{ context: context, max_page_size: 3 }.merge(arguments))
diff --git a/spec/lib/gitlab/graphql/present/field_extension_spec.rb b/spec/lib/gitlab/graphql/present/field_extension_spec.rb
index 5f0f444e0bb..49992d7b71f 100644
--- a/spec/lib/gitlab/graphql/present/field_extension_spec.rb
+++ b/spec/lib/gitlab/graphql/present/field_extension_spec.rb
@@ -143,23 +143,6 @@ RSpec.describe Gitlab::Graphql::Present::FieldExtension do
it_behaves_like 'calling the presenter method'
end
- # This is exercised here using an explicit `resolve:` proc, but
- # @resolver_proc values are used in field instrumentation as well.
- context 'when the field uses a resolve proc' do
- let(:presenter) { base_presenter }
- let(:field) do
- ::Types::BaseField.new(
- name: field_name,
- type: GraphQL::Types::String,
- null: true,
- owner: owner,
- resolve: ->(obj, args, ctx) { 'Hello from a proc' }
- )
- end
-
- specify { expect(resolve_value).to eq 'Hello from a proc' }
- end
-
context 'when the presenter provides a new method' do
def presenter
Class.new(base_presenter) do
diff --git a/spec/lib/gitlab/graphql/query_analyzers/ast/logger_analyzer_spec.rb b/spec/lib/gitlab/graphql/query_analyzers/ast/logger_analyzer_spec.rb
new file mode 100644
index 00000000000..a5274d49fdb
--- /dev/null
+++ b/spec/lib/gitlab/graphql/query_analyzers/ast/logger_analyzer_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Graphql::QueryAnalyzers::AST::LoggerAnalyzer do
+ let(:query) { GraphQL::Query.new(GitlabSchema, document: document, context: {}, variables: { body: 'some note' }) }
+ let(:document) do
+ GraphQL.parse <<-GRAPHQL
+ mutation createNote($body: String!) {
+ createNote(input: {noteableId: "gid://gitlab/Noteable/1", body: $body}) {
+ note {
+ id
+ }
+ }
+ }
+ GRAPHQL
+ end
+
+ describe '#result' do
+ let(:monotonic_time_before) { 42 }
+ let(:monotonic_time_after) { 500 }
+ let(:monotonic_time_duration) { monotonic_time_after - monotonic_time_before }
+
+ before do
+ RequestStore.store[:graphql_logs] = nil
+
+ allow(Gitlab::Metrics::System).to receive(:monotonic_time)
+ .and_return(monotonic_time_before, monotonic_time_before,
+ monotonic_time_before, monotonic_time_before,
+ monotonic_time_after)
+ end
+
+ it 'returns the complexity, depth, duration, etc' do
+ results = GraphQL::Analysis::AST.analyze_query(query, [described_class], multiplex_analyzers: [])
+ result = results.first
+
+ expect(result[:duration_s]).to eq monotonic_time_duration
+ expect(result[:depth]).to eq 3
+ expect(result[:complexity]).to eq 3
+ expect(result[:used_fields]).to eq ['Note.id', 'CreateNotePayload.note', 'Mutation.createNote']
+ expect(result[:used_deprecated_fields]).to eq []
+
+ request = result.except(:duration_s).merge({
+ operation_name: 'createNote',
+ variables: { body: "[FILTERED]" }.to_s
+ })
+
+ expect(RequestStore.store[:graphql_logs]).to match([request])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/graphql/query_analyzers/ast/recursion_analyzer_spec.rb b/spec/lib/gitlab/graphql/query_analyzers/ast/recursion_analyzer_spec.rb
new file mode 100644
index 00000000000..997fb129f42
--- /dev/null
+++ b/spec/lib/gitlab/graphql/query_analyzers/ast/recursion_analyzer_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Graphql::QueryAnalyzers::AST::RecursionAnalyzer do
+ let(:query) { GraphQL::Query.new(GitlabSchema, document: document, context: {}, variables: { body: 'some note' }) }
+
+ context 'when recursion threshold not exceeded' do
+ let(:document) do
+ GraphQL.parse <<-GRAPHQL
+ query recurse {
+ group(fullPath: "h5bp") {
+ projects {
+ nodes {
+ name
+ group {
+ projects {
+ nodes {
+ name
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ GRAPHQL
+ end
+
+ it 'returns the complexity, depth, duration, etc' do
+ result = GraphQL::Analysis::AST.analyze_query(query, [described_class], multiplex_analyzers: [])
+
+ expect(result.first).to be_nil
+ end
+ end
+
+ context 'when recursion threshold exceeded' do
+ let(:document) do
+ GraphQL.parse <<-GRAPHQL
+ query recurse {
+ group(fullPath: "h5bp") {
+ projects {
+ nodes {
+ name
+ group {
+ projects {
+ nodes {
+ name
+ group {
+ projects {
+ nodes {
+ name
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ GRAPHQL
+ end
+
+ it 'returns error' do
+ result = GraphQL::Analysis::AST.analyze_query(query, [described_class], multiplex_analyzers: [])
+
+ expect(result.first.is_a?(GraphQL::AnalysisError)).to be_truthy
+ end
+ end
+end
diff --git a/spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb b/spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb
deleted file mode 100644
index dee8f9e3c64..00000000000
--- a/spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb
+++ /dev/null
@@ -1,49 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Graphql::QueryAnalyzers::LoggerAnalyzer do
- let(:initial_value) { analyzer.initial_value(query) }
- let(:analyzer) { described_class.new }
- let(:query) { GraphQL::Query.new(GitlabSchema, document: document, context: {}, variables: { body: "some note" }) }
- let(:document) do
- GraphQL.parse <<-GRAPHQL
- mutation createNote($body: String!) {
- createNote(input: {noteableId: "1", body: $body}) {
- note {
- id
- }
- }
- }
- GRAPHQL
- end
-
- describe '#final_value' do
- let(:monotonic_time_before) { 42 }
- let(:monotonic_time_after) { 500 }
- let(:monotonic_time_duration) { monotonic_time_after - monotonic_time_before }
- let(:memo) { initial_value }
-
- subject(:final_value) { analyzer.final_value(memo) }
-
- before do
- RequestStore.store[:graphql_logs] = nil
-
- allow(GraphQL::Analysis).to receive(:analyze_query).and_return([4, 2, [[], []]])
- allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(monotonic_time_before, monotonic_time_after)
- allow(Gitlab::GraphqlLogger).to receive(:info)
- end
-
- it 'inserts duration in seconds to memo and sets request store' do
- expect { final_value }.to change { memo[:duration_s] }.to(monotonic_time_duration)
- .and change { RequestStore.store[:graphql_logs] }.to([{
- complexity: 4,
- depth: 2,
- operation_name: query.operation_name,
- used_deprecated_fields: [],
- used_fields: [],
- variables: { body: "[FILTERED]" }.to_s
- }])
- end
- end
-end
diff --git a/spec/lib/gitlab/graphql/tracers/logger_tracer_spec.rb b/spec/lib/gitlab/graphql/tracers/logger_tracer_spec.rb
index 5bc077a963e..20792fb4554 100644
--- a/spec/lib/gitlab/graphql/tracers/logger_tracer_spec.rb
+++ b/spec/lib/gitlab/graphql/tracers/logger_tracer_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::Graphql::Tracers::LoggerTracer do
use Gitlab::Graphql::Tracers::LoggerTracer
use Gitlab::Graphql::Tracers::TimerTracer
- query_analyzer Gitlab::Graphql::QueryAnalyzers::LoggerAnalyzer.new
+ query_analyzer Gitlab::Graphql::QueryAnalyzers::AST::LoggerAnalyzer
query Graphql::FakeQueryType
end
@@ -20,11 +20,11 @@ RSpec.describe Gitlab::Graphql::Tracers::LoggerTracer do
end
end
- it "logs every query", :aggregate_failures do
+ it "logs every query", :aggregate_failures, :unlimited_max_formatted_output_length do
variables = { name: "Ada Lovelace" }
query_string = 'query fooOperation($name: String) { helloWorld(message: $name) }'
- # Build an actual query so we don't have to hardocde the "fingerprint" calculations
+ # Build an actual query so we don't have to hardcode the "fingerprint" calculations
query = GraphQL::Query.new(dummy_schema, query_string, variables: variables)
expect(::Gitlab::GraphqlLogger).to receive(:info).with({
diff --git a/spec/lib/gitlab/hash_digest/facade_spec.rb b/spec/lib/gitlab/hash_digest/facade_spec.rb
new file mode 100644
index 00000000000..b352744513e
--- /dev/null
+++ b/spec/lib/gitlab/hash_digest/facade_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::HashDigest::Facade do
+ describe '.hexdigest' do
+ let(:plaintext) { 'something that is plaintext' }
+
+ let(:sha256_hash) { OpenSSL::Digest::SHA256.hexdigest(plaintext) }
+ let(:md5_hash) { Digest::MD5.hexdigest(plaintext) } # rubocop:disable Fips/MD5
+
+ it 'uses SHA256' do
+ expect(described_class.hexdigest(plaintext)).to eq(sha256_hash)
+ end
+
+ context 'when feature flags is not available' do
+ before do
+ allow(Feature).to receive(:feature_flags_available?).and_return(false)
+ end
+
+ it 'uses MD5' do
+ expect(described_class.hexdigest(plaintext)).to eq(md5_hash)
+ end
+ end
+
+ context 'when active_support_hash_digest_sha256 FF is disabled' do
+ before do
+ stub_feature_flags(active_support_hash_digest_sha256: false)
+ end
+
+ it 'uses MD5' do
+ expect(described_class.hexdigest(plaintext)).to eq(md5_hash)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/highlight_spec.rb b/spec/lib/gitlab/highlight_spec.rb
index 65d8c59fea7..537e59d91c3 100644
--- a/spec/lib/gitlab/highlight_spec.rb
+++ b/spec/lib/gitlab/highlight_spec.rb
@@ -124,27 +124,14 @@ RSpec.describe Gitlab::Highlight do
context 'timeout' do
subject(:highlight) { described_class.new('file.rb', 'begin', language: 'ruby').highlight('Content') }
- it 'utilizes timeout for web' do
- expect(Timeout).to receive(:timeout).with(described_class::TIMEOUT_FOREGROUND).and_call_original
-
- highlight
- end
-
it 'falls back to plaintext on timeout' do
allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
- expect(Timeout).to receive(:timeout).and_raise(Timeout::Error)
+ expect(Gitlab::RenderTimeout).to receive(:timeout).and_raise(Timeout::Error)
expect(Rouge::Lexers::PlainText).to receive(:lex).and_call_original
highlight
end
-
- it 'utilizes longer timeout for sidekiq' do
- allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
- expect(Timeout).to receive(:timeout).with(described_class::TIMEOUT_BACKGROUND).and_call_original
-
- highlight
- end
end
end
end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 1546b6a26c8..9d516c8d7ac 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -65,6 +65,7 @@ issues:
- customer_relations_contacts
- issue_customer_relations_contacts
- email
+- issuable_resource_links
work_item_type:
- issues
events:
@@ -274,6 +275,7 @@ ci_pipelines:
- security_findings
- daily_build_group_report_results
- latest_builds
+- latest_successful_builds
- daily_report_results
- latest_builds_report_results
- messages
@@ -336,6 +338,8 @@ integrations:
- jira_tracker_data
- zentao_tracker_data
- issue_tracker_data
+# dingtalk_tracker_data JiHu-specific, see https://jihulab.com/gitlab-cn/gitlab/-/merge_requests/417
+- dingtalk_tracker_data
hooks:
- project
- web_hook_logs
@@ -414,6 +418,8 @@ project:
- pushover_integration
- jira_integration
- zentao_integration
+# dingtalk_integration JiHu-specific, see https://jihulab.com/gitlab-cn/gitlab/-/merge_requests/417
+- dingtalk_integration
- redmine_integration
- youtrack_integration
- custom_issue_tracker_integration
@@ -613,6 +619,7 @@ project:
- secure_files
- security_trainings
- vulnerability_reads
+- build_artifacts_size_refresh
award_emoji:
- awardable
- user
diff --git a/spec/lib/gitlab/import_export/lfs_saver_spec.rb b/spec/lib/gitlab/import_export/lfs_saver_spec.rb
index 84bd782c467..aa456736f78 100644
--- a/spec/lib/gitlab/import_export/lfs_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/lfs_saver_spec.rb
@@ -45,6 +45,18 @@ RSpec.describe Gitlab::ImportExport::LfsSaver do
expect(File).to exist("#{shared.export_path}/lfs-objects/#{lfs_object.oid}")
end
+ context 'when lfs object has file on disk missing' do
+ it 'does not attempt to copy non-existent file' do
+ FileUtils.rm(lfs_object.file.path)
+ expect(saver).not_to receive(:copy_files)
+
+ saver.save # rubocop:disable Rails/SaveBang
+
+ expect(shared.errors).to be_empty
+ expect(File).not_to exist("#{shared.export_path}/lfs-objects/#{lfs_object.oid}")
+ end
+ end
+
describe 'saving a json file' do
before do
# Create two more LfsObjectProject records with different `repository_type`s
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index e06fcb0cd3f..d7f07a1eadf 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -585,6 +585,7 @@ ProjectFeature:
- operations_access_level
- security_and_compliance_access_level
- container_registry_access_level
+- package_registry_access_level
- created_at
- updated_at
ProtectedBranch::MergeAccessLevel:
@@ -858,6 +859,10 @@ Epic:
- external_key
- confidential
- color
+ - total_opened_issue_weight
+ - total_closed_issue_weight
+ - total_opened_issue_count
+ - total_closed_issue_count
EpicIssue:
- id
- relative_position
diff --git a/spec/lib/gitlab/import_sources_spec.rb b/spec/lib/gitlab/import_sources_spec.rb
index 416d651b0de..f42a109aa3a 100644
--- a/spec/lib/gitlab/import_sources_spec.rb
+++ b/spec/lib/gitlab/import_sources_spec.rb
@@ -7,17 +7,17 @@ RSpec.describe Gitlab::ImportSources do
it 'returns a hash' do
expected =
{
- 'GitHub' => 'github',
- 'Bitbucket Cloud' => 'bitbucket',
- 'Bitbucket Server' => 'bitbucket_server',
- 'GitLab.com' => 'gitlab',
- 'Google Code' => 'google_code',
- 'FogBugz' => 'fogbugz',
- 'Repo by URL' => 'git',
- 'GitLab export' => 'gitlab_project',
- 'Gitea' => 'gitea',
- 'Manifest file' => 'manifest',
- 'Phabricator' => 'phabricator'
+ 'GitHub' => 'github',
+ 'Bitbucket Cloud' => 'bitbucket',
+ 'Bitbucket Server' => 'bitbucket_server',
+ 'GitLab.com' => 'gitlab',
+ 'Google Code' => 'google_code',
+ 'FogBugz' => 'fogbugz',
+ 'Repository by URL' => 'git',
+ 'GitLab export' => 'gitlab_project',
+ 'Gitea' => 'gitea',
+ 'Manifest file' => 'manifest',
+ 'Phabricator' => 'phabricator'
}
expect(described_class.options).to eq(expected)
@@ -93,7 +93,7 @@ RSpec.describe Gitlab::ImportSources do
'gitlab' => 'GitLab.com',
'google_code' => 'Google Code',
'fogbugz' => 'FogBugz',
- 'git' => 'Repo by URL',
+ 'git' => 'Repository by URL',
'gitlab_project' => 'GitLab export',
'gitea' => 'Gitea',
'manifest' => 'Manifest file',
diff --git a/spec/lib/gitlab/inactive_projects_deletion_warning_tracker_spec.rb b/spec/lib/gitlab/inactive_projects_deletion_warning_tracker_spec.rb
index 4eb2388f3f7..7afb80488d8 100644
--- a/spec/lib/gitlab/inactive_projects_deletion_warning_tracker_spec.rb
+++ b/spec/lib/gitlab/inactive_projects_deletion_warning_tracker_spec.rb
@@ -2,14 +2,12 @@
require "spec_helper"
-RSpec.describe Gitlab::InactiveProjectsDeletionWarningTracker do
+RSpec.describe Gitlab::InactiveProjectsDeletionWarningTracker, :freeze_time do
let_it_be(:project_id) { 1 }
describe '.notified_projects', :clean_gitlab_redis_shared_state do
before do
- freeze_time do
- Gitlab::InactiveProjectsDeletionWarningTracker.new(project_id).mark_notified
- end
+ Gitlab::InactiveProjectsDeletionWarningTracker.new(project_id).mark_notified
end
it 'returns the list of projects for which deletion warning email has been sent' do
@@ -53,6 +51,46 @@ RSpec.describe Gitlab::InactiveProjectsDeletionWarningTracker do
end
end
+ describe '#notification_date', :clean_gitlab_redis_shared_state do
+ before do
+ Gitlab::InactiveProjectsDeletionWarningTracker.new(project_id).mark_notified
+ end
+
+ it 'returns the date if a deletion warning email has been sent for a given project' do
+ expect(Gitlab::InactiveProjectsDeletionWarningTracker.new(project_id).notification_date).to eq("#{Date.current}")
+ end
+
+ it 'returns nil if a deletion warning email has not been sent for a given project' do
+ expect(Gitlab::InactiveProjectsDeletionWarningTracker.new(2).notification_date).to eq(nil)
+ end
+ end
+
+ describe '#scheduled_deletion_date', :clean_gitlab_redis_shared_state do
+ shared_examples 'returns the expected deletion date' do
+ it do
+ expect(Gitlab::InactiveProjectsDeletionWarningTracker.new(project_id).scheduled_deletion_date)
+ .to eq(1.month.from_now.to_date.to_s)
+ end
+ end
+
+ before do
+ stub_application_setting(inactive_projects_delete_after_months: 2)
+ stub_application_setting(inactive_projects_send_warning_email_after_months: 1)
+ end
+
+ context 'without a stored deletion email date' do
+ it_behaves_like 'returns the expected deletion date'
+ end
+
+ context 'with a stored deletion email date' do
+ before do
+ Gitlab::InactiveProjectsDeletionWarningTracker.new(project_id).mark_notified
+ end
+
+ it_behaves_like 'returns the expected deletion date'
+ end
+ end
+
describe '#reset' do
before do
Gitlab::InactiveProjectsDeletionWarningTracker.new(project_id).mark_notified
diff --git a/spec/lib/gitlab/instrumentation_helper_spec.rb b/spec/lib/gitlab/instrumentation_helper_spec.rb
index 5fea355ab4f..79d626386d4 100644
--- a/spec/lib/gitlab/instrumentation_helper_spec.rb
+++ b/spec/lib/gitlab/instrumentation_helper_spec.rb
@@ -110,6 +110,14 @@ RSpec.describe Gitlab::InstrumentationHelper do
expect(payload).to include(:pid)
end
+ it 'logs the worker ID' do
+ expect(Prometheus::PidProvider).to receive(:worker_id).and_return('puma_1')
+
+ subject
+
+ expect(payload).to include(worker_id: 'puma_1')
+ end
+
context 'when logging memory allocations' do
include MemoryInstrumentationHelper
diff --git a/spec/lib/gitlab/jira_import/base_importer_spec.rb b/spec/lib/gitlab/jira_import/base_importer_spec.rb
index 479551095de..70a594b09af 100644
--- a/spec/lib/gitlab/jira_import/base_importer_spec.rb
+++ b/spec/lib/gitlab/jira_import/base_importer_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::JiraImport::BaseImporter do
- include JiraServiceHelper
+ include JiraIntegrationHelpers
let(:project) { create(:project) }
diff --git a/spec/lib/gitlab/jira_import/issues_importer_spec.rb b/spec/lib/gitlab/jira_import/issues_importer_spec.rb
index aead5405bd1..565a9ad17e1 100644
--- a/spec/lib/gitlab/jira_import/issues_importer_spec.rb
+++ b/spec/lib/gitlab/jira_import/issues_importer_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::JiraImport::IssuesImporter do
- include JiraServiceHelper
+ include JiraIntegrationHelpers
let_it_be(:user) { create(:user) }
let_it_be(:current_user) { create(:user) }
diff --git a/spec/lib/gitlab/jira_import/labels_importer_spec.rb b/spec/lib/gitlab/jira_import/labels_importer_spec.rb
index 71440590815..4fb5e363475 100644
--- a/spec/lib/gitlab/jira_import/labels_importer_spec.rb
+++ b/spec/lib/gitlab/jira_import/labels_importer_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::JiraImport::LabelsImporter do
- include JiraServiceHelper
+ include JiraIntegrationHelpers
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
diff --git a/spec/lib/gitlab/jira_import_spec.rb b/spec/lib/gitlab/jira_import_spec.rb
index a7c73e79641..972b0ab6ed1 100644
--- a/spec/lib/gitlab/jira_import_spec.rb
+++ b/spec/lib/gitlab/jira_import_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::JiraImport do
let(:project_id) { 321 }
describe '.validate_project_settings!' do
- include JiraServiceHelper
+ include JiraIntegrationHelpers
let_it_be(:project, reload: true) { create(:project) }
diff --git a/spec/lib/gitlab/legacy_github_import/importer_spec.rb b/spec/lib/gitlab/legacy_github_import/importer_spec.rb
index e69edbe6dc0..1800b42160d 100644
--- a/spec/lib/gitlab/legacy_github_import/importer_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/importer_spec.rb
@@ -260,28 +260,6 @@ RSpec.describe Gitlab::LegacyGithubImport::Importer do
)
end
- context 'when importing a GitHub project' do
- let(:api_root) { 'https://api.github.com' }
- let(:repo_root) { 'https://github.com' }
-
- subject { described_class.new(project) }
-
- it_behaves_like 'Gitlab::LegacyGithubImport::Importer#execute'
- it_behaves_like 'Gitlab::LegacyGithubImport::Importer#execute an error occurs'
- it_behaves_like 'Gitlab::LegacyGithubImport unit-testing'
-
- describe '#client' do
- it 'instantiates a Client' do
- allow(project).to receive(:import_data).and_return(double(credentials: credentials))
- expect(Gitlab::LegacyGithubImport::Client).to receive(:new).with(
- credentials[:user]
- )
-
- subject.client
- end
- end
- end
-
context 'when importing a Gitea project' do
let(:api_root) { 'https://try.gitea.io/api/v1' }
let(:repo_root) { 'https://try.gitea.io' }
diff --git a/spec/lib/gitlab/mail_room/mail_room_spec.rb b/spec/lib/gitlab/mail_room/mail_room_spec.rb
index 12fb12ebd87..06a25be757e 100644
--- a/spec/lib/gitlab/mail_room/mail_room_spec.rb
+++ b/spec/lib/gitlab/mail_room/mail_room_spec.rb
@@ -303,6 +303,7 @@ RSpec.describe Gitlab::MailRoom do
delivery_method: 'postback',
delivery_options: {
delivery_url: "http://gitlab.example/api/v4/internal/mail_room/incoming_email",
+ content_type: "text/plain",
jwt_auth_header: Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER,
jwt_issuer: Gitlab::MailRoom::INTERNAL_API_REQUEST_JWT_ISSUER,
jwt_algorithm: 'HS256',
@@ -316,6 +317,7 @@ RSpec.describe Gitlab::MailRoom do
delivery_method: 'postback',
delivery_options: {
delivery_url: "http://gitlab.example/api/v4/internal/mail_room/service_desk_email",
+ content_type: "text/plain",
jwt_auth_header: Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER,
jwt_issuer: Gitlab::MailRoom::INTERNAL_API_REQUEST_JWT_ISSUER,
jwt_algorithm: 'HS256',
diff --git a/spec/lib/gitlab/mailgun/webhook_processors/failure_logger_spec.rb b/spec/lib/gitlab/mailgun/webhook_processors/failure_logger_spec.rb
new file mode 100644
index 00000000000..a2286415e96
--- /dev/null
+++ b/spec/lib/gitlab/mailgun/webhook_processors/failure_logger_spec.rb
@@ -0,0 +1,92 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Mailgun::WebhookProcessors::FailureLogger do
+ describe '#execute', :freeze_time, :clean_gitlab_redis_rate_limiting do
+ let(:base_payload) do
+ {
+ 'id' => 'U2kZkAiuScqcMTq-8Atz-Q',
+ 'event' => 'failed',
+ 'recipient' => 'recipient@gitlab.com',
+ 'reason' => 'bounce',
+ 'delivery-status' => {
+ 'code' => '421',
+ 'message' => '4.4.2 mxfront9g.mail.example.com Error: timeout exceeded'
+ }
+ }
+ end
+
+ context 'on permanent failure' do
+ let(:processor) { described_class.new(base_payload.merge({ 'severity' => 'permanent' })) }
+
+ it 'logs the failure immediately' do
+ expect(Gitlab::ErrorTracking::Logger).to receive(:error).with(
+ event: 'email_delivery_failure',
+ mailgun_event_id: base_payload['id'],
+ recipient: base_payload['recipient'],
+ failure_type: 'permanent',
+ failure_reason: base_payload['reason'],
+ failure_code: base_payload['delivery-status']['code'],
+ failure_message: base_payload['delivery-status']['message']
+ )
+
+ processor.execute
+ end
+ end
+
+ context 'on temporary failure' do
+ let(:processor) { described_class.new(base_payload.merge({ 'severity' => 'temporary' })) }
+
+ before do
+ allow(Gitlab::ApplicationRateLimiter).to receive(:rate_limits)
+ .and_return(temporary_email_failure: { threshold: 1, interval: 1.minute })
+ end
+
+ context 'when threshold is not exceeded' do
+ it 'increments counter but does not log the failure' do
+ expect(Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(
+ :temporary_email_failure, scope: 'recipient@gitlab.com'
+ ).and_call_original
+ expect(Gitlab::ErrorTracking::Logger).not_to receive(:error)
+
+ processor.execute
+ end
+ end
+
+ context 'when threshold is exceeded' do
+ before do
+ processor.execute
+ end
+
+ it 'increments counter and logs the failure' do
+ expect(Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(
+ :temporary_email_failure, scope: 'recipient@gitlab.com'
+ ).and_call_original
+ expect(Gitlab::ErrorTracking::Logger).to receive(:error).with(
+ event: 'email_delivery_failure',
+ mailgun_event_id: base_payload['id'],
+ recipient: base_payload['recipient'],
+ failure_type: 'temporary',
+ failure_reason: base_payload['reason'],
+ failure_code: base_payload['delivery-status']['code'],
+ failure_message: base_payload['delivery-status']['message']
+ )
+
+ processor.execute
+ end
+ end
+ end
+
+ context 'on other events' do
+ let(:processor) { described_class.new(base_payload.merge({ 'event' => 'delivered' })) }
+
+ it 'does nothing' do
+ expect(Gitlab::ErrorTracking::Logger).not_to receive(:error)
+ expect(Gitlab::ApplicationRateLimiter).not_to receive(:throttled?)
+
+ processor.execute
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/mailgun/webhook_processors/member_invites_spec.rb b/spec/lib/gitlab/mailgun/webhook_processors/member_invites_spec.rb
new file mode 100644
index 00000000000..3bd364b0d15
--- /dev/null
+++ b/spec/lib/gitlab/mailgun/webhook_processors/member_invites_spec.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Mailgun::WebhookProcessors::MemberInvites do
+ describe '#execute', :aggregate_failures do
+ let_it_be(:member) { create(:project_member, :invited) }
+
+ let(:raw_invite_token) { member.raw_invite_token }
+ let(:payload) do
+ {
+ 'event' => 'failed',
+ 'severity' => 'permanent',
+ 'tags' => [Members::Mailgun::INVITE_EMAIL_TAG],
+ 'user-variables' => { ::Members::Mailgun::INVITE_EMAIL_TOKEN_KEY => raw_invite_token }
+ }
+ end
+
+ subject(:service) { described_class.new(payload).execute }
+
+ it 'marks the member invite email success as false' do
+ expect(Gitlab::AppLogger).to receive(:info).with(
+ message: /^UPDATED MEMBER INVITE_EMAIL_SUCCESS/,
+ event: 'updated_member_invite_email_success'
+ ).and_call_original
+
+ expect { service }.to change { member.reload.invite_email_success }.from(true).to(false)
+ end
+
+ context 'when invite token is not found in payload' do
+ before do
+ payload.delete('user-variables')
+ end
+
+ it 'does not change member status and logs an error' do
+ expect(Gitlab::AppLogger).not_to receive(:info)
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ an_instance_of(described_class::ProcessWebhookServiceError))
+
+ expect { service }.not_to change { member.reload.invite_email_success }
+ end
+ end
+
+ shared_examples 'does nothing' do
+ it 'does not change member status' do
+ expect(Gitlab::AppLogger).not_to receive(:info)
+
+ expect { service }.not_to change { member.reload.invite_email_success }
+ end
+ end
+
+ context 'when member can not be found' do
+ let(:raw_invite_token) { '_foobar_' }
+
+ it_behaves_like 'does nothing'
+ end
+
+ context 'when failure is temporary' do
+ before do
+ payload['severity'] = 'temporary'
+ end
+
+ it_behaves_like 'does nothing'
+ end
+
+ context 'when email is not a member invite' do
+ before do
+ payload.delete('tags')
+ end
+
+ it_behaves_like 'does nothing'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/memory/jemalloc_spec.rb b/spec/lib/gitlab/memory/jemalloc_spec.rb
new file mode 100644
index 00000000000..8847516b52c
--- /dev/null
+++ b/spec/lib/gitlab/memory/jemalloc_spec.rb
@@ -0,0 +1,121 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Memory::Jemalloc do
+ let(:outdir) { Dir.mktmpdir }
+
+ after do
+ FileUtils.rm_f(outdir)
+ end
+
+ context 'when jemalloc is loaded' do
+ let(:fiddle_func) { instance_double(::Fiddle::Function) }
+
+ context 'with JSON format' do
+ let(:format) { :json }
+ let(:output) { '{"a": 24}' }
+
+ before do
+ stub_stats_call(output, 'J')
+ end
+
+ describe '.stats' do
+ it 'returns stats JSON' do
+ expect(described_class.stats(format: format)).to eq(output)
+ end
+ end
+
+ describe '.dump_stats' do
+ it 'writes stats JSON file' do
+ described_class.dump_stats(path: outdir, format: format)
+
+ file = Dir.entries(outdir).find { |e| e.match(/jemalloc_stats\.#{$$}\.\d+\.json$/) }
+ expect(file).not_to be_nil
+ expect(File.read(File.join(outdir, file))).to eq(output)
+ end
+ end
+ end
+
+ context 'with text format' do
+ let(:format) { :text }
+ let(:output) { 'stats' }
+
+ before do
+ stub_stats_call(output)
+ end
+
+ describe '.stats' do
+ it 'returns a text report' do
+ expect(described_class.stats(format: format)).to eq(output)
+ end
+ end
+
+ describe '.dump_stats' do
+ it 'writes stats text file' do
+ described_class.dump_stats(path: outdir, format: format)
+
+ file = Dir.entries(outdir).find { |e| e.match(/jemalloc_stats\.#{$$}\.\d+\.txt$/) }
+ expect(file).not_to be_nil
+ expect(File.read(File.join(outdir, file))).to eq(output)
+ end
+ end
+ end
+
+ context 'with unsupported format' do
+ let(:format) { 'unsupported' }
+
+ describe '.stats' do
+ it 'raises an error' do
+ expect do
+ described_class.stats(format: format)
+ end.to raise_error(/format must be one of/)
+ end
+ end
+
+ describe '.dump_stats' do
+ it 'raises an error' do
+ expect do
+ described_class.dump_stats(path: outdir, format: format)
+ end.to raise_error(/format must be one of/)
+ end
+ end
+ end
+ end
+
+ context 'when jemalloc is not loaded' do
+ before do
+ expect(::Fiddle::Handle).to receive(:sym).and_raise(Fiddle::DLError)
+ end
+
+ describe '.stats' do
+ it 'returns nil' do
+ expect(described_class.stats).to be_nil
+ end
+ end
+
+ describe '.dump_stats' do
+ it 'does nothing' do
+ stub_env('LD_PRELOAD', nil)
+
+ described_class.dump_stats(path: outdir)
+
+ expect(Dir.empty?(outdir)).to be(true)
+ end
+ end
+ end
+
+ def stub_stats_call(output, expected_options = '')
+ # Stub function pointer to stats call.
+ func_pointer = Fiddle::Pointer.new(0xd34db33f)
+ expect(::Fiddle::Handle).to receive(:sym).with('malloc_stats_print').and_return(func_pointer)
+
+ # Stub actual function call.
+ expect(::Fiddle::Function).to receive(:new)
+ .with(func_pointer, anything, anything)
+ .and_return(fiddle_func)
+ expect(fiddle_func).to receive(:call).with(anything, nil, expected_options) do |callback, _, options|
+ callback.call(nil, output)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/dashboard/processor_spec.rb b/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
index 14a4c01fce3..52908a0b339 100644
--- a/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
@@ -16,7 +16,6 @@ RSpec.describe Gitlab::Metrics::Dashboard::Processor do
Gitlab::Metrics::Dashboard::Stages::CustomMetricsInserter,
Gitlab::Metrics::Dashboard::Stages::CustomMetricsDetailsInserter,
Gitlab::Metrics::Dashboard::Stages::MetricEndpointInserter,
- Gitlab::Metrics::Dashboard::Stages::AlertsInserter,
Gitlab::Metrics::Dashboard::Stages::PanelIdsInserter,
Gitlab::Metrics::Dashboard::Stages::UrlValidator
]
@@ -118,43 +117,6 @@ RSpec.describe Gitlab::Metrics::Dashboard::Processor do
end
end
- context 'when the dashboard references persisted metrics with alerts' do
- let!(:alert) do
- create(
- :prometheus_alert,
- environment: environment,
- project: project,
- prometheus_metric: persisted_metric
- )
- end
-
- shared_examples_for 'has saved alerts' do
- it 'includes an alert path' do
- target_metric = all_metrics.find { |metric| metric[:metric_id] == persisted_metric.id }
-
- expect(target_metric).to be_a Hash
- expect(target_metric).to include(:alert_path)
- expect(target_metric[:alert_path]).to include(
- project.path,
- persisted_metric.id.to_s,
- environment.id.to_s
- )
- end
- end
-
- context 'that are shared across projects' do
- let!(:persisted_metric) { create(:prometheus_metric, :common, identifier: 'metric_a1') }
-
- it_behaves_like 'has saved alerts'
- end
-
- context 'when the project has associated metrics' do
- let!(:persisted_metric) { create(:prometheus_metric, project: project, group: :business) }
-
- it_behaves_like 'has saved alerts'
- end
- end
-
context 'when there are no alerts' do
let!(:persisted_metric) { create(:prometheus_metric, :common, identifier: 'metric_a1') }
diff --git a/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb
index c88d8c17eac..57790ad78a8 100644
--- a/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb
+++ b/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb
@@ -66,7 +66,7 @@ RSpec.describe Gitlab::Metrics::Samplers::DatabaseSampler do
let(:main_replica_host) { main_load_balancer.host }
let(:ci_load_balancer) { double(:load_balancer, host_list: ci_host_list, configuration: configuration) }
- let(:configuration) { double(:configuration, primary_connection_specification_name: 'Ci::ApplicationRecord') }
+ let(:configuration) { double(:configuration, connection_specification_name: 'Ci::ApplicationRecord') }
let(:ci_host_list) { double(:host_list, hosts: [ci_replica_host]) }
let(:ci_replica_host) { double(:host, connection: ci_connection) }
let(:ci_connection) { double(:connection, pool: Ci::ApplicationRecord.connection_pool) }
@@ -121,7 +121,7 @@ RSpec.describe Gitlab::Metrics::Samplers::DatabaseSampler do
let(:main_replica_host) { main_load_balancer.host }
let(:ci_load_balancer) { double(:load_balancer, host_list: ci_host_list, configuration: configuration) }
- let(:configuration) { double(:configuration, primary_connection_specification_name: 'Ci::ApplicationRecord') }
+ let(:configuration) { double(:configuration, connection_specification_name: 'Ci::ApplicationRecord') }
let(:ci_host_list) { double(:host_list, hosts: [ci_replica_host]) }
let(:ci_replica_host) { double(:host, connection: ci_connection) }
let(:ci_connection) { double(:connection, pool: Ci::ApplicationRecord.connection_pool) }
diff --git a/spec/lib/gitlab/metrics/sli_spec.rb b/spec/lib/gitlab/metrics/sli_spec.rb
index 9b776d6738d..102ea442b3a 100644
--- a/spec/lib/gitlab/metrics/sli_spec.rb
+++ b/spec/lib/gitlab/metrics/sli_spec.rb
@@ -17,13 +17,13 @@ RSpec.describe Gitlab::Metrics::Sli do
it 'allows different SLIs to be defined on each subclass' do
apdex_counters = [
- fake_total_counter('foo', 'apdex'),
- fake_numerator_counter('foo', 'apdex', 'success')
+ fake_total_counter('foo_apdex'),
+ fake_numerator_counter('foo_apdex', 'success')
]
error_rate_counters = [
- fake_total_counter('foo', 'error_rate'),
- fake_numerator_counter('foo', 'error_rate', 'error')
+ fake_total_counter('foo'),
+ fake_numerator_counter('foo', 'error')
]
apdex = described_class::Apdex.initialize_sli(:foo, [{ hello: :world }])
@@ -40,13 +40,17 @@ RSpec.describe Gitlab::Metrics::Sli do
end
subclasses = {
- Gitlab::Metrics::Sli::Apdex => :success,
- Gitlab::Metrics::Sli::ErrorRate => :error
+ Gitlab::Metrics::Sli::Apdex => {
+ suffix: '_apdex',
+ numerator: :success
+ },
+ Gitlab::Metrics::Sli::ErrorRate => {
+ suffix: '',
+ numerator: :error
+ }
}
- subclasses.each do |subclass, numerator_type|
- subclass_type = subclass.to_s.demodulize.underscore
-
+ subclasses.each do |subclass, subclass_info|
describe subclass do
describe 'Class methods' do
before do
@@ -73,8 +77,8 @@ RSpec.describe Gitlab::Metrics::Sli do
describe '.initialize_sli' do
it 'returns and stores a new initialized SLI' do
counters = [
- fake_total_counter(:bar, subclass_type),
- fake_numerator_counter(:bar, subclass_type, numerator_type)
+ fake_total_counter("bar#{subclass_info[:suffix]}"),
+ fake_numerator_counter("bar#{subclass_info[:suffix]}", subclass_info[:numerator])
]
sli = described_class.initialize_sli(:bar, [{ hello: :world }])
@@ -86,8 +90,8 @@ RSpec.describe Gitlab::Metrics::Sli do
it 'does not change labels for an already-initialized SLI' do
counters = [
- fake_total_counter(:bar, subclass_type),
- fake_numerator_counter(:bar, subclass_type, numerator_type)
+ fake_total_counter("bar#{subclass_info[:suffix]}"),
+ fake_numerator_counter("bar#{subclass_info[:suffix]}", subclass_info[:numerator])
]
sli = described_class.initialize_sli(:bar, [{ hello: :world }])
@@ -106,8 +110,8 @@ RSpec.describe Gitlab::Metrics::Sli do
describe '.initialized?' do
before do
- fake_total_counter(:boom, subclass_type)
- fake_numerator_counter(:boom, subclass_type, numerator_type)
+ fake_total_counter("boom#{subclass_info[:suffix]}")
+ fake_numerator_counter("boom#{subclass_info[:suffix]}", subclass_info[:numerator])
end
it 'is true when an SLI was initialized with labels' do
@@ -125,8 +129,8 @@ RSpec.describe Gitlab::Metrics::Sli do
describe '#initialize_counters' do
it 'initializes counters for the passed label combinations' do
counters = [
- fake_total_counter(:hey, subclass_type),
- fake_numerator_counter(:hey, subclass_type, numerator_type)
+ fake_total_counter("hey#{subclass_info[:suffix]}"),
+ fake_numerator_counter("hey#{subclass_info[:suffix]}", subclass_info[:numerator])
]
described_class.new(:hey).initialize_counters([{ foo: 'bar' }, { foo: 'baz' }])
@@ -138,18 +142,18 @@ RSpec.describe Gitlab::Metrics::Sli do
describe "#increment" do
let!(:sli) { described_class.new(:heyo) }
- let!(:total_counter) { fake_total_counter(:heyo, subclass_type) }
- let!(:numerator_counter) { fake_numerator_counter(:heyo, subclass_type, numerator_type) }
+ let!(:total_counter) { fake_total_counter("heyo#{subclass_info[:suffix]}") }
+ let!(:numerator_counter) { fake_numerator_counter("heyo#{subclass_info[:suffix]}", subclass_info[:numerator]) }
- it "increments both counters for labels when #{numerator_type} is true" do
- sli.increment(labels: { hello: "world" }, numerator_type => true)
+ it "increments both counters for labels when #{subclass_info[:numerator]} is true" do
+ sli.increment(labels: { hello: "world" }, subclass_info[:numerator] => true)
expect(total_counter).to have_received(:increment).with({ hello: 'world' })
expect(numerator_counter).to have_received(:increment).with({ hello: 'world' })
end
- it "only increments the total counters for labels when #{numerator_type} is false" do
- sli.increment(labels: { hello: "world" }, numerator_type => false)
+ it "only increments the total counters for labels when #{subclass_info[:numerator]} is false" do
+ sli.increment(labels: { hello: "world" }, subclass_info[:numerator] => false)
expect(total_counter).to have_received(:increment).with({ hello: 'world' })
expect(numerator_counter).not_to have_received(:increment).with({ hello: 'world' })
@@ -168,11 +172,11 @@ RSpec.describe Gitlab::Metrics::Sli do
fake_counter
end
- def fake_total_counter(name, type)
- fake_prometheus_counter("gitlab_sli:#{name}_#{type}:total")
+ def fake_total_counter(name)
+ fake_prometheus_counter("gitlab_sli:#{name}:total")
end
- def fake_numerator_counter(name, type, numerator_name)
- fake_prometheus_counter("gitlab_sli:#{name}_#{type}:#{numerator_name}_total")
+ def fake_numerator_counter(name, numerator_name)
+ fake_prometheus_counter("gitlab_sli:#{name}:#{numerator_name}_total")
end
end
diff --git a/spec/lib/gitlab/middleware/compressed_json_spec.rb b/spec/lib/gitlab/middleware/compressed_json_spec.rb
index c5efc568971..a07cd49c572 100644
--- a/spec/lib/gitlab/middleware/compressed_json_spec.rb
+++ b/spec/lib/gitlab/middleware/compressed_json_spec.rb
@@ -8,11 +8,12 @@ RSpec.describe Gitlab::Middleware::CompressedJson do
let(:app) { double(:app) }
let(:middleware) { described_class.new(app) }
+ let(:content_type) { 'application/json' }
let(:env) do
{
'HTTP_CONTENT_ENCODING' => 'gzip',
'REQUEST_METHOD' => 'POST',
- 'CONTENT_TYPE' => 'application/json',
+ 'CONTENT_TYPE' => content_type,
'PATH_INFO' => path,
'rack.input' => StringIO.new(input)
}
@@ -35,6 +36,12 @@ RSpec.describe Gitlab::Middleware::CompressedJson do
let(:path) { '/api/v4/error_tracking/collector/1/store'}
it_behaves_like 'decompress middleware'
+
+ context 'with no Content-Type' do
+ let(:content_type) { nil }
+
+ it_behaves_like 'decompress middleware'
+ end
end
context 'with collector route under relative url' do
diff --git a/spec/lib/gitlab/pages_transfer_spec.rb b/spec/lib/gitlab/pages_transfer_spec.rb
deleted file mode 100644
index 021d9cb7318..00000000000
--- a/spec/lib/gitlab/pages_transfer_spec.rb
+++ /dev/null
@@ -1,157 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::PagesTransfer do
- describe '#async' do
- let(:async) { subject.async }
-
- context 'when receiving an allowed method' do
- it 'schedules a PagesTransferWorker', :aggregate_failures do
- described_class::METHODS.each do |meth|
- expect(PagesTransferWorker)
- .to receive(:perform_async).with(meth, %w[foo bar])
-
- async.public_send(meth, 'foo', 'bar')
- end
- end
-
- it 'does nothing if legacy storage is disabled' do
- allow(Settings.pages.local_store).to receive(:enabled).and_return(false)
-
- described_class::METHODS.each do |meth|
- expect(PagesTransferWorker)
- .not_to receive(:perform_async)
-
- async.public_send(meth, 'foo', 'bar')
- end
- end
- end
-
- context 'when receiving a private method' do
- it 'raises NoMethodError' do
- expect { async.move('foo', 'bar') }.to raise_error(NoMethodError)
- end
- end
-
- context 'when receiving a non-existent method' do
- it 'raises NoMethodError' do
- expect { async.foo('bar') }.to raise_error(NoMethodError)
- end
- end
- end
-
- RSpec.shared_examples 'moving a pages directory' do |parameter|
- let!(:pages_path_before) { project.pages_path }
- let(:config_path_before) { File.join(pages_path_before, 'config.json') }
- let(:pages_path_after) { project.reload.pages_path }
- let(:config_path_after) { File.join(pages_path_after, 'config.json') }
-
- before do
- FileUtils.mkdir_p(pages_path_before)
- FileUtils.touch(config_path_before)
- end
-
- after do
- FileUtils.remove_entry(pages_path_before, true)
- FileUtils.remove_entry(pages_path_after, true)
- end
-
- it 'moves the directory' do
- subject.public_send(meth, *args)
-
- expect(File.exist?(config_path_before)).to be(false)
- expect(File.exist?(config_path_after)).to be(true)
- end
-
- it 'returns false if it fails to move the directory' do
- # Move the directory once, so it can't be moved again
- subject.public_send(meth, *args)
-
- expect(subject.public_send(meth, *args)).to be(false)
- end
-
- it 'does nothing if legacy storage is disabled' do
- allow(Settings.pages.local_store).to receive(:enabled).and_return(false)
-
- subject.public_send(meth, *args)
-
- expect(File.exist?(config_path_before)).to be(true)
- expect(File.exist?(config_path_after)).to be(false)
- end
- end
-
- describe '#move_namespace' do
- # Can't use let_it_be because we change the path
- let(:group_1) { create(:group) }
- let(:group_2) { create(:group) }
- let(:subgroup) { create(:group, parent: group_1) }
- let(:project) { create(:project, group: subgroup) }
- let(:new_path) { "#{group_2.path}/#{subgroup.path}" }
- let(:meth) { 'move_namespace' }
-
- # Store the path before we change it
- let!(:args) { [project.path, subgroup.full_path, new_path] }
-
- before do
- # We need to skip hooks, otherwise the directory will be moved
- # via an ActiveRecord callback
- subgroup.update_columns(parent_id: group_2.id)
- subgroup.route.update!(path: new_path)
- end
-
- include_examples 'moving a pages directory'
- end
-
- describe '#move_project' do
- # Can't use let_it_be because we change the path
- let(:group_1) { create(:group) }
- let(:group_2) { create(:group) }
- let(:project) { create(:project, group: group_1) }
- let(:new_path) { group_2.path }
- let(:meth) { 'move_project' }
- let(:args) { [project.path, group_1.full_path, group_2.full_path] }
-
- include_examples 'moving a pages directory' do
- before do
- project.update!(group: group_2)
- end
- end
- end
-
- describe '#rename_project' do
- # Can't use let_it_be because we change the path
- let(:project) { create(:project) }
- let(:new_path) { project.path.succ }
- let(:meth) { 'rename_project' }
-
- # Store the path before we change it
- let!(:args) { [project.path, new_path, project.namespace.full_path] }
-
- include_examples 'moving a pages directory' do
- before do
- project.update!(path: new_path)
- end
- end
- end
-
- describe '#rename_namespace' do
- # Can't use let_it_be because we change the path
- let(:group) { create(:group) }
- let(:project) { create(:project, group: group) }
- let(:new_path) { project.namespace.full_path.succ }
- let(:meth) { 'rename_namespace' }
-
- # Store the path before we change it
- let!(:args) { [project.namespace.full_path, new_path] }
-
- before do
- # We need to skip hooks, otherwise the directory will be moved
- # via an ActiveRecord callback
- group.update_columns(path: new_path)
- group.route.update!(path: new_path)
- end
-
- include_examples 'moving a pages directory'
- end
-end
diff --git a/spec/lib/gitlab/patch/database_config_spec.rb b/spec/lib/gitlab/patch/database_config_spec.rb
index 73dc84bb2ef..b06d28dbcd5 100644
--- a/spec/lib/gitlab/patch/database_config_spec.rb
+++ b/spec/lib/gitlab/patch/database_config_spec.rb
@@ -11,9 +11,6 @@ RSpec.describe Gitlab::Patch::DatabaseConfig do
let(:configuration) { Rails::Application::Configuration.new(Rails.root) }
before do
- allow(File).to receive(:exist?).and_call_original
- allow(File).to receive(:exist?).with(Rails.root.join("config/database_geo.yml")).and_return(false)
-
# The `AS::ConfigurationFile` calls `read` in `def initialize`
# thus we cannot use `expect_next_instance_of`
# rubocop:disable RSpec/AnyInstanceOf
diff --git a/spec/lib/gitlab/project_stats_refresh_conflicts_logger_spec.rb b/spec/lib/gitlab/project_stats_refresh_conflicts_logger_spec.rb
new file mode 100644
index 00000000000..ce05d5b11c7
--- /dev/null
+++ b/spec/lib/gitlab/project_stats_refresh_conflicts_logger_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::ProjectStatsRefreshConflictsLogger do
+ before do
+ Gitlab::ApplicationContext.push(feature_category: 'test', caller_id: 'caller')
+ end
+
+ describe '.warn_artifact_deletion_during_stats_refresh' do
+ it 'logs a warning about artifacts being deleted while the project is undergoing stats refresh' do
+ project_id = 123
+ method = 'Foo#action'
+
+ expect(Gitlab::AppLogger).to receive(:warn).with(
+ hash_including(
+ message: 'Deleted artifacts undergoing refresh',
+ method: method,
+ project_id: project_id,
+ 'correlation_id' => an_instance_of(String),
+ 'meta.feature_category' => 'test',
+ 'meta.caller_id' => 'caller'
+ )
+ )
+
+ described_class.warn_artifact_deletion_during_stats_refresh(project_id: project_id, method: method)
+ end
+ end
+
+ describe '.warn_request_rejected_during_stats_refresh' do
+ it 'logs a warning about artifacts being deleted while the project is undergoing stats refresh' do
+ project_id = 123
+
+ expect(Gitlab::AppLogger).to receive(:warn).with(
+ hash_including(
+ message: 'Rejected request due to project undergoing stats refresh',
+ project_id: project_id,
+ 'correlation_id' => an_instance_of(String),
+ 'meta.feature_category' => 'test',
+ 'meta.caller_id' => 'caller'
+ )
+ )
+
+ described_class.warn_request_rejected_during_stats_refresh(project_id)
+ end
+ end
+
+ describe '.warn_skipped_artifact_deletion_during_stats_refresh' do
+ it 'logs a warning about artifacts being excluded from deletion while the project is undergoing stats refresh' do
+ project_ids = [12, 34]
+ method = 'Foo#action'
+
+ expect(Gitlab::AppLogger).to receive(:warn).with(
+ hash_including(
+ message: 'Skipped deleting artifacts undergoing refresh',
+ method: method,
+ project_ids: match_array(project_ids),
+ 'correlation_id' => an_instance_of(String),
+ 'meta.feature_category' => 'test',
+ 'meta.caller_id' => 'caller'
+ )
+ )
+
+ described_class.warn_skipped_artifact_deletion_during_stats_refresh(project_ids: project_ids, method: method)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/project_template_spec.rb b/spec/lib/gitlab/project_template_spec.rb
index 0ef52b63bc6..630369977ff 100644
--- a/spec/lib/gitlab/project_template_spec.rb
+++ b/spec/lib/gitlab/project_template_spec.rb
@@ -3,19 +3,12 @@
require 'spec_helper'
RSpec.describe Gitlab::ProjectTemplate do
+ include ProjectTemplateTestHelper
+
describe '.all' do
it 'returns all templates' do
- expected = %w[
- rails spring express iosswift dotnetcore android
- gomicro gatsby hugo jekyll plainhtml gitbook
- hexo middleman gitpod_spring_petclinic nfhugo
- nfjekyll nfplainhtml nfgitbook nfhexo salesforcedx
- serverless_framework tencent_serverless_framework
- jsonnet cluster_management kotlin_native_linux
- ]
-
expect(described_class.all).to be_an(Array)
- expect(described_class.all.map(&:name)).to match_array(expected)
+ expect(described_class.all.map(&:name)).to match_array(all_templates)
end
end
diff --git a/spec/lib/gitlab/protocol_access_spec.rb b/spec/lib/gitlab/protocol_access_spec.rb
new file mode 100644
index 00000000000..4722ea99608
--- /dev/null
+++ b/spec/lib/gitlab/protocol_access_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Gitlab::ProtocolAccess do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:group) { create(:group) }
+ let_it_be(:p1) { create(:project, :repository, namespace: group) }
+
+ describe ".allowed?" do
+ where(:protocol, :project, :admin_setting, :namespace_setting, :expected_result) do
+ "web" | nil | nil | nil | true
+ "ssh" | nil | nil | nil | true
+ "http" | nil | nil | nil | true
+ "ssh" | nil | "" | nil | true
+ "http" | nil | "" | nil | true
+ "ssh" | nil | "ssh" | nil | true
+ "http" | nil | "http" | nil | true
+ "ssh" | nil | "http" | nil | false
+ "http" | nil | "ssh" | nil | false
+ "ssh" | ref(:p1) | nil | "all" | true
+ "http" | ref(:p1) | nil | "all" | true
+ "ssh" | ref(:p1) | nil | "ssh" | true
+ "http" | ref(:p1) | nil | "http" | true
+ "ssh" | ref(:p1) | nil | "http" | false
+ "http" | ref(:p1) | nil | "ssh" | false
+ "ssh" | ref(:p1) | "" | "all" | true
+ "http" | ref(:p1) | "" | "all" | true
+ "ssh" | ref(:p1) | "ssh" | "ssh" | true
+ "http" | ref(:p1) | "http" | "http" | true
+ end
+
+ with_them do
+ subject { described_class.allowed?(protocol, project: project) }
+
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:enabled_git_access_protocol).and_return(admin_setting)
+
+ if project.present?
+ project.root_namespace.namespace_settings.update!(enabled_git_access_protocol: namespace_setting)
+ end
+ end
+
+ it do
+ is_expected.to be(expected_result)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/rack_attack_spec.rb b/spec/lib/gitlab/rack_attack_spec.rb
index 39ea02bad8b..7ba4eab50c7 100644
--- a/spec/lib/gitlab/rack_attack_spec.rb
+++ b/spec/lib/gitlab/rack_attack_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe Gitlab::RackAttack, :aggregate_failures do
end
before do
- allow(fake_rack_attack).to receive(:throttled_response=)
+ allow(fake_rack_attack).to receive(:throttled_responder=)
allow(fake_rack_attack).to receive(:throttle)
allow(fake_rack_attack).to receive(:track)
allow(fake_rack_attack).to receive(:safelist)
@@ -48,7 +48,7 @@ RSpec.describe Gitlab::RackAttack, :aggregate_failures do
it 'configures the throttle response' do
described_class.configure(fake_rack_attack)
- expect(fake_rack_attack).to have_received(:throttled_response=).with(an_instance_of(Proc))
+ expect(fake_rack_attack).to have_received(:throttled_responder=).with(an_instance_of(Proc))
end
it 'configures the safelist' do
diff --git a/spec/lib/gitlab/redis/duplicate_jobs_spec.rb b/spec/lib/gitlab/redis/duplicate_jobs_spec.rb
new file mode 100644
index 00000000000..53e3d73d17e
--- /dev/null
+++ b/spec/lib/gitlab/redis/duplicate_jobs_spec.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Redis::DuplicateJobs do
+ # Note: this is a pseudo-store in front of `SharedState`, meant only as a tool
+ # to move away from `Sidekiq.redis` for duplicate job data. Thus, we use the
+ # same store configuration as the former.
+ let(:instance_specific_config_file) { "config/redis.shared_state.yml" }
+ let(:environment_config_file_name) { "GITLAB_REDIS_SHARED_STATE_CONFIG_FILE" }
+
+ include_examples "redis_shared_examples"
+
+ describe '#pool' do
+ subject { described_class.pool }
+
+ before do
+ redis_clear_raw_config!(Gitlab::Redis::SharedState)
+ redis_clear_raw_config!(Gitlab::Redis::Queues)
+ end
+
+ after do
+ redis_clear_raw_config!(Gitlab::Redis::SharedState)
+ redis_clear_raw_config!(Gitlab::Redis::Queues)
+ end
+
+ around do |example|
+ clear_pool
+ example.run
+ ensure
+ clear_pool
+ end
+
+ context 'store connection settings' do
+ let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" }
+ let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" }
+
+ before do
+ allow(Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(config_new_format_host)
+ allow(Gitlab::Redis::Queues).to receive(:config_file_name).and_return(config_new_format_socket)
+ end
+
+ it 'instantiates an instance of MultiStore' do
+ subject.with do |redis_instance|
+ expect(redis_instance).to be_instance_of(::Gitlab::Redis::MultiStore)
+
+ expect(redis_instance.primary_store.connection[:id]).to eq("redis://test-host:6379/99")
+ expect(redis_instance.primary_store.connection[:namespace]).to be_nil
+ expect(redis_instance.secondary_store.connection[:id]).to eq("redis:///path/to/redis.sock/0")
+ expect(redis_instance.secondary_store.connection[:namespace]).to eq("resque:gitlab")
+
+ expect(redis_instance.instance_name).to eq('DuplicateJobs')
+ end
+ end
+ end
+
+ # Make sure they current namespace is respected for the secondary store but omitted from the primary
+ context 'key namespaces' do
+ let(:key) { 'key' }
+ let(:value) { '123' }
+
+ it 'writes keys to SharedState with no prefix, and to Queues with the "resque:gitlab:" prefix' do
+ subject.with do |redis_instance|
+ redis_instance.set(key, value)
+ end
+
+ Gitlab::Redis::SharedState.with do |redis_instance|
+ expect(redis_instance.get(key)).to eq(value)
+ end
+
+ Gitlab::Redis::Queues.with do |redis_instance|
+ expect(redis_instance.get("resque:gitlab:#{key}")).to eq(value)
+ end
+ end
+ end
+
+ it_behaves_like 'multi store feature flags', :use_primary_and_secondary_stores_for_duplicate_jobs,
+ :use_primary_store_as_default_for_duplicate_jobs
+ end
+
+ describe '#raw_config_hash' do
+ it 'has a legacy default URL' do
+ expect(subject).to receive(:fetch_config) { false }
+
+ expect(subject.send(:raw_config_hash)).to eq(url: 'redis://localhost:6382')
+ end
+ end
+
+ describe '#store_name' do
+ it 'returns the name of the SharedState store' do
+ expect(described_class.store_name).to eq('SharedState')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/redis/multi_store_spec.rb b/spec/lib/gitlab/redis/multi_store_spec.rb
new file mode 100644
index 00000000000..e127c89c303
--- /dev/null
+++ b/spec/lib/gitlab/redis/multi_store_spec.rb
@@ -0,0 +1,924 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Redis::MultiStore do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:redis_store_class) do
+ Class.new(Gitlab::Redis::Wrapper) do
+ def config_file_name
+ config_file_name = "spec/fixtures/config/redis_new_format_host.yml"
+ Rails.root.join(config_file_name).to_s
+ end
+
+ def self.name
+ 'Sessions'
+ end
+ end
+ end
+
+ let_it_be(:primary_db) { 1 }
+ let_it_be(:secondary_db) { 2 }
+ let_it_be(:primary_store) { create_redis_store(redis_store_class.params, db: primary_db, serializer: nil) }
+ let_it_be(:secondary_store) { create_redis_store(redis_store_class.params, db: secondary_db, serializer: nil) }
+ let_it_be(:instance_name) { 'TestStore' }
+ let_it_be(:multi_store) { described_class.new(primary_store, secondary_store, instance_name)}
+
+ subject { multi_store.send(name, *args) }
+
+ before do
+ skip_feature_flags_yaml_validation
+ skip_default_enabled_yaml_check
+ end
+
+ after(:all) do
+ primary_store.flushdb
+ secondary_store.flushdb
+ end
+
+ context 'when primary_store is nil' do
+ let(:multi_store) { described_class.new(nil, secondary_store, instance_name)}
+
+ it 'fails with exception' do
+ expect { multi_store }.to raise_error(ArgumentError, /primary_store is required/)
+ end
+ end
+
+ context 'when secondary_store is nil' do
+ let(:multi_store) { described_class.new(primary_store, nil, instance_name)}
+
+ it 'fails with exception' do
+ expect { multi_store }.to raise_error(ArgumentError, /secondary_store is required/)
+ end
+ end
+
+ context 'when instance_name is nil' do
+ let(:instance_name) { nil }
+ let(:multi_store) { described_class.new(primary_store, secondary_store, instance_name)}
+
+ it 'fails with exception' do
+ expect { multi_store }.to raise_error(ArgumentError, /instance_name is required/)
+ end
+ end
+
+ context 'when primary_store is not a ::Redis instance' do
+ before do
+ allow(primary_store).to receive(:is_a?).with(::Redis).and_return(false)
+ allow(primary_store).to receive(:is_a?).with(::Redis::Namespace).and_return(false)
+ end
+
+ it 'fails with exception' do
+ expect { described_class.new(primary_store, secondary_store, instance_name) }
+ .to raise_error(ArgumentError, /invalid primary_store/)
+ end
+ end
+
+ context 'when primary_store is a ::Redis::Namespace instance' do
+ before do
+ allow(primary_store).to receive(:is_a?).with(::Redis).and_return(false)
+ allow(primary_store).to receive(:is_a?).with(::Redis::Namespace).and_return(true)
+ end
+
+ it 'fails with exception' do
+ expect { described_class.new(primary_store, secondary_store, instance_name) }.not_to raise_error
+ end
+ end
+
+ context 'when secondary_store is not a ::Redis instance' do
+ before do
+ allow(secondary_store).to receive(:is_a?).with(::Redis).and_return(false)
+ allow(secondary_store).to receive(:is_a?).with(::Redis::Namespace).and_return(false)
+ end
+
+ it 'fails with exception' do
+ expect { described_class.new(primary_store, secondary_store, instance_name) }
+ .to raise_error(ArgumentError, /invalid secondary_store/)
+ end
+ end
+
+ context 'when secondary_store is a ::Redis::Namespace instance' do
+ before do
+ allow(secondary_store).to receive(:is_a?).with(::Redis).and_return(false)
+ allow(secondary_store).to receive(:is_a?).with(::Redis::Namespace).and_return(true)
+ end
+
+ it 'fails with exception' do
+ expect { described_class.new(primary_store, secondary_store, instance_name) }.not_to raise_error
+ end
+ end
+
+ context 'with READ redis commands' do
+ let_it_be(:key1) { "redis:{1}:key_a" }
+ let_it_be(:key2) { "redis:{1}:key_b" }
+ let_it_be(:value1) { "redis_value1"}
+ let_it_be(:value2) { "redis_value2"}
+ let_it_be(:skey) { "redis:set:key" }
+ let_it_be(:keys) { [key1, key2] }
+ let_it_be(:values) { [value1, value2] }
+ let_it_be(:svalues) { [value2, value1] }
+
+ where(:case_name, :name, :args, :value, :block) do
+ 'execute :get command' | :get | ref(:key1) | ref(:value1) | nil
+ 'execute :mget command' | :mget | ref(:keys) | ref(:values) | nil
+ 'execute :mget with block' | :mget | ref(:keys) | ref(:values) | ->(value) { value }
+ 'execute :smembers command' | :smembers | ref(:skey) | ref(:svalues) | nil
+ 'execute :scard command' | :scard | ref(:skey) | 2 | nil
+ end
+
+ before(:all) do
+ primary_store.multi do |multi|
+ multi.set(key1, value1)
+ multi.set(key2, value2)
+ multi.sadd(skey, value1)
+ multi.sadd(skey, value2)
+ end
+
+ secondary_store.multi do |multi|
+ multi.set(key1, value1)
+ multi.set(key2, value2)
+ multi.sadd(skey, value1)
+ multi.sadd(skey, value2)
+ end
+ end
+
+ RSpec.shared_examples_for 'reads correct value' do
+ it 'returns the correct value' do
+ if value.is_a?(Array)
+ # :smembers does not guarantee the order it will return the values (unsorted set)
+ is_expected.to match_array(value)
+ else
+ is_expected.to eq(value)
+ end
+ end
+ end
+
+ RSpec.shared_examples_for 'fallback read from the secondary store' do
+ let(:counter) { Gitlab::Metrics::NullMetric.instance }
+
+ before do
+ allow(Gitlab::Metrics).to receive(:counter).and_return(counter)
+ end
+
+ it 'fallback and execute on secondary instance' do
+ expect(secondary_store).to receive(name).with(*args).and_call_original
+
+ subject
+ end
+
+ it 'logs the ReadFromPrimaryError' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ an_instance_of(Gitlab::Redis::MultiStore::ReadFromPrimaryError),
+ hash_including(command_name: name, instance_name: instance_name)
+ )
+
+ subject
+ end
+
+ it 'increment read fallback count metrics' do
+ expect(counter).to receive(:increment).with(command: name, instance_name: instance_name)
+
+ subject
+ end
+
+ include_examples 'reads correct value'
+
+ context 'when fallback read from the secondary instance raises an exception' do
+ before do
+ allow(secondary_store).to receive(name).with(*args).and_raise(StandardError)
+ end
+
+ it 'fails with exception' do
+ expect { subject }.to raise_error(StandardError)
+ end
+ end
+ end
+
+ RSpec.shared_examples_for 'secondary store' do
+ it 'execute on the secondary instance' do
+ expect(secondary_store).to receive(name).with(*args).and_call_original
+
+ subject
+ end
+
+ include_examples 'reads correct value'
+
+ it 'does not execute on the primary store' do
+ expect(primary_store).not_to receive(name)
+
+ subject
+ end
+ end
+
+ with_them do
+ describe "#{name}" do
+ before do
+ allow(primary_store).to receive(name).and_call_original
+ allow(secondary_store).to receive(name).and_call_original
+ end
+
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
+ end
+
+ context 'when reading from the primary is successful' do
+ it 'returns the correct value' do
+ expect(primary_store).to receive(name).with(*args).and_call_original
+
+ subject
+ end
+
+ it 'does not execute on the secondary store' do
+ expect(secondary_store).not_to receive(name)
+
+ subject
+ end
+
+ include_examples 'reads correct value'
+ end
+
+ context 'when reading from primary instance is raising an exception' do
+ before do
+ allow(primary_store).to receive(name).with(*args).and_raise(StandardError)
+ allow(Gitlab::ErrorTracking).to receive(:log_exception)
+ end
+
+ it 'logs the exception' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(StandardError),
+ hash_including(:multi_store_error_message, instance_name: instance_name, command_name: name))
+
+ subject
+ end
+
+ include_examples 'fallback read from the secondary store'
+ end
+
+ context 'when reading from primary instance return no value' do
+ before do
+ allow(primary_store).to receive(name).and_return(nil)
+ end
+
+ include_examples 'fallback read from the secondary store'
+ end
+
+ context 'when the command is executed within pipelined block' do
+ subject do
+ multi_store.pipelined do
+ multi_store.send(name, *args)
+ end
+ end
+
+ it 'is executed only 1 time on primary instance' do
+ expect(primary_store).to receive(name).with(*args).once
+
+ subject
+ end
+ end
+
+ if params[:block]
+ subject do
+ multi_store.send(name, *args, &block)
+ end
+
+ context 'when block is provided' do
+ it 'yields to the block' do
+ expect(primary_store).to receive(name).and_yield(value)
+
+ subject
+ end
+
+ include_examples 'reads correct value'
+ end
+ end
+ end
+
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ it_behaves_like 'secondary store'
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: true)
+ end
+
+ it 'execute on the primary instance' do
+ expect(primary_store).to receive(name).with(*args).and_call_original
+
+ subject
+ end
+
+ include_examples 'reads correct value'
+
+ it 'does not execute on the secondary store' do
+ expect(secondary_store).not_to receive(name)
+
+ subject
+ end
+ end
+ end
+
+ context 'with both primary and secondary store using same redis instance' do
+ let(:primary_store) { create_redis_store(redis_store_class.params, db: primary_db, serializer: nil) }
+ let(:secondary_store) { create_redis_store(redis_store_class.params, db: primary_db, serializer: nil) }
+ let(:multi_store) { described_class.new(primary_store, secondary_store, instance_name)}
+
+ it_behaves_like 'secondary store'
+ end
+ end
+ end
+ end
+
+ RSpec.shared_examples_for 'verify that store contains values' do |store|
+ it "#{store} redis store contains correct values", :aggregate_errors do
+ subject
+
+ redis_store = multi_store.send(store)
+
+ if expected_value.is_a?(Array)
+ # :smembers does not guarantee the order it will return the values
+ expect(redis_store.send(verification_name, *verification_args)).to match_array(expected_value)
+ else
+ expect(redis_store.send(verification_name, *verification_args)).to eq(expected_value)
+ end
+ end
+ end
+
+ context 'with WRITE redis commands' do
+ let_it_be(:key1) { "redis:{1}:key_a" }
+ let_it_be(:key2) { "redis:{1}:key_b" }
+ let_it_be(:value1) { "redis_value1"}
+ let_it_be(:value2) { "redis_value2"}
+ let_it_be(:key1_value1) { [key1, value1] }
+ let_it_be(:key1_value2) { [key1, value2] }
+ let_it_be(:ttl) { 10 }
+ let_it_be(:key1_ttl_value1) { [key1, ttl, value1] }
+ let_it_be(:skey) { "redis:set:key" }
+ let_it_be(:svalues1) { [value2, value1] }
+ let_it_be(:svalues2) { [value1] }
+ let_it_be(:skey_value1) { [skey, value1] }
+ let_it_be(:skey_value2) { [skey, value2] }
+
+ where(:case_name, :name, :args, :expected_value, :verification_name, :verification_args) do
+ 'execute :set command' | :set | ref(:key1_value1) | ref(:value1) | :get | ref(:key1)
+ 'execute :setnx command' | :setnx | ref(:key1_value2) | ref(:value1) | :get | ref(:key2)
+ 'execute :setex command' | :setex | ref(:key1_ttl_value1) | ref(:ttl) | :ttl | ref(:key1)
+ 'execute :sadd command' | :sadd | ref(:skey_value2) | ref(:svalues1) | :smembers | ref(:skey)
+ 'execute :srem command' | :srem | ref(:skey_value1) | [] | :smembers | ref(:skey)
+ 'execute :del command' | :del | ref(:key2) | nil | :get | ref(:key2)
+ 'execute :flushdb command' | :flushdb | nil | 0 | :dbsize | nil
+ end
+
+ before do
+ primary_store.flushdb
+ secondary_store.flushdb
+
+ primary_store.multi do |multi|
+ multi.set(key2, value1)
+ multi.sadd(skey, value1)
+ end
+
+ secondary_store.multi do |multi|
+ multi.set(key2, value1)
+ multi.sadd(skey, value1)
+ end
+ end
+
+ with_them do
+ describe "#{name}" do
+ let(:expected_args) {args || no_args }
+
+ before do
+ allow(primary_store).to receive(name).and_call_original
+ allow(secondary_store).to receive(name).and_call_original
+ end
+
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
+ end
+
+ context 'when executing on primary instance is successful' do
+ it 'executes on both primary and secondary redis store', :aggregate_errors do
+ expect(primary_store).to receive(name).with(*expected_args).and_call_original
+ expect(secondary_store).to receive(name).with(*expected_args).and_call_original
+
+ subject
+ end
+
+ include_examples 'verify that store contains values', :primary_store
+ include_examples 'verify that store contains values', :secondary_store
+ end
+
+ context 'when executing on the primary instance is raising an exception' do
+ before do
+ allow(primary_store).to receive(name).with(*expected_args).and_raise(StandardError)
+ allow(Gitlab::ErrorTracking).to receive(:log_exception)
+ end
+
+ it 'logs the exception and execute on secondary instance', :aggregate_errors do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(StandardError),
+ hash_including(:multi_store_error_message, command_name: name, instance_name: instance_name))
+ expect(secondary_store).to receive(name).with(*expected_args).and_call_original
+
+ subject
+ end
+
+ include_examples 'verify that store contains values', :secondary_store
+ end
+
+ context 'when the command is executed within pipelined block' do
+ subject do
+ multi_store.pipelined do
+ multi_store.send(name, *args)
+ end
+ end
+
+ it 'is executed only 1 time on each instance', :aggregate_errors do
+ expect(primary_store).to receive(name).with(*expected_args).once
+ expect(secondary_store).to receive(name).with(*expected_args).once
+
+ subject
+ end
+
+ include_examples 'verify that store contains values', :primary_store
+ include_examples 'verify that store contains values', :secondary_store
+ end
+ end
+
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ it 'executes only on the secondary redis store', :aggregate_errors do
+ expect(secondary_store).to receive(name).with(*expected_args)
+ expect(primary_store).not_to receive(name).with(*expected_args)
+
+ subject
+ end
+
+ include_examples 'verify that store contains values', :secondary_store
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: true)
+ end
+
+ it 'executes only on the primary_redis redis store', :aggregate_errors do
+ expect(primary_store).to receive(name).with(*expected_args)
+ expect(secondary_store).not_to receive(name).with(*expected_args)
+
+ subject
+ end
+
+ include_examples 'verify that store contains values', :primary_store
+ end
+ end
+ end
+ end
+ end
+
+ RSpec.shared_examples_for 'pipelined command' do |name|
+ let_it_be(:key1) { "redis:{1}:key_a" }
+ let_it_be(:value1) { "redis_value1"}
+ let_it_be(:value2) { "redis_value2"}
+ let_it_be(:expected_value) { value1 }
+ let_it_be(:verification_name) { :get }
+ let_it_be(:verification_args) { key1 }
+
+ before do
+ primary_store.flushdb
+ secondary_store.flushdb
+ end
+
+ describe "command execution in a transaction" do
+ let(:counter) { Gitlab::Metrics::NullMetric.instance }
+
+ before do
+ allow(Gitlab::Metrics).to receive(:counter).with(
+ :gitlab_redis_multi_store_pipelined_diff_error_total,
+ 'Redis MultiStore pipelined command diff between stores'
+ ).and_return(counter)
+ end
+
+ subject do
+ multi_store.send(name) do |redis|
+ redis.set(key1, value1)
+ end
+ end
+
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
+ end
+
+ context 'when executing on primary instance is successful' do
+ it 'executes on both primary and secondary redis store', :aggregate_errors do
+ expect(primary_store).to receive(name).and_call_original
+ expect(secondary_store).to receive(name).and_call_original
+
+ subject
+ end
+
+ include_examples 'verify that store contains values', :primary_store
+ include_examples 'verify that store contains values', :secondary_store
+ end
+
+ context 'when executing on the primary instance is raising an exception' do
+ before do
+ allow(primary_store).to receive(name).and_raise(StandardError)
+ allow(Gitlab::ErrorTracking).to receive(:log_exception)
+ end
+
+ it 'logs the exception and execute on secondary instance', :aggregate_errors do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(StandardError),
+ hash_including(:multi_store_error_message, command_name: name))
+ expect(secondary_store).to receive(name).and_call_original
+
+ subject
+ end
+
+ include_examples 'verify that store contains values', :secondary_store
+ end
+
+ describe 'return values from a transaction' do
+ subject do
+ multi_store.send(name) do |redis|
+ redis.get(key1)
+ end
+ end
+
+ context 'when the value exists on both and are equal' do
+ before do
+ primary_store.set(key1, value1)
+ secondary_store.set(key1, value1)
+ end
+
+ it 'returns the value' do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+
+ expect(subject).to eq([value1])
+ end
+ end
+
+ context 'when the value exists on both but differ' do
+ before do
+ primary_store.set(key1, value1)
+ secondary_store.set(key1, value2)
+ end
+
+ it 'returns the value from the secondary store, logging an error' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ an_instance_of(Gitlab::Redis::MultiStore::PipelinedDiffError),
+ hash_including(command_name: name, instance_name: instance_name)
+ ).and_call_original
+ expect(counter).to receive(:increment).with(command: name, instance_name: instance_name)
+
+ expect(subject).to eq([value2])
+ end
+ end
+
+ context 'when the value does not exist on the primary but it does on the secondary' do
+ before do
+ secondary_store.set(key1, value2)
+ end
+
+ it 'returns the value from the secondary store, logging an error' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ an_instance_of(Gitlab::Redis::MultiStore::PipelinedDiffError),
+ hash_including(command_name: name, instance_name: instance_name)
+ )
+ expect(counter).to receive(:increment).with(command: name, instance_name: instance_name)
+
+ expect(subject).to eq([value2])
+ end
+ end
+
+ context 'when the value does not exist in either' do
+ it 'returns nil without logging an error' do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+ expect(counter).not_to receive(:increment)
+
+ expect(subject).to eq([nil])
+ end
+ end
+ end
+ end
+
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ it 'executes only on the secondary redis store', :aggregate_errors do
+ expect(secondary_store).to receive(name)
+ expect(primary_store).not_to receive(name)
+
+ subject
+ end
+
+ include_examples 'verify that store contains values', :secondary_store
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: true)
+ end
+
+ it 'executes only on the primary_redis redis store', :aggregate_errors do
+ expect(primary_store).to receive(name)
+ expect(secondary_store).not_to receive(name)
+
+ subject
+ end
+
+ include_examples 'verify that store contains values', :primary_store
+ end
+ end
+ end
+ end
+
+ describe '#multi' do
+ include_examples 'pipelined command', :multi
+ end
+
+ describe '#pipelined' do
+ include_examples 'pipelined command', :pipelined
+ end
+
+ context 'with unsupported command' do
+ let(:counter) { Gitlab::Metrics::NullMetric.instance }
+
+ before do
+ primary_store.flushdb
+ secondary_store.flushdb
+ allow(Gitlab::Metrics).to receive(:counter).and_return(counter)
+ end
+
+ let_it_be(:key) { "redis:counter" }
+
+ subject { multi_store.incr(key) }
+
+ it 'responds to missing method' do
+ expect(multi_store).to receive(:respond_to_missing?).and_call_original
+
+ expect(multi_store.respond_to?(:incr)).to be(true)
+ end
+
+ it 'executes method missing' do
+ expect(multi_store).to receive(:method_missing)
+
+ subject
+ end
+
+ context 'when command is not in SKIP_LOG_METHOD_MISSING_FOR_COMMANDS' do
+ it 'logs MethodMissingError' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ an_instance_of(Gitlab::Redis::MultiStore::MethodMissingError),
+ hash_including(command_name: :incr, instance_name: instance_name)
+ )
+
+ subject
+ end
+
+ it 'increments method missing counter' do
+ expect(counter).to receive(:increment).with(command: :incr, instance_name: instance_name)
+
+ subject
+ end
+ end
+
+ context 'when command is in SKIP_LOG_METHOD_MISSING_FOR_COMMANDS' do
+ subject { multi_store.info }
+
+ it 'does not log MethodMissingError' do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+
+ subject
+ end
+
+ it 'does not increment method missing counter' do
+ expect(counter).not_to receive(:increment)
+
+ subject
+ end
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: true)
+ end
+
+ it 'fallback and executes only on the secondary store', :aggregate_errors do
+ expect(primary_store).to receive(:incr).with(key).and_call_original
+ expect(secondary_store).not_to receive(:incr)
+
+ subject
+ end
+
+ it 'correct value is stored on the secondary store', :aggregate_errors do
+ subject
+
+ expect(secondary_store.get(key)).to be_nil
+ expect(primary_store.get(key)).to eq('1')
+ end
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ it 'fallback and executes only on the secondary store', :aggregate_errors do
+ expect(secondary_store).to receive(:incr).with(key).and_call_original
+ expect(primary_store).not_to receive(:incr)
+
+ subject
+ end
+
+ it 'correct value is stored on the secondary store', :aggregate_errors do
+ subject
+
+ expect(primary_store.get(key)).to be_nil
+ expect(secondary_store.get(key)).to eq('1')
+ end
+ end
+
+ context 'when the command is executed within pipelined block' do
+ subject do
+ multi_store.pipelined do
+ multi_store.incr(key)
+ end
+ end
+
+ it 'is executed only 1 time on each instance', :aggregate_errors do
+ expect(primary_store).to receive(:incr).with(key).once
+ expect(secondary_store).to receive(:incr).with(key).once
+
+ subject
+ end
+
+ it "both redis stores are containing correct values", :aggregate_errors do
+ subject
+
+ expect(primary_store.get(key)).to eq('1')
+ expect(secondary_store.get(key)).to eq('1')
+ end
+ end
+ end
+
+ describe '#to_s' do
+ subject { multi_store.to_s }
+
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store is enabled' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
+ end
+
+ it 'returns same value as primary_store' do
+ is_expected.to eq(primary_store.to_s)
+ end
+ end
+
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: true)
+ end
+
+ it 'returns same value as primary_store' do
+ is_expected.to eq(primary_store.to_s)
+ end
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ it 'returns same value as primary_store' do
+ is_expected.to eq(secondary_store.to_s)
+ end
+ end
+ end
+ end
+
+ describe '#is_a?' do
+ it 'returns true for ::Redis::Store' do
+ expect(multi_store.is_a?(::Redis::Store)).to be true
+ end
+ end
+
+ describe '#use_primary_and_secondary_stores?' do
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store is enabled' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
+ end
+
+ it 'multi store is disabled' do
+ expect(multi_store.use_primary_and_secondary_stores?).to be true
+ end
+ end
+
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
+ end
+
+ it 'multi store is disabled' do
+ expect(multi_store.use_primary_and_secondary_stores?).to be false
+ end
+ end
+
+ context 'with empty DB' do
+ before do
+ allow(Feature::FlipperFeature).to receive(:table_exists?).and_return(false)
+ end
+
+ it 'multi store is disabled' do
+ expect(multi_store.use_primary_and_secondary_stores?).to be false
+ end
+ end
+
+ context 'when FF table guard raises' do
+ before do
+ allow(Feature::FlipperFeature).to receive(:table_exists?).and_raise
+ end
+
+ it 'multi store is disabled' do
+ expect(multi_store.use_primary_and_secondary_stores?).to be false
+ end
+ end
+ end
+
+ describe '#use_primary_store_as_default?' do
+ context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: true)
+ end
+
+ it 'multi store is disabled' do
+ expect(multi_store.use_primary_store_as_default?).to be true
+ end
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ it 'multi store is disabled' do
+ expect(multi_store.use_primary_store_as_default?).to be false
+ end
+ end
+
+ context 'with empty DB' do
+ before do
+ allow(Feature::FlipperFeature).to receive(:table_exists?).and_return(false)
+ end
+
+ it 'multi store is disabled' do
+ expect(multi_store.use_primary_and_secondary_stores?).to be false
+ end
+ end
+
+ context 'when FF table guard raises' do
+ before do
+ allow(Feature::FlipperFeature).to receive(:table_exists?).and_raise
+ end
+
+ it 'multi store is disabled' do
+ expect(multi_store.use_primary_and_secondary_stores?).to be false
+ end
+ end
+ end
+
+ def create_redis_store(options, extras = {})
+ ::Redis::Store.new(options.merge(extras))
+ end
+end
diff --git a/spec/lib/gitlab/redis/sidekiq_status_spec.rb b/spec/lib/gitlab/redis/sidekiq_status_spec.rb
new file mode 100644
index 00000000000..f641ea40efd
--- /dev/null
+++ b/spec/lib/gitlab/redis/sidekiq_status_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Redis::SidekiqStatus do
+ # Note: this is a pseudo-store in front of `SharedState`, meant only as a tool
+ # to move away from `Sidekiq.redis` for sidekiq status data. Thus, we use the
+ # same store configuration as the former.
+ let(:instance_specific_config_file) { "config/redis.shared_state.yml" }
+ let(:environment_config_file_name) { "GITLAB_REDIS_SHARED_STATE_CONFIG_FILE" }
+
+ include_examples "redis_shared_examples"
+
+ describe '#pool' do
+ let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" }
+ let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" }
+
+ subject { described_class.pool }
+
+ before do
+ redis_clear_raw_config!(Gitlab::Redis::SharedState)
+ redis_clear_raw_config!(Gitlab::Redis::Queues)
+
+ allow(Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(config_new_format_host)
+ allow(Gitlab::Redis::Queues).to receive(:config_file_name).and_return(config_new_format_socket)
+ end
+
+ after do
+ redis_clear_raw_config!(Gitlab::Redis::SharedState)
+ redis_clear_raw_config!(Gitlab::Redis::Queues)
+ end
+
+ around do |example|
+ clear_pool
+ example.run
+ ensure
+ clear_pool
+ end
+
+ it 'instantiates an instance of MultiStore' do
+ subject.with do |redis_instance|
+ expect(redis_instance).to be_instance_of(::Gitlab::Redis::MultiStore)
+
+ expect(redis_instance.primary_store.connection[:id]).to eq("redis://test-host:6379/99")
+ expect(redis_instance.secondary_store.connection[:id]).to eq("redis:///path/to/redis.sock/0")
+
+ expect(redis_instance.instance_name).to eq('SidekiqStatus')
+ end
+ end
+
+ it_behaves_like 'multi store feature flags', :use_primary_and_secondary_stores_for_sidekiq_status,
+ :use_primary_store_as_default_for_sidekiq_status
+ end
+
+ describe '#raw_config_hash' do
+ it 'has a legacy default URL' do
+ expect(subject).to receive(:fetch_config) { false }
+
+ expect(subject.send(:raw_config_hash)).to eq(url: 'redis://localhost:6382')
+ end
+ end
+
+ describe '#store_name' do
+ it 'returns the name of the SharedState store' do
+ expect(described_class.store_name).to eq('SharedState')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/regex_requires_app_spec.rb b/spec/lib/gitlab/regex_requires_app_spec.rb
new file mode 100644
index 00000000000..5808033dc4c
--- /dev/null
+++ b/spec/lib/gitlab/regex_requires_app_spec.rb
@@ -0,0 +1,90 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+# Only specs that *cannot* be run with fast_spec_helper only
+# See regex_spec for tests that do not require the full spec_helper
+RSpec.describe Gitlab::Regex do
+ describe '.debian_architecture_regex' do
+ subject { described_class.debian_architecture_regex }
+
+ it { is_expected.to match('amd64') }
+ it { is_expected.to match('kfreebsd-i386') }
+
+ # may not be empty string
+ it { is_expected.not_to match('') }
+ # must start with an alphanumeric
+ it { is_expected.not_to match('-a') }
+ it { is_expected.not_to match('+a') }
+ it { is_expected.not_to match('.a') }
+ it { is_expected.not_to match('_a') }
+ # only letters, digits and characters '-'
+ it { is_expected.not_to match('a+b') }
+ it { is_expected.not_to match('a.b') }
+ it { is_expected.not_to match('a_b') }
+ it { is_expected.not_to match('a~') }
+ it { is_expected.not_to match('aé') }
+
+ # More strict
+ # Enforce lowercase
+ it { is_expected.not_to match('AMD64') }
+ it { is_expected.not_to match('Amd64') }
+ it { is_expected.not_to match('aMD64') }
+ end
+
+ describe '.npm_package_name_regex' do
+ subject { described_class.npm_package_name_regex }
+
+ it { is_expected.to match('@scope/package') }
+ it { is_expected.to match('unscoped-package') }
+ it { is_expected.not_to match('@first-scope@second-scope/package') }
+ it { is_expected.not_to match('scope-without-at-symbol/package') }
+ it { is_expected.not_to match('@not-a-scoped-package') }
+ it { is_expected.not_to match('@scope/sub/package') }
+ it { is_expected.not_to match('@scope/../../package') }
+ it { is_expected.not_to match('@scope%2e%2e%2fpackage') }
+ it { is_expected.not_to match('@%2e%2e%2f/package') }
+
+ context 'capturing group' do
+ [
+ ['@scope/package', 'scope'],
+ ['unscoped-package', nil],
+ ['@not-a-scoped-package', nil],
+ ['@scope/sub/package', nil],
+ ['@inv@lid-scope/package', nil]
+ ].each do |package_name, extracted_scope_name|
+ it "extracts the scope name for #{package_name}" do
+ match = package_name.match(described_class.npm_package_name_regex)
+ expect(match&.captures&.first).to eq(extracted_scope_name)
+ end
+ end
+ end
+ end
+
+ describe '.debian_distribution_regex' do
+ subject { described_class.debian_distribution_regex }
+
+ it { is_expected.to match('buster') }
+ it { is_expected.to match('buster-updates') }
+ it { is_expected.to match('Debian10.5') }
+
+ # Do not allow slash, even if this exists in the wild
+ it { is_expected.not_to match('jessie/updates') }
+
+ # Do not allow Unicode
+ it { is_expected.not_to match('hé') }
+ end
+
+ describe '.debian_component_regex' do
+ subject { described_class.debian_component_regex }
+
+ it { is_expected.to match('main') }
+ it { is_expected.to match('non-free') }
+
+ # Do not allow slash
+ it { is_expected.not_to match('non/free') }
+
+ # Do not allow Unicode
+ it { is_expected.not_to match('hé') }
+ end
+end
diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb
index b4c1f3b689b..d48e8183650 100644
--- a/spec/lib/gitlab/regex_spec.rb
+++ b/spec/lib/gitlab/regex_spec.rb
@@ -1,7 +1,11 @@
# frozen_string_literal: true
-require 'spec_helper'
+require 'fast_spec_helper'
+require_relative '../../../lib/gitlab/regex'
+
+# All specs that can be run with fast_spec_helper only
+# See regex_requires_app_spec for tests that require the full spec_helper
RSpec.describe Gitlab::Regex do
shared_examples_for 'project/group name chars regex' do
it { is_expected.to match('gitlab-ce') }
@@ -401,35 +405,6 @@ RSpec.describe Gitlab::Regex do
it { is_expected.not_to match('%2e%2e%2f1.2.3') }
end
- describe '.npm_package_name_regex' do
- subject { described_class.npm_package_name_regex }
-
- it { is_expected.to match('@scope/package') }
- it { is_expected.to match('unscoped-package') }
- it { is_expected.not_to match('@first-scope@second-scope/package') }
- it { is_expected.not_to match('scope-without-at-symbol/package') }
- it { is_expected.not_to match('@not-a-scoped-package') }
- it { is_expected.not_to match('@scope/sub/package') }
- it { is_expected.not_to match('@scope/../../package') }
- it { is_expected.not_to match('@scope%2e%2e%2fpackage') }
- it { is_expected.not_to match('@%2e%2e%2f/package') }
-
- context 'capturing group' do
- [
- ['@scope/package', 'scope'],
- ['unscoped-package', nil],
- ['@not-a-scoped-package', nil],
- ['@scope/sub/package', nil],
- ['@inv@lid-scope/package', nil]
- ].each do |package_name, extracted_scope_name|
- it "extracts the scope name for #{package_name}" do
- match = package_name.match(described_class.npm_package_name_regex)
- expect(match&.captures&.first).to eq(extracted_scope_name)
- end
- end
- end
- end
-
describe '.nuget_version_regex' do
subject { described_class.nuget_version_regex }
@@ -595,8 +570,7 @@ RSpec.describe Gitlab::Regex do
# nothing after colon in version number
it { is_expected.not_to match('2:') }
# revision number is empty
- # Note: we are less strict here
- # it { is_expected.not_to match('1.0-') }
+ it { is_expected.not_to match('1.0-') }
# version number is empty
it { is_expected.not_to match('-1') }
it { is_expected.not_to match('2:-1') }
@@ -618,63 +592,12 @@ RSpec.describe Gitlab::Regex do
it { is_expected.not_to match('1.0 ') }
# dpkg accepts multiple colons
it { is_expected.not_to match('1:2:3') }
+ # we limit the number of dashes
+ it { is_expected.to match('1-2-3-4-5-6-7-8-9-10-11-12-13-14-15') }
+ it { is_expected.not_to match('1-2-3-4-5-6-7-8-9-10-11-12-13-14-15-16') }
end
end
- describe '.debian_architecture_regex' do
- subject { described_class.debian_architecture_regex }
-
- it { is_expected.to match('amd64') }
- it { is_expected.to match('kfreebsd-i386') }
-
- # may not be empty string
- it { is_expected.not_to match('') }
- # must start with an alphanumeric
- it { is_expected.not_to match('-a') }
- it { is_expected.not_to match('+a') }
- it { is_expected.not_to match('.a') }
- it { is_expected.not_to match('_a') }
- # only letters, digits and characters '-'
- it { is_expected.not_to match('a+b') }
- it { is_expected.not_to match('a.b') }
- it { is_expected.not_to match('a_b') }
- it { is_expected.not_to match('a~') }
- it { is_expected.not_to match('aé') }
-
- # More strict
- # Enforce lowercase
- it { is_expected.not_to match('AMD64') }
- it { is_expected.not_to match('Amd64') }
- it { is_expected.not_to match('aMD64') }
- end
-
- describe '.debian_distribution_regex' do
- subject { described_class.debian_distribution_regex }
-
- it { is_expected.to match('buster') }
- it { is_expected.to match('buster-updates') }
- it { is_expected.to match('Debian10.5') }
-
- # Do not allow slash, even if this exists in the wild
- it { is_expected.not_to match('jessie/updates') }
-
- # Do not allow Unicode
- it { is_expected.not_to match('hé') }
- end
-
- describe '.debian_component_regex' do
- subject { described_class.debian_component_regex }
-
- it { is_expected.to match('main') }
- it { is_expected.to match('non-free') }
-
- # Do not allow slash
- it { is_expected.not_to match('non/free') }
-
- # Do not allow Unicode
- it { is_expected.not_to match('hé') }
- end
-
describe '.helm_channel_regex' do
subject { described_class.helm_channel_regex }
@@ -1020,4 +943,29 @@ RSpec.describe Gitlab::Regex do
it { is_expected.not_to match('a' * 63 + '#') }
it { is_expected.not_to match('') }
end
+
+ describe '.sep_by_1' do
+ subject { %r{\A #{described_class.sep_by_1(/\.+/, /[abcdef]{3}/)} \z}x }
+
+ it { is_expected.to match('abc') }
+ it { is_expected.to match('abc.def') }
+ it { is_expected.to match('abc.def.caf') }
+ it { is_expected.to match('abc..def') }
+ it { is_expected.to match('abc..def..caf') }
+ it { is_expected.to match('abc...def') }
+ it { is_expected.to match('abc....def........caf') }
+ it { is_expected.to match((['abc'] * 100).join('.')) }
+
+ it { is_expected.not_to match('') }
+ it { is_expected.not_to match('a') }
+ it { is_expected.not_to match('aaaa') }
+ it { is_expected.not_to match('foo') }
+ it { is_expected.not_to match('.abc') }
+ it { is_expected.not_to match('abc.') }
+ it { is_expected.not_to match('.abc.def') }
+ it { is_expected.not_to match('abc.def.') }
+ it { is_expected.not_to match('abc.defe.caf') }
+ it { is_expected.not_to match('abc!abc') }
+ it { is_expected.not_to match((['abc'] * 100).join('.') + '!') }
+ end
end
diff --git a/spec/lib/gitlab/render_timeout_spec.rb b/spec/lib/gitlab/render_timeout_spec.rb
new file mode 100644
index 00000000000..f322d71867b
--- /dev/null
+++ b/spec/lib/gitlab/render_timeout_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::RenderTimeout do
+ def expect_timeout(period)
+ block = proc {}
+
+ expect(Timeout).to receive(:timeout).with(period) do |_, &block|
+ expect(block).to eq(block)
+ end
+
+ described_class.timeout(&block)
+ end
+
+ it 'utilizes timeout for web' do
+ expect_timeout(described_class::FOREGROUND)
+ end
+
+ it 'utilizes longer timeout for sidekiq' do
+ allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
+
+ expect_timeout(described_class::BACKGROUND)
+ end
+end
diff --git a/spec/lib/gitlab/seeder_spec.rb b/spec/lib/gitlab/seeder_spec.rb
index a22d47cbfb3..a94ae2bca7a 100644
--- a/spec/lib/gitlab/seeder_spec.rb
+++ b/spec/lib/gitlab/seeder_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe Gitlab::Seeder do
describe '.quiet' do
let(:database_base_models) do
{
- main: ApplicationRecord,
+ main: ActiveRecord::Base,
ci: Ci::ApplicationRecord
}
end
diff --git a/spec/lib/gitlab/service_desk_email_spec.rb b/spec/lib/gitlab/service_desk_email_spec.rb
index 67a1f07eec6..9847496e361 100644
--- a/spec/lib/gitlab/service_desk_email_spec.rb
+++ b/spec/lib/gitlab/service_desk_email_spec.rb
@@ -78,4 +78,10 @@ RSpec.describe Gitlab::ServiceDeskEmail do
end
end
end
+
+ context 'self.key_from_fallback_message_id' do
+ it 'returns reply key' do
+ expect(described_class.key_from_fallback_message_id('reply-key@localhost')).to eq('key')
+ end
+ end
end
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
index 00ae55237e9..9c0cbe21e6b 100644
--- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -59,6 +59,21 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
end
+ it 'logs the normalized SQL query for statement timeouts' do
+ travel_to(timestamp) do
+ expect(logger).to receive(:info).with(start_payload)
+ expect(logger).to receive(:warn).with(
+ include('exception.sql' => 'SELECT "users".* FROM "users" WHERE "users"."id" = $1 AND "users"."foo" = $2')
+ )
+
+ expect do
+ call_subject(job, 'test_queue') do
+ raise ActiveRecord::StatementInvalid.new(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = 1 AND "users"."foo" = 2')
+ end
+ end.to raise_error(ActiveRecord::StatementInvalid)
+ end
+ end
+
it 'logs the root cause of an Sidekiq::JobRetry::Skip exception in the job' do
travel_to(timestamp) do
expect(logger).to receive(:info).with(start_payload)
@@ -100,8 +115,8 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
include(
'message' => 'TestWorker JID-da883554ee4fe414012f5f42: fail: 0.0 sec',
'job_status' => 'fail',
- 'error_class' => 'Sidekiq::JobRetry::Skip',
- 'error_message' => 'Sidekiq::JobRetry::Skip'
+ 'exception.class' => 'Sidekiq::JobRetry::Skip',
+ 'exception.message' => 'Sidekiq::JobRetry::Skip'
)
)
expect(subject).to receive(:log_job_start).and_call_original
@@ -288,7 +303,8 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
'duration_s' => 0.0,
'completed_at' => timestamp.to_f,
'cpu_s' => 1.111112,
- 'rate_limiting_gates' => []
+ 'rate_limiting_gates' => [],
+ 'worker_id' => "process_#{Process.pid}"
)
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
index 8d46845548a..d240bf51e67 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gitlab_redis_queues do
+RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gitlab_redis_queues, :clean_gitlab_redis_shared_state do
using RSpec::Parameterized::TableSyntax
subject(:duplicate_job) do
@@ -81,135 +81,99 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
end
end
- describe '#check!' do
- context 'when there was no job in the queue yet' do
- it { expect(duplicate_job.check!).to eq('123') }
+ shared_examples 'tracking duplicates in redis' do
+ describe '#check!' do
+ context 'when there was no job in the queue yet' do
+ it { expect(duplicate_job.check!).to eq('123') }
- shared_examples 'sets Redis keys with correct TTL' do
- it "adds an idempotency key with correct ttl" do
- expect { duplicate_job.check! }
- .to change { read_idempotency_key_with_ttl(idempotency_key) }
- .from([nil, -2])
- .to(['123', be_within(1).of(expected_ttl)])
- end
-
- context 'when wal locations is not empty' do
- it "adds an existing wal locations key with correct ttl" do
+ shared_examples 'sets Redis keys with correct TTL' do
+ it "adds an idempotency key with correct ttl" do
expect { duplicate_job.check! }
- .to change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :main)) }
- .from([nil, -2])
- .to([wal_locations[:main], be_within(1).of(expected_ttl)])
- .and change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :ci)) }
+ .to change { read_idempotency_key_with_ttl(idempotency_key) }
.from([nil, -2])
- .to([wal_locations[:ci], be_within(1).of(expected_ttl)])
+ .to(['123', be_within(1).of(expected_ttl)])
end
- end
- end
- context 'with TTL option is not set' do
- let(:expected_ttl) { described_class::DEFAULT_DUPLICATE_KEY_TTL }
-
- it_behaves_like 'sets Redis keys with correct TTL'
- end
-
- context 'when TTL option is set' do
- let(:expected_ttl) { 5.minutes }
-
- before do
- allow(duplicate_job).to receive(:options).and_return({ ttl: expected_ttl })
+ context 'when wal locations is not empty' do
+ it "adds an existing wal locations key with correct ttl" do
+ expect { duplicate_job.check! }
+ .to change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :main)) }
+ .from([nil, -2])
+ .to([wal_locations[:main], be_within(1).of(expected_ttl)])
+ .and change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :ci)) }
+ .from([nil, -2])
+ .to([wal_locations[:ci], be_within(1).of(expected_ttl)])
+ end
+ end
end
- it_behaves_like 'sets Redis keys with correct TTL'
- end
+ context 'when TTL option is not set' do
+ let(:expected_ttl) { described_class::DEFAULT_DUPLICATE_KEY_TTL }
- it "adds the idempotency key to the jobs payload" do
- expect { duplicate_job.check! }.to change { job['idempotency_key'] }.from(nil).to(idempotency_key)
- end
- end
-
- context 'when there was already a job with same arguments in the same queue' do
- before do
- set_idempotency_key(idempotency_key, 'existing-key')
- wal_locations.each do |config_name, location|
- set_idempotency_key(existing_wal_location_key(idempotency_key, config_name), location)
+ it_behaves_like 'sets Redis keys with correct TTL'
end
- end
- it { expect(duplicate_job.check!).to eq('existing-key') }
+ context 'when TTL option is set' do
+ let(:expected_ttl) { 5.minutes }
- it "does not change the existing key's TTL" do
- expect { duplicate_job.check! }
- .not_to change { read_idempotency_key_with_ttl(idempotency_key) }
- .from(['existing-key', -1])
- end
-
- it "does not change the existing wal locations key's TTL" do
- expect { duplicate_job.check! }
- .to not_change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :main)) }
- .from([wal_locations[:main], -1])
- .and not_change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :ci)) }
- .from([wal_locations[:ci], -1])
- end
+ before do
+ allow(duplicate_job).to receive(:options).and_return({ ttl: expected_ttl })
+ end
- it 'sets the existing jid' do
- duplicate_job.check!
+ it_behaves_like 'sets Redis keys with correct TTL'
+ end
- expect(duplicate_job.existing_jid).to eq('existing-key')
+ it "adds the idempotency key to the jobs payload" do
+ expect { duplicate_job.check! }.to change { job['idempotency_key'] }.from(nil).to(idempotency_key)
+ end
end
- end
- end
-
- describe '#update_latest_wal_location!' do
- before do
- allow(Gitlab::Database).to receive(:database_base_models).and_return(
- { main: ::ActiveRecord::Base,
- ci: ::ActiveRecord::Base })
- set_idempotency_key(existing_wal_location_key(idempotency_key, :main), existing_wal[:main])
- set_idempotency_key(existing_wal_location_key(idempotency_key, :ci), existing_wal[:ci])
+ context 'when there was already a job with same arguments in the same queue' do
+ before do
+ set_idempotency_key(idempotency_key, 'existing-key')
+ wal_locations.each do |config_name, location|
+ set_idempotency_key(existing_wal_location_key(idempotency_key, config_name), location)
+ end
+ end
- # read existing_wal_locations
- duplicate_job.check!
- end
+ it { expect(duplicate_job.check!).to eq('existing-key') }
- context "when the key doesn't exists in redis" do
- let(:existing_wal) do
- {
- main: '0/D525E3A0',
- ci: 'AB/12340'
- }
- end
+ it "does not change the existing key's TTL" do
+ expect { duplicate_job.check! }
+ .not_to change { read_idempotency_key_with_ttl(idempotency_key) }
+ .from(['existing-key', -1])
+ end
- let(:new_wal_location_with_offset) do
- {
- # offset is relative to `existing_wal`
- main: ['0/D525E3A8', '8'],
- ci: ['AB/12345', '5']
- }
- end
+ it "does not change the existing wal locations key's TTL" do
+ expect { duplicate_job.check! }
+ .to not_change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :main)) }
+ .from([wal_locations[:main], -1])
+ .and not_change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :ci)) }
+ .from([wal_locations[:ci], -1])
+ end
- let(:wal_locations) { new_wal_location_with_offset.transform_values(&:first) }
+ it 'sets the existing jid' do
+ duplicate_job.check!
- it 'stores a wal location to redis with an offset relative to existing wal location' do
- expect { duplicate_job.update_latest_wal_location! }
- .to change { read_range_from_redis(wal_location_key(idempotency_key, :main)) }
- .from([])
- .to(new_wal_location_with_offset[:main])
- .and change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) }
- .from([])
- .to(new_wal_location_with_offset[:ci])
+ expect(duplicate_job.existing_jid).to eq('existing-key')
+ end
end
end
- context "when the key exists in redis" do
+ describe '#update_latest_wal_location!' do
before do
- rpush_to_redis_key(wal_location_key(idempotency_key, :main), *stored_wal_location_with_offset[:main])
- rpush_to_redis_key(wal_location_key(idempotency_key, :ci), *stored_wal_location_with_offset[:ci])
- end
+ allow(Gitlab::Database).to receive(:database_base_models).and_return(
+ { main: ::ActiveRecord::Base,
+ ci: ::ActiveRecord::Base })
- let(:wal_locations) { new_wal_location_with_offset.transform_values(&:first) }
+ set_idempotency_key(existing_wal_location_key(idempotency_key, :main), existing_wal[:main])
+ set_idempotency_key(existing_wal_location_key(idempotency_key, :ci), existing_wal[:ci])
- context "when the new offset is bigger then the existing one" do
+ # read existing_wal_locations
+ duplicate_job.check!
+ end
+
+ context "when the key doesn't exists in redis" do
let(:existing_wal) do
{
main: '0/D525E3A0',
@@ -217,14 +181,6 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
}
end
- let(:stored_wal_location_with_offset) do
- {
- # offset is relative to `existing_wal`
- main: ['0/D525E3A3', '3'],
- ci: ['AB/12342', '2']
- }
- end
-
let(:new_wal_location_with_offset) do
{
# offset is relative to `existing_wal`
@@ -233,154 +189,335 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
}
end
- it 'updates a wal location to redis with an offset' do
+ let(:wal_locations) { new_wal_location_with_offset.transform_values(&:first) }
+
+ it 'stores a wal location to redis with an offset relative to existing wal location' do
expect { duplicate_job.update_latest_wal_location! }
.to change { read_range_from_redis(wal_location_key(idempotency_key, :main)) }
- .from(stored_wal_location_with_offset[:main])
+ .from([])
.to(new_wal_location_with_offset[:main])
.and change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) }
- .from(stored_wal_location_with_offset[:ci])
+ .from([])
.to(new_wal_location_with_offset[:ci])
end
end
- context "when the old offset is not bigger then the existing one" do
- let(:existing_wal) do
- {
- main: '0/D525E3A0',
- ci: 'AB/12340'
- }
+ context "when the key exists in redis" do
+ before do
+ rpush_to_redis_key(wal_location_key(idempotency_key, :main), *stored_wal_location_with_offset[:main])
+ rpush_to_redis_key(wal_location_key(idempotency_key, :ci), *stored_wal_location_with_offset[:ci])
end
- let(:stored_wal_location_with_offset) do
- {
- # offset is relative to `existing_wal`
- main: ['0/D525E3A8', '8'],
- ci: ['AB/12345', '5']
- }
- end
+ let(:wal_locations) { new_wal_location_with_offset.transform_values(&:first) }
- let(:new_wal_location_with_offset) do
- {
- # offset is relative to `existing_wal`
- main: ['0/D525E3A2', '2'],
- ci: ['AB/12342', '2']
- }
+ context "when the new offset is bigger then the existing one" do
+ let(:existing_wal) do
+ {
+ main: '0/D525E3A0',
+ ci: 'AB/12340'
+ }
+ end
+
+ let(:stored_wal_location_with_offset) do
+ {
+ # offset is relative to `existing_wal`
+ main: ['0/D525E3A3', '3'],
+ ci: ['AB/12342', '2']
+ }
+ end
+
+ let(:new_wal_location_with_offset) do
+ {
+ # offset is relative to `existing_wal`
+ main: ['0/D525E3A8', '8'],
+ ci: ['AB/12345', '5']
+ }
+ end
+
+ it 'updates a wal location to redis with an offset' do
+ expect { duplicate_job.update_latest_wal_location! }
+ .to change { read_range_from_redis(wal_location_key(idempotency_key, :main)) }
+ .from(stored_wal_location_with_offset[:main])
+ .to(new_wal_location_with_offset[:main])
+ .and change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) }
+ .from(stored_wal_location_with_offset[:ci])
+ .to(new_wal_location_with_offset[:ci])
+ end
end
- it "does not update a wal location to redis with an offset" do
- expect { duplicate_job.update_latest_wal_location! }
- .to not_change { read_range_from_redis(wal_location_key(idempotency_key, :main)) }
- .from(stored_wal_location_with_offset[:main])
- .and not_change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) }
- .from(stored_wal_location_with_offset[:ci])
+ context "when the old offset is not bigger then the existing one" do
+ let(:existing_wal) do
+ {
+ main: '0/D525E3A0',
+ ci: 'AB/12340'
+ }
+ end
+
+ let(:stored_wal_location_with_offset) do
+ {
+ # offset is relative to `existing_wal`
+ main: ['0/D525E3A8', '8'],
+ ci: ['AB/12345', '5']
+ }
+ end
+
+ let(:new_wal_location_with_offset) do
+ {
+ # offset is relative to `existing_wal`
+ main: ['0/D525E3A2', '2'],
+ ci: ['AB/12342', '2']
+ }
+ end
+
+ it "does not update a wal location to redis with an offset" do
+ expect { duplicate_job.update_latest_wal_location! }
+ .to not_change { read_range_from_redis(wal_location_key(idempotency_key, :main)) }
+ .from(stored_wal_location_with_offset[:main])
+ .and not_change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) }
+ .from(stored_wal_location_with_offset[:ci])
+ end
end
end
end
- end
- describe '#latest_wal_locations' do
- context 'when job was deduplicated and wal locations were already persisted' do
- before do
- rpush_to_redis_key(wal_location_key(idempotency_key, :main), wal_locations[:main], 1024)
- rpush_to_redis_key(wal_location_key(idempotency_key, :ci), wal_locations[:ci], 1024)
- end
+ describe '#latest_wal_locations' do
+ context 'when job was deduplicated and wal locations were already persisted' do
+ before do
+ rpush_to_redis_key(wal_location_key(idempotency_key, :main), wal_locations[:main], 1024)
+ rpush_to_redis_key(wal_location_key(idempotency_key, :ci), wal_locations[:ci], 1024)
+ end
- it { expect(duplicate_job.latest_wal_locations).to eq(wal_locations) }
- end
+ it { expect(duplicate_job.latest_wal_locations).to eq(wal_locations) }
+ end
- context 'when job is not deduplication and wal locations were not persisted' do
- it { expect(duplicate_job.latest_wal_locations).to be_empty }
+ context 'when job is not deduplication and wal locations were not persisted' do
+ it { expect(duplicate_job.latest_wal_locations).to be_empty }
+ end
end
- end
- describe '#delete!' do
- context "when we didn't track the definition" do
- it { expect { duplicate_job.delete! }.not_to raise_error }
- end
+ describe '#delete!' do
+ context "when we didn't track the definition" do
+ it { expect { duplicate_job.delete! }.not_to raise_error }
+ end
- context 'when the key exists in redis' do
- before do
- set_idempotency_key(idempotency_key, 'existing-jid')
- set_idempotency_key(deduplicated_flag_key, 1)
- wal_locations.each do |config_name, location|
- set_idempotency_key(existing_wal_location_key(idempotency_key, config_name), location)
- set_idempotency_key(wal_location_key(idempotency_key, config_name), location)
+ context 'when the key exists in redis' do
+ before do
+ set_idempotency_key(idempotency_key, 'existing-jid')
+ set_idempotency_key(deduplicated_flag_key, 1)
+ wal_locations.each do |config_name, location|
+ set_idempotency_key(existing_wal_location_key(idempotency_key, config_name), location)
+ set_idempotency_key(wal_location_key(idempotency_key, config_name), location)
+ end
end
- end
- shared_examples 'deleting the duplicate job' do
- shared_examples 'deleting keys from redis' do |key_name|
- it "removes the #{key_name} from redis" do
- expect { duplicate_job.delete! }
- .to change { read_idempotency_key_with_ttl(key) }
- .from([from_value, -1])
- .to([nil, -2])
+ shared_examples 'deleting the duplicate job' do
+ shared_examples 'deleting keys from redis' do |key_name|
+ it "removes the #{key_name} from redis" do
+ expect { duplicate_job.delete! }
+ .to change { read_idempotency_key_with_ttl(key) }
+ .from([from_value, -1])
+ .to([nil, -2])
+ end
+ end
+
+ shared_examples 'does not delete key from redis' do |key_name|
+ it "does not remove the #{key_name} from redis" do
+ expect { duplicate_job.delete! }
+ .to not_change { read_idempotency_key_with_ttl(key) }
+ .from([from_value, -1])
+ end
+ end
+
+ it_behaves_like 'deleting keys from redis', 'idempotent key' do
+ let(:key) { idempotency_key }
+ let(:from_value) { 'existing-jid' }
+ end
+
+ it_behaves_like 'deleting keys from redis', 'deduplication counter key' do
+ let(:key) { deduplicated_flag_key }
+ let(:from_value) { '1' }
+ end
+
+ it_behaves_like 'deleting keys from redis', 'existing wal location keys for main database' do
+ let(:key) { existing_wal_location_key(idempotency_key, :main) }
+ let(:from_value) { wal_locations[:main] }
+ end
+
+ it_behaves_like 'deleting keys from redis', 'existing wal location keys for ci database' do
+ let(:key) { existing_wal_location_key(idempotency_key, :ci) }
+ let(:from_value) { wal_locations[:ci] }
+ end
+
+ it_behaves_like 'deleting keys from redis', 'latest wal location keys for main database' do
+ let(:key) { wal_location_key(idempotency_key, :main) }
+ let(:from_value) { wal_locations[:main] }
+ end
+
+ it_behaves_like 'deleting keys from redis', 'latest wal location keys for ci database' do
+ let(:key) { wal_location_key(idempotency_key, :ci) }
+ let(:from_value) { wal_locations[:ci] }
end
end
- shared_examples 'does not delete key from redis' do |key_name|
- it "does not remove the #{key_name} from redis" do
- expect { duplicate_job.delete! }
- .to not_change { read_idempotency_key_with_ttl(key) }
- .from([from_value, -1])
+ context 'when the idempotency key is not part of the job' do
+ it_behaves_like 'deleting the duplicate job'
+
+ it 'recalculates the idempotency hash' do
+ expect(duplicate_job).to receive(:idempotency_hash).and_call_original
+
+ duplicate_job.delete!
end
end
- it_behaves_like 'deleting keys from redis', 'idempotent key' do
- let(:key) { idempotency_key }
- let(:from_value) { 'existing-jid' }
+ context 'when the idempotency key is part of the job' do
+ let(:idempotency_key) { 'not the same as what we calculate' }
+ let(:job) { super().merge('idempotency_key' => idempotency_key) }
+
+ it_behaves_like 'deleting the duplicate job'
+
+ it 'does not recalculate the idempotency hash' do
+ expect(duplicate_job).not_to receive(:idempotency_hash)
+
+ duplicate_job.delete!
+ end
end
+ end
+ end
- it_behaves_like 'deleting keys from redis', 'deduplication counter key' do
- let(:key) { deduplicated_flag_key }
- let(:from_value) { '1' }
+ describe '#set_deduplicated_flag!' do
+ context 'when the job is reschedulable' do
+ before do
+ allow(duplicate_job).to receive(:reschedulable?) { true }
end
- it_behaves_like 'deleting keys from redis', 'existing wal location keys for main database' do
- let(:key) { existing_wal_location_key(idempotency_key, :main) }
- let(:from_value) { wal_locations[:main] }
+ it 'sets the key in Redis' do
+ duplicate_job.set_deduplicated_flag!
+
+ flag = with_redis { |redis| redis.get(deduplicated_flag_key) }
+
+ expect(flag).to eq(described_class::DEDUPLICATED_FLAG_VALUE.to_s)
end
- it_behaves_like 'deleting keys from redis', 'existing wal location keys for ci database' do
- let(:key) { existing_wal_location_key(idempotency_key, :ci) }
- let(:from_value) { wal_locations[:ci] }
+ it 'sets, gets and cleans up the deduplicated flag' do
+ expect(duplicate_job.should_reschedule?).to eq(false)
+
+ duplicate_job.set_deduplicated_flag!
+ expect(duplicate_job.should_reschedule?).to eq(true)
+
+ duplicate_job.delete!
+ expect(duplicate_job.should_reschedule?).to eq(false)
end
+ end
- it_behaves_like 'deleting keys from redis', 'latest wal location keys for main database' do
- let(:key) { wal_location_key(idempotency_key, :main) }
- let(:from_value) { wal_locations[:main] }
+ context 'when the job is not reschedulable' do
+ before do
+ allow(duplicate_job).to receive(:reschedulable?) { false }
end
- it_behaves_like 'deleting keys from redis', 'latest wal location keys for ci database' do
- let(:key) { wal_location_key(idempotency_key, :ci) }
- let(:from_value) { wal_locations[:ci] }
+ it 'does not set the key in Redis' do
+ duplicate_job.set_deduplicated_flag!
+
+ flag = with_redis { |redis| redis.get(deduplicated_flag_key) }
+
+ expect(flag).to be_nil
end
- end
- context 'when the idempotency key is not part of the job' do
- it_behaves_like 'deleting the duplicate job'
+ it 'does not set the deduplicated flag' do
+ expect(duplicate_job.should_reschedule?).to eq(false)
- it 'recalculates the idempotency hash' do
- expect(duplicate_job).to receive(:idempotency_hash).and_call_original
+ duplicate_job.set_deduplicated_flag!
+ expect(duplicate_job.should_reschedule?).to eq(false)
duplicate_job.delete!
+ expect(duplicate_job.should_reschedule?).to eq(false)
end
end
+ end
+
+ describe '#duplicate?' do
+ it "raises an error if the check wasn't performed" do
+ expect { duplicate_job.duplicate? }.to raise_error /Call `#check!` first/
+ end
- context 'when the idempotency key is part of the job' do
- let(:idempotency_key) { 'not the same as what we calculate' }
- let(:job) { super().merge('idempotency_key' => idempotency_key) }
+ it 'returns false if the existing jid equals the job jid' do
+ duplicate_job.check!
- it_behaves_like 'deleting the duplicate job'
+ expect(duplicate_job.duplicate?).to be(false)
+ end
- it 'does not recalculate the idempotency hash' do
- expect(duplicate_job).not_to receive(:idempotency_hash)
+ it 'returns false if the existing jid is different from the job jid' do
+ set_idempotency_key(idempotency_key, 'a different jid')
+ duplicate_job.check!
- duplicate_job.delete!
+ expect(duplicate_job.duplicate?).to be(true)
+ end
+ end
+
+ def existing_wal_location_key(idempotency_key, connection_name)
+ "#{idempotency_key}:#{connection_name}:existing_wal_location"
+ end
+
+ def wal_location_key(idempotency_key, connection_name)
+ "#{idempotency_key}:#{connection_name}:wal_location"
+ end
+
+ def set_idempotency_key(key, value = '1')
+ with_redis { |r| r.set(key, value) }
+ end
+
+ def rpush_to_redis_key(key, wal, offset)
+ with_redis { |r| r.rpush(key, [wal, offset]) }
+ end
+
+ def read_idempotency_key_with_ttl(key)
+ with_redis do |redis|
+ redis.pipelined do |p|
+ p.get(key)
+ p.ttl(key)
end
end
end
+
+ def read_range_from_redis(key)
+ with_redis do |redis|
+ redis.lrange(key, 0, -1)
+ end
+ end
+ end
+
+ context 'with multi-store feature flags turned on' do
+ def with_redis(&block)
+ Gitlab::Redis::DuplicateJobs.with(&block)
+ end
+
+ it 'use Gitlab::Redis::DuplicateJobs.with' do
+ expect(Gitlab::Redis::DuplicateJobs).to receive(:with).and_call_original
+ expect(Sidekiq).not_to receive(:redis)
+
+ duplicate_job.check!
+ end
+
+ it_behaves_like 'tracking duplicates in redis'
+ end
+
+ context 'when both multi-store feature flags are off' do
+ def with_redis(&block)
+ Sidekiq.redis(&block)
+ end
+
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_duplicate_jobs: false)
+ stub_feature_flags(use_primary_store_as_default_for_duplicate_jobs: false)
+ end
+
+ it 'use Sidekiq.redis' do
+ expect(Sidekiq).to receive(:redis).and_call_original
+ expect(Gitlab::Redis::DuplicateJobs).not_to receive(:with)
+
+ duplicate_job.check!
+ end
+
+ it_behaves_like 'tracking duplicates in redis'
end
describe '#scheduled?' do
@@ -449,75 +586,6 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
end
end
- describe '#set_deduplicated_flag!' do
- context 'when the job is reschedulable' do
- before do
- allow(duplicate_job).to receive(:reschedulable?) { true }
- end
-
- it 'sets the key in Redis' do
- duplicate_job.set_deduplicated_flag!
-
- flag = Sidekiq.redis { |redis| redis.get(deduplicated_flag_key) }
-
- expect(flag).to eq(described_class::DEDUPLICATED_FLAG_VALUE.to_s)
- end
-
- it 'sets, gets and cleans up the deduplicated flag' do
- expect(duplicate_job.should_reschedule?).to eq(false)
-
- duplicate_job.set_deduplicated_flag!
- expect(duplicate_job.should_reschedule?).to eq(true)
-
- duplicate_job.delete!
- expect(duplicate_job.should_reschedule?).to eq(false)
- end
- end
-
- context 'when the job is not reschedulable' do
- before do
- allow(duplicate_job).to receive(:reschedulable?) { false }
- end
-
- it 'does not set the key in Redis' do
- duplicate_job.set_deduplicated_flag!
-
- flag = Sidekiq.redis { |redis| redis.get(deduplicated_flag_key) }
-
- expect(flag).to be_nil
- end
-
- it 'does not set the deduplicated flag' do
- expect(duplicate_job.should_reschedule?).to eq(false)
-
- duplicate_job.set_deduplicated_flag!
- expect(duplicate_job.should_reschedule?).to eq(false)
-
- duplicate_job.delete!
- expect(duplicate_job.should_reschedule?).to eq(false)
- end
- end
- end
-
- describe '#duplicate?' do
- it "raises an error if the check wasn't performed" do
- expect { duplicate_job.duplicate? }.to raise_error /Call `#check!` first/
- end
-
- it 'returns false if the existing jid equals the job jid' do
- duplicate_job.check!
-
- expect(duplicate_job.duplicate?).to be(false)
- end
-
- it 'returns false if the existing jid is different from the job jid' do
- set_idempotency_key(idempotency_key, 'a different jid')
- duplicate_job.check!
-
- expect(duplicate_job.duplicate?).to be(true)
- end
- end
-
describe '#scheduled_at' do
let(:scheduled_at) { 42 }
let(:job) do
@@ -592,35 +660,4 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
end
end
end
-
- def existing_wal_location_key(idempotency_key, connection_name)
- "#{idempotency_key}:#{connection_name}:existing_wal_location"
- end
-
- def wal_location_key(idempotency_key, connection_name)
- "#{idempotency_key}:#{connection_name}:wal_location"
- end
-
- def set_idempotency_key(key, value = '1')
- Sidekiq.redis { |r| r.set(key, value) }
- end
-
- def rpush_to_redis_key(key, wal, offset)
- Sidekiq.redis { |r| r.rpush(key, [wal, offset]) }
- end
-
- def read_idempotency_key_with_ttl(key)
- Sidekiq.redis do |redis|
- redis.pipelined do |p|
- p.get(key)
- p.ttl(key)
- end
- end
- end
-
- def read_range_from_redis(key)
- Sidekiq.redis do |redis|
- redis.lrange(key, 0, -1)
- end
- end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executed_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executed_spec.rb
index 963301bc001..ab9d14ad729 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executed_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executed_spec.rb
@@ -23,8 +23,15 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::Strategies::UntilExecut
end
end
+ it 'deletes the lock even if an error occurs' do
+ expect(fake_duplicate_job).not_to receive(:scheduled?)
+ expect(fake_duplicate_job).to receive(:delete!).once
+
+ perform_strategy_with_error
+ end
+
it 'does not reschedule the job even if deduplication happened' do
- expect(fake_duplicate_job).to receive(:delete!)
+ expect(fake_duplicate_job).to receive(:delete!).once
expect(fake_duplicate_job).not_to receive(:reschedule)
strategy.perform({}) do
@@ -33,16 +40,33 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::Strategies::UntilExecut
end
context 'when job is reschedulable' do
- it 'reschedules the job if deduplication happened' do
+ before do
allow(fake_duplicate_job).to receive(:should_reschedule?) { true }
+ end
- expect(fake_duplicate_job).to receive(:delete!)
+ it 'reschedules the job if deduplication happened' do
+ expect(fake_duplicate_job).to receive(:delete!).once
expect(fake_duplicate_job).to receive(:reschedule).once
strategy.perform({}) do
proc.call
end
end
+
+ it 'does not reschedule the job if an error occurs' do
+ expect(fake_duplicate_job).to receive(:delete!).once
+ expect(fake_duplicate_job).not_to receive(:reschedule)
+
+ perform_strategy_with_error
+ end
+ end
+
+ def perform_strategy_with_error
+ expect do
+ strategy.perform({}) do
+ raise 'expected error'
+ end
+ end.to raise_error(RuntimeError, 'expected error')
end
end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb b/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb
index 3baa0c6f967..821d8b8fe7b 100644
--- a/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb
@@ -50,6 +50,26 @@ RSpec.describe Gitlab::SidekiqMiddleware::WorkerContext::Client do
end
describe "#call" do
+ context 'root_caller_id' do
+ it 'uses caller_id of the current context' do
+ Gitlab::ApplicationContext.with_context(caller_id: 'CALLER') do
+ TestWithContextWorker.perform_async
+ end
+
+ job = TestWithContextWorker.jobs.last
+ expect(job['meta.root_caller_id']).to eq('CALLER')
+ end
+
+ it 'uses root_caller_id instead of caller_id of the current context' do
+ Gitlab::ApplicationContext.with_context(caller_id: 'CALLER', root_caller_id: 'ROOT_CALLER') do
+ TestWithContextWorker.perform_async
+ end
+
+ job = TestWithContextWorker.jobs.last
+ expect(job['meta.root_caller_id']).to eq('ROOT_CALLER')
+ end
+ end
+
it 'applies a context for jobs scheduled in batch' do
user_per_job = { 'job1' => build_stubbed(:user, username: 'user-1'),
'job2' => build_stubbed(:user, username: 'user-2') }
@@ -97,7 +117,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::WorkerContext::Client do
end
it 'does not set any explicit feature category for mailers', :sidekiq_mailers do
- expect(Gitlab::ApplicationContext).not_to receive(:with_context)
+ expect(Gitlab::ApplicationContext).to receive(:with_context).with(hash_excluding(feature_category: anything))
TestMailer.test_mail.deliver_later
end
diff --git a/spec/lib/gitlab/sidekiq_status_spec.rb b/spec/lib/gitlab/sidekiq_status_spec.rb
index c94deb8e008..027697db7e1 100644
--- a/spec/lib/gitlab/sidekiq_status_spec.rb
+++ b/spec/lib/gitlab/sidekiq_status_spec.rb
@@ -3,138 +3,175 @@
require 'spec_helper'
RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues, :clean_gitlab_redis_shared_state do
- describe '.set' do
- it 'stores the job ID' do
- described_class.set('123')
+ shared_examples 'tracking status in redis' do
+ describe '.set' do
+ it 'stores the job ID' do
+ described_class.set('123')
+
+ key = described_class.key_for('123')
+
+ with_redis do |redis|
+ expect(redis.exists(key)).to eq(true)
+ expect(redis.ttl(key) > 0).to eq(true)
+ expect(redis.get(key)).to eq('1')
+ end
+ end
- key = described_class.key_for('123')
+ it 'allows overriding the expiration time' do
+ described_class.set('123', described_class::DEFAULT_EXPIRATION * 2)
+
+ key = described_class.key_for('123')
- Sidekiq.redis do |redis|
- expect(redis.exists(key)).to eq(true)
- expect(redis.ttl(key) > 0).to eq(true)
- expect(redis.get(key)).to eq('1')
+ with_redis do |redis|
+ expect(redis.exists(key)).to eq(true)
+ expect(redis.ttl(key) > described_class::DEFAULT_EXPIRATION).to eq(true)
+ expect(redis.get(key)).to eq('1')
+ end
end
- end
- it 'allows overriding the expiration time' do
- described_class.set('123', described_class::DEFAULT_EXPIRATION * 2)
+ it 'does not store anything with a nil expiry' do
+ described_class.set('123', nil)
- key = described_class.key_for('123')
+ key = described_class.key_for('123')
- Sidekiq.redis do |redis|
- expect(redis.exists(key)).to eq(true)
- expect(redis.ttl(key) > described_class::DEFAULT_EXPIRATION).to eq(true)
- expect(redis.get(key)).to eq('1')
+ with_redis do |redis|
+ expect(redis.exists(key)).to eq(false)
+ end
end
end
- it 'does not store anything with a nil expiry' do
- described_class.set('123', nil)
+ describe '.unset' do
+ it 'removes the job ID' do
+ described_class.set('123')
+ described_class.unset('123')
- key = described_class.key_for('123')
+ key = described_class.key_for('123')
- Sidekiq.redis do |redis|
- expect(redis.exists(key)).to eq(false)
+ with_redis do |redis|
+ expect(redis.exists(key)).to eq(false)
+ end
end
end
- end
- describe '.unset' do
- it 'removes the job ID' do
- described_class.set('123')
- described_class.unset('123')
+ describe '.all_completed?' do
+ it 'returns true if all jobs have been completed' do
+ expect(described_class.all_completed?(%w(123))).to eq(true)
+ end
- key = described_class.key_for('123')
+ it 'returns false if a job has not yet been completed' do
+ described_class.set('123')
- Sidekiq.redis do |redis|
- expect(redis.exists(key)).to eq(false)
+ expect(described_class.all_completed?(%w(123 456))).to eq(false)
end
end
- end
- describe '.all_completed?' do
- it 'returns true if all jobs have been completed' do
- expect(described_class.all_completed?(%w(123))).to eq(true)
- end
+ describe '.running?' do
+ it 'returns true if job is running' do
+ described_class.set('123')
- it 'returns false if a job has not yet been completed' do
- described_class.set('123')
+ expect(described_class.running?('123')).to be(true)
+ end
- expect(described_class.all_completed?(%w(123 456))).to eq(false)
+ it 'returns false if job is not found' do
+ expect(described_class.running?('123')).to be(false)
+ end
end
- end
- describe '.running?' do
- it 'returns true if job is running' do
- described_class.set('123')
+ describe '.num_running' do
+ it 'returns 0 if all jobs have been completed' do
+ expect(described_class.num_running(%w(123))).to eq(0)
+ end
+
+ it 'returns 2 if two jobs are still running' do
+ described_class.set('123')
+ described_class.set('456')
- expect(described_class.running?('123')).to be(true)
+ expect(described_class.num_running(%w(123 456 789))).to eq(2)
+ end
end
- it 'returns false if job is not found' do
- expect(described_class.running?('123')).to be(false)
+ describe '.num_completed' do
+ it 'returns 1 if all jobs have been completed' do
+ expect(described_class.num_completed(%w(123))).to eq(1)
+ end
+
+ it 'returns 1 if a job has not yet been completed' do
+ described_class.set('123')
+ described_class.set('456')
+
+ expect(described_class.num_completed(%w(123 456 789))).to eq(1)
+ end
end
- end
- describe '.num_running' do
- it 'returns 0 if all jobs have been completed' do
- expect(described_class.num_running(%w(123))).to eq(0)
+ describe '.completed_jids' do
+ it 'returns the completed job' do
+ expect(described_class.completed_jids(%w(123))).to eq(['123'])
+ end
+
+ it 'returns only the jobs completed' do
+ described_class.set('123')
+ described_class.set('456')
+
+ expect(described_class.completed_jids(%w(123 456 789))).to eq(['789'])
+ end
end
- it 'returns 2 if two jobs are still running' do
- described_class.set('123')
- described_class.set('456')
+ describe '.job_status' do
+ it 'returns an array of boolean values' do
+ described_class.set('123')
+ described_class.set('456')
+ described_class.unset('123')
- expect(described_class.num_running(%w(123 456 789))).to eq(2)
+ expect(described_class.job_status(%w(123 456 789))).to eq([false, true, false])
+ end
+
+ it 'handles an empty array' do
+ expect(described_class.job_status([])).to eq([])
+ end
end
end
- describe '.num_completed' do
- it 'returns 1 if all jobs have been completed' do
- expect(described_class.num_completed(%w(123))).to eq(1)
+ context 'with multi-store feature flags turned on' do
+ def with_redis(&block)
+ Gitlab::Redis::SidekiqStatus.with(&block)
end
- it 'returns 1 if a job has not yet been completed' do
- described_class.set('123')
- described_class.set('456')
+ it 'uses Gitlab::Redis::SidekiqStatus.with' do
+ expect(Gitlab::Redis::SidekiqStatus).to receive(:with).and_call_original
+ expect(Sidekiq).not_to receive(:redis)
- expect(described_class.num_completed(%w(123 456 789))).to eq(1)
+ described_class.job_status(%w(123 456 789))
end
- end
- describe '.key_for' do
- it 'returns the key for a job ID' do
- key = described_class.key_for('123')
+ it_behaves_like 'tracking status in redis'
+ end
- expect(key).to be_an_instance_of(String)
- expect(key).to include('123')
+ context 'when both multi-store feature flags are off' do
+ def with_redis(&block)
+ Sidekiq.redis(&block)
end
- end
- describe '.completed_jids' do
- it 'returns the completed job' do
- expect(described_class.completed_jids(%w(123))).to eq(['123'])
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_sidekiq_status: false)
+ stub_feature_flags(use_primary_store_as_default_for_sidekiq_status: false)
end
- it 'returns only the jobs completed' do
- described_class.set('123')
- described_class.set('456')
+ it 'uses Sidekiq.redis' do
+ expect(Sidekiq).to receive(:redis).and_call_original
+ expect(Gitlab::Redis::SidekiqStatus).not_to receive(:with)
- expect(described_class.completed_jids(%w(123 456 789))).to eq(['789'])
+ described_class.job_status(%w(123 456 789))
end
- end
- describe '.job_status' do
- it 'returns an array of boolean values' do
- described_class.set('123')
- described_class.set('456')
- described_class.unset('123')
+ it_behaves_like 'tracking status in redis'
+ end
- expect(described_class.job_status(%w(123 456 789))).to eq([false, true, false])
- end
+ describe '.key_for' do
+ it 'returns the key for a job ID' do
+ key = described_class.key_for('123')
- it 'handles an empty array' do
- expect(described_class.job_status([])).to eq([])
+ expect(key).to be_an_instance_of(String)
+ expect(key).to include('123')
end
end
end
diff --git a/spec/lib/gitlab/slash_commands/deploy_spec.rb b/spec/lib/gitlab/slash_commands/deploy_spec.rb
index 71fca1e1fc8..5167523ff58 100644
--- a/spec/lib/gitlab/slash_commands/deploy_spec.rb
+++ b/spec/lib/gitlab/slash_commands/deploy_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe Gitlab::SlashCommands::Deploy do
context 'with environment' do
let!(:staging) { create(:environment, name: 'staging', project: project) }
let!(:pipeline) { create(:ci_pipeline, project: project) }
- let!(:build) { create(:ci_build, pipeline: pipeline) }
+ let!(:build) { create(:ci_build, pipeline: pipeline, environment: 'production') }
let!(:deployment) { create(:deployment, :success, environment: staging, deployable: build) }
context 'without actions' do
diff --git a/spec/lib/gitlab/sql/cte_spec.rb b/spec/lib/gitlab/sql/cte_spec.rb
index 18ae2cb065f..523380eae34 100644
--- a/spec/lib/gitlab/sql/cte_spec.rb
+++ b/spec/lib/gitlab/sql/cte_spec.rb
@@ -3,15 +3,14 @@
require 'spec_helper'
RSpec.describe Gitlab::SQL::CTE do
- describe '#to_arel' do
+ shared_examples '#to_arel' do
it 'generates an Arel relation for the CTE body' do
- relation = User.where(id: 1)
cte = described_class.new(:cte_name, relation)
sql = cte.to_arel.to_sql
name = ApplicationRecord.connection.quote_table_name(:cte_name)
sql1 = ApplicationRecord.connection.unprepared_statement do
- relation.except(:order).to_sql
+ relation.is_a?(String) ? relation : relation.to_sql
end
expected = [
@@ -25,6 +24,20 @@ RSpec.describe Gitlab::SQL::CTE do
end
end
+ describe '#to_arel' do
+ context 'when relation is an ActiveRecord::Relation' do
+ let(:relation) { User.where(id: 1) }
+
+ include_examples '#to_arel'
+ end
+
+ context 'when relation is a String' do
+ let(:relation) { User.where(id: 1).to_sql }
+
+ include_examples '#to_arel'
+ end
+ end
+
describe '#alias_to' do
it 'returns an alias for the CTE' do
cte = described_class.new(:cte_name, nil)
diff --git a/spec/lib/gitlab/ssh/signature_spec.rb b/spec/lib/gitlab/ssh/signature_spec.rb
new file mode 100644
index 00000000000..e8d366f0762
--- /dev/null
+++ b/spec/lib/gitlab/ssh/signature_spec.rb
@@ -0,0 +1,227 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ssh::Signature do
+ # ssh-keygen -t ed25519
+ let_it_be(:committer_email) { 'ssh-commit-test@example.com' }
+ let_it_be(:public_key_text) { 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIJKOfqOH0fDde+Ua/1SObkXB1CEDF5M6UfARMpW3F87u' }
+ let_it_be_with_reload(:user) { create(:user, email: committer_email) }
+ let_it_be_with_reload(:key) { create(:key, key: public_key_text, user: user) }
+
+ let(:signed_text) { 'This message was signed by an ssh key' }
+
+ let(:signature_text) do
+ # ssh-keygen -Y sign -n file -f id_test message.txt
+ <<~SIG
+ -----BEGIN SSH SIGNATURE-----
+ U1NIU0lHAAAAAQAAADMAAAALc3NoLWVkMjU1MTkAAAAgko5+o4fR8N175Rr/VI5uRcHUIQ
+ MXkzpR8BEylbcXzu4AAAAEZmlsZQAAAAAAAAAGc2hhNTEyAAAAUwAAAAtzc2gtZWQyNTUx
+ OQAAAECQa95KgBkgbMwIPNwHRjHu0WYrKvAc5O/FaBXlTDcPWQHi8WRDhbPNN6MqSYLg/S
+ hsei6Y8VYPv85StrEHYdoF
+ -----END SSH SIGNATURE-----
+ SIG
+ end
+
+ subject(:signature) do
+ described_class.new(
+ signature_text,
+ signed_text,
+ committer_email
+ )
+ end
+
+ shared_examples 'verified signature' do
+ it 'reports verified status' do
+ expect(signature.verification_status).to eq(:verified)
+ end
+ end
+
+ shared_examples 'unverified signature' do
+ it 'reports unverified status' do
+ expect(signature.verification_status).to eq(:unverified)
+ end
+ end
+
+ describe 'signature verification' do
+ context 'when signature is valid and user email is verified' do
+ it_behaves_like 'verified signature'
+ end
+
+ context 'when using an RSA key' do
+ let(:public_key_text) do
+ <<~KEY.delete("\n")
+ ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCr3ucg9tLf87S2TxgeDaO4Cs5Mzv7wwi5w
+ OnSG8hE/Zj7xzf0kXAYns/dHhPilkQMCulMQuGGprGzDJXZ9WrrVDHgBj2+kLB8cc+XYIb29
+ HPsoz5a1T776wWrzs5cw3Vbb0ZEMPG27SfJ+HtIqnIAcgBoRxgP/+I9we7tVxrTuog/9jSzU
+ H1IscwfwgKdUrvN5cyhqqxWspwZVlf6s4jaVjC9sKlF7u9CBCxqM2G7GZRKH2sEV2Tw0mT4z
+ 39UQ5uz9+4hxWChosiQChrT9zSJDGWQm3WGn5ubYPeB/xINEKkFxuEupnSK7l8PQxeLAwlcN
+ YHKMkHdO16O6PlpxvcLR1XVy4F12NXCxFjTr8GmFvJTvevf9iuFRmYQpffqm+EMN0shuhPag
+ Z1poVK7ZMO49b4HD6csGwDjXEgNAnyi7oPV1WMHVy+xi2j+yaAgiVk50kgTwp9sGkHTiMTM8
+ YWjCq+Hb+HXLINmqO5V1QChT7PAFYycmQ0Fe2x39eLLMHy0=
+ KEY
+ end
+
+ let(:signature_text) do
+ <<~SIG
+ -----BEGIN SSH SIGNATURE-----
+ U1NIU0lHAAAAAQAAAZcAAAAHc3NoLXJzYQAAAAMBAAEAAAGBAKve5yD20t/ztLZPGB4No7
+ gKzkzO/vDCLnA6dIbyET9mPvHN/SRcBiez90eE+KWRAwK6UxC4YamsbMMldn1autUMeAGP
+ b6QsHxxz5dghvb0c+yjPlrVPvvrBavOzlzDdVtvRkQw8bbtJ8n4e0iqcgByAGhHGA//4j3
+ B7u1XGtO6iD/2NLNQfUixzB/CAp1Su83lzKGqrFaynBlWV/qziNpWML2wqUXu70IELGozY
+ bsZlEofawRXZPDSZPjPf1RDm7P37iHFYKGiyJAKGtP3NIkMZZCbdYafm5tg94H/Eg0QqQX
+ G4S6mdIruXw9DF4sDCVw1gcoyQd07Xo7o+WnG9wtHVdXLgXXY1cLEWNOvwaYW8lO969/2K
+ 4VGZhCl9+qb4Qw3SyG6E9qBnWmhUrtkw7j1vgcPpywbAONcSA0CfKLug9XVYwdXL7GLaP7
+ JoCCJWTnSSBPCn2waQdOIxMzxhaMKr4dv4dcsg2ao7lXVAKFPs8AVjJyZDQV7bHf14sswf
+ LQAAAARmaWxlAAAAAAAAAAZzaGE1MTIAAAGUAAAADHJzYS1zaGEyLTUxMgAAAYAXgXpXWw
+ A1fYHTUON+e1yrTw8AKB4ymfqpR9Zr1OUmYUKJ9xXvvyNCfKHL6XD14CkMu1Tx8Z3TTPG9
+ C6uAXBniKRwwaLVOKffZMshf5sbjcy65KkqBPC7n/cDiCAeoJ8Y05trEDV62+pOpB2lLdv
+ pwwg2o0JaoLbdRcKCD0pw1u0O7VDDngTKFZ4ghHrEslxwlFruht1h9hs3rmdITlT0RMNuU
+ PHGAIB56u4E4UeoMd3D5rga+4Boj0s6551VgP3vCmcz9ZojPHhTCQdUZU1yHdEBTadYTq6
+ UWHhQwDCUDkSNKCRxWo6EyKZQeTakedAt4qkdSpSUCKOJGWKmPOfAm2/sDEmSxffRdxRRg
+ QUe8lklyFTZd6U/ZkJ/y7VR46fcSkEqLSLd9jAZT/3HJXbZfULpwsTcvcLcJLkCuzHEaU1
+ LRyJBsanLCYHTv7ep5PvIuAngUWrXK2eb7oacVs94mWXfs1PG482Ym4+bZA5u0QliGTVaC
+ M2EMhRTf0cqFuA4=
+ -----END SSH SIGNATURE-----
+ SIG
+ end
+
+ before do
+ key.update!(key: public_key_text)
+ end
+
+ it_behaves_like 'verified signature'
+ end
+
+ context 'when signed text is an empty string' do
+ let(:signed_text) { '' }
+ let(:signature_text) do
+ <<~SIG
+ -----BEGIN SSH SIGNATURE-----
+ U1NIU0lHAAAAAQAAADMAAAALc3NoLWVkMjU1MTkAAAAgko5+o4fR8N175Rr/VI5uRcHUIQ
+ MXkzpR8BEylbcXzu4AAAAEZmlsZQAAAAAAAAAGc2hhNTEyAAAAUwAAAAtzc2gtZWQyNTUx
+ OQAAAEC1y2I7o3KqKFlnM+MLkhIo+uRX3YQOYCqycfibyfvmkZTcwqMxgNBInBM9pY3VvS
+ sbW2iEdgz34agHbi+1BHIM
+ -----END SSH SIGNATURE-----
+ SIG
+ end
+
+ it_behaves_like 'verified signature'
+ end
+
+ context 'when signed text is nil' do
+ let(:signed_text) { nil }
+ let(:signature_text) do
+ <<~SIG
+ -----BEGIN SSH SIGNATURE-----
+ U1NIU0lHAAAAAQAAADMAAAALc3NoLWVkMjU1MTkAAAAgko5+o4fR8N175Rr/VI5uRcHUIQ
+ MXkzpR8BEylbcXzu4AAAAEZmlsZQAAAAAAAAAGc2hhNTEyAAAAUwAAAAtzc2gtZWQyNTUx
+ OQAAAEC1y2I7o3KqKFlnM+MLkhIo+uRX3YQOYCqycfibyfvmkZTcwqMxgNBInBM9pY3VvS
+ sbW2iEdgz34agHbi+1BHIM
+ -----END SSH SIGNATURE-----
+ SIG
+ end
+
+ it_behaves_like 'unverified signature'
+ end
+
+ context 'when committer_email is empty' do
+ let(:committer_email) { '' }
+
+ it_behaves_like 'unverified signature'
+ end
+
+ context 'when committer_email is nil' do
+ let(:committer_email) { nil }
+
+ it_behaves_like 'unverified signature'
+ end
+
+ context 'when signature_text is empty' do
+ let(:signature_text) { '' }
+
+ it_behaves_like 'unverified signature'
+ end
+
+ context 'when signature_text is nil' do
+ let(:signature_text) { nil }
+
+ it_behaves_like 'unverified signature'
+ end
+
+ context 'when user email is not verified' do
+ before do
+ user.update!(confirmed_at: nil)
+ end
+
+ it_behaves_like 'unverified signature'
+ end
+
+ context 'when no user exists with the committer email' do
+ let(:committer_email) { 'different-email+ssh-commit-test@example.com' }
+
+ it_behaves_like 'unverified signature'
+ end
+
+ context 'when signature is invalid' do
+ let(:signature_text) do
+ # truncated base64
+ <<~SIG
+ -----BEGIN SSH SIGNATURE-----
+ U1NIU0lHAAAAAQAAADMAAAALc3NoLWVkMjU1MTkAAAAgko5+o4fR8N175Rr/VI5uRcHUIQ
+ MXkzpR8BEylbcXzu4AAAAEZmlsZQAAAAAAAAAGc2hhNTEyAAAAUwAAAAtzc2gtZWQyNTUx
+ OQAAAECQa95KgBkgbMwIPNwHRjHu0WYrKvAc5O/FaBXlTDcPWQHi8WRDhbPNN6MqSYLg/S
+ -----END SSH SIGNATURE-----
+ SIG
+ end
+
+ it_behaves_like 'unverified signature'
+ end
+
+ context 'when signature is for a different message' do
+ let(:signature_text) do
+ <<~SIG
+ -----BEGIN SSH SIGNATURE-----
+ U1NIU0lHAAAAAQAAADMAAAALc3NoLWVkMjU1MTkAAAAgQtog20+l2pMcPnuoaWXuNpw9u7
+ OzPnJzdLUon0+ELNQAAAAEZmlsZQAAAAAAAAAGc2hhNTEyAAAAUwAAAAtzc2gtZWQyNTUx
+ OQAAAEB3/B+6c3+XqEuqjiqlVQwQmUdj8WquROtkhdtScEOP8GXcGQx+aaQs5nq4ZJCuu5
+ ywcU+4xQaLVpCf7tfGWa4K
+ -----END SSH SIGNATURE-----
+ SIG
+ end
+
+ it_behaves_like 'unverified signature'
+ end
+
+ context 'when message has been tampered' do
+ let(:signed_text) do
+ <<~MSG
+ This message was signed by an ssh key
+ The pubkey fingerprint is SHA256:RjzeOilYHkiHqz5fefdnrWr8qn5nbroAisuuTMoH9PU
+ MSG
+ end
+
+ it_behaves_like 'unverified signature'
+ end
+
+ context 'when key does not exist in GitLab' do
+ before do
+ key.delete
+ end
+
+ it 'reports unknown_key status' do
+ expect(signature.verification_status).to eq(:unknown_key)
+ end
+ end
+
+ context 'when key belongs to someone other than the committer' do
+ let_it_be(:other_user) { create(:user, email: 'other-user@example.com') }
+
+ let(:committer_email) { other_user.email }
+
+ it 'reports other_user status' do
+ expect(signature.verification_status).to eq(:other_user)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ssh_public_key_spec.rb b/spec/lib/gitlab/ssh_public_key_spec.rb
index 422b6f925a1..114a18cf99a 100644
--- a/spec/lib/gitlab/ssh_public_key_spec.rb
+++ b/spec/lib/gitlab/ssh_public_key_spec.rb
@@ -334,6 +334,107 @@ RSpec.describe Gitlab::SSHPublicKey, lib: true, fips_mode: false do
include_examples 'raises error when the key is represented by a class that is not in the list of supported technologies'
end
+ describe '#banned?' do
+ subject { public_key.banned? }
+
+ where(:key) do
+ [
+ 'ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAIEAwRIdDlHaIqZXND/l1vFT7ue3rc/DvXh2y' \
+ 'x5EFtuxGQRHVxGMazDhV4vj5ANGXDQwUYI0iZh6aOVrDy8I/y9/y+YDGCvsnqrDbuPDjW' \
+ '26s2bBXWgUPiC93T3TA6L2KOxhVcl7mljEOIYACRHPpJNYVGhinCxDUH9LxMrdNXgP5Ok= mateidu@localhost',
+
+ 'ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAIBnZQ+6nhlPX/JnX5i5hXpljJ89bSnnrsSs51' \
+ 'hSPuoJGmoKowBddISK7s10AIpO0xAWGcr8PUr2FOjEBbDHqlRxoXF0Ocms9xv3ql9EYUQ5' \
+ '+U+M6BymWhNTFPOs6gFHUl8Bw3t6c+SRKBpfRFB0yzBj9d093gSdfTAFoz+yLo4vRw==',
+
+ 'ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAIEAvIhC5skTzxyHif/7iy3yhxuK6/OB13hjPq' \
+ 'rskogkYFrcW8OK4VJT+5+Fx7wd4sQCnVn8rNqahw/x6sfcOMDI/Xvn4yKU4t8TnYf2MpUV' \
+ 'r4ndz39L5Ds1n7Si1m2suUNxWbKv58I8+NMhlt2ITraSuTU0NGymWOc8+LNi+MHXdLk= SCCP Superuser',
+
+ 'ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA6NF8iallvQVp22WDkTkyrtvp9eWW6A8YVr' \
+ '+kz4TjGYe7gHzIw+niNltGEFHzD8+v1I2YJ6oXevct1YeS0o9HZyN1Q9qgCgzUFtdOKLv6' \
+ 'IedplqoPkcmF0aYet2PkEDo3MlTBckFXPITAMzF8dJSIFo9D8HfdOV0IAdx4O7PtixWKn5' \
+ 'y2hMNG0zQPyUecp4pzC6kivAIhyfHilFR61RGL+GPXQ2MWZWFYbAGjyiYJnAmCP3NOTd0j' \
+ 'MZEnDkbUvxhMmBYSdETk1rRgm+R4LOzFUGaHqHDLKLX+FIPKcF96hrucXzcWyLbIbEgE98' \
+ 'OHlnVYCzRdK8jlqm8tehUc9c9WhQ== vagrant insecure public key',
+
+ 'ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAIEAwRIdDlHaIqZXND/l1vFT7ue3rc/DvXh2yx' \
+ '5EFtuxGQRHVxGMazDhV4vj5ANGXDQwUYI0iZh6aOVrDy8I/y9/y+YDGCvsnqrDbuPDjW26' \
+ 's2bBXWgUPiC93T3TA6L2KOxhVcl7mljEOIYACRHPpJNYVGhinCxDUH9LxMrdNXgP5Ok= mateidu@localhost',
+
+ 'ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAIEAn8LoId2N5i28cNKuEWWea3yt0I/LdT/NRO' \
+ 'rF44WZewtxch+DIwteQhM1qL6EKUSqz3Q2geX1crpOsNnyh67xy5lNo086u/QewOCSRAUG' \
+ 'rQCXqFQ4JU8ny/qugWALQHjbIaPHj/3zMK09r4cpTSeAU7CW5nQyTKGmh7v9CAfWfcs= adam@localhost.localdomain',
+
+ 'ssh-dss AAAAB3NzaC1kc3MAAACBAJTDsX+8olPZeyr58g9XE0L8PKT5030NZBPlE7np4h' \
+ 'Bqx36HoWarWq1Csn8M57dWN9StKbs03k2ggY6sYJK5AW2EWar70um3pYjKQHiZq7mITmit' \
+ 'sozFN/K7wu2e2iKRgquUwH5SuYoOJ29n7uhaILXiKZP4/H/dDudqPRSY6tJPAAAAFQDtuW' \
+ 'H90mDbU2L/Ms2lfl/cja/wHwAAAIAMBwSHZt2ysOHCFe1WLUvdwVDHUqk3QHTskuuAnMlw' \
+ 'MtSvCaUxSatdHahsMZ9VCHjoQUx6j+TcgRLDbMlRLnwUlb6wpniehLBFk+qakGcREqks5N' \
+ 'xYzFTJXwROzP72jPvVgQyOZHWq81gCild/ljL7hmrduCqYwxDIz4o7U92UKQAAAIBmhSl9' \
+ 'CVPgVMv1xO8DAHVhM1huIIK8mNFrzMJz+JXzBx81ms1kWSeQOC/nraaXFTBlqiQsvB8tzr' \
+ '4xZdbaI/QzVLKNAF5C8BJ4ScNlTIx1aZJwyMil8Nzb+0YAsw5Ja+bEZZvEVlAYnd10qRWr' \
+ 'PeEY1txLMmX3wDa+JvJL7fmuBg==',
+
+ 'ssh-dss AAAAB3NzaC1kc3MAAACBAMq5EcIFdfCjJakyQnP/BBp9oc6mpaZVguf0Znp5C4' \
+ '0twiG1lASQJZlM1qOB/hkBWYeBCHUkcOLEnVXSZzB62L+W/LGKodqnsiQPRr57AA6jPc6m' \
+ 'NBnejHai8cSdAl9n/0s2IQjdcrxM8CPq2uEyfm0J3AV6Lrbbxr5NgE5xxM+DAAAAFQCmFk' \
+ '/M7Rx2jexsJ9COpHkHwUjcNQAAAIAdg18oByp/tjjDKhWhmmv+HbVIROkRqSxBvuEZEmcW' \
+ 'lg38mLIT1bydfpSou/V4rI5ctxwCfJ1rRr66pw6GwCrz4fXmyVlhrj7TrktyQ9+zRXhynF' \
+ '4wdNPWErhNHb8tGlSOFiOBcUTlouX3V/ka6Dkd6ZQrZLQFaH+gjfyTZZ82HQAAAIEArsJg' \
+ 'p7RLPOsCeLqoia/eljseBFVDazO5Q0ysUotTw9wgXGGVWREwm8wNggFNb9eCiBAAUfVZVf' \
+ 'hVAtFT0pBf/eIVLPXyaMw3prBt7LqeBrbagODc3WAAdMTPIdYYcOKgv+YvTXa51zG64v6p' \
+ 'QOfS8WXgKCzDl44puXfYeDk5lVQ=',
+
+ 'ssh-dss AAAAB3NzaC1kc3MAAACBAKwKBw7D4OA1H/uD4htdh04TBIHdbSjeXUSnWJsce8' \
+ 'C0tvoB01Yarjv9TFj+tfeDYVWtUK1DA1JkyqSuoAtDANJzF4I6Isyd0KPrW3dHFTcg6Xlz' \
+ '8d3KEaHokY93NOmB/xWEkhme8b7Q0U2iZie2pgWbTLXV0FA+lhskTtPHW3+VAAAAFQDRya' \
+ 'yUlVZKXEweF3bUe03zt9e8VQAAAIAEPK1k3Y6ErAbIl96dnUCnZjuWQ7xXy062pf63QuRW' \
+ 'I6LYSscm3f1pEknWUNFr/erQ02pkfi2eP9uHl1TI1ql+UmJX3g3frfssLNZwWXAW0m8PbY' \
+ '3HZSs+f5hevM3ua32pnKDmbQ2WpvKNyycKHi81hSI14xMcdblJolhN5iY8/wAAAIAjEe5+' \
+ '0m/TlBtVkqQbUit+s/g+eB+PFQ+raaQdL1uztW3etntXAPH1MjxsAC/vthWYSTYXORkDFM' \
+ 'hrO5ssE2rfg9io0NDyTIZt+VRQMGdi++dH8ptU+ldl2ZejLFdTJFwFgcfXz+iQ1mx6h9TP' \
+ 'X1crE1KoMAVOj3yKVfKpLB1EkA== root@lbslave',
+
+ 'ssh-dss AAAAB3NzaC1kc3MAAACBAN3AITryJMQyOKZjAky+mQ/8pOHIlu4q8pzmR0qotK' \
+ 'aLm2yye5a0PY2rOaQRAzi7EPheBXbqTb8a8TrHhGXI5P7GUHaJho5HhEnw+5TwAvP72L7L' \
+ 'cPwxMxj/rLcR/jV+uLMsVeJVWjwJcUv83yzPXoVjK0hrIm+RLLeuTM+gTylHAAAAFQD5gB' \
+ 'dXsXAiTz1atzMg3xDFF1zlowAAAIAlLy6TCMlOBM0IcPsvP/9bEjDj0M8YZazdqt4amO2I' \
+ 'aNUPYt9/sIsLOQfxIj8myDK1TOp8NyRJep7V5aICG4f3Q+XktlmLzdWn3sjvbWuIAXe1op' \
+ 'jG2T69YhxfHZr8Wn7P4tpCgyqM4uHmUKrfnBzQQ9vkUUWsZoUXM2Z7vUXVfQAAAIAU6eNl' \
+ 'phQWDwx0KOBiiYhF9BM6kDbQlyw8333rAG3G4CcjI2G8eYGtpBNliaD185UjCEsjPiudhG' \
+ 'il/j4Zt/+VY3aGOLoi8kqXBBc8ZAML9bbkXpyhQhMgwiywx3ciFmvSn2UAin8yurStYPQx' \
+ 'tXauZN5PYbdwCHPS7ApIStdpMA== wood@endec1',
+
+ 'ssh-dss AAAAB3NzaC1kc3MAAACBAISAE3CAX4hsxTw0dRc0gx8nQ41r3Vkj9OmG6LGeKW' \
+ 'Rmpy7C6vaExuupjxid76fd4aS56lCUEEoRlJ3zE93qoK9acI6EGqGQFLuDZ0fqMyRSX+il' \
+ 'f+1HDo/TRyuraggxp9Hj9LMpZVbpFATMm0+d9Xs7eLmaJjuMsowNlOf8NFdHAAAAFQCwdv' \
+ 'qOAkR6QhuiAapQ/9iVuR0UAQAAAIBpLMo4dhSeWkChfv659WLPftxRrX/HR8YMD/jqa3R4' \
+ 'PsVM2g6dQ1191nHugtdV7uaMeOqOJ/QRWeYM+UYwT0Zgx2LqvgVSjNDfdjk+ZRY8x3SmEx' \
+ 'Fi62mKFoTGSOCXfcAfuanjaoF+sepnaiLUd+SoJShGYHoqR2QWiysTRqknlwAAAIBLEgYm' \
+ 'r9XCSqjENFDVQPFELYKT7Zs9J87PjPS1AP0qF1OoRGZ5mefK6X/6VivPAUWmmmev/BuAs8' \
+ 'M1HtfGeGGzMzDIiU/WZQ3bScLB1Ykrcjk7TOFD6xrnk/inYAp5l29hjidoAONcXoHmUAMY' \
+ 'OKqn63Q2AsDpExVcmfj99/BlpQ=='
+ ]
+ end
+
+ with_them do
+ it { is_expected.to be true }
+ end
+
+ context 'with a valid SSH key' do
+ let(:key) { attributes_for(:rsa_key_2048)[:key] }
+
+ it { is_expected.to be false }
+ end
+
+ context 'with an invalid SSH key' do
+ let(:key) { 'this is not a key' }
+
+ it { is_expected.to be false }
+ end
+ end
+
describe '#fingerprint' do
subject { public_key.fingerprint }
diff --git a/spec/lib/gitlab/static_site_editor/config/file_config/entry/global_spec.rb b/spec/lib/gitlab/static_site_editor/config/file_config/entry/global_spec.rb
deleted file mode 100644
index 9ce6007165b..00000000000
--- a/spec/lib/gitlab/static_site_editor/config/file_config/entry/global_spec.rb
+++ /dev/null
@@ -1,245 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::StaticSiteEditor::Config::FileConfig::Entry::Global do
- let(:global) { described_class.new(hash) }
- let(:default_image_upload_path_value) { 'source/images' }
-
- let(:default_mounts_value) do
- [
- {
- source: 'source',
- target: ''
- }
- ]
- end
-
- let(:default_static_site_generator_value) { 'middleman' }
-
- shared_examples_for 'valid default configuration' do
- describe '#compose!' do
- before do
- global.compose!
- end
-
- it 'creates nodes hash' do
- expect(global.descendants).to be_an Array
- end
-
- it 'creates node object for each entry' do
- expect(global.descendants.count).to eq 3
- end
-
- it 'creates node object using valid class' do
- expect(global.descendants.map(&:class)).to match_array(expected_node_object_classes)
- end
-
- it 'sets a description containing "Static Site Editor" for all nodes' do
- expect(global.descendants.map(&:description)).to all(match(/Static Site Editor/))
- end
-
- describe '#leaf?' do
- it 'is not leaf' do
- expect(global).not_to be_leaf
- end
- end
- end
-
- context 'when not composed' do
- describe '#static_site_generator_value' do
- it 'returns nil' do
- expect(global.static_site_generator_value).to be nil
- end
- end
-
- describe '#leaf?' do
- it 'is leaf' do
- expect(global).to be_leaf
- end
- end
- end
-
- context 'when composed' do
- before do
- global.compose!
- end
-
- describe '#errors' do
- it 'has no errors' do
- expect(global.errors).to be_empty
- end
- end
-
- describe '#image_upload_path_value' do
- it 'returns correct values' do
- expect(global.image_upload_path_value).to eq(default_image_upload_path_value)
- end
- end
-
- describe '#mounts_value' do
- it 'returns correct values' do
- expect(global.mounts_value).to eq(default_mounts_value)
- end
- end
-
- describe '#static_site_generator_value' do
- it 'returns correct values' do
- expect(global.static_site_generator_value).to eq(default_static_site_generator_value)
- end
- end
- end
- end
-
- describe '.nodes' do
- it 'returns a hash' do
- expect(described_class.nodes).to be_a(Hash)
- end
-
- context 'when filtering all the entry/node names' do
- it 'contains the expected node names' do
- expected_node_names = %i[
- image_upload_path
- mounts
- static_site_generator
- ]
- expect(described_class.nodes.keys).to match_array(expected_node_names)
- end
- end
- end
-
- context 'when configuration is valid' do
- context 'when some entries defined' do
- let(:expected_node_object_classes) do
- [
- Gitlab::StaticSiteEditor::Config::FileConfig::Entry::ImageUploadPath,
- Gitlab::StaticSiteEditor::Config::FileConfig::Entry::Mounts,
- Gitlab::StaticSiteEditor::Config::FileConfig::Entry::StaticSiteGenerator
- ]
- end
-
- let(:hash) do
- {
- image_upload_path: default_image_upload_path_value,
- mounts: default_mounts_value,
- static_site_generator: default_static_site_generator_value
- }
- end
-
- it_behaves_like 'valid default configuration'
- end
- end
-
- context 'when value is an empty hash' do
- let(:expected_node_object_classes) do
- [
- Gitlab::Config::Entry::Unspecified,
- Gitlab::Config::Entry::Unspecified,
- Gitlab::Config::Entry::Unspecified
- ]
- end
-
- let(:hash) { {} }
-
- it_behaves_like 'valid default configuration'
- end
-
- context 'when configuration is not valid' do
- before do
- global.compose!
- end
-
- context 'when a single entry is invalid' do
- let(:hash) do
- { image_upload_path: { not_a_string: true } }
- end
-
- describe '#errors' do
- it 'reports errors' do
- expect(global.errors)
- .to include 'image_upload_path config should be a string'
- end
- end
- end
-
- context 'when a multiple entries are invalid' do
- let(:hash) do
- {
- image_upload_path: { not_a_string: true },
- static_site_generator: { not_a_string: true }
- }
- end
-
- describe '#errors' do
- it 'reports errors' do
- expect(global.errors)
- .to match_array([
- 'image_upload_path config should be a string',
- 'static_site_generator config should be a string',
- "static_site_generator config should be 'middleman'"
- ])
- end
- end
- end
-
- context 'when there is an invalid key' do
- let(:hash) do
- { invalid_key: true }
- end
-
- describe '#errors' do
- it 'reports errors' do
- expect(global.errors)
- .to include 'global config contains unknown keys: invalid_key'
- end
- end
- end
- end
-
- context 'when value is not a hash' do
- let(:hash) { [] }
-
- describe '#valid?' do
- it 'is not valid' do
- expect(global).not_to be_valid
- end
- end
-
- describe '#errors' do
- it 'returns error about invalid type' do
- expect(global.errors.first).to match /should be a hash/
- end
- end
- end
-
- describe '#specified?' do
- it 'is concrete entry that is defined' do
- expect(global.specified?).to be true
- end
- end
-
- describe '#[]' do
- before do
- global.compose!
- end
-
- let(:hash) do
- { static_site_generator: default_static_site_generator_value }
- end
-
- context 'when entry exists' do
- it 'returns correct entry' do
- expect(global[:static_site_generator])
- .to be_an_instance_of Gitlab::StaticSiteEditor::Config::FileConfig::Entry::StaticSiteGenerator
- expect(global[:static_site_generator].value).to eq default_static_site_generator_value
- end
- end
-
- context 'when entry does not exist' do
- it 'always return unspecified node' do
- expect(global[:some][:unknown][:node])
- .not_to be_specified
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/static_site_editor/config/file_config/entry/image_upload_path_spec.rb b/spec/lib/gitlab/static_site_editor/config/file_config/entry/image_upload_path_spec.rb
deleted file mode 100644
index c2b7fbf6f98..00000000000
--- a/spec/lib/gitlab/static_site_editor/config/file_config/entry/image_upload_path_spec.rb
+++ /dev/null
@@ -1,38 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::StaticSiteEditor::Config::FileConfig::Entry::ImageUploadPath do
- subject(:image_upload_path_entry) { described_class.new(config) }
-
- describe 'validations' do
- context 'with a valid config' do
- let(:config) { 'an-image-upload-path' }
-
- it { is_expected.to be_valid }
-
- describe '#value' do
- it 'returns a image_upload_path key' do
- expect(image_upload_path_entry.value).to eq config
- end
- end
- end
-
- context 'with an invalid config' do
- let(:config) { { not_a_string: true } }
-
- it { is_expected.not_to be_valid }
-
- it 'reports errors about wrong type' do
- expect(image_upload_path_entry.errors)
- .to include 'image upload path config should be a string'
- end
- end
- end
-
- describe '.default' do
- it 'returns default image_upload_path' do
- expect(described_class.default).to eq 'source/images'
- end
- end
-end
diff --git a/spec/lib/gitlab/static_site_editor/config/file_config/entry/mount_spec.rb b/spec/lib/gitlab/static_site_editor/config/file_config/entry/mount_spec.rb
deleted file mode 100644
index 04248fc60a5..00000000000
--- a/spec/lib/gitlab/static_site_editor/config/file_config/entry/mount_spec.rb
+++ /dev/null
@@ -1,101 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::StaticSiteEditor::Config::FileConfig::Entry::Mount do
- subject(:entry) { described_class.new(config) }
-
- describe 'validations' do
- context 'with a valid config' do
- context 'and target is a non-empty string' do
- let(:config) do
- {
- source: 'source',
- target: 'sub-site'
- }
- end
-
- it { is_expected.to be_valid }
-
- describe '#value' do
- it 'returns mount configuration' do
- expect(entry.value).to eq config
- end
- end
- end
-
- context 'and target is an empty string' do
- let(:config) do
- {
- source: 'source',
- target: ''
- }
- end
-
- it { is_expected.to be_valid }
-
- describe '#value' do
- it 'returns mount configuration' do
- expect(entry.value).to eq config
- end
- end
- end
- end
-
- context 'with an invalid config' do
- context 'when source is not a string' do
- let(:config) { { source: 123, target: 'target' } }
-
- it { is_expected.not_to be_valid }
-
- it 'reports error' do
- expect(entry.errors)
- .to include 'mount source should be a string'
- end
- end
-
- context 'when source is not present' do
- let(:config) { { target: 'target' } }
-
- it { is_expected.not_to be_valid }
-
- it 'reports error' do
- expect(entry.errors)
- .to include "mount source can't be blank"
- end
- end
-
- context 'when target is not a string' do
- let(:config) { { source: 'source', target: 123 } }
-
- it { is_expected.not_to be_valid }
-
- it 'reports error' do
- expect(entry.errors)
- .to include 'mount target should be a string'
- end
- end
-
- context 'when there is an unknown key present' do
- let(:config) { { test: 100 } }
-
- it { is_expected.not_to be_valid }
-
- it 'reports error' do
- expect(entry.errors)
- .to include 'mount config contains unknown keys: test'
- end
- end
- end
- end
-
- describe '.default' do
- it 'returns default mount' do
- expect(described_class.default)
- .to eq({
- source: 'source',
- target: ''
- })
- end
- end
-end
diff --git a/spec/lib/gitlab/static_site_editor/config/file_config/entry/mounts_spec.rb b/spec/lib/gitlab/static_site_editor/config/file_config/entry/mounts_spec.rb
deleted file mode 100644
index 0ae2ece9474..00000000000
--- a/spec/lib/gitlab/static_site_editor/config/file_config/entry/mounts_spec.rb
+++ /dev/null
@@ -1,53 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::StaticSiteEditor::Config::FileConfig::Entry::Mounts do
- subject(:entry) { described_class.new(config) }
-
- describe 'validations' do
- context 'with a valid config' do
- let(:config) do
- [
- {
- source: 'source',
- target: ''
- },
- {
- source: 'sub-site/source',
- target: 'sub-site'
- }
- ]
- end
-
- it { is_expected.to be_valid }
-
- describe '#value' do
- it 'returns mounts configuration' do
- expect(entry.value).to eq config
- end
- end
- end
-
- context 'with an invalid config' do
- let(:config) { { not_an_array: true } }
-
- it { is_expected.not_to be_valid }
-
- it 'reports errors about wrong type' do
- expect(entry.errors)
- .to include 'mounts config should be a array'
- end
- end
- end
-
- describe '.default' do
- it 'returns default mounts' do
- expect(described_class.default)
- .to eq([{
- source: 'source',
- target: ''
- }])
- end
- end
-end
diff --git a/spec/lib/gitlab/static_site_editor/config/file_config/entry/static_site_generator_spec.rb b/spec/lib/gitlab/static_site_editor/config/file_config/entry/static_site_generator_spec.rb
deleted file mode 100644
index a9c730218cf..00000000000
--- a/spec/lib/gitlab/static_site_editor/config/file_config/entry/static_site_generator_spec.rb
+++ /dev/null
@@ -1,50 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::StaticSiteEditor::Config::FileConfig::Entry::StaticSiteGenerator do
- let(:static_site_generator) { described_class.new(config) }
-
- describe 'validations' do
- context 'when value is valid' do
- let(:config) { 'middleman' }
-
- describe '#value' do
- it 'returns a static_site_generator key' do
- expect(static_site_generator.value).to eq config
- end
- end
-
- describe '#valid?' do
- it 'is valid' do
- expect(static_site_generator).to be_valid
- end
- end
- end
-
- context 'when value is invalid' do
- let(:config) { 'not-a-valid-generator' }
-
- describe '#valid?' do
- it 'is not valid' do
- expect(static_site_generator).not_to be_valid
- end
- end
- end
-
- context 'when value has a wrong type' do
- let(:config) { { not_a_string: true } }
-
- it 'reports errors about wrong type' do
- expect(static_site_generator.errors)
- .to include 'static site generator config should be a string'
- end
- end
- end
-
- describe '.default' do
- it 'returns default static_site_generator' do
- expect(described_class.default).to eq 'middleman'
- end
- end
-end
diff --git a/spec/lib/gitlab/static_site_editor/config/file_config_spec.rb b/spec/lib/gitlab/static_site_editor/config/file_config_spec.rb
deleted file mode 100644
index d444d4f1df7..00000000000
--- a/spec/lib/gitlab/static_site_editor/config/file_config_spec.rb
+++ /dev/null
@@ -1,87 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::StaticSiteEditor::Config::FileConfig do
- let(:config) do
- described_class.new(yml)
- end
-
- context 'when config is valid' do
- context 'when config has valid values' do
- let(:yml) do
- <<-EOS
- static_site_generator: middleman
- EOS
- end
-
- describe '#to_hash_with_defaults' do
- it 'returns hash created from string' do
- expect(config.to_hash_with_defaults.fetch(:static_site_generator)).to eq 'middleman'
- end
- end
-
- describe '#valid?' do
- it 'is valid' do
- expect(config).to be_valid
- end
-
- it 'has no errors' do
- expect(config.errors).to be_empty
- end
- end
- end
- end
-
- context 'when a config entry has an empty value' do
- let(:yml) { 'static_site_generator: ' }
-
- describe '#to_hash' do
- it 'returns default value' do
- expect(config.to_hash_with_defaults.fetch(:static_site_generator)).to eq 'middleman'
- end
- end
-
- describe '#valid?' do
- it 'is valid' do
- expect(config).to be_valid
- end
-
- it 'has no errors' do
- expect(config.errors).to be_empty
- end
- end
- end
-
- context 'when config is invalid' do
- context 'when yml is incorrect' do
- let(:yml) { '// invalid' }
-
- describe '.new' do
- it 'raises error' do
- expect { config }.to raise_error(described_class::ConfigError, /Invalid configuration format/)
- end
- end
- end
-
- context 'when config value exists but is not a valid value' do
- let(:yml) { 'static_site_generator: "unsupported-generator"' }
-
- describe '#valid?' do
- it 'is not valid' do
- expect(config).not_to be_valid
- end
-
- it 'has errors' do
- expect(config.errors).not_to be_empty
- end
- end
-
- describe '#errors' do
- it 'returns an array of strings' do
- expect(config.errors).to all(be_an_instance_of(String))
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb b/spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb
deleted file mode 100644
index 8cd3feba339..00000000000
--- a/spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb
+++ /dev/null
@@ -1,127 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::StaticSiteEditor::Config::GeneratedConfig do
- subject(:config) { described_class.new(repository, ref, path, return_url) }
-
- let_it_be(:namespace) { create(:namespace, name: 'namespace') }
- let_it_be(:root_group) { create(:group, name: 'group') }
- let_it_be(:subgroup) { create(:group, name: 'subgroup', parent: root_group) }
- let_it_be(:project) { create(:project, :public, :repository, name: 'project', namespace: namespace) }
- let_it_be(:project_with_subgroup) { create(:project, :public, :repository, name: 'project', group: subgroup) }
- let_it_be(:repository) { project.repository }
-
- let(:ref) { 'master' }
- let(:path) { 'README.md' }
- let(:return_url) { 'http://example.com' }
-
- describe '#data' do
- subject { config.data }
-
- it 'returns data for the frontend component' do
- is_expected
- .to match({
- branch: 'master',
- commit_id: repository.commit.id,
- namespace: 'namespace',
- path: 'README.md',
- project: 'project',
- project_id: project.id,
- return_url: 'http://example.com',
- is_supported_content: true,
- base_url: '/namespace/project/-/sse/master%2FREADME.md',
- merge_requests_illustration_path: %r{illustrations/merge_requests}
- })
- end
-
- context 'when namespace is a subgroup' do
- let(:repository) { project_with_subgroup.repository }
-
- it 'returns data for the frontend component' do
- is_expected.to include(
- namespace: 'group/subgroup',
- project: 'project',
- base_url: '/group/subgroup/project/-/sse/master%2FREADME.md'
- )
- end
- end
-
- context 'when file has .md.erb extension' do
- before do
- repository.create_file(
- project.creator,
- path,
- '',
- message: 'message',
- branch_name: ref
- )
- end
-
- let(:ref) { 'main' }
- let(:path) { 'README.md.erb' }
-
- it { is_expected.to include(branch: ref, is_supported_content: true) }
- end
-
- context 'when file path is nested' do
- let(:path) { 'lib/README.md' }
-
- it { is_expected.to include(base_url: '/namespace/project/-/sse/master%2Flib%2FREADME.md') }
- end
-
- context 'when branch is not master or main' do
- let(:ref) { 'my-branch' }
-
- it { is_expected.to include(is_supported_content: false) }
- end
-
- context 'when file does not have a markdown extension' do
- let(:path) { 'README.txt' }
-
- it { is_expected.to include(is_supported_content: false) }
- end
-
- context 'when file does not have an extension' do
- let(:path) { 'README' }
-
- it { is_expected.to include(is_supported_content: false) }
- end
-
- context 'when file does not exist' do
- let(:path) { 'UNKNOWN.md' }
-
- it { is_expected.to include(is_supported_content: false) }
- end
-
- context 'when repository is empty' do
- let(:repository) { create(:project_empty_repo).repository }
-
- it { is_expected.to include(is_supported_content: false) }
- end
-
- context 'when return_url is not a valid URL' do
- let(:return_url) { 'example.com' }
-
- it { is_expected.to include(return_url: nil) }
- end
-
- context 'when return_url has a javascript scheme' do
- let(:return_url) { 'javascript:alert(document.domain)' }
-
- it { is_expected.to include(return_url: nil) }
- end
-
- context 'when return_url is missing' do
- let(:return_url) { nil }
-
- it { is_expected.to include(return_url: nil) }
- end
-
- context 'when a commit for the ref cannot be found' do
- let(:ref) { 'nonexistent-ref' }
-
- it { is_expected.to include(commit_id: nil) }
- end
- end
-end
diff --git a/spec/lib/gitlab/subscription_portal_spec.rb b/spec/lib/gitlab/subscription_portal_spec.rb
index 8d5a39baf77..098a58bff83 100644
--- a/spec/lib/gitlab/subscription_portal_spec.rb
+++ b/spec/lib/gitlab/subscription_portal_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe ::Gitlab::SubscriptionPortal do
using RSpec::Parameterized::TableSyntax
+ include SubscriptionPortalHelper
let(:env_value) { nil }
@@ -13,9 +14,9 @@ RSpec.describe ::Gitlab::SubscriptionPortal do
describe '.default_subscriptions_url' do
where(:test, :development, :result) do
- false | false | 'https://customers.gitlab.com'
- false | true | 'https://customers.staging.gitlab.com'
- true | false | 'https://customers.staging.gitlab.com'
+ false | false | prod_customers_url
+ false | true | staging_customers_url
+ true | false | staging_customers_url
end
before do
@@ -34,7 +35,7 @@ RSpec.describe ::Gitlab::SubscriptionPortal do
subject { described_class.subscriptions_url }
context 'when CUSTOMER_PORTAL_URL ENV is unset' do
- it { is_expected.to eq('https://customers.staging.gitlab.com') }
+ it { is_expected.to eq(staging_customers_url) }
end
context 'when CUSTOMER_PORTAL_URL ENV is set' do
@@ -54,17 +55,17 @@ RSpec.describe ::Gitlab::SubscriptionPortal do
context 'url methods' do
where(:method_name, :result) do
- :default_subscriptions_url | 'https://customers.staging.gitlab.com'
- :payment_form_url | 'https://customers.staging.gitlab.com/payment_forms/cc_validation'
- :payment_validation_form_id | 'payment_method_validation'
- :registration_validation_form_url | 'https://customers.staging.gitlab.com/payment_forms/cc_registration_validation'
- :subscriptions_graphql_url | 'https://customers.staging.gitlab.com/graphql'
- :subscriptions_more_minutes_url | 'https://customers.staging.gitlab.com/buy_pipeline_minutes'
- :subscriptions_more_storage_url | 'https://customers.staging.gitlab.com/buy_storage'
- :subscriptions_manage_url | 'https://customers.staging.gitlab.com/subscriptions'
- :subscriptions_instance_review_url | 'https://customers.staging.gitlab.com/instance_review'
- :subscriptions_gitlab_plans_url | 'https://customers.staging.gitlab.com/gitlab_plans'
- :edit_account_url | 'https://customers.staging.gitlab.com/customers/edit'
+ :default_subscriptions_url | staging_customers_url
+ :payment_form_url | "#{staging_customers_url}/payment_forms/cc_validation"
+ :payment_validation_form_id | 'payment_method_validation'
+ :registration_validation_form_url | "#{staging_customers_url}/payment_forms/cc_registration_validation"
+ :subscriptions_graphql_url | "#{staging_customers_url}/graphql"
+ :subscriptions_more_minutes_url | "#{staging_customers_url}/buy_pipeline_minutes"
+ :subscriptions_more_storage_url | "#{staging_customers_url}/buy_storage"
+ :subscriptions_manage_url | "#{staging_customers_url}/subscriptions"
+ :subscriptions_instance_review_url | "#{staging_customers_url}/instance_review"
+ :subscriptions_gitlab_plans_url | "#{staging_customers_url}/gitlab_plans"
+ :edit_account_url | "#{staging_customers_url}/customers/edit"
end
with_them do
@@ -79,7 +80,10 @@ RSpec.describe ::Gitlab::SubscriptionPortal do
let(:group_id) { 153 }
- it { is_expected.to eq("https://customers.staging.gitlab.com/gitlab/namespaces/#{group_id}/extra_seats") }
+ it do
+ url = "#{staging_customers_url}/gitlab/namespaces/#{group_id}/extra_seats"
+ is_expected.to eq(url)
+ end
end
describe '.upgrade_subscription_url' do
@@ -88,7 +92,10 @@ RSpec.describe ::Gitlab::SubscriptionPortal do
let(:group_id) { 153 }
let(:plan_id) { 5 }
- it { is_expected.to eq("https://customers.staging.gitlab.com/gitlab/namespaces/#{group_id}/upgrade/#{plan_id}") }
+ it do
+ url = "#{staging_customers_url}/gitlab/namespaces/#{group_id}/upgrade/#{plan_id}"
+ is_expected.to eq(url)
+ end
end
describe '.renew_subscription_url' do
@@ -96,6 +103,9 @@ RSpec.describe ::Gitlab::SubscriptionPortal do
let(:group_id) { 153 }
- it { is_expected.to eq("https://customers.staging.gitlab.com/gitlab/namespaces/#{group_id}/renew") }
+ it do
+ url = "#{staging_customers_url}/gitlab/namespaces/#{group_id}/renew"
+ is_expected.to eq(url)
+ end
end
end
diff --git a/spec/lib/gitlab/themes_spec.rb b/spec/lib/gitlab/themes_spec.rb
index c9dc23d7c14..a41f7d927fe 100644
--- a/spec/lib/gitlab/themes_spec.rb
+++ b/spec/lib/gitlab/themes_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::Themes, lib: true do
css = described_class.body_classes
expect(css).to include('ui-indigo')
- expect(css).to include('ui-dark')
+ expect(css).to include('ui-gray')
expect(css).to include('ui-blue')
end
end
@@ -16,7 +16,7 @@ RSpec.describe Gitlab::Themes, lib: true do
describe '.by_id' do
it 'returns a Theme by its ID' do
expect(described_class.by_id(1).name).to eq 'Indigo'
- expect(described_class.by_id(3).name).to eq 'Light'
+ expect(described_class.by_id(3).name).to eq 'Light Gray'
end
end
diff --git a/spec/lib/gitlab/tracking/standard_context_spec.rb b/spec/lib/gitlab/tracking/standard_context_spec.rb
index c88b0af30f6..508b33949a8 100644
--- a/spec/lib/gitlab/tracking/standard_context_spec.rb
+++ b/spec/lib/gitlab/tracking/standard_context_spec.rb
@@ -92,6 +92,34 @@ RSpec.describe Gitlab::Tracking::StandardContext do
end
end
+ context 'with incorrect argument type' do
+ context 'when standard_context_type_check FF is disabled' do
+ before do
+ stub_feature_flags(standard_context_type_check: false)
+ end
+
+ subject { described_class.new(project: create(:group)) }
+
+ it 'does not call `track_and_raise_for_dev_exception`' do
+ expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception)
+ snowplow_context
+ end
+ end
+
+ context 'when standard_context_type_check FF is enabled' do
+ before do
+ stub_feature_flags(standard_context_type_check: true)
+ end
+
+ subject { described_class.new(project: create(:group)) }
+
+ it 'does call `track_and_raise_for_dev_exception`' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
+ snowplow_context
+ end
+ end
+ end
+
it 'contains user id' do
expect(snowplow_context.to_json[:data].keys).to include(:user_id)
end
diff --git a/spec/lib/gitlab/updated_notes_paginator_spec.rb b/spec/lib/gitlab/updated_notes_paginator_spec.rb
deleted file mode 100644
index ce6a7719fb4..00000000000
--- a/spec/lib/gitlab/updated_notes_paginator_spec.rb
+++ /dev/null
@@ -1,57 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::UpdatedNotesPaginator do
- let(:issue) { create(:issue) }
-
- let(:project) { issue.project }
- let(:finder) { NotesFinder.new(user, target: issue, last_fetched_at: last_fetched_at) }
- let(:user) { issue.author }
-
- let!(:page_1) { create_list(:note, 2, noteable: issue, project: project, updated_at: 2.days.ago) }
- let!(:page_2) { [create(:note, noteable: issue, project: project, updated_at: 1.day.ago)] }
-
- let(:page_1_boundary) { page_1.last.updated_at + NotesFinder::FETCH_OVERLAP }
-
- around do |example|
- freeze_time do
- example.run
- end
- end
-
- before do
- stub_const("Gitlab::UpdatedNotesPaginator::LIMIT", 2)
- end
-
- subject(:paginator) { described_class.new(finder.execute, last_fetched_at: last_fetched_at) }
-
- describe 'last_fetched_at: start of time' do
- let(:last_fetched_at) { Time.at(0) }
-
- it 'calculates the first page of notes', :aggregate_failures do
- expect(paginator.notes).to match_array(page_1)
- expect(paginator.metadata).to match(
- more: true,
- last_fetched_at: microseconds(page_1_boundary)
- )
- end
- end
-
- describe 'last_fetched_at: start of final page' do
- let(:last_fetched_at) { page_1_boundary }
-
- it 'calculates a final page', :aggregate_failures do
- expect(paginator.notes).to match_array(page_2)
- expect(paginator.metadata).to match(
- more: false,
- last_fetched_at: microseconds(Time.zone.now)
- )
- end
- end
-
- # Convert a time to an integer number of microseconds
- def microseconds(time)
- (time.to_i * 1_000_000) + time.usec
- end
-end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_imported_projects_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_imported_projects_metric_spec.rb
index 4c86410d609..b7da9b27e19 100644
--- a/spec/lib/gitlab/usage/metrics/instrumentations/count_imported_projects_metric_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_imported_projects_metric_spec.rb
@@ -4,15 +4,30 @@ require 'spec_helper'
RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountImportedProjectsMetric do
let_it_be(:user) { create(:user) }
- let_it_be(:gitea_imports) do
- create_list(:project, 3, import_type: 'gitea', creator_id: user.id, created_at: 3.weeks.ago)
+
+ # Project records have to be created chronologically, because of
+ # metric SQL query optimizations that rely on the fact that `id`s
+ # increment monotonically over time.
+ #
+ # See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/89701
+ let_it_be(:old_import) { create(:project, import_type: 'gitea', creator_id: user.id, created_at: 2.months.ago) }
+ let_it_be(:gitea_import_1) { create(:project, import_type: 'gitea', creator_id: user.id, created_at: 21.days.ago) }
+
+ let_it_be(:gitea_import_2) do
+ create(:project, import_type: 'gitea', creator_id: user.id, created_at: 20.days.ago)
end
- let_it_be(:bitbucket_imports) do
- create_list(:project, 2, import_type: 'bitbucket', creator_id: user.id, created_at: 3.weeks.ago)
+ let_it_be(:gitea_import_3) do
+ create(:project, import_type: 'gitea', creator_id: user.id, created_at: 19.days.ago)
end
- let_it_be(:old_import) { create(:project, import_type: 'gitea', creator_id: user.id, created_at: 2.months.ago) }
+ let_it_be(:bitbucket_import_1) do
+ create(:project, import_type: 'bitbucket', creator_id: user.id, created_at: 2.weeks.ago)
+ end
+
+ let_it_be(:bitbucket_import_2) do
+ create(:project, import_type: 'bitbucket', creator_id: user.id, created_at: 1.week.ago)
+ end
context 'with import_type gitea' do
context 'with all time frame' do
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_imported_projects_total_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_imported_projects_total_metric_spec.rb
new file mode 100644
index 00000000000..bfc4240def6
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_imported_projects_total_metric_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountImportedProjectsTotalMetric do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:gitea_imports) do
+ create_list(:project, 3, import_type: 'gitea', creator_id: user.id, created_at: 3.weeks.ago)
+ end
+
+ let_it_be(:bitbucket_imports) do
+ create_list(:project, 2, import_type: 'bitbucket', creator_id: user.id, created_at: 3.weeks.ago)
+ end
+
+ let_it_be(:old_import) { create(:project, import_type: 'gitea', creator_id: user.id, created_at: 2.months.ago) }
+
+ let_it_be(:bulk_import_projects) do
+ create_list(:bulk_import_entity, 3, source_type: 'project_entity', created_at: 3.weeks.ago)
+ end
+
+ let_it_be(:bulk_import_groups) do
+ create_list(:bulk_import_entity, 3, source_type: 'group_entity', created_at: 3.weeks.ago)
+ end
+
+ let_it_be(:old_bulk_import_project) do
+ create(:bulk_import_entity, source_type: 'project_entity', created_at: 2.months.ago)
+ end
+
+ before do
+ allow(ApplicationRecord.connection).to receive(:transaction_open?).and_return(false)
+ end
+
+ context 'with all time frame' do
+ let(:expected_value) { 10 }
+ let(:expected_query) do
+ "SELECT (SELECT COUNT(\"projects\".\"id\") FROM \"projects\" WHERE \"projects\".\"import_type\""\
+ " IN ('gitlab_project', 'gitlab', 'github', 'bitbucket', 'bitbucket_server', 'gitea', 'git', 'manifest',"\
+ " 'gitlab_migration'))"\
+ " + (SELECT COUNT(\"bulk_import_entities\".\"id\") FROM \"bulk_import_entities\""\
+ " WHERE \"bulk_import_entities\".\"source_type\" = 1)"
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', time_frame: 'all'
+ end
+
+ context 'for 28d time frame' do
+ let(:expected_value) { 8 }
+ let(:start) { 30.days.ago.to_s(:db) }
+ let(:finish) { 2.days.ago.to_s(:db) }
+ let(:expected_query) do
+ "SELECT (SELECT COUNT(\"projects\".\"id\") FROM \"projects\" WHERE \"projects\".\"import_type\""\
+ " IN ('gitlab_project', 'gitlab', 'github', 'bitbucket', 'bitbucket_server', 'gitea', 'git', 'manifest',"\
+ " 'gitlab_migration')"\
+ " AND \"projects\".\"created_at\" BETWEEN '#{start}' AND '#{finish}')"\
+ " + (SELECT COUNT(\"bulk_import_entities\".\"id\") FROM \"bulk_import_entities\""\
+ " WHERE \"bulk_import_entities\".\"source_type\" = 1 AND \"bulk_import_entities\".\"created_at\""\
+ " BETWEEN '#{start}' AND '#{finish}')"
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', time_frame: '28d'
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/jira_imports_total_imported_issues_count_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/jira_imports_total_imported_issues_count_metric_spec.rb
new file mode 100644
index 00000000000..f7a53cd3dcc
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/jira_imports_total_imported_issues_count_metric_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::JiraImportsTotalImportedIssuesCountMetric do
+ let_it_be(:jira_import_state_1) { create(:jira_import_state, :finished, imported_issues_count: 3) }
+ let_it_be(:jira_import_state_2) { create(:jira_import_state, :finished, imported_issues_count: 2) }
+
+ let(:expected_value) { 5 }
+ let(:expected_query) do
+ 'SELECT SUM("jira_imports"."imported_issues_count") FROM "jira_imports" WHERE "jira_imports"."status" = 4'
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' }
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/numbers_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/numbers_metric_spec.rb
new file mode 100644
index 00000000000..180c76d56f3
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/numbers_metric_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::NumbersMetric do
+ subject do
+ described_class.tap do |metric_class|
+ metric_class.operation :add
+ metric_class.data do |time_frame|
+ [
+ Gitlab::Usage::Metrics::Instrumentations::CountIssuesMetric.new(time_frame: time_frame).value,
+ Gitlab::Usage::Metrics::Instrumentations::CountBoardsMetric.new(time_frame: time_frame).value
+ ]
+ end
+ end.new(time_frame: 'all')
+ end
+
+ describe '#value' do
+ let_it_be(:issue_1) { create(:issue) }
+ let_it_be(:issue_2) { create(:issue) }
+ let_it_be(:issue_3) { create(:issue) }
+ let_it_be(:issues) { Issue.all }
+
+ let_it_be(:board_1) { create(:board) }
+ let_it_be(:boards) { Board.all }
+
+ before do
+ allow(Issue.connection).to receive(:transaction_open?).and_return(false)
+ end
+
+ it 'calculates a correct result' do
+ expect(subject.value).to eq(4)
+ end
+
+ context 'with availability defined' do
+ subject do
+ described_class.tap do |metric_class|
+ metric_class.operation :add
+ metric_class.data { [1] }
+ metric_class.available? { false }
+ end.new(time_frame: 'all')
+ end
+
+ it 'responds to #available? properly' do
+ expect(subject.available?).to eq(false)
+ end
+ end
+
+ context 'with availability not defined' do
+ subject do
+ Class.new(described_class) do
+ operation :add
+ data { [] }
+ end.new(time_frame: 'all')
+ end
+
+ it 'responds to #available? properly' do
+ expect(subject.available?).to eq(true)
+ end
+ end
+ end
+
+ context 'with unimplemented operation method used' do
+ subject do
+ described_class.tap do |metric_class|
+ metric_class.operation :invalid_operation
+ metric_class.data { [] }
+ end.new(time_frame: 'all')
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(described_class::UnimplementedOperationError)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/unique_active_users_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/unique_active_users_metric_spec.rb
new file mode 100644
index 00000000000..8a0ce61de74
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/unique_active_users_metric_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::UniqueActiveUsersMetric do
+ let_it_be(:user1) { create(:user, last_activity_on: 1.day.ago) }
+ let_it_be(:user2) { create(:user, last_activity_on: 5.days.ago) }
+ let_it_be(:user3) { create(:user, last_activity_on: 50.days.ago) }
+ let_it_be(:user4) { create(:user) }
+ let_it_be(:user5) { create(:user, user_type: 1, last_activity_on: 5.days.ago ) } # support bot
+ let_it_be(:user6) { create(:user, state: 'blocked') }
+
+ context '28d' do
+ let(:start) { 30.days.ago.to_date.to_s }
+ let(:finish) { 2.days.ago.to_date.to_s }
+ let(:expected_value) { 1 }
+ let(:expected_query) do
+ "SELECT COUNT(\"users\".\"id\") FROM \"users\" WHERE (\"users\".\"state\" IN ('active')) AND " \
+ "(\"users\".\"user_type\" IS NULL OR \"users\".\"user_type\" IN (6, 4)) AND \"users\".\"last_activity_on\" " \
+ "BETWEEN '#{start}' AND '#{finish}'"
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: '28d' }
+ end
+
+ context 'all' do
+ let(:expected_value) { 4 }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'all' }
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/name_suggestion_spec.rb b/spec/lib/gitlab/usage/metrics/name_suggestion_spec.rb
index 6955fbcaf5a..9ee8bc6b568 100644
--- a/spec/lib/gitlab/usage/metrics/name_suggestion_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/name_suggestion_spec.rb
@@ -71,9 +71,19 @@ RSpec.describe Gitlab::Usage::Metrics::NameSuggestion do
end
end
+ context 'for average metrics' do
+ it_behaves_like 'name suggestion' do
+ # corresponding metric is collected with average(Ci::Pipeline, :duration)
+ let(:key_path) { 'counts.ci_pipeline_duration' }
+ let(:operation) { :average }
+ let(:relation) { Ci::Pipeline }
+ let(:column) { :duration}
+ let(:name_suggestion) { /average_duration_from_ci_pipelines/ }
+ end
+ end
+
context 'for redis metrics' do
it_behaves_like 'name suggestion' do
- # corresponding metric is collected with redis_usage_data { unique_visit_service.unique_visits_for(targets: :analytics) }
let(:operation) { :redis }
let(:column) { nil }
let(:relation) { nil }
diff --git a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
index f81ad9b193d..167dba9b57d 100644
--- a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
@@ -77,8 +77,7 @@ RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::Generator do
context 'for redis metrics' do
it_behaves_like 'name suggestion' do
- # corresponding metric is collected with redis_usage_data { unique_visit_service.unique_visits_for(targets: :analytics) }
- let(:key_path) { 'analytics_unique_visits.analytics_unique_visits_for_any_target' }
+ let(:key_path) { 'usage_activity_by_stage_monthly.create.merge_requests_users' }
let(:name_suggestion) { /<please fill metric name, suggested format is: {subject}_{verb}{ing|ed}_{object} eg: users_creating_epics or merge_requests_viewed_in_single_file_mode>/ }
end
end
diff --git a/spec/lib/gitlab/usage/metrics/query_spec.rb b/spec/lib/gitlab/usage/metrics/query_spec.rb
index 65b8a7a046b..355d619f768 100644
--- a/spec/lib/gitlab/usage/metrics/query_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/query_spec.rb
@@ -61,6 +61,12 @@ RSpec.describe Gitlab::Usage::Metrics::Query do
end
end
+ describe '.average' do
+ it 'returns the raw SQL' do
+ expect(described_class.for(:average, Issue, :weight)).to eq('SELECT AVG("issues"."weight") FROM "issues"')
+ end
+ end
+
describe 'estimate_batch_distinct_count' do
it 'returns the raw SQL' do
expect(described_class.for(:estimate_batch_distinct_count, Issue, :author_id)).to eq('SELECT COUNT(DISTINCT "issues"."author_id") FROM "issues"')
diff --git a/spec/lib/gitlab/usage/service_ping_report_spec.rb b/spec/lib/gitlab/usage/service_ping_report_spec.rb
index e007554df4a..1e8f9db4dea 100644
--- a/spec/lib/gitlab/usage/service_ping_report_spec.rb
+++ b/spec/lib/gitlab/usage/service_ping_report_spec.rb
@@ -155,6 +155,11 @@ RSpec.describe Gitlab::Usage::ServicePingReport, :use_clean_rails_memory_store_c
memoized_constatns += Gitlab::UsageData::EE_MEMOIZED_VALUES if defined? Gitlab::UsageData::EE_MEMOIZED_VALUES
memoized_constatns.each { |v| Gitlab::UsageData.clear_memoization(v) }
stub_database_flavor_check('Cloud SQL for PostgreSQL')
+
+ # in_product_marketing_email metrics values are extracted from a single group by query
+ # to check if the queries for individual metrics return the same value as group by when the value is non-zero
+ create(:in_product_marketing_email, track: :create, series: 0, cta_clicked_at: Time.current)
+ create(:in_product_marketing_email, track: :verify, series: 0)
end
let(:service_ping_payload) { described_class.for(output: :all_metrics_values) }
diff --git a/spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb b/spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb
index c4a84445a01..01396602f29 100644
--- a/spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb
@@ -3,6 +3,7 @@
require 'spec_helper'
# If this spec fails, we need to add the new code review event to the correct aggregated metric
+# NOTE: ONLY user related metrics to be added to the aggregates - otherwise add it to the exception list
RSpec.describe 'Code review events' do
it 'the aggregated metrics contain all the code review metrics' do
path = Rails.root.join('config/metrics/aggregates/code_review.yml')
@@ -15,7 +16,7 @@ RSpec.describe 'Code review events' do
code_review_events = Gitlab::UsageDataCounters::HLLRedisCounter.events_for_category("code_review")
- exceptions = %w[i_code_review_mr_diffs i_code_review_mr_single_file_diffs i_code_review_total_suggestions_applied i_code_review_total_suggestions_added]
+ exceptions = %w[i_code_review_mr_diffs i_code_review_mr_with_invalid_approvers i_code_review_mr_single_file_diffs i_code_review_total_suggestions_applied i_code_review_total_suggestions_added i_code_review_create_note_in_ipynb_diff i_code_review_create_note_in_ipynb_diff_mr i_code_review_create_note_in_ipynb_diff_commit]
code_review_aggregated_events += exceptions
expect(code_review_events - code_review_aggregated_events).to be_empty
diff --git a/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
index 9aecb8f8b25..dbc34681660 100644
--- a/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
@@ -66,18 +66,6 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red
end
end
- context 'for SSE edit actions' do
- it_behaves_like 'tracks and counts action' do
- def track_action(params)
- described_class.track_sse_edit_action(**params)
- end
-
- def count_unique(params)
- described_class.count_sse_edit_actions(**params)
- end
- end
- end
-
it 'can return the count of actions per user deduplicated' do
described_class.track_web_ide_edit_action(author: user1)
described_class.track_live_preview_edit_action(author: user1)
diff --git a/spec/lib/gitlab/usage_data_counters/static_site_editor_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/static_site_editor_counter_spec.rb
deleted file mode 100644
index 1bf5dad1c9f..00000000000
--- a/spec/lib/gitlab/usage_data_counters/static_site_editor_counter_spec.rb
+++ /dev/null
@@ -1,14 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::UsageDataCounters::StaticSiteEditorCounter do
- it_behaves_like 'a redis usage counter', 'StaticSiteEditor', :views
- it_behaves_like 'a redis usage counter', 'StaticSiteEditor', :commits
- it_behaves_like 'a redis usage counter', 'StaticSiteEditor', :merge_requests
-
- it_behaves_like 'a redis usage counter with totals', :static_site_editor,
- views: 3,
- commits: 4,
- merge_requests: 5
-end
diff --git a/spec/lib/gitlab/usage_data_counters_spec.rb b/spec/lib/gitlab/usage_data_counters_spec.rb
index 379a2cb778d..0696b375eb5 100644
--- a/spec/lib/gitlab/usage_data_counters_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters_spec.rb
@@ -13,10 +13,10 @@ RSpec.describe Gitlab::UsageDataCounters do
describe '.count' do
subject { described_class.count(event_name) }
- let(:event_name) { 'static_site_editor_views' }
+ let(:event_name) { 'web_ide_views' }
it 'increases a view counter' do
- expect(Gitlab::UsageDataCounters::StaticSiteEditorCounter).to receive(:count).with('views')
+ expect(Gitlab::UsageDataCounters::WebIdeCounter).to receive(:count).with('views')
subject
end
diff --git a/spec/lib/gitlab/usage_data_metrics_spec.rb b/spec/lib/gitlab/usage_data_metrics_spec.rb
index 563eed75c38..485f2131d87 100644
--- a/spec/lib/gitlab/usage_data_metrics_spec.rb
+++ b/spec/lib/gitlab/usage_data_metrics_spec.rb
@@ -24,11 +24,8 @@ RSpec.describe Gitlab::UsageDataMetrics do
expect(subject).to include(:hostname)
end
- it 'includes counts keys' do
+ it 'includes counts keys', :aggregate_failures do
expect(subject[:counts]).to include(:boards)
- end
-
- it 'includes counts keys' do
expect(subject[:counts]).to include(:issues)
end
diff --git a/spec/lib/gitlab/usage_data_queries_spec.rb b/spec/lib/gitlab/usage_data_queries_spec.rb
index 7c64a31c499..2fe43c11d27 100644
--- a/spec/lib/gitlab/usage_data_queries_spec.rb
+++ b/spec/lib/gitlab/usage_data_queries_spec.rb
@@ -105,4 +105,25 @@ RSpec.describe Gitlab::UsageDataQueries do
expect(described_class.maximum_id(Project)).to eq(nil)
end
end
+
+ describe 'sent_in_product_marketing_email_count' do
+ it 'returns sql query that returns correct value' do
+ expect(described_class.sent_in_product_marketing_email_count(nil, 0, 0)).to eq(
+ 'SELECT COUNT("in_product_marketing_emails"."id") ' \
+ 'FROM "in_product_marketing_emails" ' \
+ 'WHERE "in_product_marketing_emails"."track" = 0 AND "in_product_marketing_emails"."series" = 0'
+ )
+ end
+ end
+
+ describe 'clicked_in_product_marketing_email_count' do
+ it 'returns sql query that returns correct value' do
+ expect(described_class.clicked_in_product_marketing_email_count(nil, 0, 0)).to eq(
+ 'SELECT COUNT("in_product_marketing_emails"."id") ' \
+ 'FROM "in_product_marketing_emails" ' \
+ 'WHERE "in_product_marketing_emails"."track" = 0 AND "in_product_marketing_emails"."series" = 0 ' \
+ 'AND "in_product_marketing_emails"."cta_clicked_at" IS NOT NULL'
+ )
+ end
+ end
end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index 7edec6d13f4..790f5b638b9 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -723,7 +723,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(counts_monthly[:projects_with_alerts_created]).to eq(1)
expect(counts_monthly[:projects]).to eq(1)
expect(counts_monthly[:packages]).to eq(1)
- expect(counts_monthly[:promoted_issues]).to eq(Gitlab::UsageData::DEPRECATED_VALUE)
end
end
@@ -755,7 +754,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
it { is_expected.to include(:kubernetes_agent_gitops_sync) }
it { is_expected.to include(:kubernetes_agent_k8s_api_proxy_request) }
- it { is_expected.to include(:static_site_editor_views) }
it { is_expected.to include(:package_events_i_package_pull_package) }
it { is_expected.to include(:package_events_i_package_delete_package_by_user) }
it { is_expected.to include(:package_events_i_package_conan_push_package) }
@@ -1188,12 +1186,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
counter.track_web_ide_edit_action(author: user3, time: time - 3.days)
counter.track_snippet_editor_edit_action(author: user3)
-
- counter.track_sse_edit_action(author: user1)
- counter.track_sse_edit_action(author: user1)
- counter.track_sse_edit_action(author: user2)
- counter.track_sse_edit_action(author: user3)
- counter.track_sse_edit_action(author: user2, time: time - 3.days)
end
it 'returns the distinct count of user actions within the specified time period' do
@@ -1206,108 +1198,12 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
action_monthly_active_users_web_ide_edit: 2,
action_monthly_active_users_sfe_edit: 2,
action_monthly_active_users_snippet_editor_edit: 2,
- action_monthly_active_users_ide_edit: 3,
- action_monthly_active_users_sse_edit: 3
+ action_monthly_active_users_ide_edit: 3
}
)
end
end
- describe '.analytics_unique_visits_data' do
- subject { described_class.analytics_unique_visits_data }
-
- it 'returns the number of unique visits to pages with analytics features' do
- ::Gitlab::Analytics::UniqueVisits.analytics_events.each do |target|
- expect_any_instance_of(::Gitlab::Analytics::UniqueVisits).to receive(:unique_visits_for).with(targets: target).and_return(123)
- end
-
- expect_any_instance_of(::Gitlab::Analytics::UniqueVisits).to receive(:unique_visits_for).with(targets: :analytics).and_return(543)
- expect_any_instance_of(::Gitlab::Analytics::UniqueVisits).to receive(:unique_visits_for).with(targets: :analytics, start_date: 4.weeks.ago.to_date, end_date: Date.current).and_return(987)
-
- expect(subject).to eq({
- analytics_unique_visits: {
- 'g_analytics_contribution' => 123,
- 'g_analytics_insights' => 123,
- 'g_analytics_issues' => 123,
- 'g_analytics_productivity' => 123,
- 'g_analytics_valuestream' => 123,
- 'p_analytics_pipelines' => 123,
- 'p_analytics_code_reviews' => 123,
- 'p_analytics_valuestream' => 123,
- 'p_analytics_insights' => 123,
- 'p_analytics_issues' => 123,
- 'p_analytics_repo' => 123,
- 'i_analytics_cohorts' => 123,
- 'i_analytics_dev_ops_score' => 123,
- 'i_analytics_instance_statistics' => 123,
- 'p_analytics_ci_cd_deployment_frequency' => 123,
- 'p_analytics_ci_cd_lead_time' => 123,
- 'p_analytics_ci_cd_pipelines' => 123,
- 'p_analytics_merge_request' => 123,
- 'i_analytics_dev_ops_adoption' => 123,
- 'users_viewing_analytics_group_devops_adoption' => 123,
- 'analytics_unique_visits_for_any_target' => 543,
- 'analytics_unique_visits_for_any_target_monthly' => 987
- }
- })
- end
- end
-
- describe '.compliance_unique_visits_data' do
- subject { described_class.compliance_unique_visits_data }
-
- before do
- allow_next_instance_of(::Gitlab::Analytics::UniqueVisits) do |instance|
- ::Gitlab::Analytics::UniqueVisits.compliance_events.each do |target|
- allow(instance).to receive(:unique_visits_for).with(targets: target).and_return(123)
- end
-
- allow(instance).to receive(:unique_visits_for).with(targets: :compliance).and_return(543)
-
- allow(instance).to receive(:unique_visits_for).with(targets: :compliance, start_date: 4.weeks.ago.to_date, end_date: Date.current).and_return(987)
- end
- end
-
- it 'returns the number of unique visits to pages with compliance features' do
- expect(subject).to eq({
- compliance_unique_visits: {
- 'g_compliance_dashboard' => 123,
- 'g_compliance_audit_events' => 123,
- 'i_compliance_credential_inventory' => 123,
- 'i_compliance_audit_events' => 123,
- 'a_compliance_audit_events_api' => 123,
- 'compliance_unique_visits_for_any_target' => 543,
- 'compliance_unique_visits_for_any_target_monthly' => 987
- }
- })
- end
- end
-
- describe '.search_unique_visits_data' do
- subject { described_class.search_unique_visits_data }
-
- before do
- events = ::Gitlab::UsageDataCounters::HLLRedisCounter.events_for_category('search')
- events.each do |event|
- allow(::Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:unique_events).with(event_names: event, start_date: 7.days.ago.to_date, end_date: Date.current).and_return(123)
- end
- allow(::Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:unique_events).with(event_names: events, start_date: 7.days.ago.to_date, end_date: Date.current).and_return(543)
- allow(::Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:unique_events).with(event_names: events, start_date: 4.weeks.ago.to_date, end_date: Date.current).and_return(987)
- end
-
- it 'returns the number of unique visits to pages with search features' do
- expect(subject).to eq({
- search_unique_visits: {
- 'i_search_total' => 123,
- 'i_search_advanced' => 123,
- 'i_search_paid' => 123,
- 'search_unique_visits_for_any_target_weekly' => 543,
- 'search_unique_visits_for_any_target_monthly' => 987
- }
- })
- end
- end
-
describe 'redis_hll_counters' do
subject { described_class.redis_hll_counters }
@@ -1497,4 +1393,20 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
end
+
+ context 'on Gitlab.com' do
+ before do
+ allow(Gitlab).to receive(:com?).and_return(true)
+ end
+
+ describe '.system_usage_data' do
+ subject { described_class.system_usage_data }
+
+ it 'returns fallback value for disabled metrics' do
+ expect(subject[:counts][:ci_internal_pipelines]).to eq(Gitlab::Utils::UsageData::FALLBACK)
+ expect(subject[:counts][:issues_created_gitlab_alerts]).to eq(Gitlab::Utils::UsageData::FALLBACK)
+ expect(subject[:counts][:issues_created_manually_from_alerts]).to eq(Gitlab::Utils::UsageData::FALLBACK)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/utils/usage_data_spec.rb b/spec/lib/gitlab/utils/usage_data_spec.rb
index a74a9f06c6f..25ba5a3e09e 100644
--- a/spec/lib/gitlab/utils/usage_data_spec.rb
+++ b/spec/lib/gitlab/utils/usage_data_spec.rb
@@ -259,6 +259,37 @@ RSpec.describe Gitlab::Utils::UsageData do
end
end
+ describe '#average' do
+ let(:relation) { double(:relation) }
+
+ it 'returns the average when operation succeeds' do
+ allow(Gitlab::Database::BatchCount)
+ .to receive(:batch_average)
+ .with(relation, :column, batch_size: 100, start: 2, finish: 3)
+ .and_return(1)
+
+ expect(described_class.average(relation, :column, batch_size: 100, start: 2, finish: 3)).to eq(1)
+ end
+
+ it 'records duration' do
+ expect(described_class).to receive(:with_duration)
+
+ allow(Gitlab::Database::BatchCount).to receive(:batch_average).and_return(1)
+
+ described_class.average(relation, :column)
+ end
+
+ context 'when operation fails' do
+ subject { described_class.average(relation, :column) }
+
+ let(:fallback) { 15 }
+ let(:failing_class) { Gitlab::Database::BatchCount }
+ let(:failing_method) { :batch_average }
+
+ it_behaves_like 'failing hardening method'
+ end
+ end
+
describe '#histogram' do
let_it_be(:projects) { create_list(:project, 3) }
diff --git a/spec/lib/gitlab/web_hooks/rate_limiter_spec.rb b/spec/lib/gitlab/web_hooks/rate_limiter_spec.rb
new file mode 100644
index 00000000000..b25ce4ea9da
--- /dev/null
+++ b/spec/lib/gitlab/web_hooks/rate_limiter_spec.rb
@@ -0,0 +1,123 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::WebHooks::RateLimiter, :clean_gitlab_redis_rate_limiting do
+ let_it_be(:plan) { create(:default_plan) }
+ let_it_be_with_reload(:project_hook) { create(:project_hook) }
+ let_it_be_with_reload(:system_hook) { create(:system_hook) }
+ let_it_be_with_reload(:integration_hook) { create(:jenkins_integration).service_hook }
+ let_it_be(:limit) { 1 }
+
+ using RSpec::Parameterized::TableSyntax
+
+ describe '#rate_limit!' do
+ def rate_limit!(hook)
+ described_class.new(hook).rate_limit!
+ end
+
+ shared_examples 'a hook that is never rate limited' do
+ specify do
+ expect(Gitlab::ApplicationRateLimiter).not_to receive(:throttled?)
+
+ expect(rate_limit!(hook)).to eq(false)
+ end
+ end
+
+ context 'when there is no plan limit' do
+ where(:hook) { [ref(:project_hook), ref(:system_hook), ref(:integration_hook)] }
+
+ with_them { it_behaves_like 'a hook that is never rate limited' }
+ end
+
+ context 'when there is a plan limit' do
+ before_all do
+ create(:plan_limits, plan: plan, web_hook_calls: limit)
+ end
+
+ where(:hook, :limitless_hook_type) do
+ ref(:project_hook) | false
+ ref(:system_hook) | true
+ ref(:integration_hook) | true
+ end
+
+ with_them do
+ if params[:limitless_hook_type]
+ it_behaves_like 'a hook that is never rate limited'
+ else
+ it 'rate limits the hook, returning true when rate limited' do
+ expect(Gitlab::ApplicationRateLimiter).to receive(:throttled?)
+ .exactly(3).times
+ .and_call_original
+
+ freeze_time do
+ limit.times { expect(rate_limit!(hook)).to eq(false) }
+ expect(rate_limit!(hook)).to eq(true)
+ end
+
+ travel_to(1.day.from_now) do
+ expect(rate_limit!(hook)).to eq(false)
+ end
+ end
+ end
+ end
+ end
+
+ describe 'rate limit scope' do
+ it 'rate limits all hooks from the same namespace', :freeze_time do
+ create(:plan_limits, plan: plan, web_hook_calls: limit)
+ project_hook_in_different_namespace = create(:project_hook)
+ project_hook_in_same_namespace = create(:project_hook,
+ project: create(:project, namespace: project_hook.project.namespace)
+ )
+
+ limit.times { expect(rate_limit!(project_hook)).to eq(false) }
+ expect(rate_limit!(project_hook)).to eq(true)
+ expect(rate_limit!(project_hook_in_same_namespace)).to eq(true)
+ expect(rate_limit!(project_hook_in_different_namespace)).to eq(false)
+ end
+ end
+ end
+
+ describe '#rate_limited?' do
+ subject { described_class.new(hook).rate_limited? }
+
+ context 'when no plan limit has been defined' do
+ where(:hook) { [ref(:project_hook), ref(:system_hook), ref(:integration_hook)] }
+
+ with_them do
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ context 'when there is a plan limit' do
+ before_all do
+ create(:plan_limits, plan: plan, web_hook_calls: limit)
+ end
+
+ context 'when hook is not rate-limited' do
+ where(:hook) { [ref(:project_hook), ref(:system_hook), ref(:integration_hook)] }
+
+ with_them do
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ context 'when hook is rate-limited' do
+ before do
+ allow(Gitlab::ApplicationRateLimiter).to receive(:throttled?).and_return(true)
+ end
+
+ where(:hook, :limitless_hook_type) do
+ ref(:project_hook) | false
+ ref(:system_hook) | true
+ ref(:integration_hook) | true
+ end
+
+ with_them do
+ it { is_expected.to eq(!limitless_hook_type) }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/marginalia_spec.rb b/spec/lib/marginalia_spec.rb
index 53048ae2e6b..693b7bd45c9 100644
--- a/spec/lib/marginalia_spec.rb
+++ b/spec/lib/marginalia_spec.rb
@@ -59,7 +59,7 @@ RSpec.describe 'Marginalia spec' do
"application" => "test",
"endpoint_id" => "MarginaliaTestController#first_user",
"correlation_id" => correlation_id,
- "db_config_name" => ENV['GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci'] == 'main' ? 'main' : 'ci'
+ "db_config_name" => 'ci'
}
end
diff --git a/spec/lib/object_storage/direct_upload_spec.rb b/spec/lib/object_storage/direct_upload_spec.rb
index 1629aec89f5..18a58522d12 100644
--- a/spec/lib/object_storage/direct_upload_spec.rb
+++ b/spec/lib/object_storage/direct_upload_spec.rb
@@ -342,68 +342,84 @@ RSpec.describe ObjectStorage::DirectUpload do
context 'when length is unknown' do
let(:has_length) { false }
- it_behaves_like 'a valid S3 upload with multipart data' do
- before do
- stub_object_storage_multipart_init(storage_url, "myUpload")
+ context 'when s3_omit_multipart_urls feature flag is enabled' do
+ let(:consolidated_settings) { true }
+
+ it 'omits multipart URLs' do
+ expect(subject).not_to have_key(:MultipartUpload)
end
- context 'when maximum upload size is 0' do
- let(:maximum_size) { 0 }
+ it_behaves_like 'a valid upload'
+ end
- it 'returns maximum number of parts' do
- expect(subject[:MultipartUpload][:PartURLs].length).to eq(100)
- end
+ context 'when s3_omit_multipart_urls feature flag is disabled' do
+ before do
+ stub_feature_flags(s3_omit_multipart_urls: false)
+ end
- it 'part size is minimum, 5MB' do
- expect(subject[:MultipartUpload][:PartSize]).to eq(5.megabyte)
+ it_behaves_like 'a valid S3 upload with multipart data' do
+ before do
+ stub_object_storage_multipart_init(storage_url, "myUpload")
end
- end
- context 'when maximum upload size is < 5 MB' do
- let(:maximum_size) { 1024 }
+ context 'when maximum upload size is 0' do
+ let(:maximum_size) { 0 }
- it 'returns only 1 part' do
- expect(subject[:MultipartUpload][:PartURLs].length).to eq(1)
- end
+ it 'returns maximum number of parts' do
+ expect(subject[:MultipartUpload][:PartURLs].length).to eq(100)
+ end
- it 'part size is minimum, 5MB' do
- expect(subject[:MultipartUpload][:PartSize]).to eq(5.megabyte)
+ it 'part size is minimum, 5MB' do
+ expect(subject[:MultipartUpload][:PartSize]).to eq(5.megabyte)
+ end
end
- end
- context 'when maximum upload size is 10MB' do
- let(:maximum_size) { 10.megabyte }
+ context 'when maximum upload size is < 5 MB' do
+ let(:maximum_size) { 1024 }
- it 'returns only 2 parts' do
- expect(subject[:MultipartUpload][:PartURLs].length).to eq(2)
- end
+ it 'returns only 1 part' do
+ expect(subject[:MultipartUpload][:PartURLs].length).to eq(1)
+ end
- it 'part size is minimum, 5MB' do
- expect(subject[:MultipartUpload][:PartSize]).to eq(5.megabyte)
+ it 'part size is minimum, 5MB' do
+ expect(subject[:MultipartUpload][:PartSize]).to eq(5.megabyte)
+ end
end
- end
- context 'when maximum upload size is 12MB' do
- let(:maximum_size) { 12.megabyte }
+ context 'when maximum upload size is 10MB' do
+ let(:maximum_size) { 10.megabyte }
- it 'returns only 3 parts' do
- expect(subject[:MultipartUpload][:PartURLs].length).to eq(3)
- end
+ it 'returns only 2 parts' do
+ expect(subject[:MultipartUpload][:PartURLs].length).to eq(2)
+ end
- it 'part size is rounded-up to 5MB' do
- expect(subject[:MultipartUpload][:PartSize]).to eq(5.megabyte)
+ it 'part size is minimum, 5MB' do
+ expect(subject[:MultipartUpload][:PartSize]).to eq(5.megabyte)
+ end
end
- end
- context 'when maximum upload size is 49GB' do
- let(:maximum_size) { 49.gigabyte }
+ context 'when maximum upload size is 12MB' do
+ let(:maximum_size) { 12.megabyte }
+
+ it 'returns only 3 parts' do
+ expect(subject[:MultipartUpload][:PartURLs].length).to eq(3)
+ end
- it 'returns maximum, 100 parts' do
- expect(subject[:MultipartUpload][:PartURLs].length).to eq(100)
+ it 'part size is rounded-up to 5MB' do
+ expect(subject[:MultipartUpload][:PartSize]).to eq(5.megabyte)
+ end
end
- it 'part size is rounded-up to 5MB' do
- expect(subject[:MultipartUpload][:PartSize]).to eq(505.megabyte)
+ context 'when maximum upload size is 49GB' do
+ let(:maximum_size) { 49.gigabyte }
+
+ it 'returns maximum, 100 parts' do
+ expect(subject[:MultipartUpload][:PartURLs].length).to eq(100)
+ end
+
+ it 'part size is rounded-up to 5MB' do
+ expect(subject[:MultipartUpload][:PartSize]).to eq(505.megabyte)
+ end
end
end
end
diff --git a/spec/lib/security/ci_configuration/sast_build_action_spec.rb b/spec/lib/security/ci_configuration/sast_build_action_spec.rb
index efb8b0b9984..611a886d252 100644
--- a/spec/lib/security/ci_configuration/sast_build_action_spec.rb
+++ b/spec/lib/security/ci_configuration/sast_build_action_spec.rb
@@ -4,54 +4,54 @@ require 'spec_helper'
RSpec.describe Security::CiConfiguration::SastBuildAction do
let(:default_sast_values) do
- { 'global' =>
+ { global:
[
- { 'field' => 'SECURE_ANALYZERS_PREFIX', 'defaultValue' => 'registry.gitlab.com/security-products', 'value' => 'registry.gitlab.com/security-products' }
+ { field: 'SECURE_ANALYZERS_PREFIX', default_value: 'registry.gitlab.com/security-products', value: 'registry.gitlab.com/security-products' }
],
- 'pipeline' =>
+ pipeline:
[
- { 'field' => 'stage', 'defaultValue' => 'test', 'value' => 'test' },
- { 'field' => 'SEARCH_MAX_DEPTH', 'defaultValue' => 4, 'value' => 4 },
- { 'field' => 'SAST_EXCLUDED_PATHS', 'defaultValue' => 'spec, test, tests, tmp', 'value' => 'spec, test, tests, tmp' }
+ { field: 'stage', default_value: 'test', value: 'test' },
+ { field: 'SEARCH_MAX_DEPTH', default_value: 4, value: 4 },
+ { field: 'SAST_EXCLUDED_PATHS', default_value: 'spec, test, tests, tmp', value: 'spec, test, tests, tmp' }
] }
end
let(:params) do
- { 'global' =>
+ { global:
[
- { 'field' => 'SECURE_ANALYZERS_PREFIX', 'defaultValue' => 'registry.gitlab.com/security-products', 'value' => 'new_registry' }
+ { field: 'SECURE_ANALYZERS_PREFIX', default_value: 'registry.gitlab.com/security-products', value: 'new_registry' }
],
- 'pipeline' =>
+ pipeline:
[
- { 'field' => 'stage', 'defaultValue' => 'test', 'value' => 'security' },
- { 'field' => 'SEARCH_MAX_DEPTH', 'defaultValue' => 4, 'value' => 1 },
- { 'field' => 'SAST_EXCLUDED_PATHS', 'defaultValue' => 'spec, test, tests, tmp', 'value' => 'spec,docs' }
+ { field: 'stage', default_value: 'test', value: 'security' },
+ { field: 'SEARCH_MAX_DEPTH', default_value: 4, value: 1 },
+ { field: 'SAST_EXCLUDED_PATHS', default_value: 'spec, test, tests, tmp', value: 'spec,docs' }
] }
end
let(:params_with_analyzer_info) do
- params.merge( { 'analyzers' =>
+ params.merge( { analyzers:
[
{
- 'name' => "bandit",
- 'enabled' => false
+ name: "bandit",
+ enabled: false
},
{
- 'name' => "brakeman",
- 'enabled' => true,
- 'variables' => [
- { 'field' => "SAST_BRAKEMAN_LEVEL",
- 'defaultValue' => "1",
- 'value' => "2" }
+ name: "brakeman",
+ enabled: true,
+ variables: [
+ { field: "SAST_BRAKEMAN_LEVEL",
+ default_value: "1",
+ value: "2" }
]
},
{
- 'name' => "flawfinder",
- 'enabled' => true,
- 'variables' => [
- { 'field' => "SAST_FLAWFINDER_LEVEL",
- 'defaultValue' => "1",
- 'value' => "1" }
+ name: "flawfinder",
+ enabled: true,
+ variables: [
+ { field: "SAST_FLAWFINDER_LEVEL",
+ default_value: "1",
+ value: "1" }
]
}
] }
@@ -59,15 +59,15 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
end
let(:params_with_all_analyzers_enabled) do
- params.merge( { 'analyzers' =>
+ params.merge( { analyzers:
[
{
- 'name' => "flawfinder",
- 'enabled' => true
+ name: "flawfinder",
+ enabled: true
},
{
- 'name' => "brakeman",
- 'enabled' => true
+ name: "brakeman",
+ enabled: true
}
] }
)
@@ -162,15 +162,15 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
context 'with update stage and SEARCH_MAX_DEPTH and set SECURE_ANALYZERS_PREFIX to default' do
let(:params) do
- { 'global' =>
+ { global:
[
- { 'field' => 'SECURE_ANALYZERS_PREFIX', 'defaultValue' => 'registry.gitlab.com/security-products', 'value' => 'registry.gitlab.com/security-products' }
+ { field: 'SECURE_ANALYZERS_PREFIX', default_value: 'registry.gitlab.com/security-products', value: 'registry.gitlab.com/security-products' }
],
- 'pipeline' =>
+ pipeline:
[
- { 'field' => 'stage', 'defaultValue' => 'test', 'value' => 'brand_new_stage' },
- { 'field' => 'SEARCH_MAX_DEPTH', 'defaultValue' => 4, 'value' => 5 },
- { 'field' => 'SAST_EXCLUDED_PATHS', 'defaultValue' => 'spec, test, tests, tmp', 'value' => 'spec,docs' }
+ { field: 'stage', default_value: 'test', value: 'brand_new_stage' },
+ { field: 'SEARCH_MAX_DEPTH', default_value: 4, value: 5 },
+ { field: 'SAST_EXCLUDED_PATHS', default_value: 'spec, test, tests, tmp', value: 'spec,docs' }
] }
end
@@ -273,9 +273,9 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
context 'with one empty parameter' do
let(:params) do
- { 'global' =>
+ { global:
[
- { 'field' => 'SECURE_ANALYZERS_PREFIX', 'defaultValue' => 'registry.gitlab.com/security-products', 'value' => '' }
+ { field: 'SECURE_ANALYZERS_PREFIX', default_value: 'registry.gitlab.com/security-products', value: '' }
] }
end
diff --git a/spec/lib/service_ping/build_payload_spec.rb b/spec/lib/service_ping/build_payload_spec.rb
index 6cce07262b2..b10c9fd5bc0 100644
--- a/spec/lib/service_ping/build_payload_spec.rb
+++ b/spec/lib/service_ping/build_payload_spec.rb
@@ -14,35 +14,6 @@ RSpec.describe ServicePing::BuildPayload do
end
end
- context 'when usage_ping_enabled setting is false' do
- before do
- # Gitlab::CurrentSettings.usage_ping_enabled? == false
- stub_config_setting(usage_ping_enabled: false)
- end
-
- it 'returns empty service ping payload' do
- expect(service_ping_payload).to eq({})
- end
- end
-
- context 'when usage_ping_enabled setting is true' do
- before do
- # Gitlab::CurrentSettings.usage_ping_enabled? == true
- stub_config_setting(usage_ping_enabled: true)
- end
-
- it_behaves_like 'complete service ping payload'
-
- context 'with require stats consent enabled' do
- before do
- allow(User).to receive(:single_user)
- .and_return(instance_double(User, :user, requires_usage_stats_consent?: true))
- end
-
- it 'returns empty service ping payload' do
- expect(service_ping_payload).to eq({})
- end
- end
- end
+ it_behaves_like 'complete service ping payload'
end
end
diff --git a/spec/lib/service_ping/permit_data_categories_spec.rb b/spec/lib/service_ping/permit_data_categories_spec.rb
index d1027a6f1ab..a4b88531205 100644
--- a/spec/lib/service_ping/permit_data_categories_spec.rb
+++ b/spec/lib/service_ping/permit_data_categories_spec.rb
@@ -19,26 +19,10 @@ RSpec.describe ServicePing::PermitDataCategories do
end
context 'when usage ping setting is set to false' do
- before do
- allow(User).to receive(:single_user)
- .and_return(instance_double(User, :user, requires_usage_stats_consent?: false))
+ it 'returns all categories' do
stub_config_setting(usage_ping_enabled: false)
- end
-
- it 'returns no categories' do
- expect(permitted_categories).to match_array([])
- end
- end
- context 'when User.single_user&.requires_usage_stats_consent? is required' do
- before do
- allow(User).to receive(:single_user)
- .and_return(instance_double(User, :user, requires_usage_stats_consent?: true))
- stub_config_setting(usage_ping_enabled: true)
- end
-
- it 'returns no categories' do
- expect(permitted_categories).to match_array([])
+ expect(permitted_categories).to match_array(%w[standard subscription operational optional])
end
end
end
diff --git a/spec/mailers/emails/admin_notification_spec.rb b/spec/mailers/emails/admin_notification_spec.rb
index 90381eb8ffd..a233be86a83 100644
--- a/spec/mailers/emails/admin_notification_spec.rb
+++ b/spec/mailers/emails/admin_notification_spec.rb
@@ -3,9 +3,62 @@
require 'spec_helper'
RSpec.describe Emails::AdminNotification do
+ include EmailSpec::Matchers
+ include_context 'gitlab email notification'
+
it 'adds email methods to Notify' do
subject.instance_methods.each do |email_method|
expect(Notify).to be_respond_to(email_method)
end
end
+
+ describe 'user_auto_banned_email' do
+ let_it_be(:admin) { create(:user) }
+ let_it_be(:user) { create(:user) }
+
+ let(:max_project_downloads) { 5 }
+ let(:time_period) { 600 }
+
+ subject do
+ Notify.user_auto_banned_email(
+ admin.id, user.id,
+ max_project_downloads: max_project_downloads,
+ within_seconds: time_period
+ )
+ end
+
+ it_behaves_like 'an email sent from GitLab'
+ it_behaves_like 'it should not have Gmail Actions links'
+ it_behaves_like 'a user cannot unsubscribe through footer link'
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
+
+ it 'is sent to the administrator' do
+ is_expected.to deliver_to admin.email
+ end
+
+ it 'has the correct subject' do
+ is_expected.to have_subject "We've detected unusual activity"
+ end
+
+ it 'includes the name of the user' do
+ is_expected.to have_body_text user.name
+ end
+
+ it 'includes the reason' do
+ is_expected.to have_body_text "due to them downloading more than 5 project repositories within 10 minutes"
+ end
+
+ it 'includes a link to unban the user' do
+ is_expected.to have_body_text admin_users_url(filter: 'banned')
+ end
+
+ it 'includes a link to change the settings' do
+ is_expected.to have_body_text network_admin_application_settings_url(anchor: 'js-ip-limits-settings')
+ end
+
+ it 'includes the email reason' do
+ is_expected.to have_body_text "You're receiving this email because of your account on localhost"
+ end
+ end
end
diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb
index b6ad66f41b5..a9796c28870 100644
--- a/spec/mailers/notify_spec.rb
+++ b/spec/mailers/notify_spec.rb
@@ -1858,7 +1858,7 @@ RSpec.describe Notify do
end
end
- subject { ActionMailer::Base.deliveries.last }
+ subject { ActionMailer::Base.deliveries.first }
it_behaves_like 'an email sent from GitLab'
it_behaves_like "a user cannot unsubscribe through footer link"
diff --git a/spec/migrations/20220416054011_schedule_backfill_project_member_namespace_id_spec.rb b/spec/migrations/20220416054011_schedule_backfill_project_member_namespace_id_spec.rb
new file mode 100644
index 00000000000..2838fc9387c
--- /dev/null
+++ b/spec/migrations/20220416054011_schedule_backfill_project_member_namespace_id_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe ScheduleBackfillProjectMemberNamespaceId do
+ let_it_be(:migration) { described_class::MIGRATION }
+
+ describe '#up' do
+ it 'schedules background jobs for each batch of project members' do
+ migrate!
+
+ expect(migration).to have_scheduled_batched_migration(
+ table_name: :members,
+ column_name: :id,
+ interval: described_class::INTERVAL
+ )
+ end
+ end
+
+ describe '#down' do
+ it 'deletes all batched migration records' do
+ migrate!
+ schema_migrate_down!
+
+ expect(migration).not_to have_scheduled_batched_migration
+ end
+ end
+end
diff --git a/spec/migrations/20220502015011_clean_up_fix_merge_request_diff_commit_users_spec.rb b/spec/migrations/20220502015011_clean_up_fix_merge_request_diff_commit_users_spec.rb
index 769c0993b67..2bc3e89a748 100644
--- a/spec/migrations/20220502015011_clean_up_fix_merge_request_diff_commit_users_spec.rb
+++ b/spec/migrations/20220502015011_clean_up_fix_merge_request_diff_commit_users_spec.rb
@@ -15,21 +15,5 @@ RSpec.describe CleanUpFixMergeRequestDiffCommitUsers, :migration do
migrate!
end
-
- it 'processes pending background jobs' do
- project = projects.create!(name: 'p1', namespace_id: namespace.id, project_namespace_id: project_namespace.id)
-
- Gitlab::Database::BackgroundMigrationJob.create!(
- class_name: 'FixMergeRequestDiffCommitUsers',
- arguments: [project.id]
- )
-
- migrate!
-
- background_migrations = Gitlab::Database::BackgroundMigrationJob
- .where(class_name: 'FixMergeRequestDiffCommitUsers')
-
- expect(background_migrations.count).to eq(0)
- end
end
end
diff --git a/spec/migrations/20220503035221_add_gitlab_schema_to_batched_background_migrations_spec.rb b/spec/migrations/20220503035221_add_gitlab_schema_to_batched_background_migrations_spec.rb
new file mode 100644
index 00000000000..5002c665c79
--- /dev/null
+++ b/spec/migrations/20220503035221_add_gitlab_schema_to_batched_background_migrations_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe AddGitlabSchemaToBatchedBackgroundMigrations do
+ it 'sets gitlab_schema for existing methods to "gitlab_main" and default to NULL' do
+ batched_migrations = table(:batched_background_migrations)
+ batched_migration = batched_migrations.create!(
+ id: 1, created_at: Time.now, updated_at: Time.now,
+ max_value: 100, batch_size: 100, sub_batch_size: 10, interval: 120,
+ job_class_name: 'TestJob', table_name: '_test', column_name: 'id'
+ )
+
+ reversible_migration do |migration|
+ migration.before -> {
+ batched_migrations.reset_column_information
+ column = batched_migrations.columns.find { |column| column.name == 'gitlab_schema' }
+
+ expect(column).to be_nil
+ }
+
+ migration.after -> {
+ expect(batched_migration.reload.gitlab_schema).to eq('gitlab_main')
+
+ batched_migrations.reset_column_information
+ column = batched_migrations.columns.find { |column| column.name == 'gitlab_schema' }
+
+ expect(column).to be
+ expect(column.default).to be_nil
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20220505044348_fix_automatic_iterations_cadences_start_date_spec.rb b/spec/migrations/20220505044348_fix_automatic_iterations_cadences_start_date_spec.rb
index 8bc336a6b26..575157f8331 100644
--- a/spec/migrations/20220505044348_fix_automatic_iterations_cadences_start_date_spec.rb
+++ b/spec/migrations/20220505044348_fix_automatic_iterations_cadences_start_date_spec.rb
@@ -4,8 +4,7 @@ require 'spec_helper'
require_migration!
-RSpec.describe FixAutomaticIterationsCadencesStartDate,
- quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/362446' do
+RSpec.describe FixAutomaticIterationsCadencesStartDate do
let(:migration) { described_class.new }
let(:namespaces) { table(:namespaces) }
let(:sprints) { table(:sprints) }
diff --git a/spec/migrations/20220512190659_remove_web_hooks_web_hook_logs_web_hook_id_fk_spec.rb b/spec/migrations/20220512190659_remove_web_hooks_web_hook_logs_web_hook_id_fk_spec.rb
new file mode 100644
index 00000000000..fa94a73582d
--- /dev/null
+++ b/spec/migrations/20220512190659_remove_web_hooks_web_hook_logs_web_hook_id_fk_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe RemoveWebHooksWebHookLogsWebHookIdFk do
+ let(:web_hooks) { table(:web_hooks) }
+ let(:logs) { table(:web_hook_logs) }
+
+ let!(:hook) { web_hooks.create! }
+
+ let!(:log_a) { logs.create!(web_hook_id: hook.id, response_body: 'msg-a') }
+ let!(:log_b) { logs.create!(web_hook_id: hook.id, response_body: 'msg-b') }
+
+ describe '#up' do
+ it 'allows us to delete web-hooks and leave web-hook logs intact' do
+ migrate!
+
+ expect { hook.delete }.not_to change(logs, :count)
+
+ expect(logs.pluck(:response_body)).to match_array %w[msg-a msg-b]
+ end
+ end
+
+ describe '#down' do
+ it 'ensures referential integrity of hook logs' do
+ migrate!
+ schema_migrate_down!
+
+ expect { hook.delete }.to change(logs, :count).by(-2)
+ end
+ end
+end
diff --git a/spec/migrations/20220520040416_schedule_set_legacy_open_source_license_available_for_non_public_projects_spec.rb b/spec/migrations/20220520040416_schedule_set_legacy_open_source_license_available_for_non_public_projects_spec.rb
new file mode 100644
index 00000000000..e3bc832a10b
--- /dev/null
+++ b/spec/migrations/20220520040416_schedule_set_legacy_open_source_license_available_for_non_public_projects_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe ScheduleSetLegacyOpenSourceLicenseAvailableForNonPublicProjects do
+ context 'on gitlab.com' do
+ let(:migration) { described_class::MIGRATION }
+
+ before do
+ allow(Gitlab).to receive(:com?).and_return(true)
+ end
+
+ describe '#up' do
+ it 'schedules background jobs for each batch of projects' do
+ migrate!
+
+ expect(migration).to(
+ have_scheduled_batched_migration(
+ table_name: :projects,
+ column_name: :id,
+ interval: described_class::INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ )
+ end
+ end
+
+ describe '#down' do
+ it 'deletes all batched migration records' do
+ migrate!
+ schema_migrate_down!
+
+ expect(migration).not_to have_scheduled_batched_migration
+ end
+ end
+ end
+
+ context 'on self-managed instance' do
+ let(:migration) { described_class.new }
+
+ before do
+ allow(Gitlab).to receive(:com?).and_return(false)
+ end
+
+ describe '#up' do
+ it 'does not schedule background job' do
+ expect(migration).not_to receive(:queue_batched_background_migration)
+
+ migration.up
+ end
+ end
+
+ describe '#down' do
+ it 'does not delete background job' do
+ expect(migration).not_to receive(:delete_batched_background_migration)
+
+ migration.down
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20220523171107_drop_deploy_tokens_token_column_spec.rb b/spec/migrations/20220523171107_drop_deploy_tokens_token_column_spec.rb
new file mode 100644
index 00000000000..78df6f5fc35
--- /dev/null
+++ b/spec/migrations/20220523171107_drop_deploy_tokens_token_column_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe DropDeployTokensTokenColumn do
+ let(:deploy_tokens) { table(:deploy_tokens) }
+
+ it 'correctly migrates up and down' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(deploy_tokens.column_names).to include('token')
+ }
+
+ migration.after -> {
+ deploy_tokens.reset_column_information
+
+ expect(deploy_tokens.column_names).not_to include('token')
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20220524074947_finalize_backfill_null_note_discussion_ids_spec.rb b/spec/migrations/20220524074947_finalize_backfill_null_note_discussion_ids_spec.rb
new file mode 100644
index 00000000000..74ad4662b3e
--- /dev/null
+++ b/spec/migrations/20220524074947_finalize_backfill_null_note_discussion_ids_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe FinalizeBackfillNullNoteDiscussionIds, :migration do
+ subject(:migration) { described_class.new }
+
+ let(:notes) { table(:notes) }
+ let(:bg_migration_class) { Gitlab::BackgroundMigration::BackfillNoteDiscussionId }
+ let(:bg_migration) { instance_double(bg_migration_class) }
+
+ before do
+ stub_const("#{described_class.name}::BATCH_SIZE", 2)
+ end
+
+ it 'performs remaining background migrations', :aggregate_failures do
+ # Already migrated
+ notes.create!(noteable_type: 'Issue', noteable_id: 1, discussion_id: Digest::SHA1.hexdigest('note1'))
+ notes.create!(noteable_type: 'Issue', noteable_id: 1, discussion_id: Digest::SHA1.hexdigest('note2'))
+ # update required
+ record1 = notes.create!(noteable_type: 'Issue', noteable_id: 1, discussion_id: nil)
+ record2 = notes.create!(noteable_type: 'Issue', noteable_id: 1, discussion_id: nil)
+ record3 = notes.create!(noteable_type: 'Issue', noteable_id: 1, discussion_id: nil)
+
+ expect(Gitlab::BackgroundMigration).to receive(:steal).with(bg_migration_class.name.demodulize)
+ expect(bg_migration_class).to receive(:new).twice.and_return(bg_migration)
+ expect(bg_migration).to receive(:perform).with(record1.id, record2.id)
+ expect(bg_migration).to receive(:perform).with(record3.id, record3.id)
+
+ migrate!
+ end
+end
diff --git a/spec/migrations/add_web_hook_calls_to_plan_limits_paid_tiers_spec.rb b/spec/migrations/add_web_hook_calls_to_plan_limits_paid_tiers_spec.rb
new file mode 100644
index 00000000000..63ad9367503
--- /dev/null
+++ b/spec/migrations/add_web_hook_calls_to_plan_limits_paid_tiers_spec.rb
@@ -0,0 +1,101 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe AddWebHookCallsToPlanLimitsPaidTiers do
+ let_it_be(:plans) { table(:plans) }
+ let_it_be(:plan_limits) { table(:plan_limits) }
+
+ context 'when on Gitlab.com' do
+ let(:free_plan) { plans.create!(name: 'free') }
+ let(:bronze_plan) { plans.create!(name: 'bronze') }
+ let(:silver_plan) { plans.create!(name: 'silver') }
+ let(:gold_plan) { plans.create!(name: 'gold') }
+ let(:premium_plan) { plans.create!(name: 'premium') }
+ let(:premium_trial_plan) { plans.create!(name: 'premium_trial') }
+ let(:ultimate_plan) { plans.create!(name: 'ultimate') }
+ let(:ultimate_trial_plan) { plans.create!(name: 'ultimate_trial') }
+ let(:opensource_plan) { plans.create!(name: 'opensource') }
+
+ before do
+ allow(Gitlab).to receive(:com?).and_return(true)
+ # 120 is the value for 'free' migrated in `db/migrate/20210601131742_update_web_hook_calls_limit.rb`
+ plan_limits.create!(plan_id: free_plan.id, web_hook_calls: 120)
+ plan_limits.create!(plan_id: bronze_plan.id)
+ plan_limits.create!(plan_id: silver_plan.id)
+ plan_limits.create!(plan_id: gold_plan.id)
+ plan_limits.create!(plan_id: premium_plan.id)
+ plan_limits.create!(plan_id: premium_trial_plan.id)
+ plan_limits.create!(plan_id: ultimate_plan.id)
+ plan_limits.create!(plan_id: ultimate_trial_plan.id)
+ plan_limits.create!(plan_id: opensource_plan.id)
+ end
+
+ it 'correctly migrates up and down' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(
+ plan_limits.pluck(:plan_id, :web_hook_calls, :web_hook_calls_mid, :web_hook_calls_low)
+ ).to contain_exactly(
+ [free_plan.id, 120, 0, 0],
+ [bronze_plan.id, 0, 0, 0],
+ [silver_plan.id, 0, 0, 0],
+ [gold_plan.id, 0, 0, 0],
+ [premium_plan.id, 0, 0, 0],
+ [premium_trial_plan.id, 0, 0, 0],
+ [ultimate_plan.id, 0, 0, 0],
+ [ultimate_trial_plan.id, 0, 0, 0],
+ [opensource_plan.id, 0, 0, 0]
+ )
+ }
+
+ migration.after -> {
+ expect(
+ plan_limits.pluck(:plan_id, :web_hook_calls, :web_hook_calls_mid, :web_hook_calls_low)
+ ).to contain_exactly(
+ [free_plan.id, 500, 500, 500],
+ [bronze_plan.id, 4_000, 2_800, 1_600],
+ [silver_plan.id, 4_000, 2_800, 1_600],
+ [gold_plan.id, 13_000, 9_000, 6_000],
+ [premium_plan.id, 4_000, 2_800, 1_600],
+ [premium_trial_plan.id, 4_000, 2_800, 1_600],
+ [ultimate_plan.id, 13_000, 9_000, 6_000],
+ [ultimate_trial_plan.id, 13_000, 9_000, 6_000],
+ [opensource_plan.id, 13_000, 9_000, 6_000]
+ )
+ }
+ end
+ end
+ end
+
+ context 'when on self hosted' do
+ let(:default_plan) { plans.create!(name: 'default') }
+
+ before do
+ allow(Gitlab).to receive(:com?).and_return(false)
+
+ plan_limits.create!(plan_id: default_plan.id)
+ end
+
+ it 'does nothing' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(
+ plan_limits.pluck(:plan_id, :web_hook_calls, :web_hook_calls_mid, :web_hook_calls_low)
+ ).to contain_exactly(
+ [default_plan.id, 0, 0, 0]
+ )
+ }
+
+ migration.after -> {
+ expect(
+ plan_limits.pluck(:plan_id, :web_hook_calls, :web_hook_calls_mid, :web_hook_calls_low)
+ ).to contain_exactly(
+ [default_plan.id, 0, 0, 0]
+ )
+ }
+ end
+ end
+ end
+end
diff --git a/spec/migrations/bulk_insert_cluster_enabled_grants_spec.rb b/spec/migrations/bulk_insert_cluster_enabled_grants_spec.rb
new file mode 100644
index 00000000000..a359a78ab45
--- /dev/null
+++ b/spec/migrations/bulk_insert_cluster_enabled_grants_spec.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe BulkInsertClusterEnabledGrants, :migration do
+ let(:migration) { described_class.new }
+
+ let(:cluster_enabled_grants) { table(:cluster_enabled_grants) }
+ let(:namespaces) { table(:namespaces) }
+ let(:cluster_projects) { table(:cluster_projects) }
+ let(:cluster_groups) { table(:cluster_groups) }
+ let(:clusters) { table(:clusters) }
+ let(:projects) { table(:projects) }
+
+ context 'with namespaces, cluster_groups and cluster_projects' do
+ it 'creates unique cluster_enabled_grants for root_namespaces with clusters' do
+ # Does not create grants for namespaces without clusters
+ namespaces.create!(id: 1, path: 'eee', name: 'eee', traversal_ids: [1]) # not used
+
+ # Creates unique grant for a root namespace with its own cluster
+ root_ns_with_own_cluster = namespaces.create!(id: 2, path: 'ddd', name: 'ddd', traversal_ids: [2])
+ cluster_root_ns_with_own_cluster = clusters.create!(name: 'cluster_root_ns_with_own_cluster')
+ cluster_groups.create!(
+ cluster_id: cluster_root_ns_with_own_cluster.id,
+ group_id: root_ns_with_own_cluster.id)
+
+ # Creates unique grant for namespaces with multiple sub-group clusters
+ root_ns_with_sub_group_clusters = namespaces.create!(id: 3, path: 'aaa', name: 'aaa', traversal_ids: [3])
+
+ subgroup_1 = namespaces.create!(
+ id: 4,
+ path: 'bbb',
+ name: 'bbb',
+ parent_id: root_ns_with_sub_group_clusters.id,
+ traversal_ids: [root_ns_with_sub_group_clusters.id, 4])
+ cluster_subgroup_1 = clusters.create!(name: 'cluster_subgroup_1')
+ cluster_groups.create!(cluster_id: cluster_subgroup_1.id, group_id: subgroup_1.id)
+
+ subgroup_2 = namespaces.create!(
+ id: 5,
+ path: 'ccc',
+ name: 'ccc',
+ parent_id: subgroup_1.id,
+ traversal_ids: [root_ns_with_sub_group_clusters.id, subgroup_1.id, 5])
+ cluster_subgroup_2 = clusters.create!(name: 'cluster_subgroup_2')
+ cluster_groups.create!(cluster_id: cluster_subgroup_2.id, group_id: subgroup_2.id)
+
+ # Creates unique grant for a root namespace with multiple projects clusters
+ root_ns_with_project_group_clusters = namespaces.create!(id: 6, path: 'fff', name: 'fff', traversal_ids: [6])
+
+ project_namespace_1 = namespaces.create!(id: 7, path: 'ggg', name: 'ggg', traversal_ids: [7])
+ project_1 = projects.create!(
+ name: 'project_1',
+ namespace_id: root_ns_with_project_group_clusters.id,
+ project_namespace_id: project_namespace_1.id)
+ cluster_project_1 = clusters.create!(name: 'cluster_project_1')
+ cluster_projects.create!(cluster_id: cluster_project_1.id, project_id: project_1.id)
+
+ project_namespace_2 = namespaces.create!(id: 8, path: 'hhh', name: 'hhh', traversal_ids: [8])
+ project_2 = projects.create!(
+ name: 'project_2',
+ namespace_id: root_ns_with_project_group_clusters.id,
+ project_namespace_id: project_namespace_2.id)
+ cluster_project_2 = clusters.create!(name: 'cluster_project_2')
+ cluster_projects.create!(cluster_id: cluster_project_2.id, project_id: project_2.id)
+
+ migrate!
+
+ expected_cluster_enabled_grants = [
+ root_ns_with_sub_group_clusters.id,
+ root_ns_with_own_cluster.id,
+ root_ns_with_project_group_clusters.id
+ ]
+
+ expect(cluster_enabled_grants.pluck(:namespace_id)).to match_array(expected_cluster_enabled_grants)
+ end
+ end
+
+ context 'without namespaces, cluster_groups or cluster_projects' do
+ it 'does nothing' do
+ expect { migrate! }.not_to change { cluster_enabled_grants.count }
+ end
+ end
+end
diff --git a/spec/migrations/cleanup_backfill_integrations_enable_ssl_verification_spec.rb b/spec/migrations/cleanup_backfill_integrations_enable_ssl_verification_spec.rb
new file mode 100644
index 00000000000..1517405b358
--- /dev/null
+++ b/spec/migrations/cleanup_backfill_integrations_enable_ssl_verification_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe CleanupBackfillIntegrationsEnableSslVerification, :migration do
+ let(:job_class_name) { 'BackfillIntegrationsEnableSslVerification' }
+
+ before do
+ # Jobs enqueued in Sidekiq.
+ Sidekiq::Testing.disable! do
+ BackgroundMigrationWorker.perform_in(10, job_class_name, [1, 2])
+ BackgroundMigrationWorker.perform_in(20, job_class_name, [3, 4])
+ end
+
+ # Jobs tracked in the database.
+ Gitlab::Database::BackgroundMigrationJob.create!(
+ class_name: job_class_name,
+ arguments: [5, 6],
+ status: Gitlab::Database::BackgroundMigrationJob.statuses['pending']
+ )
+ Gitlab::Database::BackgroundMigrationJob.create!(
+ class_name: job_class_name,
+ arguments: [7, 8],
+ status: Gitlab::Database::BackgroundMigrationJob.statuses['succeeded']
+ )
+
+ migrate!
+ end
+
+ it_behaves_like(
+ 'finalized tracked background migration',
+ Gitlab::BackgroundMigration::BackfillIntegrationsEnableSslVerification
+ )
+end
diff --git a/spec/migrations/cleanup_orphaned_routes_spec.rb b/spec/migrations/cleanup_orphaned_routes_spec.rb
new file mode 100644
index 00000000000..68598939557
--- /dev/null
+++ b/spec/migrations/cleanup_orphaned_routes_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe CleanupOrphanedRoutes, :migration do
+ let(:migration) { described_class::MIGRATION }
+
+ describe '#up' do
+ it 'schedules background jobs' do
+ migrate!
+
+ expect(migration).to have_scheduled_batched_migration(
+ table_name: :routes,
+ column_name: :id,
+ interval: described_class::DELAY_INTERVAL,
+ gitlab_schema: :gitlab_main
+ )
+ end
+ end
+
+ describe '#down' do
+ it 'deletes all batched migration records' do
+ migrate!
+ schema_migrate_down!
+
+ expect(migration).not_to have_scheduled_batched_migration
+ end
+ end
+end
diff --git a/spec/migrations/finalize_routes_backfilling_for_projects_spec.rb b/spec/migrations/finalize_routes_backfilling_for_projects_spec.rb
new file mode 100644
index 00000000000..2bb740d0c2f
--- /dev/null
+++ b/spec/migrations/finalize_routes_backfilling_for_projects_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe FinalizeRoutesBackfillingForProjects, :migration do
+ let(:batched_migrations) { table(:batched_background_migrations) }
+
+ let_it_be(:migration) { described_class::MIGRATION }
+
+ describe '#up' do
+ shared_examples 'finalizes the migration' do
+ it 'finalizes the migration' do
+ allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
+ expect(runner).to receive(:finalize).with('BackfillNamespaceIdForProjectRoute', :projects, :id, [])
+ end
+ end
+ end
+
+ context 'when routes backfilling migration is missing' do
+ it 'warns migration not found' do
+ expect(Gitlab::AppLogger)
+ .to receive(:warn).with(/Could not find batched background migration for the given configuration:/)
+
+ migrate!
+ end
+ end
+
+ context 'with backfilling migration present' do
+ let!(:project_namespace_backfill) do
+ batched_migrations.create!(
+ job_class_name: 'BackfillNamespaceIdForProjectRoute',
+ table_name: :routes,
+ column_name: :id,
+ job_arguments: [],
+ interval: 2.minutes,
+ min_value: 1,
+ max_value: 2,
+ batch_size: 1000,
+ sub_batch_size: 200,
+ gitlab_schema: :gitlab_main,
+ status: 3 # finished
+ )
+ end
+
+ context 'when backfilling migration finished successfully' do
+ it 'does not raise exception' do
+ expect { migrate! }.not_to raise_error
+ end
+ end
+
+ context 'with different backfilling migration statuses' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:status, :description) do
+ 0 | 'paused'
+ 1 | 'active'
+ 4 | 'failed'
+ 5 | 'finalizing'
+ end
+
+ with_them do
+ before do
+ project_namespace_backfill.update!(status: status)
+ end
+
+ it_behaves_like 'finalizes the migration'
+ end
+ end
+ end
+ end
+end
diff --git a/spec/migrations/queue_backfill_project_feature_package_registry_access_level_spec.rb b/spec/migrations/queue_backfill_project_feature_package_registry_access_level_spec.rb
new file mode 100644
index 00000000000..487d94b82a1
--- /dev/null
+++ b/spec/migrations/queue_backfill_project_feature_package_registry_access_level_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueBackfillProjectFeaturePackageRegistryAccessLevel do
+ let_it_be(:batched_migration) { described_class::MIGRATION }
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :projects,
+ column_name: :id,
+ interval: described_class::DELAY_INTERVAL
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/remove_invalid_integrations_spec.rb b/spec/migrations/remove_invalid_integrations_spec.rb
new file mode 100644
index 00000000000..cab2d79998e
--- /dev/null
+++ b/spec/migrations/remove_invalid_integrations_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe RemoveInvalidIntegrations, :migration do
+ describe '#up' do
+ let!(:integrations) { table(:integrations) }
+
+ let!(:valid_integration) { integrations.create!(type_new: 'Foo') }
+ let!(:invalid_integration) { integrations.create! }
+
+ it 'removes invalid integrations', :aggregate_failures do
+ expect { migrate! }
+ .to change { integrations.pluck(:id) }.to(contain_exactly(valid_integration.id))
+ end
+
+ context 'when there are many invalid integrations' do
+ before do
+ stub_const('RemoveInvalidIntegrations::BATCH_SIZE', 3)
+ 5.times { integrations.create! }
+ end
+
+ it 'removes them all' do
+ migrate!
+
+ expect(integrations.pluck(:type_new)).to all(be_present)
+ end
+ end
+ end
+end
diff --git a/spec/migrations/remove_not_null_contraint_on_title_from_sprints_spec.rb b/spec/migrations/remove_not_null_contraint_on_title_from_sprints_spec.rb
index fdafc4a5a89..198644fe183 100644
--- a/spec/migrations/remove_not_null_contraint_on_title_from_sprints_spec.rb
+++ b/spec/migrations/remove_not_null_contraint_on_title_from_sprints_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
require_migration!
-RSpec.describe RemoveNotNullContraintOnTitleFromSprints, :migration, schema: 20220304052335 do
+RSpec.describe RemoveNotNullContraintOnTitleFromSprints, :migration do
let(:migration) { described_class.new }
let(:namespaces) { table(:namespaces) }
let(:sprints) { table(:sprints) }
diff --git a/spec/migrations/schedule_migrate_pages_to_zip_storage_spec.rb b/spec/migrations/schedule_migrate_pages_to_zip_storage_spec.rb
deleted file mode 100644
index 52bbd5b4f6e..00000000000
--- a/spec/migrations/schedule_migrate_pages_to_zip_storage_spec.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleMigratePagesToZipStorage, :sidekiq_might_not_need_inline, schema: 20210301200959 do
- let(:migration_class) { described_class::MIGRATION }
- let(:migration_name) { migration_class.to_s.demodulize }
-
- let(:namespaces_table) { table(:namespaces) }
- let(:projects_table) { table(:projects) }
- let(:metadata_table) { table(:project_pages_metadata) }
- let(:deployments_table) { table(:pages_deployments) }
-
- let(:namespace) { namespaces_table.create!(path: "group", name: "group") }
-
- def create_project_metadata(path, deployed, with_deployment)
- project = projects_table.create!(path: path, namespace_id: namespace.id)
-
- deployment_id = nil
-
- if with_deployment
- deployment_id = deployments_table.create!(project_id: project.id, file_store: 1, file: '1', file_count: 1, file_sha256: '123', size: 1).id
- end
-
- metadata_table.create!(project_id: project.id, deployed: deployed, pages_deployment_id: deployment_id)
- end
-
- it 'correctly schedules background migrations' do
- Sidekiq::Testing.fake! do
- freeze_time do
- create_project_metadata("not-deployed-project", false, false)
-
- first_id = create_project_metadata("project1", true, false).id
- last_id = create_project_metadata("project2", true, false).id
-
- create_project_metadata("project-with-deployment", true, true)
-
- migrate!
-
- expect(migration_name).to be_scheduled_delayed_migration(5.minutes, first_id, last_id)
- expect(BackgroundMigrationWorker.jobs.size).to eq(1)
- end
- end
- end
-end
diff --git a/spec/migrations/schedule_populate_requirements_issue_id_spec.rb b/spec/migrations/schedule_populate_requirements_issue_id_spec.rb
new file mode 100644
index 00000000000..2702c000b60
--- /dev/null
+++ b/spec/migrations/schedule_populate_requirements_issue_id_spec.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe SchedulePopulateRequirementsIssueId do
+ include MigrationHelpers::WorkItemTypesHelper
+
+ let(:issues) { table(:issues) }
+ let(:requirements) { table(:requirements) }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:users) { table(:users) }
+ let!(:group) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
+ let!(:project_namespace) { namespaces.create!(name: 'project-namespace', path: 'project-namespace') }
+
+ let!(:project) do
+ projects.create!(namespace_id: group.id, project_namespace_id: project_namespace.id, name: 'gitlab', path: 'gitlab')
+ end
+
+ let(:migration) { described_class::MIGRATION }
+
+ let!(:author) do
+ users.create!(
+ email: 'author@example.com',
+ notification_email: 'author@example.com',
+ name: 'author',
+ username: 'author',
+ projects_limit: 10,
+ state: 'active')
+ end
+
+ before do
+ stub_const("#{described_class.name}::BATCH_SIZE", 2)
+ end
+
+ it 'schedules jobs for all requirements without issues in sync' do
+ Sidekiq::Testing.fake! do
+ freeze_time do
+ migrate!
+
+ # Restores the previous schema so we do not have a NOT NULL
+ # constraint on requirements.issue_id column, which would
+ # prevent us to create invalid requirement records.
+ migration_context.down(previous_migration(3).version)
+
+ requirement_1 = create_requirement(iid: 1, title: 'r 1')
+
+ # Create one requirement with issue_id present, to make
+ # sure a job won't be scheduled for it
+ work_item_type_id = work_item_types_table.find_by(namespace_id: nil, name: 'Issue').id
+ issue = issues.create!(state_id: 1, work_item_type_id: work_item_type_id)
+ create_requirement(iid: 2, title: 'r 2', issue_id: issue.id)
+
+ requirement_3 = create_requirement(iid: 3, title: 'r 3')
+ requirement_4 = create_requirement(iid: 4, title: 'r 4')
+ requirement_5 = create_requirement(iid: 5, title: 'r 5')
+
+ migrate!
+
+ expect(migration).to be_scheduled_delayed_migration(120.seconds, requirement_1.id, requirement_3.id)
+ expect(migration).to be_scheduled_delayed_migration(240.seconds, requirement_4.id, requirement_5.id)
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+ end
+ end
+ end
+
+ def create_requirement(iid:, title:, issue_id: nil)
+ requirements.create!(
+ iid: iid,
+ project_id: project.id,
+ issue_id: issue_id,
+ title: title,
+ state: 1,
+ created_at: Time.now,
+ updated_at: Time.now,
+ author_id: author.id)
+ end
+end
diff --git a/spec/migrations/schedule_purging_stale_security_scans_spec.rb b/spec/migrations/schedule_purging_stale_security_scans_spec.rb
new file mode 100644
index 00000000000..b5a38634b58
--- /dev/null
+++ b/spec/migrations/schedule_purging_stale_security_scans_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe SchedulePurgingStaleSecurityScans do
+ let_it_be(:namespaces) { table(:namespaces) }
+ let_it_be(:projects) { table(:projects) }
+ let_it_be(:pipelines) { table(:ci_pipelines) }
+ let_it_be(:builds) { table(:ci_builds) }
+ let_it_be(:security_scans) { table(:security_scans) }
+
+ let_it_be(:namespace) { namespaces.create!(name: "foo", path: "bar") }
+ let_it_be(:project) { projects.create!(namespace_id: namespace.id, project_namespace_id: namespace.id) }
+ let_it_be(:pipeline) { pipelines.create!(project_id: project.id, ref: 'master', sha: 'adf43c3a', status: 'success') }
+ let_it_be(:ci_build) { builds.create!(commit_id: pipeline.id, retried: false, type: 'Ci::Build') }
+
+ let!(:security_scan_1) { security_scans.create!(build_id: ci_build.id, scan_type: 1, created_at: 92.days.ago) }
+ let!(:security_scan_2) { security_scans.create!(build_id: ci_build.id, scan_type: 2, created_at: 91.days.ago) }
+
+ let(:com?) { false }
+ let(:dev_or_test_env?) { false }
+
+ before do
+ allow(::Gitlab).to receive(:com?).and_return(com?)
+ allow(::Gitlab).to receive(:dev_or_test_env?).and_return(dev_or_test_env?)
+
+ stub_const("#{described_class.name}::BATCH_SIZE", 1)
+ end
+
+ shared_examples_for 'schedules the background jobs' do
+ before do
+ # This will not be scheduled as it's not stale
+ security_scans.create!(build_id: ci_build.id, scan_type: 3)
+ end
+
+ around do |example|
+ freeze_time { Sidekiq::Testing.fake! { example.run } }
+ end
+
+ it 'creates 2 jobs', :aggregate_failures do
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs.size).to be(2)
+ expect(described_class::MIGRATION)
+ .to be_scheduled_delayed_migration(2.minutes, security_scan_1.id, security_scan_1.id)
+ expect(described_class::MIGRATION)
+ .to be_scheduled_delayed_migration(4.minutes, security_scan_2.id, security_scan_2.id)
+ end
+ end
+
+ context 'when the migration does not run on GitLab.com or `dev_or_test_env`' do
+ it 'does not run the migration' do
+ expect { migrate! }.not_to change { BackgroundMigrationWorker.jobs.size }
+ end
+ end
+
+ context 'when the migration runs on GitLab.com' do
+ let(:com?) { true }
+
+ it_behaves_like 'schedules the background jobs'
+ end
+
+ context 'when the migration runs on dev or test env' do
+ let(:dev_or_test_env?) { true }
+
+ it_behaves_like 'schedules the background jobs'
+ end
+end
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index 20cd96e831c..61f008416ea 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -85,12 +85,15 @@ RSpec.describe ApplicationSetting do
it { is_expected.to validate_numericality_of(:container_registry_import_max_step_duration).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_numericality_of(:container_registry_pre_import_timeout).only_integer.is_greater_than_or_equal_to(0) }
it { is_expected.to validate_numericality_of(:container_registry_import_timeout).only_integer.is_greater_than_or_equal_to(0) }
+ it { is_expected.to validate_numericality_of(:container_registry_pre_import_tags_rate).is_greater_than_or_equal_to(0) }
it { is_expected.not_to allow_value(nil).for(:container_registry_import_max_tags_count) }
it { is_expected.not_to allow_value(nil).for(:container_registry_import_max_retries) }
it { is_expected.not_to allow_value(nil).for(:container_registry_import_start_max_retries) }
it { is_expected.not_to allow_value(nil).for(:container_registry_import_max_step_duration) }
it { is_expected.not_to allow_value(nil).for(:container_registry_pre_import_timeout) }
it { is_expected.not_to allow_value(nil).for(:container_registry_import_timeout) }
+ it { is_expected.not_to allow_value(nil).for(:container_registry_pre_import_tags_rate) }
+ it { is_expected.to allow_value(1.5).for(:container_registry_pre_import_tags_rate) }
it { is_expected.to validate_presence_of(:container_registry_import_target_plan) }
it { is_expected.to validate_presence_of(:container_registry_import_created_before) }
@@ -551,11 +554,45 @@ RSpec.describe ApplicationSetting do
it { is_expected.to allow_value(*KeyRestrictionValidator.supported_key_restrictions(type)).for(field) }
it { is_expected.not_to allow_value(128).for(field) }
end
+ end
+ end
- it_behaves_like 'key validations'
+ describe '#ensure_key_restrictions!' do
+ context 'with non-compliant FIPS settings' do
+ before do
+ setting.update_columns(
+ rsa_key_restriction: 1024,
+ dsa_key_restriction: 0,
+ ecdsa_key_restriction: 521,
+ ed25519_key_restriction: -1,
+ ecdsa_sk_key_restriction: 0,
+ ed25519_sk_key_restriction: 0
+ )
+ end
- context 'FIPS mode', :fips_mode do
- it_behaves_like 'key validations'
+ context 'in non-FIPS mode', fips_mode: false do
+ it 'keeps existing key restrictions' do
+ expect { setting.ensure_key_restrictions! }.not_to change { setting.valid? }
+ expect(setting).to be_valid
+ expect(setting.rsa_key_restriction).to eq(1024)
+ expect(setting.dsa_key_restriction).to eq(0)
+ expect(setting.ecdsa_key_restriction).to eq(521)
+ expect(setting.ed25519_key_restriction).to eq(-1)
+ expect(setting.ecdsa_sk_key_restriction).to eq(0)
+ expect(setting.ed25519_sk_key_restriction).to eq(0)
+ end
+ end
+
+ context 'in FIPS mode', :fips_mode do
+ it 'updates key restrictions to meet FIPS compliance' do
+ expect { setting.ensure_key_restrictions! }.to change { setting.valid? }.from(false).to(true)
+ expect(setting.rsa_key_restriction).to eq(3072)
+ expect(setting.dsa_key_restriction).to eq(-1)
+ expect(setting.ecdsa_key_restriction).to eq(521)
+ expect(setting.ed25519_key_restriction).to eq(-1)
+ expect(setting.ecdsa_sk_key_restriction).to eq(256)
+ expect(setting.ed25519_sk_key_restriction).to eq(256)
+ end
end
end
end
diff --git a/spec/models/bulk_imports/entity_spec.rb b/spec/models/bulk_imports/entity_spec.rb
index 6f6a7c9bcd8..874009d552a 100644
--- a/spec/models/bulk_imports/entity_spec.rb
+++ b/spec/models/bulk_imports/entity_spec.rb
@@ -160,7 +160,7 @@ RSpec.describe BulkImports::Entity, type: :model do
it 'returns group pipelines' do
entity = build(:bulk_import_entity, :group_entity)
- expect(entity.pipelines.flatten).to include(BulkImports::Groups::Pipelines::GroupPipeline)
+ expect(entity.pipelines.collect { _1[:pipeline] }).to include(BulkImports::Groups::Pipelines::GroupPipeline)
end
end
@@ -168,29 +168,7 @@ RSpec.describe BulkImports::Entity, type: :model do
it 'returns project pipelines' do
entity = build(:bulk_import_entity, :project_entity)
- expect(entity.pipelines.flatten).to include(BulkImports::Projects::Pipelines::ProjectPipeline)
- end
- end
- end
-
- describe '#create_pipeline_trackers!' do
- context 'when entity is group' do
- it 'creates trackers for group entity' do
- entity = create(:bulk_import_entity, :group_entity)
- entity.create_pipeline_trackers!
-
- expect(entity.trackers.count).to eq(BulkImports::Groups::Stage.new(entity).pipelines.count)
- expect(entity.trackers.map(&:pipeline_name)).to include(BulkImports::Groups::Pipelines::GroupPipeline.to_s)
- end
- end
-
- context 'when entity is project' do
- it 'creates trackers for project entity' do
- entity = create(:bulk_import_entity, :project_entity)
- entity.create_pipeline_trackers!
-
- expect(entity.trackers.count).to eq(BulkImports::Projects::Stage.new(entity).pipelines.count)
- expect(entity.trackers.map(&:pipeline_name)).to include(BulkImports::Projects::Pipelines::ProjectPipeline.to_s)
+ expect(entity.pipelines.collect { _1[:pipeline] }).to include(BulkImports::Projects::Pipelines::ProjectPipeline)
end
end
end
diff --git a/spec/models/bulk_imports/export_status_spec.rb b/spec/models/bulk_imports/export_status_spec.rb
index 79ed6b39358..6ade82409dc 100644
--- a/spec/models/bulk_imports/export_status_spec.rb
+++ b/spec/models/bulk_imports/export_status_spec.rb
@@ -10,11 +10,9 @@ RSpec.describe BulkImports::ExportStatus do
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let(:response_double) do
- double(parsed_response: [{ 'relation' => 'labels', 'status' => status, 'error' => 'error!' }])
- end
-
- let(:invalid_response_double) do
- double(parsed_response: [{ 'relation' => 'not_a_real_relation', 'status' => status, 'error' => 'error!' }])
+ instance_double(HTTParty::Response,
+ parsed_response: [{ 'relation' => 'labels', 'status' => status, 'error' => 'error!' }]
+ )
end
subject { described_class.new(tracker, relation) }
@@ -40,22 +38,34 @@ RSpec.describe BulkImports::ExportStatus do
it 'returns false' do
expect(subject.started?).to eq(false)
end
+ end
- context 'when returned relation is invalid' do
- before do
- allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
- allow(client).to receive(:get).and_return(invalid_response_double)
- end
- end
+ context 'when export status is not present' do
+ let(:response_double) do
+ instance_double(HTTParty::Response, parsed_response: [])
+ end
- it 'returns false' do
- expect(subject.started?).to eq(false)
+ it 'returns false' do
+ expect(subject.started?).to eq(false)
+ end
+ end
+
+ context 'when something goes wrong during export status fetch' do
+ before do
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
+ allow(client).to receive(:get).and_raise(
+ BulkImports::NetworkError.new("Unsuccessful response", response: nil)
+ )
end
end
+
+ it 'returns false' do
+ expect(subject.started?).to eq(false)
+ end
end
end
- describe '#failed' do
+ describe '#failed?' do
context 'when export status is failed' do
let(:status) { BulkImports::Export::FAILED }
@@ -74,12 +84,67 @@ RSpec.describe BulkImports::ExportStatus do
context 'when export status is not present' do
let(:response_double) do
- double(parsed_response: [])
+ instance_double(HTTParty::Response, parsed_response: [])
+ end
+
+ it 'returns false' do
+ expect(subject.started?).to eq(false)
+ end
+ end
+
+ context 'when something goes wrong during export status fetch' do
+ before do
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
+ allow(client).to receive(:get).and_raise(
+ BulkImports::NetworkError.new("Unsuccessful response", response: nil)
+ )
+ end
+ end
+
+ it 'returns false' do
+ expect(subject.started?).to eq(false)
+ end
+ end
+ end
+
+ describe '#empty?' do
+ context 'when export status is present' do
+ let(:status) { 'any status' }
+
+ it { expect(subject.empty?).to eq(false) }
+ end
+
+ context 'when export status is not present' do
+ let(:response_double) do
+ instance_double(HTTParty::Response, parsed_response: [])
end
it 'returns true' do
- expect(subject.failed?).to eq(true)
- expect(subject.error).to eq('Empty relation export status')
+ expect(subject.empty?).to eq(true)
+ end
+ end
+
+ context 'when export status is empty' do
+ let(:response_double) do
+ instance_double(HTTParty::Response, parsed_response: nil)
+ end
+
+ it 'returns true' do
+ expect(subject.empty?).to eq(true)
+ end
+ end
+
+ context 'when something goes wrong during export status fetch' do
+ before do
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
+ allow(client).to receive(:get).and_raise(
+ BulkImports::NetworkError.new("Unsuccessful response", response: nil)
+ )
+ end
+ end
+
+ it 'returns false' do
+ expect(subject.started?).to eq(false)
end
end
end
diff --git a/spec/models/bulk_imports/file_transfer/project_config_spec.rb b/spec/models/bulk_imports/file_transfer/project_config_spec.rb
index 61caff647d6..0f02c5c546f 100644
--- a/spec/models/bulk_imports/file_transfer/project_config_spec.rb
+++ b/spec/models/bulk_imports/file_transfer/project_config_spec.rb
@@ -94,7 +94,7 @@ RSpec.describe BulkImports::FileTransfer::ProjectConfig do
describe '#file_relations' do
it 'returns project file relations' do
- expect(subject.file_relations).to contain_exactly('uploads', 'lfs_objects')
+ expect(subject.file_relations).to contain_exactly('uploads', 'lfs_objects', 'repository', 'design')
end
end
end
diff --git a/spec/models/bulk_imports/tracker_spec.rb b/spec/models/bulk_imports/tracker_spec.rb
index 0b6f692a477..1aa76d4dadd 100644
--- a/spec/models/bulk_imports/tracker_spec.rb
+++ b/spec/models/bulk_imports/tracker_spec.rb
@@ -67,7 +67,7 @@ RSpec.describe BulkImports::Tracker, type: :model do
describe '#pipeline_class' do
it 'returns the pipeline class' do
entity = create(:bulk_import_entity)
- pipeline_class = BulkImports::Groups::Stage.new(entity).pipelines.first[1]
+ pipeline_class = BulkImports::Groups::Stage.new(entity).pipelines.first[:pipeline]
tracker = create(:bulk_import_tracker, pipeline_name: pipeline_class)
expect(tracker.pipeline_class).to eq(pipeline_class)
diff --git a/spec/models/ci/bridge_spec.rb b/spec/models/ci/bridge_spec.rb
index 6409ea9fc3d..cb29cce554f 100644
--- a/spec/models/ci/bridge_spec.rb
+++ b/spec/models/ci/bridge_spec.rb
@@ -259,25 +259,16 @@ RSpec.describe Ci::Bridge do
context 'forward variables' do
using RSpec::Parameterized::TableSyntax
- where(:yaml_variables, :pipeline_variables, :ff, :variables) do
- nil | nil | true | %w[BRIDGE]
- nil | false | true | %w[BRIDGE]
- nil | true | true | %w[BRIDGE PVAR1]
- false | nil | true | %w[]
- false | false | true | %w[]
- false | true | true | %w[PVAR1]
- true | nil | true | %w[BRIDGE]
- true | false | true | %w[BRIDGE]
- true | true | true | %w[BRIDGE PVAR1]
- nil | nil | false | %w[BRIDGE]
- nil | false | false | %w[BRIDGE]
- nil | true | false | %w[BRIDGE]
- false | nil | false | %w[BRIDGE]
- false | false | false | %w[BRIDGE]
- false | true | false | %w[BRIDGE]
- true | nil | false | %w[BRIDGE]
- true | false | false | %w[BRIDGE]
- true | true | false | %w[BRIDGE]
+ where(:yaml_variables, :pipeline_variables, :variables) do
+ nil | nil | %w[BRIDGE]
+ nil | false | %w[BRIDGE]
+ nil | true | %w[BRIDGE PVAR1]
+ false | nil | %w[]
+ false | false | %w[]
+ false | true | %w[PVAR1]
+ true | nil | %w[BRIDGE]
+ true | false | %w[BRIDGE]
+ true | true | %w[BRIDGE PVAR1]
end
with_them do
@@ -292,10 +283,6 @@ RSpec.describe Ci::Bridge do
}
end
- before do
- stub_feature_flags(ci_trigger_forward_variables: ff)
- end
-
it 'returns variables according to the forward value' do
expect(bridge.downstream_variables.map { |v| v[:key] }).to contain_exactly(*variables)
end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index dcf6915a01e..6ad6bb16eb5 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -294,31 +294,28 @@ RSpec.describe Ci::Build do
end
end
- describe '.with_reports' do
- subject { described_class.with_reports(Ci::JobArtifact.test_reports) }
+ describe '.with_artifacts' do
+ subject(:builds) { described_class.with_artifacts(artifact_scope) }
- context 'when build has a test report' do
- let!(:build) { create(:ci_build, :success, :test_reports) }
+ let(:artifact_scope) { Ci::JobArtifact.where(file_type: 'archive') }
- it 'selects the build' do
- is_expected.to eq([build])
- end
- end
+ let!(:build_1) { create(:ci_build, :artifacts) }
+ let!(:build_2) { create(:ci_build, :codequality_reports) }
+ let!(:build_3) { create(:ci_build, :test_reports) }
+ let!(:build_4) { create(:ci_build, :artifacts) }
- context 'when build does not have test reports' do
- let!(:build) { create(:ci_build, :success, :trace_artifact) }
-
- it 'does not select the build' do
- is_expected.to be_empty
- end
+ it 'returns artifacts matching the given scope' do
+ expect(builds).to contain_exactly(build_1, build_4)
end
- context 'when there are multiple builds with test reports' do
- let!(:builds) { create_list(:ci_build, 5, :success, :test_reports) }
+ context 'when there are multiple builds containing artifacts' do
+ before do
+ create_list(:ci_build, 5, :success, :test_reports)
+ end
it 'does not execute a query for selecting job artifact one by one' do
recorded = ActiveRecord::QueryRecorder.new do
- subject.each do |build|
+ builds.each do |build|
build.job_artifacts.map { |a| a.file.exists? }
end
end
@@ -1367,7 +1364,7 @@ RSpec.describe Ci::Build do
before do
allow(Deployments::LinkMergeRequestWorker).to receive(:perform_async)
- allow(Deployments::HooksWorker).to receive(:perform_async)
+ allow(deployment).to receive(:execute_hooks)
end
it 'has deployments record with created status' do
@@ -1423,7 +1420,7 @@ RSpec.describe Ci::Build do
before do
allow(Deployments::UpdateEnvironmentWorker).to receive(:perform_async)
- allow(Deployments::HooksWorker).to receive(:perform_async)
+ allow(deployment).to receive(:execute_hooks)
end
it_behaves_like 'avoid deadlock'
@@ -1509,14 +1506,28 @@ RSpec.describe Ci::Build do
it 'transitions to running and calls webhook' do
freeze_time do
- expect(Deployments::HooksWorker)
- .to receive(:perform_async).with(deployment_id: deployment.id, status_changed_at: Time.current)
+ expect(deployment).to receive(:execute_hooks).with(Time.current)
subject
end
expect(deployment).to be_running
end
+
+ context 'when `deployment_hooks_skip_worker` flag is disabled' do
+ before do
+ stub_feature_flags(deployment_hooks_skip_worker: false)
+ end
+
+ it 'executes Deployments::HooksWorker asynchronously' do
+ freeze_time do
+ expect(Deployments::HooksWorker)
+ .to receive(:perform_async).with(deployment_id: deployment.id, status_changed_at: Time.current)
+
+ subject
+ end
+ end
+ end
end
end
end
@@ -1830,6 +1841,27 @@ RSpec.describe Ci::Build do
end
context 'build is erasable' do
+ context 'when project is undergoing stats refresh' do
+ let!(:build) { create(:ci_build, :test_reports, :trace_artifact, :success, :artifacts) }
+
+ describe '#erase' do
+ before do
+ allow(build.project).to receive(:refreshing_build_artifacts_size?).and_return(true)
+ end
+
+ it 'logs and continues with deleting the artifacts' do
+ expect(Gitlab::ProjectStatsRefreshConflictsLogger).to receive(:warn_artifact_deletion_during_stats_refresh).with(
+ method: 'Ci::Build#erase',
+ project_id: build.project.id
+ )
+
+ build.erase
+
+ expect(build.job_artifacts.count).to eq(0)
+ end
+ end
+ end
+
context 'new artifacts' do
let!(:build) { create(:ci_build, :test_reports, :trace_artifact, :success, :artifacts) }
@@ -1924,6 +1956,23 @@ RSpec.describe Ci::Build do
expect(build.send("job_artifacts_#{file_type}")).not_to be_nil
end
end
+
+ context 'when the project is undergoing stats refresh' do
+ before do
+ allow(build.project).to receive(:refreshing_build_artifacts_size?).and_return(true)
+ end
+
+ it 'logs and continues with deleting the artifacts' do
+ expect(Gitlab::ProjectStatsRefreshConflictsLogger).to receive(:warn_artifact_deletion_during_stats_refresh).with(
+ method: 'Ci::Build#erase_erasable_artifacts!',
+ project_id: build.project.id
+ )
+
+ subject
+
+ expect(build.job_artifacts.erasable).to be_empty
+ end
+ end
end
describe '#first_pending' do
@@ -2757,6 +2806,7 @@ RSpec.describe Ci::Build do
{ key: 'CI_PROJECT_ID', value: project.id.to_s, public: true, masked: false },
{ key: 'CI_PROJECT_NAME', value: project.path, public: true, masked: false },
{ key: 'CI_PROJECT_TITLE', value: project.title, public: true, masked: false },
+ { key: 'CI_PROJECT_DESCRIPTION', value: project.description, public: true, masked: false },
{ key: 'CI_PROJECT_PATH', value: project.full_path, public: true, masked: false },
{ key: 'CI_PROJECT_PATH_SLUG', value: project.full_path_slug, public: true, masked: false },
{ key: 'CI_PROJECT_NAMESPACE', value: project.namespace.full_path, public: true, masked: false },
@@ -3486,7 +3536,7 @@ RSpec.describe Ci::Build do
]
end
- context 'when gitlab-deploy-token exists' do
+ context 'when gitlab-deploy-token exists for project' do
before do
project.deploy_tokens << deploy_token
end
@@ -3496,11 +3546,32 @@ RSpec.describe Ci::Build do
end
end
- context 'when gitlab-deploy-token does not exist' do
+ context 'when gitlab-deploy-token does not exist for project' do
it 'does not include deploy token variables' do
expect(subject.find { |v| v[:key] == 'CI_DEPLOY_USER'}).to be_nil
expect(subject.find { |v| v[:key] == 'CI_DEPLOY_PASSWORD'}).to be_nil
end
+
+ context 'when gitlab-deploy-token exists for group' do
+ before do
+ group.deploy_tokens << deploy_token
+ end
+
+ it 'includes deploy token variables' do
+ is_expected.to include(*deploy_token_variables)
+ end
+
+ context 'when the FF ci_variable_for_group_gitlab_deploy_token is disabled' do
+ before do
+ stub_feature_flags(ci_variable_for_group_gitlab_deploy_token: false)
+ end
+
+ it 'does not include deploy token variables' do
+ expect(subject.find { |v| v[:key] == 'CI_DEPLOY_USER'}).to be_nil
+ expect(subject.find { |v| v[:key] == 'CI_DEPLOY_PASSWORD'}).to be_nil
+ end
+ end
+ end
end
end
@@ -4298,6 +4369,56 @@ RSpec.describe Ci::Build do
end
end
end
+
+ context 'when build is part of parallel build' do
+ let(:build_1) { create(:ci_build, name: 'build 1/2') }
+ let(:test_report) { Gitlab::Ci::Reports::TestReports.new }
+
+ before do
+ build_1.collect_test_reports!(test_report)
+ end
+
+ it 'uses the group name for test suite name' do
+ expect(test_report.test_suites.keys).to contain_exactly('build')
+ end
+
+ context 'when there are more than one parallel builds' do
+ let(:build_2) { create(:ci_build, name: 'build 2/2') }
+
+ before do
+ build_2.collect_test_reports!(test_report)
+ end
+
+ it 'merges the test suite from parallel builds' do
+ expect(test_report.test_suites.keys).to contain_exactly('build')
+ end
+ end
+ end
+
+ context 'when build is part of matrix build' do
+ let(:test_report) { Gitlab::Ci::Reports::TestReports.new }
+ let(:matrix_build_1) { create(:ci_build, :matrix) }
+
+ before do
+ matrix_build_1.collect_test_reports!(test_report)
+ end
+
+ it 'uses the job name for the test suite' do
+ expect(test_report.test_suites.keys).to contain_exactly(matrix_build_1.name)
+ end
+
+ context 'when there are more than one matrix builds' do
+ let(:matrix_build_2) { create(:ci_build, :matrix) }
+
+ before do
+ matrix_build_2.collect_test_reports!(test_report)
+ end
+
+ it 'keeps separate test suites' do
+ expect(test_report.test_suites.keys).to match_array([matrix_build_1.name, matrix_build_2.name])
+ end
+ end
+ end
end
describe '#collect_accessibility_reports!' do
@@ -4355,68 +4476,6 @@ RSpec.describe Ci::Build do
end
end
- describe '#collect_coverage_reports!' do
- subject { build.collect_coverage_reports!(coverage_report) }
-
- let(:coverage_report) { Gitlab::Ci::Reports::CoverageReports.new }
-
- it { expect(coverage_report.files).to eq({}) }
-
- context 'when build has a coverage report' do
- context 'when there is a Cobertura coverage report from simplecov-cobertura' do
- before do
- create(:ci_job_artifact, :cobertura, job: build, project: build.project)
- end
-
- it 'parses blobs and add the results to the coverage report' do
- expect { subject }.not_to raise_error
-
- expect(coverage_report.files.keys).to match_array(['app/controllers/abuse_reports_controller.rb'])
- expect(coverage_report.files['app/controllers/abuse_reports_controller.rb'].count).to eq(23)
- end
- end
-
- context 'when there is a Cobertura coverage report from gocov-xml' do
- before do
- create(:ci_job_artifact, :coverage_gocov_xml, job: build, project: build.project)
- end
-
- it 'parses blobs and add the results to the coverage report' do
- expect { subject }.not_to raise_error
-
- expect(coverage_report.files.keys).to match_array(['auth/token.go', 'auth/rpccredentials.go'])
- expect(coverage_report.files['auth/token.go'].count).to eq(49)
- expect(coverage_report.files['auth/rpccredentials.go'].count).to eq(10)
- end
- end
-
- context 'when there is a Cobertura coverage report with class filename paths not relative to project root' do
- before do
- allow(build.project).to receive(:full_path).and_return('root/javademo')
- allow(build.pipeline).to receive(:all_worktree_paths).and_return(['src/main/java/com/example/javademo/User.java'])
-
- create(:ci_job_artifact, :coverage_with_paths_not_relative_to_project_root, job: build, project: build.project)
- end
-
- it 'parses blobs and add the results to the coverage report with corrected paths' do
- expect { subject }.not_to raise_error
-
- expect(coverage_report.files.keys).to match_array(['src/main/java/com/example/javademo/User.java'])
- end
- end
-
- context 'when there is a corrupted Cobertura coverage report' do
- before do
- create(:ci_job_artifact, :coverage_with_corrupted_data, job: build, project: build.project)
- end
-
- it 'raises an error' do
- expect { subject }.to raise_error(Gitlab::Ci::Parsers::Coverage::Cobertura::InvalidLineInformationError)
- end
- end
- end
- end
-
describe '#collect_codequality_reports!' do
subject(:codequality_report) { build.collect_codequality_reports!(Gitlab::Ci::Reports::CodequalityReports.new) }
@@ -4506,6 +4565,18 @@ RSpec.describe Ci::Build do
end
end
+ describe '#each_report' do
+ let(:report_types) { Ci::JobArtifact::COVERAGE_REPORT_FILE_TYPES }
+
+ let!(:codequality) { create(:ci_job_artifact, :codequality, job: build) }
+ let!(:coverage) { create(:ci_job_artifact, :coverage_gocov_xml, job: build) }
+ let!(:junit) { create(:ci_job_artifact, :junit, job: build) }
+
+ it 'yields job artifact blob that matches the type' do
+ expect { |b| build.each_report(report_types, &b) }.to yield_with_args(coverage.file_type, String, coverage)
+ end
+ end
+
describe '#report_artifacts' do
subject { build.report_artifacts }
@@ -4947,6 +5018,18 @@ RSpec.describe Ci::Build do
build.execute_hooks
end
+
+ context 'with blocked users' do
+ before do
+ allow(build).to receive(:user) { FactoryBot.build(:user, :blocked) }
+ end
+
+ it 'does not call project.execute_hooks' do
+ expect(build.project).not_to receive(:execute_hooks)
+
+ build.execute_hooks
+ end
+ end
end
context 'without project hooks' do
@@ -5410,6 +5493,19 @@ RSpec.describe Ci::Build do
subject
end
+ context 'with deployment' do
+ let(:environment) { create(:environment) }
+ let(:build) { create(:ci_build, :with_deployment, environment: environment.name, pipeline: pipeline) }
+
+ it 'updates the deployment status', :aggregate_failures do
+ expect(build.deployment).to receive(:sync_status_with).with(build).and_call_original
+
+ subject
+
+ expect(build.deployment.reload.status).to eq("failed")
+ end
+ end
+
context 'with queued builds' do
let(:traits) { [:queued] }
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index 24265242172..b9cac6c3f99 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -33,10 +33,10 @@ RSpec.describe Ci::JobArtifact do
end
end
- describe '.with_reports' do
+ describe '.all_reports' do
let!(:artifact) { create(:ci_job_artifact, :archive) }
- subject { described_class.with_reports }
+ subject { described_class.all_reports }
it { is_expected.to be_empty }
@@ -302,6 +302,42 @@ RSpec.describe Ci::JobArtifact do
end
end
+ describe '.created_at_before' do
+ it 'returns artifacts' do
+ artifact1 = create(:ci_job_artifact, created_at: 1.day.ago)
+ _artifact2 = create(:ci_job_artifact, created_at: 1.day.from_now)
+
+ expect(described_class.created_at_before(Time.current)).to match_array([artifact1])
+ end
+ end
+
+ describe '.id_before' do
+ it 'returns artifacts' do
+ artifact1 = create(:ci_job_artifact)
+ artifact2 = create(:ci_job_artifact)
+
+ expect(described_class.id_before(artifact2.id)).to match_array([artifact1, artifact2])
+ end
+ end
+
+ describe '.id_after' do
+ it 'returns artifacts' do
+ artifact1 = create(:ci_job_artifact)
+ artifact2 = create(:ci_job_artifact)
+
+ expect(described_class.id_after(artifact1.id)).to match_array([artifact2])
+ end
+ end
+
+ describe '.ordered_by_id' do
+ it 'returns artifacts in asc order' do
+ artifact1 = create(:ci_job_artifact)
+ artifact2 = create(:ci_job_artifact)
+
+ expect(described_class.ordered_by_id).to eq([artifact1, artifact2])
+ end
+ end
+
describe 'callbacks' do
describe '#schedule_background_upload' do
subject { create(:ci_job_artifact, :archive) }
diff --git a/spec/models/ci/namespace_mirror_spec.rb b/spec/models/ci/namespace_mirror_spec.rb
index 9b4e86916b8..3e77c349ccb 100644
--- a/spec/models/ci/namespace_mirror_spec.rb
+++ b/spec/models/ci/namespace_mirror_spec.rb
@@ -151,10 +151,9 @@ RSpec.describe Ci::NamespaceMirror do
it_behaves_like 'changing the middle namespace'
- context 'when the FFs sync_traversal_ids, use_traversal_ids and use_traversal_ids_for_ancestors are disabled' do
+ context 'when the FFs use_traversal_ids and use_traversal_ids_for_ancestors are disabled' do
before do
- stub_feature_flags(sync_traversal_ids: false,
- use_traversal_ids: false,
+ stub_feature_flags(use_traversal_ids: false,
use_traversal_ids_for_ancestors: false)
end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 8dc041814fa..31752f300f4 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -73,6 +73,17 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
+ describe '#latest_successful_builds' do
+ it 'has a one to many relationship with its latest successful builds' do
+ _old_build = create(:ci_build, :retried, pipeline: pipeline)
+ _expired_build = create(:ci_build, :expired, pipeline: pipeline)
+ _failed_builds = create_list(:ci_build, 2, :failed, pipeline: pipeline)
+ successful_builds = create_list(:ci_build, 2, :success, pipeline: pipeline)
+
+ expect(pipeline.latest_successful_builds).to contain_exactly(successful_builds.first, successful_builds.second)
+ end
+ end
+
describe '#downloadable_artifacts' do
let_it_be(:build) { create(:ci_build, pipeline: pipeline) }
let_it_be(:downloadable_artifact) { create(:ci_job_artifact, :codequality, job: build) }
@@ -3045,7 +3056,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
describe 'hooks trigerring' do
- let_it_be(:pipeline) { create(:ci_empty_pipeline, :created) }
+ let_it_be_with_reload(:pipeline) { create(:ci_empty_pipeline, :created) }
%i[
enqueue
@@ -3065,7 +3076,19 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
it 'schedules a new PipelineHooksWorker job' do
expect(PipelineHooksWorker).to receive(:perform_async).with(pipeline.id)
- pipeline.reload.public_send(pipeline_action)
+ pipeline.public_send(pipeline_action)
+ end
+
+ context 'with blocked users' do
+ before do
+ allow(pipeline).to receive(:user) { build(:user, :blocked) }
+ end
+
+ it 'does not schedule a new PipelineHooksWorker job' do
+ expect(PipelineHooksWorker).not_to receive(:perform_async)
+
+ pipeline.public_send(pipeline_action)
+ end
end
end
end
@@ -3625,6 +3648,18 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
pipeline.succeed!
end
end
+
+ context 'when the user is blocked' do
+ before do
+ pipeline.user.block!
+ end
+
+ it 'does not enqueue PipelineNotificationWorker' do
+ expect(PipelineNotificationWorker).not_to receive(:perform_async)
+
+ pipeline.succeed
+ end
+ end
end
context 'with failed pipeline' do
@@ -3645,6 +3680,18 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
pipeline.drop
end
+
+ context 'when the user is blocked' do
+ before do
+ pipeline.user.block!
+ end
+
+ it 'does not enqueue PipelineNotificationWorker' do
+ expect(PipelineNotificationWorker).not_to receive(:perform_async)
+
+ pipeline.drop
+ end
+ end
end
context 'with skipped pipeline' do
@@ -3842,6 +3889,34 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
+ describe '#latest_report_builds_in_self_and_descendants' do
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be(:child_pipeline) { create(:ci_pipeline, child_of: pipeline) }
+ let_it_be(:grandchild_pipeline) { create(:ci_pipeline, child_of: child_pipeline) }
+
+ it 'returns builds with reports artifacts from pipelines in the hierarcy' do
+ parent_build = create(:ci_build, :test_reports, pipeline: pipeline)
+ child_build = create(:ci_build, :coverage_reports, pipeline: child_pipeline)
+ grandchild_build = create(:ci_build, :codequality_reports, pipeline: grandchild_pipeline)
+
+ expect(pipeline.latest_report_builds_in_self_and_descendants).to contain_exactly(parent_build, child_build, grandchild_build)
+ end
+
+ it 'filters builds by scope' do
+ create(:ci_build, :test_reports, pipeline: pipeline)
+ grandchild_build = create(:ci_build, :codequality_reports, pipeline: grandchild_pipeline)
+
+ expect(pipeline.latest_report_builds_in_self_and_descendants(Ci::JobArtifact.codequality_reports)).to contain_exactly(grandchild_build)
+ end
+
+ it 'only returns builds that are not retried' do
+ create(:ci_build, :codequality_reports, :retried, pipeline: grandchild_pipeline)
+ grandchild_build = create(:ci_build, :codequality_reports, pipeline: grandchild_pipeline)
+
+ expect(pipeline.latest_report_builds_in_self_and_descendants).to contain_exactly(grandchild_build)
+ end
+ end
+
describe '#has_reports?' do
subject { pipeline.has_reports?(Ci::JobArtifact.test_reports) }
@@ -3900,38 +3975,6 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
- describe '#can_generate_coverage_reports?' do
- subject { pipeline.can_generate_coverage_reports? }
-
- context 'when pipeline has builds with coverage reports' do
- before do
- create(:ci_build, :coverage_reports, pipeline: pipeline)
- end
-
- context 'when pipeline status is running' do
- let(:pipeline) { create(:ci_pipeline, :running) }
-
- it { expect(subject).to be_falsey }
- end
-
- context 'when pipeline status is success' do
- let(:pipeline) { create(:ci_pipeline, :success) }
-
- it { expect(subject).to be_truthy }
- end
- end
-
- context 'when pipeline does not have builds with coverage reports' do
- before do
- create(:ci_build, :artifacts, pipeline: pipeline)
- end
-
- let(:pipeline) { create(:ci_pipeline, :success) }
-
- it { expect(subject).to be_falsey }
- end
- end
-
describe '#has_codequality_mr_diff_report?' do
subject { pipeline.has_codequality_mr_diff_report? }
@@ -4082,55 +4125,6 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
- describe '#coverage_reports' do
- subject { pipeline.coverage_reports }
-
- let_it_be(:pipeline) { create(:ci_pipeline) }
-
- context 'when pipeline has multiple builds with coverage reports' do
- let!(:build_rspec) { create(:ci_build, :success, name: 'rspec', pipeline: pipeline) }
- let!(:build_golang) { create(:ci_build, :success, name: 'golang', pipeline: pipeline) }
-
- before do
- create(:ci_job_artifact, :cobertura, job: build_rspec)
- create(:ci_job_artifact, :coverage_gocov_xml, job: build_golang)
- end
-
- it 'returns coverage reports with collected data' do
- expect(subject.files.keys).to match_array([
- "auth/token.go",
- "auth/rpccredentials.go",
- "app/controllers/abuse_reports_controller.rb"
- ])
- end
-
- it 'does not execute N+1 queries' do
- single_build_pipeline = create(:ci_empty_pipeline, :created)
- single_rspec = create(:ci_build, :success, name: 'rspec', pipeline: single_build_pipeline)
- create(:ci_job_artifact, :cobertura, job: single_rspec, project: project)
-
- control = ActiveRecord::QueryRecorder.new { single_build_pipeline.coverage_reports }
-
- expect { subject }.not_to exceed_query_limit(control)
- end
-
- context 'when builds are retried' do
- let!(:build_rspec) { create(:ci_build, :retried, :success, name: 'rspec', pipeline: pipeline) }
- let!(:build_golang) { create(:ci_build, :retried, :success, name: 'golang', pipeline: pipeline) }
-
- it 'does not take retried builds into account' do
- expect(subject.files).to eql({})
- end
- end
- end
-
- context 'when pipeline does not have any builds with coverage reports' do
- it 'returns empty coverage reports' do
- expect(subject.files).to eql({})
- end
- end
- end
-
describe '#codequality_reports' do
subject(:codequality_reports) { pipeline.codequality_reports }
@@ -4839,9 +4833,9 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
describe '#has_expired_test_reports?' do
- subject { pipeline_with_test_report.has_expired_test_reports? }
+ subject { pipeline.has_expired_test_reports? }
- let(:pipeline_with_test_report) { create(:ci_pipeline, :with_test_reports) }
+ let(:pipeline) { create(:ci_pipeline, :success, :with_test_reports) }
context 'when artifacts are not expired' do
it { is_expected.to be_falsey }
@@ -4849,11 +4843,23 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
context 'when artifacts are expired' do
before do
- pipeline_with_test_report.job_artifacts.first.update!(expire_at: Date.yesterday)
+ pipeline.job_artifacts.first.update!(expire_at: Date.yesterday)
end
it { is_expected.to be_truthy }
end
+
+ context 'when the pipeline is still running' do
+ let(:pipeline) { create(:ci_pipeline, :running) }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when the pipeline is completed without test reports' do
+ let(:pipeline) { create(:ci_pipeline, :success) }
+
+ it { is_expected.to be_falsey }
+ end
end
it_behaves_like 'it has loose foreign keys' do
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index 8a1dcbfbdeb..74d8b012b29 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -1002,8 +1002,11 @@ RSpec.describe Ci::Runner do
describe '#heartbeat' do
let(:runner) { create(:ci_runner, :project) }
let(:executor) { 'shell' }
+ let(:version) { '15.0.1' }
- subject { runner.heartbeat(architecture: '18-bit', config: { gpus: "all" }, executor: executor) }
+ subject(:heartbeat) do
+ runner.heartbeat(architecture: '18-bit', config: { gpus: "all" }, executor: executor, version: version)
+ end
context 'when database was updated recently' do
before do
@@ -1013,7 +1016,7 @@ RSpec.describe Ci::Runner do
it 'updates cache' do
expect_redis_update
- subject
+ heartbeat
end
end
@@ -1047,7 +1050,7 @@ RSpec.describe Ci::Runner do
it 'updates with expected executor type' do
expect_redis_update
- subject
+ heartbeat
expect(runner.reload.read_attribute(:executor_type)).to eq(expected_executor_type)
end
@@ -1059,6 +1062,18 @@ RSpec.describe Ci::Runner do
end
end
end
+
+ context 'with updated version' do
+ before do
+ runner.version = '1.2.3'
+ end
+
+ it 'updates version components with new version' do
+ heartbeat
+
+ expect(runner.reload.read_attribute(:semver)).to eq '15.0.1'
+ end
+ end
end
def expect_redis_update
@@ -1069,10 +1084,11 @@ RSpec.describe Ci::Runner do
end
def does_db_update
- expect { subject }.to change { runner.reload.read_attribute(:contacted_at) }
+ expect { heartbeat }.to change { runner.reload.read_attribute(:contacted_at) }
.and change { runner.reload.read_attribute(:architecture) }
.and change { runner.reload.read_attribute(:config) }
.and change { runner.reload.read_attribute(:executor_type) }
+ .and change { runner.reload.read_attribute(:semver) }
end
end
@@ -1683,4 +1699,42 @@ RSpec.describe Ci::Runner do
end
end
end
+
+ describe '.save' do
+ context 'with initial value' do
+ let(:runner) { create(:ci_runner, version: 'v1.2.3') }
+
+ it 'updates semver column' do
+ expect(runner.semver).to eq '1.2.3'
+ end
+ end
+
+ context 'with no initial version value' do
+ let(:runner) { build(:ci_runner) }
+
+ context 'with version change' do
+ subject(:update_version) { runner.update!(version: new_version) }
+
+ context 'to invalid version' do
+ let(:new_version) { 'invalid version' }
+
+ it 'updates semver column to nil' do
+ update_version
+
+ expect(runner.reload.semver).to be_nil
+ end
+ end
+
+ context 'to v14.10.1' do
+ let(:new_version) { 'v14.10.1' }
+
+ it 'updates semver column' do
+ update_version
+
+ expect(runner.reload.semver).to eq '14.10.1'
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/ci/secure_file_spec.rb b/spec/models/ci/secure_file_spec.rb
index 40ddafad013..a3f1c7b7ef7 100644
--- a/spec/models/ci/secure_file_spec.rb
+++ b/spec/models/ci/secure_file_spec.rb
@@ -48,6 +48,21 @@ RSpec.describe Ci::SecureFile do
end
end
+ describe 'ordered scope' do
+ it 'returns the newest item first' do
+ project = create(:project)
+ file1 = create(:ci_secure_file, created_at: 1.week.ago, project: project)
+ file2 = create(:ci_secure_file, created_at: 2.days.ago, project: project)
+ file3 = create(:ci_secure_file, created_at: 1.day.ago, project: project)
+
+ files = project.secure_files.order_by_created_at
+
+ expect(files[0]).to eq(file3)
+ expect(files[1]).to eq(file2)
+ expect(files[2]).to eq(file1)
+ end
+ end
+
describe '#checksum' do
it 'computes SHA256 checksum on the file before encrypted' do
expect(subject.checksum).to eq(Digest::SHA256.hexdigest(sample_file))
diff --git a/spec/models/ci/sources/pipeline_spec.rb b/spec/models/ci/sources/pipeline_spec.rb
index 73f7cfa739f..732dd5c3df3 100644
--- a/spec/models/ci/sources/pipeline_spec.rb
+++ b/spec/models/ci/sources/pipeline_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Ci::Sources::Pipeline do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:pipeline) }
- it { is_expected.to belong_to(:source_project) }
+ it { is_expected.to belong_to(:source_project).class_name('::Project') }
it { is_expected.to belong_to(:source_job) }
it { is_expected.to belong_to(:source_bridge) }
it { is_expected.to belong_to(:source_pipeline) }
diff --git a/spec/models/clusters/agent_spec.rb b/spec/models/clusters/agent_spec.rb
index f10e0cc8fa7..de67bdb32aa 100644
--- a/spec/models/clusters/agent_spec.rb
+++ b/spec/models/clusters/agent_spec.rb
@@ -7,8 +7,7 @@ RSpec.describe Clusters::Agent do
it { is_expected.to belong_to(:created_by_user).class_name('User').optional }
it { is_expected.to belong_to(:project).class_name('::Project') }
- it { is_expected.to have_many(:agent_tokens).class_name('Clusters::AgentToken') }
- it { is_expected.to have_many(:last_used_agent_tokens).class_name('Clusters::AgentToken') }
+ it { is_expected.to have_many(:agent_tokens).class_name('Clusters::AgentToken').order(Clusters::AgentToken.arel_table[:last_used_at].desc.nulls_last) }
it { is_expected.to have_many(:group_authorizations).class_name('Clusters::Agents::GroupAuthorization') }
it { is_expected.to have_many(:authorized_groups).through(:group_authorizations) }
it { is_expected.to have_many(:project_authorizations).class_name('Clusters::Agents::ProjectAuthorization') }
@@ -41,6 +40,39 @@ RSpec.describe Clusters::Agent do
it { is_expected.to contain_exactly(matching_name) }
end
+
+ describe '.has_vulnerabilities' do
+ let_it_be(:without_vulnerabilities) { create(:cluster_agent, has_vulnerabilities: false) }
+ let_it_be(:with_vulnerabilities) { create(:cluster_agent, has_vulnerabilities: true) }
+
+ context 'when value is not provided' do
+ subject { described_class.has_vulnerabilities }
+
+ it 'returns agents which have vulnerabilities' do
+ is_expected.to contain_exactly(with_vulnerabilities)
+ end
+ end
+
+ context 'when value is provided' do
+ subject { described_class.has_vulnerabilities(value) }
+
+ context 'as true' do
+ let(:value) { true }
+
+ it 'returns agents which have vulnerabilities' do
+ is_expected.to contain_exactly(with_vulnerabilities)
+ end
+ end
+
+ context 'as false' do
+ let(:value) { false }
+
+ it 'returns agents which do not have vulnerabilities' do
+ is_expected.to contain_exactly(without_vulnerabilities)
+ end
+ end
+ end
+ end
end
describe 'validation' do
@@ -117,23 +149,6 @@ RSpec.describe Clusters::Agent do
end
end
- describe '#last_used_agent_tokens' do
- let_it_be(:agent) { create(:cluster_agent) }
-
- subject { agent.last_used_agent_tokens }
-
- context 'agent has no tokens' do
- it { is_expected.to be_empty }
- end
-
- context 'agent has active and inactive tokens' do
- let!(:active_token) { create(:cluster_agent_token, agent: agent, last_used_at: 1.minute.ago) }
- let!(:inactive_token) { create(:cluster_agent_token, agent: agent, last_used_at: 2.hours.ago) }
-
- it { is_expected.to contain_exactly(active_token, inactive_token) }
- end
- end
-
describe '#activity_event_deletion_cutoff' do
let_it_be(:agent) { create(:cluster_agent) }
let_it_be(:event1) { create(:agent_activity_event, agent: agent, recorded_at: 1.hour.ago) }
diff --git a/spec/models/clusters/cluster_enabled_grant_spec.rb b/spec/models/clusters/cluster_enabled_grant_spec.rb
new file mode 100644
index 00000000000..1418d854b41
--- /dev/null
+++ b/spec/models/clusters/cluster_enabled_grant_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Clusters::ClusterEnabledGrant do
+ it { is_expected.to belong_to :namespace }
+end
diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb
index d61bed80aaa..30591a3ff5d 100644
--- a/spec/models/clusters/cluster_spec.rb
+++ b/spec/models/clusters/cluster_spec.rb
@@ -50,6 +50,10 @@ RSpec.describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
it { is_expected.to respond_to :project }
it { is_expected.to be_namespace_per_environment }
+ it_behaves_like 'it has loose foreign keys' do
+ let(:factory_name) { :cluster }
+ end
+
describe 'applications have inverse_of: :cluster option' do
let(:cluster) { create(:cluster) }
let!(:helm) { create(:clusters_applications_helm, cluster: cluster) }
diff --git a/spec/models/clusters/integrations/prometheus_spec.rb b/spec/models/clusters/integrations/prometheus_spec.rb
index e529c751889..d1e40fffee0 100644
--- a/spec/models/clusters/integrations/prometheus_spec.rb
+++ b/spec/models/clusters/integrations/prometheus_spec.rb
@@ -21,11 +21,24 @@ RSpec.describe Clusters::Integrations::Prometheus do
let(:cluster) { create(:cluster, :with_installed_helm) }
it 'deactivates prometheus_integration' do
- expect(Clusters::Applications::DeactivateServiceWorker)
+ expect(Clusters::Applications::DeactivateIntegrationWorker)
.to receive(:perform_async).with(cluster.id, 'prometheus')
integration.destroy!
end
+
+ context 'when the FF :rename_integrations_workers is disabled' do
+ before do
+ stub_feature_flags(rename_integrations_workers: false)
+ end
+
+ it 'uses the old worker' do
+ expect(Clusters::Applications::DeactivateServiceWorker)
+ .to receive(:perform_async).with(cluster.id, 'prometheus')
+
+ integration.destroy!
+ end
+ end
end
describe 'after_save' do
@@ -38,10 +51,10 @@ RSpec.describe Clusters::Integrations::Prometheus do
it 'does not touch project integrations' do
integration # ensure integration exists before we set the expectations
- expect(Clusters::Applications::DeactivateServiceWorker)
+ expect(Clusters::Applications::DeactivateIntegrationWorker)
.not_to receive(:perform_async)
- expect(Clusters::Applications::ActivateServiceWorker)
+ expect(Clusters::Applications::ActivateIntegrationWorker)
.not_to receive(:perform_async)
integration.update!(enabled: enabled)
@@ -51,19 +64,32 @@ RSpec.describe Clusters::Integrations::Prometheus do
context 'when enabling' do
let(:enabled) { false }
- it 'deactivates prometheus_integration' do
- expect(Clusters::Applications::ActivateServiceWorker)
+ it 'activates prometheus_integration' do
+ expect(Clusters::Applications::ActivateIntegrationWorker)
.to receive(:perform_async).with(cluster.id, 'prometheus')
integration.update!(enabled: true)
end
+
+ context 'when the FF :rename_integrations_workers is disabled' do
+ before do
+ stub_feature_flags(rename_integrations_workers: false)
+ end
+
+ it 'uses the old worker' do
+ expect(Clusters::Applications::ActivateServiceWorker)
+ .to receive(:perform_async).with(cluster.id, 'prometheus')
+
+ integration.update!(enabled: true)
+ end
+ end
end
context 'when disabling' do
let(:enabled) { true }
it 'activates prometheus_integration' do
- expect(Clusters::Applications::DeactivateServiceWorker)
+ expect(Clusters::Applications::DeactivateIntegrationWorker)
.to receive(:perform_async).with(cluster.id, 'prometheus')
integration.update!(enabled: false)
diff --git a/spec/models/commit_signatures/gpg_signature_spec.rb b/spec/models/commit_signatures/gpg_signature_spec.rb
index 9646e974f40..6ae2a202b72 100644
--- a/spec/models/commit_signatures/gpg_signature_spec.rb
+++ b/spec/models/commit_signatures/gpg_signature_spec.rb
@@ -3,17 +3,26 @@
require 'spec_helper'
RSpec.describe CommitSignatures::GpgSignature do
+ # This commit is seeded from https://gitlab.com/gitlab-org/gitlab-test
+ # For instructions on how to add more seed data, see the project README
let(:commit_sha) { '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33' }
let!(:project) { create(:project, :repository, path: 'sample-project') }
let!(:commit) { create(:commit, project: project, sha: commit_sha) }
- let(:gpg_signature) { create(:gpg_signature, commit_sha: commit_sha) }
+ let(:signature) { create(:gpg_signature, commit_sha: commit_sha) }
let(:gpg_key) { create(:gpg_key) }
let(:gpg_key_subkey) { create(:gpg_key_subkey) }
+ let(:attributes) do
+ {
+ commit_sha: commit_sha,
+ project: project,
+ gpg_key_primary_keyid: gpg_key.keyid
+ }
+ end
it_behaves_like 'having unique enum values'
+ it_behaves_like 'commit signature'
describe 'associations' do
- it { is_expected.to belong_to(:project).required }
it { is_expected.to belong_to(:gpg_key) }
it { is_expected.to belong_to(:gpg_key_subkey) }
end
@@ -22,104 +31,56 @@ RSpec.describe CommitSignatures::GpgSignature do
subject { described_class.new }
it { is_expected.to validate_presence_of(:commit_sha) }
- it { is_expected.to validate_presence_of(:project_id) }
it { is_expected.to validate_presence_of(:gpg_key_primary_keyid) }
end
- describe '.safe_create!' do
- let(:attributes) do
- {
- commit_sha: commit_sha,
- project: project,
- gpg_key_primary_keyid: gpg_key.keyid
- }
- end
-
- it 'finds a signature by commit sha if it existed' do
- gpg_signature
-
- expect(described_class.safe_create!(commit_sha: commit_sha)).to eq(gpg_signature)
- end
-
- it 'creates a new signature if it was not found' do
- expect { described_class.safe_create!(attributes) }.to change { described_class.count }.by(1)
- end
-
- it 'assigns the correct attributes when creating' do
- signature = described_class.safe_create!(attributes)
-
- expect(signature.project).to eq(project)
- expect(signature.commit_sha).to eq(commit_sha)
- expect(signature.gpg_key_primary_keyid).to eq(gpg_key.keyid)
- end
-
- it 'does not raise an error in case of a race condition' do
- expect(described_class).to receive(:find_by).and_return(nil, double(described_class, persisted?: true))
-
- expect(described_class).to receive(:create).and_raise(ActiveRecord::RecordNotUnique)
- allow(described_class).to receive(:create).and_call_original
-
- described_class.safe_create!(attributes)
- end
- end
-
describe '.by_commit_sha scope' do
let(:gpg_key) { create(:gpg_key, key: GpgHelpers::User2.public_key) }
let!(:another_gpg_signature) { create(:gpg_signature, gpg_key: gpg_key) }
it 'returns all gpg signatures by sha' do
- expect(described_class.by_commit_sha(commit_sha)).to eq([gpg_signature])
+ expect(described_class.by_commit_sha(commit_sha)).to match_array([signature])
expect(
described_class.by_commit_sha([commit_sha, another_gpg_signature.commit_sha])
- ).to contain_exactly(gpg_signature, another_gpg_signature)
- end
- end
-
- describe '#commit' do
- it 'fetches the commit through the project' do
- expect_next_instance_of(Project) do |instance|
- expect(instance).to receive(:commit).with(commit_sha).and_return(commit)
- end
-
- gpg_signature.commit
+ ).to contain_exactly(signature, another_gpg_signature)
end
end
describe '#gpg_key=' do
it 'supports the assignment of a GpgKey' do
- gpg_signature = create(:gpg_signature, gpg_key: gpg_key)
+ signature = create(:gpg_signature, gpg_key: gpg_key)
- expect(gpg_signature.gpg_key).to be_an_instance_of(GpgKey)
+ expect(signature.gpg_key).to be_an_instance_of(GpgKey)
end
it 'supports the assignment of a GpgKeySubkey' do
- gpg_signature = create(:gpg_signature, gpg_key: gpg_key_subkey)
+ signature = create(:gpg_signature, gpg_key: gpg_key_subkey)
- expect(gpg_signature.gpg_key).to be_an_instance_of(GpgKeySubkey)
+ expect(signature.gpg_key).to be_an_instance_of(GpgKeySubkey)
end
it 'clears gpg_key and gpg_key_subkey_id when passing nil' do
- gpg_signature.update_attribute(:gpg_key, nil)
+ signature.update_attribute(:gpg_key, nil)
- expect(gpg_signature.gpg_key_id).to be_nil
- expect(gpg_signature.gpg_key_subkey_id).to be_nil
+ expect(signature.gpg_key_id).to be_nil
+ expect(signature.gpg_key_subkey_id).to be_nil
end
end
describe '#gpg_commit' do
context 'when commit does not exist' do
it 'returns nil' do
- allow(gpg_signature).to receive(:commit).and_return(nil)
+ allow(signature).to receive(:commit).and_return(nil)
- expect(gpg_signature.gpg_commit).to be_nil
+ expect(signature.gpg_commit).to be_nil
end
end
context 'when commit exists' do
it 'returns an instance of Gitlab::Gpg::Commit' do
- allow(gpg_signature).to receive(:commit).and_return(commit)
+ allow(signature).to receive(:commit).and_return(commit)
- expect(gpg_signature.gpg_commit).to be_an_instance_of(Gitlab::Gpg::Commit)
+ expect(signature.gpg_commit).to be_an_instance_of(Gitlab::Gpg::Commit)
end
end
end
diff --git a/spec/models/commit_signatures/ssh_signature_spec.rb b/spec/models/commit_signatures/ssh_signature_spec.rb
new file mode 100644
index 00000000000..ac4496e9d8c
--- /dev/null
+++ b/spec/models/commit_signatures/ssh_signature_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe CommitSignatures::SshSignature do
+ # This commit is seeded from https://gitlab.com/gitlab-org/gitlab-test
+ # For instructions on how to add more seed data, see the project README
+ let(:commit_sha) { '7b5160f9bb23a3d58a0accdbe89da13b96b1ece9' }
+ let!(:project) { create(:project, :repository, path: 'sample-project') }
+ let!(:commit) { create(:commit, project: project, sha: commit_sha) }
+ let(:signature) { create(:ssh_signature, commit_sha: commit_sha) }
+ let(:ssh_key) { create(:ed25519_key_256) }
+ let(:attributes) do
+ {
+ commit_sha: commit_sha,
+ project: project,
+ key: ssh_key
+ }
+ end
+
+ it_behaves_like 'having unique enum values'
+ it_behaves_like 'commit signature'
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:key).required }
+ end
+
+ describe '.by_commit_sha scope' do
+ let!(:another_signature) { create(:ssh_signature, commit_sha: '0000000000000000000000000000000000000001') }
+
+ it 'returns all signatures by sha' do
+ expect(described_class.by_commit_sha(commit_sha)).to match_array([signature])
+ expect(
+ described_class.by_commit_sha([commit_sha, another_signature.commit_sha])
+ ).to contain_exactly(signature, another_signature)
+ end
+ end
+end
diff --git a/spec/models/commit_signatures/x509_commit_signature_spec.rb b/spec/models/commit_signatures/x509_commit_signature_spec.rb
index 076f209e1b7..beb101cdd89 100644
--- a/spec/models/commit_signatures/x509_commit_signature_spec.rb
+++ b/spec/models/commit_signatures/x509_commit_signature_spec.rb
@@ -3,11 +3,13 @@
require 'spec_helper'
RSpec.describe CommitSignatures::X509CommitSignature do
+ # This commit is seeded from https://gitlab.com/gitlab-org/gitlab-test
+ # For instructions on how to add more seed data, see the project README
let(:commit_sha) { '189a6c924013fc3fe40d6f1ec1dc20214183bc97' }
let(:project) { create(:project, :public, :repository) }
let!(:commit) { create(:commit, project: project, sha: commit_sha) }
let(:x509_certificate) { create(:x509_certificate) }
- let(:x509_signature) { create(:x509_commit_signature, commit_sha: commit_sha) }
+ let(:signature) { create(:x509_commit_signature, commit_sha: commit_sha) }
let(:attributes) do
{
@@ -19,38 +21,16 @@ RSpec.describe CommitSignatures::X509CommitSignature do
end
it_behaves_like 'having unique enum values'
+ it_behaves_like 'commit signature'
describe 'validation' do
- it { is_expected.to validate_presence_of(:commit_sha) }
- it { is_expected.to validate_presence_of(:project_id) }
it { is_expected.to validate_presence_of(:x509_certificate_id) }
end
describe 'associations' do
- it { is_expected.to belong_to(:project).required }
it { is_expected.to belong_to(:x509_certificate).required }
end
- describe '.safe_create!' do
- it 'finds a signature by commit sha if it existed' do
- x509_signature
-
- expect(described_class.safe_create!(commit_sha: commit_sha)).to eq(x509_signature)
- end
-
- it 'creates a new signature if it was not found' do
- expect { described_class.safe_create!(attributes) }.to change { described_class.count }.by(1)
- end
-
- it 'assigns the correct attributes when creating' do
- signature = described_class.safe_create!(attributes)
-
- expect(signature.project).to eq(project)
- expect(signature.commit_sha).to eq(commit_sha)
- expect(signature.x509_certificate_id).to eq(x509_certificate.id)
- end
- end
-
describe '#user' do
context 'if email is assigned to a user' do
let!(:user) { create(:user, email: X509Helpers::User1.certificate_email) }
diff --git a/spec/models/commit_spec.rb b/spec/models/commit_spec.rb
index 7c67b9a3d63..187be557064 100644
--- a/spec/models/commit_spec.rb
+++ b/spec/models/commit_spec.rb
@@ -746,7 +746,7 @@ eos
end
end
- describe '#work_in_progress?' do
+ describe '#draft?' do
[
'squash! ', 'fixup! ',
'draft: ', '[Draft] ', '(draft) ', 'Draft: '
@@ -754,21 +754,21 @@ eos
it "detects the '#{draft_prefix}' prefix" do
commit.message = "#{draft_prefix}#{commit.message}"
- expect(commit).to be_work_in_progress
+ expect(commit).to be_draft
end
end
- it "does not detect WIP for a commit just saying 'draft'" do
+ it "does not detect a commit just saying 'draft' as draft? == true" do
commit.message = "draft"
- expect(commit).not_to be_work_in_progress
+ expect(commit).not_to be_draft
end
["FIXUP!", "Draft - ", "Wipeout", "WIP: ", "[WIP] ", "wip: "].each do |draft_prefix|
it "doesn't detect '#{draft_prefix}' at the start of the title as a draft" do
commit.message = "#{draft_prefix} #{commit.message}"
- expect(commit).not_to be_work_in_progress
+ expect(commit).not_to be_draft
end
end
end
diff --git a/spec/models/compare_spec.rb b/spec/models/compare_spec.rb
index 86bab569ab0..0035fb8468a 100644
--- a/spec/models/compare_spec.rb
+++ b/spec/models/compare_spec.rb
@@ -35,6 +35,21 @@ RSpec.describe Compare do
end
end
+ describe '#commits' do
+ subject { compare.commits }
+
+ it 'returns a CommitCollection' do
+ is_expected.to be_kind_of(CommitCollection)
+ end
+
+ it 'returns a list of commits' do
+ commit_ids = subject.map(&:id)
+
+ expect(commit_ids).to include(head_commit.id)
+ expect(commit_ids.length).to eq(6)
+ end
+ end
+
describe '#commit' do
it 'returns raw compare head commit' do
expect(subject.commit.id).to eq(head_commit.id)
diff --git a/spec/models/concerns/as_cte_spec.rb b/spec/models/concerns/as_cte_spec.rb
new file mode 100644
index 00000000000..06d9650ec46
--- /dev/null
+++ b/spec/models/concerns/as_cte_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe AsCte do
+ let(:klass) do
+ Class.new(ApplicationRecord) do
+ include AsCte
+
+ self.table_name = 'users'
+ end
+ end
+
+ let(:query) { klass.where(id: [1, 2, 3]) }
+ let(:name) { :klass_cte }
+
+ describe '.as_cte' do
+ subject { query.as_cte(name) }
+
+ it { expect(subject).to be_a(Gitlab::SQL::CTE) }
+ it { expect(subject.query).to eq(query) }
+ it { expect(subject.table.name).to eq(name.to_s) }
+
+ context 'with materialized parameter', if: Gitlab::Database::AsWithMaterialized.materialized_supported? do
+ subject { query.as_cte(name, materialized: materialized).to_arel.to_sql }
+
+ context 'as true' do
+ let(:materialized) { true }
+
+ it { expect(subject).to match /MATERIALIZE/ }
+ end
+
+ context 'as false' do
+ let(:materialized) { false }
+
+ it { expect(subject).not_to match /MATERIALIZE/ }
+ end
+ end
+ end
+end
diff --git a/spec/models/concerns/cache_markdown_field_spec.rb b/spec/models/concerns/cache_markdown_field_spec.rb
index d46f22b2216..a00129b3fdf 100644
--- a/spec/models/concerns/cache_markdown_field_spec.rb
+++ b/spec/models/concerns/cache_markdown_field_spec.rb
@@ -404,6 +404,16 @@ RSpec.describe CacheMarkdownField, :clean_gitlab_redis_cache do
it 'returns false when there are no changes' do
expect(thing.attribute_invalidated?(:description_html)).to eq(false)
end
+
+ it 'returns false if skip_markdown_cache_validation is true' do
+ # invalidates the attribute
+ thing.cached_markdown_version += 1
+ thing.description = updated_markdown
+
+ thing.skip_markdown_cache_validation = true
+
+ expect(thing.attribute_invalidated?(:description_html)).to eq(false)
+ end
end
context 'when cache version is updated' do
diff --git a/spec/models/concerns/ci/artifactable_spec.rb b/spec/models/concerns/ci/artifactable_spec.rb
index 62fc689a9ca..b27a4d0dcc1 100644
--- a/spec/models/concerns/ci/artifactable_spec.rb
+++ b/spec/models/concerns/ci/artifactable_spec.rb
@@ -68,8 +68,8 @@ RSpec.describe Ci::Artifactable do
end
describe '.expired' do
- it 'returns a limited number of expired artifacts' do
- expect(Ci::JobArtifact.expired(1).order_id_asc).to eq([recently_expired_artifact])
+ it 'returns all expired artifacts' do
+ expect(Ci::JobArtifact.expired).to contain_exactly(recently_expired_artifact, later_expired_artifact)
end
end
diff --git a/spec/models/concerns/integrations/has_data_fields_spec.rb b/spec/models/concerns/integrations/has_data_fields_spec.rb
index b28fef571c6..374c5c33b50 100644
--- a/spec/models/concerns/integrations/has_data_fields_spec.rb
+++ b/spec/models/concerns/integrations/has_data_fields_spec.rb
@@ -6,84 +6,84 @@ RSpec.describe Integrations::HasDataFields do
let(:url) { 'http://url.com' }
let(:username) { 'username_one' }
let(:properties) do
- { url: url, username: username }
+ { url: url, username: username, jira_issue_transition_automatic: false }
end
shared_examples 'data fields' do
describe '#arg' do
- it 'returns an argument correctly' do
- expect(service.url).to eq(url)
+ it 'returns the expected values' do
+ expect(integration).to have_attributes(properties)
end
end
describe '{arg}_changed?' do
it 'returns false when the property has not been assigned a new value' do
- service.username = 'new_username'
- service.validate
- expect(service.url_changed?).to be_falsy
+ integration.username = 'new_username'
+ integration.validate
+ expect(integration.url_changed?).to be_falsy
end
it 'returns true when the property has been assigned a different value' do
- service.url = "http://example.com"
- service.validate
- expect(service.url_changed?).to be_truthy
+ integration.url = "http://example.com"
+ integration.validate
+ expect(integration.url_changed?).to be_truthy
end
it 'returns true when the property has been assigned a different value twice' do
- service.url = "http://example.com"
- service.url = "http://example.com"
- service.validate
- expect(service.url_changed?).to be_truthy
+ integration.url = "http://example.com"
+ integration.url = "http://example.com"
+ integration.validate
+ expect(integration.url_changed?).to be_truthy
end
it 'returns false when the property has been re-assigned the same value' do
- service.url = 'http://url.com'
- service.validate
- expect(service.url_changed?).to be_falsy
+ integration.url = 'http://url.com'
+ integration.validate
+ expect(integration.url_changed?).to be_falsy
end
end
describe '{arg}_touched?' do
it 'returns false when the property has not been assigned a new value' do
- service.username = 'new_username'
- service.validate
- expect(service.url_changed?).to be_falsy
+ integration.username = 'new_username'
+ integration.validate
+ expect(integration.url_changed?).to be_falsy
end
it 'returns true when the property has been assigned a different value' do
- service.url = "http://example.com"
- service.validate
- expect(service.url_changed?).to be_truthy
+ integration.url = "http://example.com"
+ integration.validate
+ expect(integration.url_changed?).to be_truthy
end
it 'returns true when the property has been assigned a different value twice' do
- service.url = "http://example.com"
- service.url = "http://example.com"
- service.validate
- expect(service.url_changed?).to be_truthy
+ integration.url = "http://example.com"
+ integration.url = "http://example.com"
+ integration.validate
+ expect(integration.url_changed?).to be_truthy
end
it 'returns true when the property has been re-assigned the same value' do
- service.url = 'http://url.com'
- expect(service.url_touched?).to be_truthy
+ integration.url = 'http://url.com'
+ expect(integration.url_touched?).to be_truthy
end
it 'returns false when the property has been re-assigned the same value' do
- service.url = 'http://url.com'
- service.validate
- expect(service.url_changed?).to be_falsy
+ integration.url = 'http://url.com'
+ integration.validate
+ expect(integration.url_changed?).to be_falsy
end
end
describe 'data_fields_present?' do
- it 'returns true from the issue tracker service' do
- expect(service.data_fields_present?).to be true
+ it 'returns true from the issue tracker integration' do
+ expect(integration.data_fields_present?).to be true
end
end
end
context 'when data are stored in data_fields' do
- let(:service) do
+ let(:integration) do
create(:jira_integration, url: url, username: username)
end
@@ -91,21 +91,21 @@ RSpec.describe Integrations::HasDataFields do
describe '{arg}_was?' do
it 'returns nil' do
- service.url = 'http://example.com'
- service.validate
- expect(service.url_was).to be_nil
+ integration.url = 'http://example.com'
+ integration.validate
+ expect(integration.url_was).to be_nil
end
end
end
- context 'when service and data_fields are not persisted' do
- let(:service) do
+ context 'when integration and data_fields are not persisted' do
+ let(:integration) do
Integrations::Jira.new
end
describe 'data_fields_present?' do
it 'returns true' do
- expect(service.data_fields_present?).to be true
+ expect(integration.data_fields_present?).to be true
end
end
end
@@ -113,9 +113,7 @@ RSpec.describe Integrations::HasDataFields do
context 'when data are stored in properties' do
let(:integration) { create(:jira_integration, :without_properties_callback, properties: properties) }
- it_behaves_like 'data fields' do
- let(:service) { integration }
- end
+ it_behaves_like 'data fields'
describe '{arg}_was?' do
it 'returns nil when the property has not been assigned a new value' do
@@ -148,9 +146,7 @@ RSpec.describe Integrations::HasDataFields do
end
end
- it_behaves_like 'data fields' do
- let(:service) { integration }
- end
+ it_behaves_like 'data fields'
describe '{arg}_was?' do
it 'returns nil' do
diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb
index e8e9c263d23..87821de3cf5 100644
--- a/spec/models/concerns/issuable_spec.rb
+++ b/spec/models/concerns/issuable_spec.rb
@@ -982,14 +982,6 @@ RSpec.describe Issuable do
subject { issuable.supports_escalation? }
it { is_expected.to eq(supports_escalation) }
-
- context 'with feature disabled' do
- before do
- stub_feature_flags(incident_escalations: false)
- end
-
- it { is_expected.to eq(false) }
- end
end
end
diff --git a/spec/models/concerns/limitable_spec.rb b/spec/models/concerns/limitable_spec.rb
index 850282d54c7..c0a6aea2075 100644
--- a/spec/models/concerns/limitable_spec.rb
+++ b/spec/models/concerns/limitable_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe Limitable do
it 'triggers scoped validations' do
instance = MinimalTestClass.new
- expect(instance).to receive(:validate_scoped_plan_limit_not_exceeded)
+ expect(instance).to receive(:scoped_plan_limits)
instance.valid?(:create)
end
@@ -94,7 +94,7 @@ RSpec.describe Limitable do
it 'triggers scoped validations' do
instance = MinimalTestClass.new
- expect(instance).to receive(:validate_global_plan_limit_not_exceeded)
+ expect(instance).to receive(:global_plan_limits)
instance.valid?(:create)
end
diff --git a/spec/models/concerns/pg_full_text_searchable_spec.rb b/spec/models/concerns/pg_full_text_searchable_spec.rb
index b6da481024a..84209999ab2 100644
--- a/spec/models/concerns/pg_full_text_searchable_spec.rb
+++ b/spec/models/concerns/pg_full_text_searchable_spec.rb
@@ -99,6 +99,17 @@ RSpec.describe PgFullTextSearchable do
it 'does not support searching by non-Latin characters' do
expect(model_class.pg_full_text_search('日本')).to be_empty
end
+
+ context 'when search term has a URL' do
+ let(:with_url) { model_class.create!(project: project, title: 'issue with url', description: 'sample url,https://gitlab.com/gitlab-org/gitlab') }
+
+ it 'allows searching by full URL, ignoring the scheme' do
+ with_url.update_search_data!
+
+ expect(model_class.pg_full_text_search('https://gitlab.com/gitlab-org/gitlab')).to contain_exactly(with_url)
+ expect(model_class.pg_full_text_search('gopher://gitlab.com/gitlab-org/gitlab')).to contain_exactly(with_url)
+ end
+ end
end
describe '#update_search_data!' do
diff --git a/spec/models/concerns/project_features_compatibility_spec.rb b/spec/models/concerns/project_features_compatibility_spec.rb
index 62c9a041a85..f2dc8464e86 100644
--- a/spec/models/concerns/project_features_compatibility_spec.rb
+++ b/spec/models/concerns/project_features_compatibility_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe ProjectFeaturesCompatibility do
let(:project) { create(:project) }
let(:features_enabled) { %w(issues wiki builds merge_requests snippets security_and_compliance) }
- let(:features) { features_enabled + %w(repository pages operations container_registry) }
+ let(:features) { features_enabled + %w(repository pages operations container_registry package_registry) }
# We had issues_enabled, snippets_enabled, builds_enabled, merge_requests_enabled and issues_enabled fields on projects table
# All those fields got moved to a new table called project_feature and are now integers instead of booleans
diff --git a/spec/models/concerns/sensitive_serializable_hash_spec.rb b/spec/models/concerns/sensitive_serializable_hash_spec.rb
index c864ecb4eec..3c9199ce18f 100644
--- a/spec/models/concerns/sensitive_serializable_hash_spec.rb
+++ b/spec/models/concerns/sensitive_serializable_hash_spec.rb
@@ -4,11 +4,15 @@ require 'spec_helper'
RSpec.describe SensitiveSerializableHash do
describe '.prevent_from_serialization' do
- let(:test_class) do
+ let(:base_class) do
Class.new do
include ActiveModel::Serialization
include SensitiveSerializableHash
+ end
+ end
+ let(:test_class) do
+ Class.new(base_class) do
attr_accessor :name, :super_secret
prevent_from_serialization :super_secret
@@ -19,6 +23,12 @@ RSpec.describe SensitiveSerializableHash do
end
end
+ let(:another_class) do
+ Class.new(base_class) do
+ prevent_from_serialization :sub_secret
+ end
+ end
+
let(:model) { test_class.new }
it 'does not include the field in serializable_hash' do
@@ -30,6 +40,11 @@ RSpec.describe SensitiveSerializableHash do
expect(model.serializable_hash(unsafe_serialization_hash: true)).to include('super_secret')
end
end
+
+ it 'does not change parent class attributes_exempt_from_serializable_hash' do
+ expect(test_class.attributes_exempt_from_serializable_hash).to contain_exactly(:super_secret)
+ expect(another_class.attributes_exempt_from_serializable_hash).to contain_exactly(:sub_secret)
+ end
end
describe '#serializable_hash' do
@@ -56,6 +71,9 @@ RSpec.describe SensitiveSerializableHash do
attributes.each do |attribute|
expect(model.attributes).to include(attribute) # double-check the attribute does exist
+ # Do not expect binary columns to appear in JSON
+ next if klass.columns_hash[attribute]&.type == :binary
+
expect(model.serializable_hash(unsafe_serialization_hash: true)).to include(attribute)
expect(model.to_json(unsafe_serialization_hash: true)).to include(attribute)
expect(model.as_json(unsafe_serialization_hash: true)).to include(attribute)
@@ -65,8 +83,12 @@ RSpec.describe SensitiveSerializableHash do
end
end
- it_behaves_like 'attr_encrypted attribute', WebHook, 'token' do
+ context 'for a web hook' do
let_it_be(:model) { create(:system_hook) }
+
+ it_behaves_like 'attr_encrypted attribute', WebHook, 'token'
+ it_behaves_like 'attr_encrypted attribute', WebHook, 'url'
+ it_behaves_like 'attr_encrypted attribute', WebHook, 'url_variables'
end
it_behaves_like 'attr_encrypted attribute', Ci::InstanceVariable, 'value' do
diff --git a/spec/models/container_registry/event_spec.rb b/spec/models/container_registry/event_spec.rb
index 6b544c95cc8..e0194a07f46 100644
--- a/spec/models/container_registry/event_spec.rb
+++ b/spec/models/container_registry/event_spec.rb
@@ -46,6 +46,12 @@ RSpec.describe ContainerRegistry::Event do
handle!
end
+ it 'clears the cache for the namespace container repositories size' do
+ expect(Rails.cache).to receive(:delete).with(group.container_repositories_size_cache_key)
+
+ handle!
+ end
+
shared_examples 'event without project statistics update' do
it 'does not queue a project statistics update' do
expect(ProjectCacheWorker).not_to receive(:perform_async)
@@ -54,14 +60,6 @@ RSpec.describe ContainerRegistry::Event do
end
end
- context 'with :container_registry_project_statistics feature flag disabled' do
- before do
- stub_feature_flags(container_registry_project_statistics: false)
- end
-
- it_behaves_like 'event without project statistics update'
- end
-
context 'with no target tag' do
let(:target) { super().without('tag') }
diff --git a/spec/models/container_repository_spec.rb b/spec/models/container_repository_spec.rb
index af4e40cecb7..7d0dfad91b2 100644
--- a/spec/models/container_repository_spec.rb
+++ b/spec/models/container_repository_spec.rb
@@ -208,23 +208,9 @@ RSpec.describe ContainerRepository, :aggregate_failures do
shared_examples 'queueing the next import' do
it 'starts the worker' do
expect(::ContainerRegistry::Migration::EnqueuerWorker).to receive(:perform_async)
- expect(::ContainerRegistry::Migration::EnqueuerWorker).to receive(:perform_in)
subject
end
-
- context 'enqueue_twice feature flag disabled' do
- before do
- stub_feature_flags(container_registry_migration_phase2_enqueue_twice: false)
- end
-
- it 'starts the worker only once' do
- expect(::ContainerRegistry::Migration::EnqueuerWorker).to receive(:perform_async)
- expect(::ContainerRegistry::Migration::EnqueuerWorker).not_to receive(:perform_in)
-
- subject
- end
- end
end
describe '#start_pre_import' do
diff --git a/spec/models/customer_relations/contact_spec.rb b/spec/models/customer_relations/contact_spec.rb
index 86f868b269e..f91546f5240 100644
--- a/spec/models/customer_relations/contact_spec.rb
+++ b/spec/models/customer_relations/contact_spec.rb
@@ -142,4 +142,99 @@ RSpec.describe CustomerRelations::Contact, type: :model do
expect(issue_contact2.reload.contact_id).to eq(dupe_contact1.id)
end
end
+
+ describe '.search' do
+ let_it_be(:contact_a) do
+ create(
+ :contact,
+ group: group,
+ first_name: "ABC",
+ last_name: "DEF",
+ email: "ghi@test.com",
+ description: "LMNO",
+ state: "inactive"
+ )
+ end
+
+ let_it_be(:contact_b) do
+ create(
+ :contact,
+ group: group,
+ first_name: "PQR",
+ last_name: "STU",
+ email: "vwx@test.com",
+ description: "YZ",
+ state: "active"
+ )
+ end
+
+ subject(:found_contacts) { group.contacts.search(search_term) }
+
+ context 'when search term is empty' do
+ let(:search_term) { "" }
+
+ it 'returns all group contacts' do
+ expect(found_contacts).to contain_exactly(contact_a, contact_b)
+ end
+ end
+
+ context 'when search term is not empty' do
+ context 'when searching for first name ignoring casing' do
+ let(:search_term) { "aBc" }
+
+ it { is_expected.to contain_exactly(contact_a) }
+ end
+
+ context 'when searching for last name ignoring casing' do
+ let(:search_term) { "StU" }
+
+ it { is_expected.to contain_exactly(contact_b) }
+ end
+
+ context 'when searching for email' do
+ let(:search_term) { "ghi" }
+
+ it { is_expected.to contain_exactly(contact_a) }
+ end
+
+ context 'when searching description ignoring casing' do
+ let(:search_term) { "Yz" }
+
+ it { is_expected.to contain_exactly(contact_b) }
+ end
+
+ context 'when fuzzy searching for email and last name' do
+ let(:search_term) { "s" }
+
+ it { is_expected.to contain_exactly(contact_a, contact_b) }
+ end
+ end
+ end
+
+ describe '.search_by_state' do
+ let_it_be(:contact_a) { create(:contact, group: group, state: "inactive") }
+ let_it_be(:contact_b) { create(:contact, group: group, state: "active") }
+
+ context 'when searching for contacts state' do
+ it 'returns only inactive contacts' do
+ expect(group.contacts.search_by_state(:inactive)).to contain_exactly(contact_a)
+ end
+
+ it 'returns only active contacts' do
+ expect(group.contacts.search_by_state(:active)).to contain_exactly(contact_b)
+ end
+ end
+ end
+
+ describe '.sort_by_name' do
+ let_it_be(:contact_a) { create(:contact, group: group, first_name: "c", last_name: "d") }
+ let_it_be(:contact_b) { create(:contact, group: group, first_name: "a", last_name: "b") }
+ let_it_be(:contact_c) { create(:contact, group: group, first_name: "e", last_name: "d") }
+
+ context 'when sorting the contacts' do
+ it 'sorts them by last name then first name in ascendent order' do
+ expect(group.contacts.sort_by_name).to eq([contact_b, contact_a, contact_c])
+ end
+ end
+ end
end
diff --git a/spec/models/customer_relations/organization_spec.rb b/spec/models/customer_relations/organization_spec.rb
index 06ba9c5b7ad..1833fcf5385 100644
--- a/spec/models/customer_relations/organization_spec.rb
+++ b/spec/models/customer_relations/organization_spec.rb
@@ -78,4 +78,83 @@ RSpec.describe CustomerRelations::Organization, type: :model do
expect(contact2.reload.organization_id).to eq(dupe_organization1.id)
end
end
+
+ describe '.search' do
+ let_it_be(:organization_a) do
+ create(
+ :organization,
+ group: group,
+ name: "DEF",
+ description: "ghi_st",
+ state: "inactive"
+ )
+ end
+
+ let_it_be(:organization_b) do
+ create(
+ :organization,
+ group: group,
+ name: "ABC_st",
+ description: "JKL",
+ state: "active"
+ )
+ end
+
+ subject(:found_organizations) { group.organizations.search(search_term) }
+
+ context 'when search term is empty' do
+ let(:search_term) { "" }
+
+ it 'returns all group organizations' do
+ expect(found_organizations).to contain_exactly(organization_a, organization_b)
+ end
+ end
+
+ context 'when search term is not empty' do
+ context 'when searching for name' do
+ let(:search_term) { "aBc" }
+
+ it { is_expected.to contain_exactly(organization_b) }
+ end
+
+ context 'when searching for description' do
+ let(:search_term) { "ghI" }
+
+ it { is_expected.to contain_exactly(organization_a) }
+ end
+
+ context 'when searching for name and description' do
+ let(:search_term) { "_st" }
+
+ it { is_expected.to contain_exactly(organization_a, organization_b) }
+ end
+ end
+ end
+
+ describe '.search_by_state' do
+ let_it_be(:organization_a) { create(:organization, group: group, state: "inactive") }
+ let_it_be(:organization_b) { create(:organization, group: group, state: "active") }
+
+ context 'when searching for organizations state' do
+ it 'returns only inactive organizations' do
+ expect(group.organizations.search_by_state(:inactive)).to contain_exactly(organization_a)
+ end
+
+ it 'returns only active organizations' do
+ expect(group.organizations.search_by_state(:active)).to contain_exactly(organization_b)
+ end
+ end
+ end
+
+ describe '.sort_by_name' do
+ let_it_be(:organization_a) { create(:organization, group: group, name: "c") }
+ let_it_be(:organization_b) { create(:organization, group: group, name: "a") }
+ let_it_be(:organization_c) { create(:organization, group: group, name: "b") }
+
+ context 'when sorting the organizations' do
+ it 'sorts them by name in ascendent order' do
+ expect(group.organizations.sort_by_name).to eq([organization_b, organization_c, organization_a])
+ end
+ end
+ end
end
diff --git a/spec/models/data_list_spec.rb b/spec/models/data_list_spec.rb
index d2f15386808..67db2730a78 100644
--- a/spec/models/data_list_spec.rb
+++ b/spec/models/data_list_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe DataList do
end
def data_list(integration)
- DataList.new([integration], integration.to_data_fields_hash, integration.data_fields.class).to_array
+ DataList.new([integration], integration.to_database_hash, integration.data_fields.class).to_array
end
it 'returns current data' do
diff --git a/spec/models/deployment_cluster_spec.rb b/spec/models/deployment_cluster_spec.rb
index dc9cbe4b082..b0564def946 100644
--- a/spec/models/deployment_cluster_spec.rb
+++ b/spec/models/deployment_cluster_spec.rb
@@ -19,4 +19,11 @@ RSpec.describe DeploymentCluster do
kubernetes_namespace: kubernetes_namespace
)
end
+
+ context 'loose foreign key on deployment_clusters.cluster_id' do
+ it_behaves_like 'cleanup by a loose foreign key' do
+ let!(:parent) { create(:cluster) }
+ let!(:model) { create(:deployment_cluster, cluster: parent) }
+ end
+ end
end
diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb
index 409353bdbcf..a58d32dfe5d 100644
--- a/spec/models/deployment_spec.rb
+++ b/spec/models/deployment_spec.rb
@@ -17,7 +17,6 @@ RSpec.describe Deployment do
it { is_expected.to delegate_method(:name).to(:environment).with_prefix }
it { is_expected.to delegate_method(:commit).to(:project) }
it { is_expected.to delegate_method(:commit_title).to(:commit).as(:try) }
- it { is_expected.to delegate_method(:manual_actions).to(:deployable).as(:try) }
it { is_expected.to delegate_method(:kubernetes_namespace).to(:deployment_cluster).as(:kubernetes_namespace) }
it { is_expected.to validate_presence_of(:ref) }
@@ -25,20 +24,23 @@ RSpec.describe Deployment do
it_behaves_like 'having unique enum values'
- describe '#scheduled_actions' do
- subject { deployment.scheduled_actions }
+ describe '#manual_actions' do
+ let(:deployment) { create(:deployment) }
- let(:project) { create(:project, :repository) }
- let(:pipeline) { create(:ci_pipeline, project: project) }
- let(:build) { create(:ci_build, :success, pipeline: pipeline) }
- let(:deployment) { create(:deployment, deployable: build) }
+ it 'delegates to environment_manual_actions' do
+ expect(deployment.deployable).to receive(:environment_manual_actions).and_call_original
- it 'delegates to other_scheduled_actions' do
- expect_next_instance_of(Ci::Build) do |instance|
- expect(instance).to receive(:other_scheduled_actions)
- end
+ deployment.manual_actions
+ end
+ end
- subject
+ describe '#scheduled_actions' do
+ let(:deployment) { create(:deployment) }
+
+ it 'delegates to environment_scheduled_actions' do
+ expect(deployment.deployable).to receive(:environment_scheduled_actions).and_call_original
+
+ deployment.scheduled_actions
end
end
@@ -137,15 +139,29 @@ RSpec.describe Deployment do
end
end
- it 'executes Deployments::HooksWorker asynchronously' do
+ it 'executes deployment hooks' do
freeze_time do
- expect(Deployments::HooksWorker)
- .to receive(:perform_async).with(deployment_id: deployment.id, status_changed_at: Time.current)
+ expect(deployment).to receive(:execute_hooks).with(Time.current)
deployment.run!
end
end
+ context 'when `deployment_hooks_skip_worker` flag is disabled' do
+ before do
+ stub_feature_flags(deployment_hooks_skip_worker: false)
+ end
+
+ it 'executes Deployments::HooksWorker asynchronously' do
+ freeze_time do
+ expect(Deployments::HooksWorker)
+ .to receive(:perform_async).with(deployment_id: deployment.id, status_changed_at: Time.current)
+
+ deployment.run!
+ end
+ end
+ end
+
it 'executes Deployments::DropOlderDeploymentsWorker asynchronously' do
expect(Deployments::DropOlderDeploymentsWorker)
.to receive(:perform_async).once.with(deployment.id)
@@ -173,14 +189,28 @@ RSpec.describe Deployment do
deployment.succeed!
end
- it 'executes Deployments::HooksWorker asynchronously' do
+ it 'executes deployment hooks' do
freeze_time do
- expect(Deployments::HooksWorker)
- .to receive(:perform_async).with(deployment_id: deployment.id, status_changed_at: Time.current)
+ expect(deployment).to receive(:execute_hooks).with(Time.current)
deployment.succeed!
end
end
+
+ context 'when `deployment_hooks_skip_worker` flag is disabled' do
+ before do
+ stub_feature_flags(deployment_hooks_skip_worker: false)
+ end
+
+ it 'executes Deployments::HooksWorker asynchronously' do
+ freeze_time do
+ expect(Deployments::HooksWorker)
+ .to receive(:perform_async).with(deployment_id: deployment.id, status_changed_at: Time.current)
+
+ deployment.succeed!
+ end
+ end
+ end
end
context 'when deployment failed' do
@@ -202,14 +232,28 @@ RSpec.describe Deployment do
deployment.drop!
end
- it 'executes Deployments::HooksWorker asynchronously' do
+ it 'executes deployment hooks' do
freeze_time do
- expect(Deployments::HooksWorker)
- .to receive(:perform_async).with(deployment_id: deployment.id, status_changed_at: Time.current)
+ expect(deployment).to receive(:execute_hooks).with(Time.current)
deployment.drop!
end
end
+
+ context 'when `deployment_hooks_skip_worker` flag is disabled' do
+ before do
+ stub_feature_flags(deployment_hooks_skip_worker: false)
+ end
+
+ it 'executes Deployments::HooksWorker asynchronously' do
+ freeze_time do
+ expect(Deployments::HooksWorker)
+ .to receive(:perform_async).with(deployment_id: deployment.id, status_changed_at: Time.current)
+
+ deployment.drop!
+ end
+ end
+ end
end
context 'when deployment was canceled' do
@@ -231,14 +275,28 @@ RSpec.describe Deployment do
deployment.cancel!
end
- it 'executes Deployments::HooksWorker asynchronously' do
+ it 'executes deployment hooks' do
freeze_time do
- expect(Deployments::HooksWorker)
- .to receive(:perform_async).with(deployment_id: deployment.id, status_changed_at: Time.current)
+ expect(deployment).to receive(:execute_hooks).with(Time.current)
deployment.cancel!
end
end
+
+ context 'when `deployment_hooks_skip_worker` flag is disabled' do
+ before do
+ stub_feature_flags(deployment_hooks_skip_worker: false)
+ end
+
+ it 'executes Deployments::HooksWorker asynchronously' do
+ freeze_time do
+ expect(Deployments::HooksWorker)
+ .to receive(:perform_async).with(deployment_id: deployment.id, status_changed_at: Time.current)
+
+ deployment.cancel!
+ end
+ end
+ end
end
context 'when deployment was skipped' do
@@ -266,6 +324,12 @@ RSpec.describe Deployment do
deployment.skip!
end
end
+
+ it 'does not execute deployment hooks' do
+ expect(deployment).not_to receive(:execute_hooks)
+
+ deployment.skip!
+ end
end
context 'when deployment is blocked' do
@@ -289,6 +353,12 @@ RSpec.describe Deployment do
deployment.block!
end
+
+ it 'does not execute deployment hooks' do
+ expect(deployment).not_to receive(:execute_hooks)
+
+ deployment.block!
+ end
end
describe 'synching status to Jira' do
@@ -550,6 +620,143 @@ RSpec.describe Deployment do
is_expected.to contain_exactly(deployment1, deployment2)
end
end
+
+ describe 'last_deployment_group_for_environment' do
+ def subject_method(environment)
+ described_class.last_deployment_group_for_environment(environment)
+ end
+
+ let!(:project) { create(:project, :repository) }
+ let!(:environment) { create(:environment, project: project) }
+
+ context 'when there are no deployments and builds' do
+ it do
+ expect(subject_method(environment)).to eq(Deployment.none)
+ end
+ end
+
+ context 'when there are no successful builds' do
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+ let(:ci_build) { create(:ci_build, :running, project: project, pipeline: pipeline) }
+
+ before do
+ create(:deployment, :success, environment: environment, project: project, deployable: ci_build)
+ end
+
+ it do
+ expect(subject_method(environment)).to eq(Deployment.none)
+ end
+ end
+
+ context 'when there are deployments for multiple pipelines' do
+ let(:pipeline_a) { create(:ci_pipeline, project: project) }
+ let(:pipeline_b) { create(:ci_pipeline, project: project) }
+ let(:ci_build_a) { create(:ci_build, :success, project: project, pipeline: pipeline_a) }
+ let(:ci_build_b) { create(:ci_build, :failed, project: project, pipeline: pipeline_b) }
+ let(:ci_build_c) { create(:ci_build, :success, project: project, pipeline: pipeline_a) }
+ let(:ci_build_d) { create(:ci_build, :failed, project: project, pipeline: pipeline_a) }
+
+ # Successful deployments for pipeline_a
+ let!(:deployment_a) do
+ create(:deployment, :success, project: project, environment: environment, deployable: ci_build_a)
+ end
+
+ let!(:deployment_b) do
+ create(:deployment, :success, project: project, environment: environment, deployable: ci_build_c)
+ end
+
+ before do
+ # Failed deployment for pipeline_a
+ create(:deployment, :failed, project: project, environment: environment, deployable: ci_build_d)
+
+ # Failed deployment for pipeline_b
+ create(:deployment, :failed, project: project, environment: environment, deployable: ci_build_b)
+ end
+
+ it 'returns the successful deployment jobs for the last deployment pipeline' do
+ expect(subject_method(environment).pluck(:id)).to contain_exactly(deployment_a.id, deployment_b.id)
+ end
+ end
+
+ context 'when there are many environments' do
+ let(:environment_b) { create(:environment, project: project) }
+
+ let(:pipeline_a) { create(:ci_pipeline, project: project) }
+ let(:pipeline_b) { create(:ci_pipeline, project: project) }
+ let(:pipeline_c) { create(:ci_pipeline, project: project) }
+ let(:pipeline_d) { create(:ci_pipeline, project: project) }
+
+ # Builds for first environment: 'environment' with pipeline_a and pipeline_b
+ let(:ci_build_a) { create(:ci_build, :success, project: project, pipeline: pipeline_a) }
+ let(:ci_build_b) { create(:ci_build, :failed, project: project, pipeline: pipeline_b) }
+ let(:ci_build_c) { create(:ci_build, :success, project: project, pipeline: pipeline_a) }
+ let(:ci_build_d) { create(:ci_build, :failed, project: project, pipeline: pipeline_a) }
+ let!(:stop_env_a) { create(:ci_build, :manual, project: project, pipeline: pipeline_a, name: 'stop_env_a') }
+
+ # Builds for second environment: 'environment_b' with pipeline_c and pipeline_d
+ let(:ci_build_e) { create(:ci_build, :success, project: project, pipeline: pipeline_c) }
+ let(:ci_build_f) { create(:ci_build, :failed, project: project, pipeline: pipeline_d) }
+ let(:ci_build_g) { create(:ci_build, :success, project: project, pipeline: pipeline_c) }
+ let(:ci_build_h) { create(:ci_build, :failed, project: project, pipeline: pipeline_c) }
+ let!(:stop_env_b) { create(:ci_build, :manual, project: project, pipeline: pipeline_c, name: 'stop_env_b') }
+
+ # Successful deployments for 'environment' from pipeline_a
+ let!(:deployment_a) do
+ create(:deployment, :success, project: project, environment: environment, deployable: ci_build_a)
+ end
+
+ let!(:deployment_b) do
+ create(:deployment, :success,
+ project: project, environment: environment, deployable: ci_build_c, on_stop: 'stop_env_a')
+ end
+
+ # Successful deployments for 'environment_b' from pipeline_c
+ let!(:deployment_c) do
+ create(:deployment, :success, project: project, environment: environment_b, deployable: ci_build_e)
+ end
+
+ let!(:deployment_d) do
+ create(:deployment, :success,
+ project: project, environment: environment_b, deployable: ci_build_g, on_stop: 'stop_env_b')
+ end
+
+ before do
+ # Failed deployment for 'environment' from pipeline_a and pipeline_b
+ create(:deployment, :failed, project: project, environment: environment, deployable: ci_build_d)
+ create(:deployment, :failed, project: project, environment: environment, deployable: ci_build_b)
+
+ # Failed deployment for 'environment_b' from pipeline_c and pipeline_d
+ create(:deployment, :failed, project: project, environment: environment_b, deployable: ci_build_h)
+ create(:deployment, :failed, project: project, environment: environment_b, deployable: ci_build_f)
+ end
+
+ it 'batch loads for environments' do
+ environments = [environment, environment_b]
+
+ # Loads Batch loader
+ environments.each do |env|
+ subject_method(env)
+ end
+
+ expect(subject_method(environments.first).pluck(:id))
+ .to contain_exactly(deployment_a.id, deployment_b.id)
+
+ expect { subject_method(environments.second).pluck(:id) }.not_to exceed_query_limit(0)
+
+ expect(subject_method(environments.second).pluck(:id))
+ .to contain_exactly(deployment_c.id, deployment_d.id)
+
+ expect(subject_method(environments.first).map(&:stop_action).compact)
+ .to contain_exactly(stop_env_a)
+
+ expect { subject_method(environments.second).map(&:stop_action) }
+ .not_to exceed_query_limit(0)
+
+ expect(subject_method(environments.second).map(&:stop_action).compact)
+ .to contain_exactly(stop_env_b)
+ end
+ end
+ end
end
describe 'latest_for_sha' do
@@ -845,11 +1052,30 @@ RSpec.describe Deployment do
expect(Deployments::UpdateEnvironmentWorker).to receive(:perform_async)
expect(Deployments::LinkMergeRequestWorker).to receive(:perform_async)
expect(Deployments::ArchiveInProjectWorker).to receive(:perform_async)
- expect(Deployments::HooksWorker).to receive(:perform_async)
expect(deploy.update_status('success')).to eq(true)
end
+ context 'when `deployment_hooks_skip_worker` flag is disabled' do
+ before do
+ stub_feature_flags(deployment_hooks_skip_worker: false)
+ end
+
+ it 'schedules `Deployments::HooksWorker` when finishing a deploy' do
+ expect(Deployments::HooksWorker).to receive(:perform_async)
+
+ deploy.update_status('success')
+ end
+ end
+
+ it 'executes deployment hooks when finishing a deploy' do
+ freeze_time do
+ expect(deploy).to receive(:execute_hooks).with(Time.current)
+
+ deploy.update_status('success')
+ end
+ end
+
it 'updates finished_at when transitioning to a finished status' do
freeze_time do
deploy.update_status('success')
@@ -1173,4 +1399,11 @@ RSpec.describe Deployment do
end
end
end
+
+ context 'loose foreign key on deployments.cluster_id' do
+ it_behaves_like 'cleanup by a loose foreign key' do
+ let!(:parent) { create(:cluster) }
+ let!(:model) { create(:deployment, cluster: parent) }
+ end
+ end
end
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index 34dfc7a1fce..fd89a3a2e22 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -479,7 +479,7 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
end
context 'when matching action is defined' do
- let(:build) { create(:ci_build) }
+ let(:build) { create(:ci_build, :success) }
let!(:deployment) do
create(:deployment, :success,
@@ -549,7 +549,7 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
context 'when matching action is defined' do
let(:pipeline) { create(:ci_pipeline, project: project) }
- let(:build_a) { create(:ci_build, pipeline: pipeline) }
+ let(:build_a) { create(:ci_build, :success, pipeline: pipeline) }
before do
create(:deployment, :success,
@@ -586,6 +586,12 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
expect(action).to eq(close_action)
expect(action.user).to eq(user)
end
+
+ it 'environment is not stopped' do
+ subject
+
+ expect(environment).not_to be_stopped
+ end
end
context 'if action did finish' do
@@ -632,8 +638,8 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
context 'when there are more then one stop action for the environment' do
let(:pipeline) { create(:ci_pipeline, project: project) }
- let(:build_a) { create(:ci_build, pipeline: pipeline) }
- let(:build_b) { create(:ci_build, pipeline: pipeline) }
+ let(:build_a) { create(:ci_build, :success, pipeline: pipeline) }
+ let(:build_b) { create(:ci_build, :success, pipeline: pipeline) }
let!(:close_actions) do
[
@@ -666,9 +672,9 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
expect(actions.pluck(:user)).to match_array(close_actions.pluck(:user))
end
- context 'when there are failed deployment jobs' do
+ context 'when there are failed builds' do
before do
- create(:ci_build, pipeline: pipeline, name: 'close_app_c')
+ create(:ci_build, :failed, pipeline: pipeline, name: 'close_app_c')
create(:deployment, :failed,
environment: environment,
@@ -676,11 +682,11 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
on_stop: 'close_app_c')
end
- it 'returns only stop actions from successful deployment jobs' do
+ it 'returns only stop actions from successful builds' do
actions = subject
expect(actions).to match_array(close_actions)
- expect(actions.count).to eq(environment.successful_deployments.count)
+ expect(actions.count).to eq(pipeline.latest_successful_builds.count)
end
end
end
@@ -697,8 +703,8 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
context 'when there are multiple deployments with actions' do
let(:pipeline) { create(:ci_pipeline, project: project) }
- let(:ci_build_a) { create(:ci_build, project: project, pipeline: pipeline) }
- let(:ci_build_b) { create(:ci_build, project: project, pipeline: pipeline) }
+ let(:ci_build_a) { create(:ci_build, :success, project: project, pipeline: pipeline) }
+ let(:ci_build_b) { create(:ci_build, :success, project: project, pipeline: pipeline) }
let!(:ci_build_c) { create(:ci_build, :manual, project: project, pipeline: pipeline, name: 'close_app_a') }
let!(:ci_build_d) { create(:ci_build, :manual, project: project, pipeline: pipeline, name: 'close_app_b') }
@@ -714,7 +720,7 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
before do
# Create failed deployment without stop_action.
- build = create(:ci_build, project: project, pipeline: pipeline)
+ build = create(:ci_build, :failed, project: project, pipeline: pipeline)
create(:deployment, :failed, project: project, environment: environment, deployable: build)
end
@@ -736,10 +742,10 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
context 'when there are deployments for multiple pipelines' do
let(:pipeline_a) { create(:ci_pipeline, project: project) }
let(:pipeline_b) { create(:ci_pipeline, project: project) }
- let(:ci_build_a) { create(:ci_build, project: project, pipeline: pipeline_a) }
- let(:ci_build_b) { create(:ci_build, project: project, pipeline: pipeline_b) }
- let(:ci_build_c) { create(:ci_build, project: project, pipeline: pipeline_a) }
- let(:ci_build_d) { create(:ci_build, project: project, pipeline: pipeline_a) }
+ let(:ci_build_a) { create(:ci_build, :success, project: project, pipeline: pipeline_a) }
+ let(:ci_build_b) { create(:ci_build, :failed, project: project, pipeline: pipeline_b) }
+ let(:ci_build_c) { create(:ci_build, :success, project: project, pipeline: pipeline_a) }
+ let(:ci_build_d) { create(:ci_build, :failed, project: project, pipeline: pipeline_a) }
# Successful deployments for pipeline_a
let!(:deployment_a) { create(:deployment, :success, project: project, environment: environment, deployable: ci_build_a) }
@@ -756,6 +762,16 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
it 'returns the successful deployment jobs for the last deployment pipeline' do
expect(subject.pluck(:id)).to contain_exactly(deployment_a.id, deployment_b.id)
end
+
+ context 'when the feature flag is disabled' do
+ before do
+ stub_feature_flags(batch_load_environment_last_deployment_group: false)
+ end
+
+ it 'returns the successful deployment jobs for the last deployment pipeline' do
+ expect(subject.pluck(:id)).to contain_exactly(deployment_a.id, deployment_b.id)
+ end
+ end
end
end
@@ -1730,17 +1746,17 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
let!(:environment3) { create(:environment, project: project, state: 'stopped') }
it 'returns the environments count grouped by state' do
- expect(project.environments.count_by_state).to eq({ stopped: 2, available: 1 })
+ expect(project.environments.count_by_state).to eq({ stopped: 2, available: 1, stopping: 0 })
end
it 'returns the environments count grouped by state with zero value' do
environment2.update!(state: 'stopped')
- expect(project.environments.count_by_state).to eq({ stopped: 3, available: 0 })
+ expect(project.environments.count_by_state).to eq({ stopped: 3, available: 0, stopping: 0 })
end
end
it 'returns zero state counts when environments are empty' do
- expect(project.environments.count_by_state).to eq({ stopped: 0, available: 0 })
+ expect(project.environments.count_by_state).to eq({ stopped: 0, available: 0, stopping: 0 })
end
end
diff --git a/spec/models/error_tracking/error_event_spec.rb b/spec/models/error_tracking/error_event_spec.rb
index 9cf5a405e74..6dab8fbf757 100644
--- a/spec/models/error_tracking/error_event_spec.rb
+++ b/spec/models/error_tracking/error_event_spec.rb
@@ -2,7 +2,9 @@
require 'spec_helper'
-RSpec.describe ErrorTracking::ErrorEvent, type: :model do
+RSpec.describe ErrorTracking::ErrorEvent do
+ include AfterNextHelpers
+
let_it_be(:event) { create(:error_tracking_error_event) }
describe 'relationships' do
@@ -18,44 +20,12 @@ RSpec.describe ErrorTracking::ErrorEvent, type: :model do
end
describe '#stacktrace' do
- it 'generates a correct stacktrace in expected format' do
- expected_context = [
- [132, " end\n"],
- [133, "\n"],
- [134, " begin\n"],
- [135, " block.call(work, *extra)\n"],
- [136, " rescue Exception => e\n"],
- [137, " STDERR.puts \"Error reached top of thread-pool: #\{e.message\} (#\{e.class\})\"\n"],
- [138, " end\n"]
- ]
-
- expected_entry = {
- 'lineNo' => 135,
- 'context' => expected_context,
- 'filename' => 'puma/thread_pool.rb',
- 'function' => 'block in spawn_thread',
- 'colNo' => 0
- }
+ it 'builds a stacktrace' do
+ expect_next(ErrorTracking::StacktraceBuilder, event.payload)
+ .to receive(:stacktrace).and_call_original
expect(event.stacktrace).to be_kind_of(Array)
- expect(event.stacktrace.first).to eq(expected_entry)
- end
-
- context 'error context is missing' do
- let(:event) { create(:error_tracking_error_event, :browser) }
-
- it 'generates a stacktrace without context' do
- expected_entry = {
- 'lineNo' => 6395,
- 'context' => [],
- 'filename' => 'webpack-internal:///./node_modules/vue/dist/vue.runtime.esm.js',
- 'function' => 'hydrate',
- 'colNo' => 0
- }
-
- expect(event.stacktrace).to be_kind_of(Array)
- expect(event.stacktrace.first).to eq(expected_entry)
- end
+ expect(event.stacktrace).not_to be_empty
end
end
diff --git a/spec/factories_spec.rb b/spec/models/factories_spec.rb
index 08286f57b3b..45c3f93e6cf 100644
--- a/spec/factories_spec.rb
+++ b/spec/models/factories_spec.rb
@@ -51,7 +51,9 @@ RSpec.describe 'factories' do
factory.definition.defined_traits.map(&:name).each do |trait_name|
describe "linting :#{trait_name} trait" do
it 'does not raise error when created' do
- pending("Trait skipped linting due to legacy error") if skipped_traits.include?([factory.name, trait_name.to_sym])
+ if skipped_traits.include?([factory.name, trait_name.to_sym])
+ pending("Trait skipped linting due to legacy error")
+ end
expect { create(factory.name, trait_name) }.not_to raise_error
end
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 0ca1fe1c8a6..d47f43a630d 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -991,6 +991,14 @@ RSpec.describe Group do
it { expect(group.last_owner?(@members[:owner])).to be_truthy }
+ context 'there is also a project_bot owner' do
+ before do
+ group.add_user(create(:user, :project_bot), GroupMember::OWNER)
+ end
+
+ it { expect(group.last_owner?(@members[:owner])).to be_truthy }
+ end
+
context 'with two owners' do
before do
create(:group_member, :owner, group: group)
@@ -1116,35 +1124,58 @@ RSpec.describe Group do
end
end
- describe '#single_owner?' do
+ describe '#all_owners_excluding_project_bots' do
let_it_be(:user) { create(:user) }
context 'when there is only one owner' do
- before do
+ let!(:owner) do
group.add_user(user, GroupMember::OWNER)
end
- it 'returns true' do
- expect(group.single_owner?).to eq(true)
+ it 'returns the owner' do
+ expect(group.all_owners_excluding_project_bots).to contain_exactly(owner)
+ end
+
+ context 'and there is also a project_bot owner' do
+ before do
+ group.add_user(create(:user, :project_bot), GroupMember::OWNER)
+ end
+
+ it 'returns only the human owner' do
+ expect(group.all_owners_excluding_project_bots).to contain_exactly(owner)
+ end
end
end
context 'when there are multiple owners' do
let_it_be(:user_2) { create(:user) }
- before do
+ let!(:owner) do
group.add_user(user, GroupMember::OWNER)
+ end
+
+ let!(:owner2) do
group.add_user(user_2, GroupMember::OWNER)
end
- it 'returns true' do
- expect(group.single_owner?).to eq(false)
+ it 'returns both owners' do
+ expect(group.all_owners_excluding_project_bots).to contain_exactly(owner, owner2)
+ end
+
+ context 'and there is also a project_bot owner' do
+ before do
+ group.add_user(create(:user, :project_bot), GroupMember::OWNER)
+ end
+
+ it 'returns only the human owners' do
+ expect(group.all_owners_excluding_project_bots).to contain_exactly(owner, owner2)
+ end
end
end
context 'when there are no owners' do
it 'returns false' do
- expect(group.single_owner?).to eq(false)
+ expect(group.all_owners_excluding_project_bots).to be_empty
end
end
end
@@ -2393,19 +2424,6 @@ RSpec.describe Group do
fetch_config
end
-
- context 'when traversal ID feature flags are disabled' do
- before do
- stub_feature_flags(sync_traversal_ids: false)
- end
-
- it 'caches the parent config when group auto_devops_enabled is nil' do
- cache_key = "namespaces:{first_auto_devops_config}:#{group.id}"
- define_cache_expectations(cache_key)
-
- fetch_config
- end
- end
end
context 'cache expiration' do
@@ -2433,14 +2451,6 @@ RSpec.describe Group do
group.update!(auto_devops_enabled: true)
end
-
- it 'does not clear cache when the feature is disabled' do
- stub_feature_flags(namespaces_cache_first_auto_devops_config: false)
-
- expect(Rails.cache).not_to receive(:delete_multi)
-
- parent.update!(auto_devops_enabled: true)
- end
end
end
end
@@ -3417,4 +3427,42 @@ RSpec.describe Group do
end
end
end
+
+ describe '#gitlab_deploy_token' do
+ subject(:gitlab_deploy_token) { group.gitlab_deploy_token }
+
+ context 'when there is a gitlab deploy token associated' do
+ let!(:deploy_token) { create(:deploy_token, :group, :gitlab_deploy_token, groups: [group]) }
+
+ it { is_expected.to eq(deploy_token) }
+ end
+
+ context 'when there is no a gitlab deploy token associated' do
+ it { is_expected.to be_nil }
+ end
+
+ context 'when there is a gitlab deploy token associated but is has been revoked' do
+ let!(:deploy_token) { create(:deploy_token, :group, :gitlab_deploy_token, :revoked, groups: [group]) }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when there is a gitlab deploy token associated but it is expired' do
+ let!(:deploy_token) { create(:deploy_token, :group, :gitlab_deploy_token, :expired, groups: [group]) }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when there is a deploy token associated with a different name' do
+ let!(:deploy_token) { create(:deploy_token, :group, groups: [group]) }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when there is a gitlab deploy token associated to a different group' do
+ let!(:deploy_token) { create(:deploy_token, :group, :gitlab_deploy_token, groups: [create(:group)]) }
+
+ it { is_expected.to be_nil }
+ end
+ end
end
diff --git a/spec/models/hooks/project_hook_spec.rb b/spec/models/hooks/project_hook_spec.rb
index ec2eca96755..4253686b843 100644
--- a/spec/models/hooks/project_hook_spec.rb
+++ b/spec/models/hooks/project_hook_spec.rb
@@ -31,15 +31,6 @@ RSpec.describe ProjectHook do
end
end
- describe '#rate_limit' do
- let_it_be(:plan_limits) { create(:plan_limits, :default_plan, web_hook_calls: 100) }
- let_it_be(:hook) { create(:project_hook) }
-
- it 'returns the default limit' do
- expect(hook.rate_limit).to be(100)
- end
- end
-
describe '#parent' do
it 'returns the associated project' do
project = build(:project)
diff --git a/spec/models/hooks/service_hook_spec.rb b/spec/models/hooks/service_hook_spec.rb
index 0d65fe302e1..68c284a913c 100644
--- a/spec/models/hooks/service_hook_spec.rb
+++ b/spec/models/hooks/service_hook_spec.rb
@@ -23,14 +23,6 @@ RSpec.describe ServiceHook do
end
end
- describe '#rate_limit' do
- let(:hook) { build(:service_hook) }
-
- it 'returns nil' do
- expect(hook.rate_limit).to be_nil
- end
- end
-
describe '#parent' do
let(:hook) { build(:service_hook, integration: integration) }
diff --git a/spec/models/hooks/system_hook_spec.rb b/spec/models/hooks/system_hook_spec.rb
index bf69c7219a8..9f5f81dd6c0 100644
--- a/spec/models/hooks/system_hook_spec.rb
+++ b/spec/models/hooks/system_hook_spec.rb
@@ -185,14 +185,6 @@ RSpec.describe SystemHook do
end
end
- describe '#rate_limit' do
- let(:hook) { build(:system_hook) }
-
- it 'returns nil' do
- expect(hook.rate_limit).to be_nil
- end
- end
-
describe '#application_context' do
let(:hook) { build(:system_hook) }
diff --git a/spec/models/hooks/web_hook_log_spec.rb b/spec/models/hooks/web_hook_log_spec.rb
index 9cfbb14e087..e1fea3318f6 100644
--- a/spec/models/hooks/web_hook_log_spec.rb
+++ b/spec/models/hooks/web_hook_log_spec.rb
@@ -48,6 +48,62 @@ RSpec.describe WebHookLog do
end
end
+ describe '.delete_batch_for' do
+ let(:hook) { create(:project_hook) }
+
+ before do
+ create_list(:web_hook_log, 3, web_hook: hook)
+ create_list(:web_hook_log, 3)
+ end
+
+ subject { described_class.delete_batch_for(hook, batch_size: batch_size) }
+
+ shared_examples 'deletes batch of web hook logs' do
+ it { is_expected.to be(batch_size <= 3) }
+
+ it 'deletes min(batch_size, total) records' do
+ deleted = [batch_size, 3].min
+
+ expect { subject }.to change(described_class, :count).by(-deleted)
+ end
+ end
+
+ context 'when the batch size is less than one' do
+ let(:batch_size) { 0 }
+
+ it 'raises an argument error' do
+ expect { subject }.to raise_error(ArgumentError)
+ end
+ end
+
+ context 'when the batch size is smaller than the total' do
+ let(:batch_size) { 2 }
+
+ include_examples 'deletes batch of web hook logs'
+ end
+
+ context 'when the batch size is equal to the total' do
+ let(:batch_size) { 3 }
+
+ include_examples 'deletes batch of web hook logs'
+ end
+
+ context 'when the batch size is greater than the total' do
+ let(:batch_size) { 1000 }
+
+ include_examples 'deletes batch of web hook logs'
+ end
+
+ it 'does not loop forever' do
+ batches = 0
+ batches += 1 while described_class.delete_batch_for(hook, batch_size: 1)
+
+ expect(hook.web_hook_logs).to be_none
+ expect(described_class.count).to eq 3
+ expect(batches).to eq 3 # true three times, stops at first false
+ end
+ end
+
describe '#success?' do
let(:web_hook_log) { build(:web_hook_log, response_status: status) }
diff --git a/spec/models/hooks/web_hook_spec.rb b/spec/models/hooks/web_hook_spec.rb
index dd954e08156..fb4d1cee606 100644
--- a/spec/models/hooks/web_hook_spec.rb
+++ b/spec/models/hooks/web_hook_spec.rb
@@ -24,6 +24,29 @@ RSpec.describe WebHook do
describe 'validations' do
it { is_expected.to validate_presence_of(:url) }
+ describe 'url_variables' do
+ it { is_expected.to allow_value({}).for(:url_variables) }
+ it { is_expected.to allow_value({ 'foo' => 'bar' }).for(:url_variables) }
+ it { is_expected.to allow_value({ 'FOO' => 'bar' }).for(:url_variables) }
+ it { is_expected.to allow_value({ 'MY_TOKEN' => 'bar' }).for(:url_variables) }
+ it { is_expected.to allow_value({ 'foo2' => 'bar' }).for(:url_variables) }
+ it { is_expected.to allow_value({ 'x' => 'y' }).for(:url_variables) }
+ it { is_expected.to allow_value({ 'x' => ('a' * 100) }).for(:url_variables) }
+ it { is_expected.to allow_value({ 'foo' => 'bar', 'bar' => 'baz' }).for(:url_variables) }
+ it { is_expected.to allow_value((1..20).to_h { ["k#{_1}", 'value'] }).for(:url_variables) }
+
+ it { is_expected.not_to allow_value([]).for(:url_variables) }
+ it { is_expected.not_to allow_value({ 'foo' => 1 }).for(:url_variables) }
+ it { is_expected.not_to allow_value({ 'bar' => :baz }).for(:url_variables) }
+ it { is_expected.not_to allow_value({ 'bar' => nil }).for(:url_variables) }
+ it { is_expected.not_to allow_value({ 'foo' => '' }).for(:url_variables) }
+ it { is_expected.not_to allow_value({ 'foo' => ('a' * 101) }).for(:url_variables) }
+ it { is_expected.not_to allow_value({ 'has spaces' => 'foo' }).for(:url_variables) }
+ it { is_expected.not_to allow_value({ '' => 'foo' }).for(:url_variables) }
+ it { is_expected.not_to allow_value({ '1foo' => 'foo' }).for(:url_variables) }
+ it { is_expected.not_to allow_value((1..21).to_h { ["k#{_1}", 'value'] }).for(:url_variables) }
+ end
+
describe 'url' do
it { is_expected.to allow_value('http://example.com').for(:url) }
it { is_expected.to allow_value('https://example.com').for(:url) }
@@ -87,7 +110,7 @@ RSpec.describe WebHook do
describe 'encrypted attributes' do
subject { described_class.encrypted_attributes.keys }
- it { is_expected.to contain_exactly(:token, :url) }
+ it { is_expected.to contain_exactly(:token, :url, :url_variables) }
end
describe 'execute' do
@@ -130,11 +153,11 @@ RSpec.describe WebHook do
end
describe '#destroy' do
- it 'cascades to web_hook_logs' do
+ it 'does not cascade to web_hook_logs' do
web_hook = create(:project_hook)
create_list(:web_hook_log, 3, web_hook: web_hook)
- expect { web_hook.destroy! }.to change(web_hook.web_hook_logs, :count).by(-3)
+ expect { web_hook.destroy! }.not_to change(web_hook.web_hook_logs, :count)
end
end
@@ -470,31 +493,70 @@ RSpec.describe WebHook do
end
describe '#rate_limited?' do
- context 'when there are rate limits' do
- before do
- allow(hook).to receive(:rate_limit).and_return(3)
+ it 'is false when hook has not been rate limited' do
+ expect_next_instance_of(Gitlab::WebHooks::RateLimiter) do |rate_limiter|
+ expect(rate_limiter).to receive(:rate_limited?).and_return(false)
end
- it 'is false when hook has not been rate limited' do
- expect(Gitlab::ApplicationRateLimiter).to receive(:peek).and_return(false)
- expect(hook).not_to be_rate_limited
+ expect(hook).not_to be_rate_limited
+ end
+
+ it 'is true when hook has been rate limited' do
+ expect_next_instance_of(Gitlab::WebHooks::RateLimiter) do |rate_limiter|
+ expect(rate_limiter).to receive(:rate_limited?).and_return(true)
end
- it 'is true when hook has been rate limited' do
- expect(Gitlab::ApplicationRateLimiter).to receive(:peek).and_return(true)
- expect(hook).to be_rate_limited
+ expect(hook).to be_rate_limited
+ end
+ end
+
+ describe '#rate_limit' do
+ it 'returns the hook rate limit' do
+ expect_next_instance_of(Gitlab::WebHooks::RateLimiter) do |rate_limiter|
+ expect(rate_limiter).to receive(:limit).and_return(10)
end
+
+ expect(hook.rate_limit).to eq(10)
end
+ end
+
+ describe '#alert_status' do
+ subject(:status) { hook.alert_status }
- context 'when there are no rate limits' do
+ it { is_expected.to eq :executable }
+
+ context 'when hook has been disabled' do
before do
- allow(hook).to receive(:rate_limit).and_return(nil)
+ hook.disable!
end
- it 'does not call Gitlab::ApplicationRateLimiter, and is false' do
- expect(Gitlab::ApplicationRateLimiter).not_to receive(:peek)
- expect(hook).not_to be_rate_limited
+ it { is_expected.to eq :disabled }
+ end
+
+ context 'when hook has been backed off' do
+ before do
+ hook.disabled_until = 1.hour.from_now
end
+
+ it { is_expected.to eq :temporarily_disabled }
+ end
+ end
+
+ describe '#to_json' do
+ it 'does not error' do
+ expect { hook.to_json }.not_to raise_error
+ end
+
+ it 'does not error, when serializing unsafe attributes' do
+ expect { hook.to_json(unsafe_serialization_hash: true) }.not_to raise_error
+ end
+
+ it 'does not contain binary attributes' do
+ expect(hook.to_json).not_to include('encrypted_url_variables')
+ end
+
+ it 'does not contain binary attributes, even when serializing unsafe attributes' do
+ expect(hook.to_json(unsafe_serialization_hash: true)).not_to include('encrypted_url_variables')
end
end
end
diff --git a/spec/models/integration_spec.rb b/spec/models/integration_spec.rb
index 0567a8bd386..038018fbd0c 100644
--- a/spec/models/integration_spec.rb
+++ b/spec/models/integration_spec.rb
@@ -250,7 +250,7 @@ RSpec.describe Integration do
context 'with all existing instances' do
def integration_hash(type)
- Integration.new(instance: true, type: type).to_integration_hash
+ Integration.new(instance: true, type: type).to_database_hash
end
before do
@@ -812,14 +812,14 @@ RSpec.describe Integration do
Class.new(Integration) do
def fields
[
- { name: 'token' },
- { name: 'api_token' },
- { name: 'token_api' },
- { name: 'safe_token' },
- { name: 'key' },
- { name: 'api_key' },
- { name: 'password' },
- { name: 'password_field' },
+ { name: 'token', type: 'password' },
+ { name: 'api_token', type: 'password' },
+ { name: 'token_api', type: 'password' },
+ { name: 'safe_token', type: 'password' },
+ { name: 'key', type: 'password' },
+ { name: 'api_key', type: 'password' },
+ { name: 'password', type: 'password' },
+ { name: 'password_field', type: 'password' },
{ name: 'some_safe_field' },
{ name: 'safe_field' },
{ name: 'url' },
@@ -837,15 +837,14 @@ RSpec.describe Integration do
context 'when the class uses the field DSL' do
let(:fake_integration) do
Class.new(described_class) do
- field :token
- field :token
- field :api_token
- field :token_api
- field :safe_token
- field :key
- field :api_key
- field :password
- field :password_field
+ field :token, type: 'password'
+ field :api_token, type: 'password'
+ field :token_api, type: 'password'
+ field :safe_token, type: 'password'
+ field :key, type: 'password'
+ field :api_key, type: 'password'
+ field :password, type: 'password'
+ field :password_field, type: 'password'
field :some_safe_field
field :safe_field
field :url
@@ -977,19 +976,25 @@ RSpec.describe Integration do
end
end
- describe '#to_integration_hash' do
+ describe '#to_database_hash' do
let(:properties) { { foo: 1, bar: true } }
let(:db_props) { properties.stringify_keys }
let(:record) { create(:integration, :instance, properties: properties) }
it 'does not include the properties key' do
- hash = record.to_integration_hash
+ hash = record.to_database_hash
expect(hash).not_to have_key('properties')
end
+ it 'does not include certain attributes' do
+ hash = record.to_database_hash
+
+ expect(hash.keys).not_to include('id', 'instance', 'project_id', 'group_id', 'created_at', 'updated_at')
+ end
+
it 'saves correctly using insert_all' do
- hash = record.to_integration_hash
+ hash = record.to_database_hash
hash[:project_id] = project.id
expect do
@@ -999,8 +1004,8 @@ RSpec.describe Integration do
expect(described_class.last).to have_attributes(properties: db_props)
end
- it 'is part of the to_integration_hash' do
- hash = record.to_integration_hash
+ it 'decrypts encrypted properties correctly' do
+ hash = record.to_database_hash
expect(hash).to include('encrypted_properties' => be_present, 'encrypted_properties_iv' => be_present)
expect(hash['encrypted_properties']).not_to eq(record.encrypted_properties)
@@ -1016,14 +1021,14 @@ RSpec.describe Integration do
context 'when the properties are empty' do
let(:properties) { {} }
- it 'is part of the to_integration_hash' do
- hash = record.to_integration_hash
+ it 'is part of the to_database_hash' do
+ hash = record.to_database_hash
expect(hash).to include('encrypted_properties' => be_nil, 'encrypted_properties_iv' => be_nil)
end
it 'saves correctly using insert_all' do
- hash = record.to_integration_hash
+ hash = record.to_database_hash
hash[:project_id] = project
expect do
@@ -1199,4 +1204,46 @@ RSpec.describe Integration do
end
end
end
+
+ describe '#async_execute' do
+ let(:integration) { described_class.new(id: 123) }
+ let(:data) { { object_kind: 'push' } }
+ let(:supported_events) { %w[push] }
+
+ subject(:async_execute) { integration.async_execute(data) }
+
+ before do
+ allow(integration).to receive(:supported_events).and_return(supported_events)
+ end
+
+ it 'queues a Integrations::ExecuteWorker' do
+ expect(Integrations::ExecuteWorker).to receive(:perform_async).with(integration.id, data)
+ expect(ProjectServiceWorker).not_to receive(:perform_async)
+
+ async_execute
+ end
+
+ context 'when the event is not supported' do
+ let(:supported_events) { %w[issue] }
+
+ it 'does not queue a worker' do
+ expect(Integrations::ExecuteWorker).not_to receive(:perform_async)
+
+ async_execute
+ end
+ end
+
+ context 'when the FF :rename_integration_workers is disabled' do
+ before do
+ stub_feature_flags(rename_integrations_workers: false)
+ end
+
+ it 'queues a ProjectServiceWorker' do
+ expect(ProjectServiceWorker).to receive(:perform_async).with(integration.id, data)
+ expect(Integrations::ExecuteWorker).not_to receive(:perform_async)
+
+ async_execute
+ end
+ end
+ end
end
diff --git a/spec/models/integrations/campfire_spec.rb b/spec/models/integrations/campfire_spec.rb
index 0044e6fae21..405a9ff4b3f 100644
--- a/spec/models/integrations/campfire_spec.rb
+++ b/spec/models/integrations/campfire_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe Integrations::Campfire do
it "calls Campfire API to get a list of rooms and speak in a room" do
# make sure a valid list of rooms is returned
- body = File.read(Rails.root + 'spec/fixtures/project_services/campfire/rooms.json')
+ body = File.read(Rails.root + 'spec/fixtures/integrations/campfire/rooms.json')
stub_full_request(@rooms_url).with(basic_auth: @auth).to_return(
body: body,
@@ -65,7 +65,7 @@ RSpec.describe Integrations::Campfire do
it "calls Campfire API to get a list of rooms but shouldn't speak in a room" do
# return a list of rooms that do not contain a room named 'test-room'
- body = File.read(Rails.root + 'spec/fixtures/project_services/campfire/rooms2.json')
+ body = File.read(Rails.root + 'spec/fixtures/integrations/campfire/rooms2.json')
stub_full_request(@rooms_url).with(basic_auth: @auth).to_return(
body: body,
status: 200,
diff --git a/spec/models/integrations/field_spec.rb b/spec/models/integrations/field_spec.rb
index c8caf831191..6b1ce7fcbde 100644
--- a/spec/models/integrations/field_spec.rb
+++ b/spec/models/integrations/field_spec.rb
@@ -5,7 +5,14 @@ require 'spec_helper'
RSpec.describe ::Integrations::Field do
subject(:field) { described_class.new(**attrs) }
- let(:attrs) { { name: nil } }
+ let(:attrs) { { name: nil, integration_class: test_integration } }
+ let(:test_integration) do
+ Class.new(Integration) do
+ def self.default_placeholder
+ 'my placeholder'
+ end
+ end
+ end
describe '#name' do
before do
@@ -68,11 +75,8 @@ RSpec.describe ::Integrations::Field do
end
context 'when set to a dynamic value' do
- before do
- attrs[name] = -> { Time.current }
- end
-
it 'is computed' do
+ attrs[name] = -> { Time.current }
start = Time.current
travel_to(start + 1.minute) do
@@ -80,6 +84,13 @@ RSpec.describe ::Integrations::Field do
expect(field.send(name)).to be_after(start)
end
end
+
+ it 'is executed in the class scope' do
+ attrs[name] = -> { default_placeholder }
+
+ expect(field[name]).to eq('my placeholder')
+ expect(field.send(name)).to eq('my placeholder')
+ end
end
end
end
diff --git a/spec/models/integrations/harbor_spec.rb b/spec/models/integrations/harbor_spec.rb
index 4a6eb27d63a..9e3d4b524a6 100644
--- a/spec/models/integrations/harbor_spec.rb
+++ b/spec/models/integrations/harbor_spec.rb
@@ -67,6 +67,16 @@ RSpec.describe Integrations::Harbor do
harbor_integration.update!(active: false)
expect(harbor_integration.ci_variables).to match_array([])
end
+
+ context 'with robot username' do
+ it 'returns username variable with $$' do
+ harbor_integration.username = 'robot$project+user'
+
+ expect(harbor_integration.ci_variables).to include(
+ { key: 'HARBOR_USERNAME', value: 'robot$$project+user' }
+ )
+ end
+ end
end
describe 'before_validation :reset_username_and_password' do
diff --git a/spec/models/integrations/irker_spec.rb b/spec/models/integrations/irker_spec.rb
index 8aea2c26dc5..16487aa36e7 100644
--- a/spec/models/integrations/irker_spec.rb
+++ b/spec/models/integrations/irker_spec.rb
@@ -25,9 +25,11 @@ RSpec.describe Integrations::Irker do
end
describe 'Execute' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+
let(:irker) { described_class.new }
- let(:user) { create(:user) }
- let(:project) { create(:project, :repository) }
+ let(:irker_server) { TCPServer.new('localhost', 0) }
let(:sample_data) do
Gitlab::DataBuilder::Push.build_sample(project, user)
end
@@ -36,15 +38,13 @@ RSpec.describe Integrations::Irker do
let(:colorize_messages) { '1' }
before do
- @irker_server = TCPServer.new 'localhost', 0
-
allow(Gitlab::CurrentSettings).to receive(:allow_local_requests_from_web_hooks_and_services?).and_return(true)
allow(irker).to receive_messages(
active: true,
project: project,
project_id: project.id,
- server_host: @irker_server.addr[2],
- server_port: @irker_server.addr[1],
+ server_host: irker_server.addr[2],
+ server_port: irker_server.addr[1],
default_irc_uri: 'irc://chat.freenode.net/',
recipients: recipients,
colorize_messages: colorize_messages)
@@ -53,18 +53,22 @@ RSpec.describe Integrations::Irker do
end
after do
- @irker_server.close
+ irker_server.close
end
it 'sends valid JSON messages to an Irker listener', :sidekiq_might_not_need_inline do
+ expect(Integrations::IrkerWorker).to receive(:perform_async)
+ .with(project.id, irker.channels, colorize_messages, sample_data, irker.settings)
+ .and_call_original
+
irker.execute(sample_data)
- conn = @irker_server.accept
+ conn = irker_server.accept
Timeout.timeout(5) do
conn.each_line do |line|
msg = Gitlab::Json.parse(line.chomp("\n"))
- expect(msg.keys).to match_array(%w(to privmsg))
+ expect(msg.keys).to match_array(%w[to privmsg])
expect(msg['to']).to match_array(["irc://chat.freenode.net/#commits",
"irc://test.net/#test"])
end
@@ -72,5 +76,19 @@ RSpec.describe Integrations::Irker do
ensure
conn.close if conn
end
+
+ context 'when the FF :rename_integrations_workers is disabled' do
+ before do
+ stub_feature_flags(rename_integrations_workers: false)
+ end
+
+ it 'queues a IrkerWorker' do
+ expect(::IrkerWorker).to receive(:perform_async)
+ .with(project.id, irker.channels, colorize_messages, sample_data, irker.settings)
+ expect(Integrations::IrkerWorker).not_to receive(:perform_async)
+
+ irker.execute(sample_data)
+ end
+ end
end
end
diff --git a/spec/models/integrations/issue_tracker_data_spec.rb b/spec/models/integrations/issue_tracker_data_spec.rb
index 597df237c67..233ed7b8475 100644
--- a/spec/models/integrations/issue_tracker_data_spec.rb
+++ b/spec/models/integrations/issue_tracker_data_spec.rb
@@ -3,7 +3,11 @@
require 'spec_helper'
RSpec.describe Integrations::IssueTrackerData do
- describe 'associations' do
- it { is_expected.to belong_to :integration }
+ it_behaves_like Integrations::BaseDataFields
+
+ describe 'encrypted attributes' do
+ subject { described_class.encrypted_attributes.keys }
+
+ it { is_expected.to contain_exactly(:issues_url, :new_issue_url, :project_url) }
end
end
diff --git a/spec/models/integrations/jira_spec.rb b/spec/models/integrations/jira_spec.rb
index 061c770a61a..28d97b74adb 100644
--- a/spec/models/integrations/jira_spec.rb
+++ b/spec/models/integrations/jira_spec.rb
@@ -31,6 +31,61 @@ RSpec.describe Integrations::Jira do
let(:integration) { jira_integration }
end
+ describe 'validations' do
+ subject { jira_integration }
+
+ context 'when integration is active' do
+ before do
+ jira_integration.active = true
+
+ # Don't auto-fill URLs from gitlab.yml
+ stub_config(issues_tracker: {})
+ end
+
+ it { is_expected.to be_valid }
+ it { is_expected.to validate_presence_of(:url) }
+ it { is_expected.to validate_presence_of(:username) }
+ it { is_expected.to validate_presence_of(:password) }
+
+ it_behaves_like 'issue tracker integration URL attribute', :url
+ it_behaves_like 'issue tracker integration URL attribute', :api_url
+ end
+
+ context 'when integration is inactive' do
+ before do
+ jira_integration.active = false
+ end
+
+ it { is_expected.to be_valid }
+ it { is_expected.not_to validate_presence_of(:url) }
+ it { is_expected.not_to validate_presence_of(:username) }
+ it { is_expected.not_to validate_presence_of(:password) }
+ end
+
+ describe 'jira_issue_transition_id' do
+ it 'accepts a blank value' do
+ jira_integration.jira_issue_transition_id = ' '
+
+ expect(jira_integration).to be_valid
+ end
+
+ it 'accepts any string containing numbers' do
+ jira_integration.jira_issue_transition_id = 'foo 23 bar'
+
+ expect(jira_integration).to be_valid
+ end
+
+ it 'does not accept a string without numbers' do
+ jira_integration.jira_issue_transition_id = 'foo bar'
+
+ expect(jira_integration).not_to be_valid
+ expect(jira_integration.errors.full_messages).to eq([
+ 'Jira issue transition IDs must be a list of numbers that can be split with , or ;'
+ ])
+ end
+ end
+ end
+
describe '#options' do
let(:options) do
{
diff --git a/spec/models/integrations/jira_tracker_data_spec.rb b/spec/models/integrations/jira_tracker_data_spec.rb
index 5430dd2eb52..d9f91527fbb 100644
--- a/spec/models/integrations/jira_tracker_data_spec.rb
+++ b/spec/models/integrations/jira_tracker_data_spec.rb
@@ -3,12 +3,12 @@
require 'spec_helper'
RSpec.describe Integrations::JiraTrackerData do
- describe 'associations' do
- it { is_expected.to belong_to(:integration) }
- end
+ it_behaves_like Integrations::BaseDataFields
describe 'deployment_type' do
- it { is_expected.to define_enum_for(:deployment_type).with_values([:unknown, :server, :cloud]).with_prefix(:deployment) }
+ specify do
+ is_expected.to define_enum_for(:deployment_type).with_values([:unknown, :server, :cloud]).with_prefix(:deployment)
+ end
end
describe 'encrypted attributes' do
diff --git a/spec/models/integrations/prometheus_spec.rb b/spec/models/integrations/prometheus_spec.rb
index a7495cb9574..fbeaebfd807 100644
--- a/spec/models/integrations/prometheus_spec.rb
+++ b/spec/models/integrations/prometheus_spec.rb
@@ -145,6 +145,17 @@ RSpec.describe Integrations::Prometheus, :use_clean_rails_memory_store_caching,
expect(req_stub).to have_been_requested
end
end
+
+ context 'when configuration is not valid' do
+ before do
+ integration.api_url = nil
+ end
+
+ it 'returns failure message' do
+ expect(integration.test[:success]).to be_falsy
+ expect(integration.test[:result]).to eq('Prometheus configuration error')
+ end
+ end
end
describe '#prometheus_client' do
diff --git a/spec/models/integrations/zentao_tracker_data_spec.rb b/spec/models/integrations/zentao_tracker_data_spec.rb
index b078c57830b..dca5c4d79ae 100644
--- a/spec/models/integrations/zentao_tracker_data_spec.rb
+++ b/spec/models/integrations/zentao_tracker_data_spec.rb
@@ -3,16 +3,14 @@
require 'spec_helper'
RSpec.describe Integrations::ZentaoTrackerData do
+ it_behaves_like Integrations::BaseDataFields
+
describe 'factory available' do
let(:zentao_tracker_data) { create(:zentao_tracker_data) }
it { expect(zentao_tracker_data.valid?).to eq true }
end
- describe 'associations' do
- it { is_expected.to belong_to(:integration) }
- end
-
describe 'encrypted attributes' do
subject { described_class.encrypted_attributes.keys }
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index c77c0a5504a..d45a23a7ef8 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -455,7 +455,7 @@ RSpec.describe Issue do
end
end
- describe '#related_issues' do
+ describe '#related_issues to relate incidents and issues' do
let_it_be(:authorized_project) { create(:project) }
let_it_be(:authorized_project2) { create(:project) }
let_it_be(:unauthorized_project) { create(:project) }
@@ -463,12 +463,14 @@ RSpec.describe Issue do
let_it_be(:authorized_issue_a) { create(:issue, project: authorized_project) }
let_it_be(:authorized_issue_b) { create(:issue, project: authorized_project) }
let_it_be(:authorized_issue_c) { create(:issue, project: authorized_project2) }
+ let_it_be(:authorized_incident_a) { create(:incident, project: authorized_project )}
let_it_be(:unauthorized_issue) { create(:issue, project: unauthorized_project) }
let_it_be(:issue_link_a) { create(:issue_link, source: authorized_issue_a, target: authorized_issue_b) }
let_it_be(:issue_link_b) { create(:issue_link, source: authorized_issue_a, target: unauthorized_issue) }
let_it_be(:issue_link_c) { create(:issue_link, source: authorized_issue_a, target: authorized_issue_c) }
+ let_it_be(:issue_incident_link_a) { create(:issue_link, source: authorized_issue_a, target: authorized_incident_a) }
before_all do
authorized_project.add_developer(user)
@@ -477,7 +479,7 @@ RSpec.describe Issue do
it 'returns only authorized related issues for given user' do
expect(authorized_issue_a.related_issues(user))
- .to contain_exactly(authorized_issue_b, authorized_issue_c)
+ .to contain_exactly(authorized_issue_b, authorized_issue_c, authorized_incident_a)
end
it 'returns issues with valid issue_link_type' do
@@ -507,7 +509,7 @@ RSpec.describe Issue do
expect(Ability).to receive(:allowed?).with(user, :read_cross_project).and_return(false)
expect(authorized_issue_a.related_issues(user))
- .to contain_exactly(authorized_issue_b)
+ .to contain_exactly(authorized_issue_b, authorized_incident_a)
end
end
end
@@ -1565,4 +1567,64 @@ RSpec.describe Issue do
expect(issue.escalation_status).to eq(escalation_status)
end
end
+
+ describe '#expire_etag_cache' do
+ let_it_be(:issue) { create(:issue) }
+
+ subject(:expire_cache) { issue.expire_etag_cache }
+
+ it 'touches the etag cache store' do
+ key = Gitlab::Routing.url_helpers.realtime_changes_project_issue_path(issue.project, issue)
+
+ expect_next_instance_of(Gitlab::EtagCaching::Store) do |cache_store|
+ expect(cache_store).to receive(:touch).with(key)
+ end
+
+ expire_cache
+ end
+ end
+
+ describe '#link_reference_pattern' do
+ let(:match_data) { described_class.link_reference_pattern.match(link_reference_url) }
+
+ context 'with issue url' do
+ let(:link_reference_url) { 'http://localhost/namespace/project/-/issues/1' }
+
+ it 'matches with expected attributes' do
+ expect(match_data['namespace']).to eq('namespace')
+ expect(match_data['project']).to eq('project')
+ expect(match_data['issue']).to eq('1')
+ end
+ end
+
+ context 'with incident url' do
+ let(:link_reference_url) { 'http://localhost/namespace1/project1/-/issues/incident/2' }
+
+ it 'matches with expected attributes' do
+ expect(match_data['namespace']).to eq('namespace1')
+ expect(match_data['project']).to eq('project1')
+ expect(match_data['issue']).to eq('2')
+ end
+ end
+ end
+
+ context 'order by closed_at' do
+ let!(:issue_a) { create(:issue, closed_at: 1.day.ago) }
+ let!(:issue_b) { create(:issue, closed_at: 5.days.ago) }
+ let!(:issue_c_nil) { create(:issue, closed_at: nil) }
+ let!(:issue_d) { create(:issue, closed_at: 3.days.ago) }
+ let!(:issue_e_nil) { create(:issue, closed_at: nil) }
+
+ describe '.order_closed_at_asc' do
+ it 'orders on closed at' do
+ expect(described_class.order_closed_at_asc.to_a).to eq([issue_b, issue_d, issue_a, issue_c_nil, issue_e_nil])
+ end
+ end
+
+ describe '.order_closed_at_desc' do
+ it 'orders on closed at' do
+ expect(described_class.order_closed_at_desc.to_a).to eq([issue_a, issue_d, issue_b, issue_c_nil, issue_e_nil])
+ end
+ end
+ end
end
diff --git a/spec/models/key_spec.rb b/spec/models/key_spec.rb
index 225c9714187..a9d1a8a5ef2 100644
--- a/spec/models/key_spec.rb
+++ b/spec/models/key_spec.rb
@@ -45,6 +45,157 @@ RSpec.describe Key, :mailer do
end
end
end
+
+ describe 'validation of banned keys' do
+ let_it_be(:user) { create(:user) }
+
+ let(:key) { build(:key) }
+ let(:banned_keys) do
+ [
+ 'ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAIEAwRIdDlHaIqZXND/l1vFT7ue3rc/DvXh2y' \
+ 'x5EFtuxGQRHVxGMazDhV4vj5ANGXDQwUYI0iZh6aOVrDy8I/y9/y+YDGCvsnqrDbuPDjW' \
+ '26s2bBXWgUPiC93T3TA6L2KOxhVcl7mljEOIYACRHPpJNYVGhinCxDUH9LxMrdNXgP5Ok= mateidu@localhost',
+
+ 'ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAIBnZQ+6nhlPX/JnX5i5hXpljJ89bSnnrsSs51' \
+ 'hSPuoJGmoKowBddISK7s10AIpO0xAWGcr8PUr2FOjEBbDHqlRxoXF0Ocms9xv3ql9EYUQ5' \
+ '+U+M6BymWhNTFPOs6gFHUl8Bw3t6c+SRKBpfRFB0yzBj9d093gSdfTAFoz+yLo4vRw==',
+
+ 'ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAIEAvIhC5skTzxyHif/7iy3yhxuK6/OB13hjPq' \
+ 'rskogkYFrcW8OK4VJT+5+Fx7wd4sQCnVn8rNqahw/x6sfcOMDI/Xvn4yKU4t8TnYf2MpUV' \
+ 'r4ndz39L5Ds1n7Si1m2suUNxWbKv58I8+NMhlt2ITraSuTU0NGymWOc8+LNi+MHXdLk= SCCP Superuser',
+
+ 'ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA6NF8iallvQVp22WDkTkyrtvp9eWW6A8YVr' \
+ '+kz4TjGYe7gHzIw+niNltGEFHzD8+v1I2YJ6oXevct1YeS0o9HZyN1Q9qgCgzUFtdOKLv6' \
+ 'IedplqoPkcmF0aYet2PkEDo3MlTBckFXPITAMzF8dJSIFo9D8HfdOV0IAdx4O7PtixWKn5' \
+ 'y2hMNG0zQPyUecp4pzC6kivAIhyfHilFR61RGL+GPXQ2MWZWFYbAGjyiYJnAmCP3NOTd0j' \
+ 'MZEnDkbUvxhMmBYSdETk1rRgm+R4LOzFUGaHqHDLKLX+FIPKcF96hrucXzcWyLbIbEgE98' \
+ 'OHlnVYCzRdK8jlqm8tehUc9c9WhQ== vagrant insecure public key',
+
+ 'ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAIEAwRIdDlHaIqZXND/l1vFT7ue3rc/DvXh2yx' \
+ '5EFtuxGQRHVxGMazDhV4vj5ANGXDQwUYI0iZh6aOVrDy8I/y9/y+YDGCvsnqrDbuPDjW26' \
+ 's2bBXWgUPiC93T3TA6L2KOxhVcl7mljEOIYACRHPpJNYVGhinCxDUH9LxMrdNXgP5Ok= mateidu@localhost',
+
+ 'ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAIEAn8LoId2N5i28cNKuEWWea3yt0I/LdT/NRO' \
+ 'rF44WZewtxch+DIwteQhM1qL6EKUSqz3Q2geX1crpOsNnyh67xy5lNo086u/QewOCSRAUG' \
+ 'rQCXqFQ4JU8ny/qugWALQHjbIaPHj/3zMK09r4cpTSeAU7CW5nQyTKGmh7v9CAfWfcs= adam@localhost.localdomain',
+
+ 'ssh-dss AAAAB3NzaC1kc3MAAACBAJTDsX+8olPZeyr58g9XE0L8PKT5030NZBPlE7np4h' \
+ 'Bqx36HoWarWq1Csn8M57dWN9StKbs03k2ggY6sYJK5AW2EWar70um3pYjKQHiZq7mITmit' \
+ 'sozFN/K7wu2e2iKRgquUwH5SuYoOJ29n7uhaILXiKZP4/H/dDudqPRSY6tJPAAAAFQDtuW' \
+ 'H90mDbU2L/Ms2lfl/cja/wHwAAAIAMBwSHZt2ysOHCFe1WLUvdwVDHUqk3QHTskuuAnMlw' \
+ 'MtSvCaUxSatdHahsMZ9VCHjoQUx6j+TcgRLDbMlRLnwUlb6wpniehLBFk+qakGcREqks5N' \
+ 'xYzFTJXwROzP72jPvVgQyOZHWq81gCild/ljL7hmrduCqYwxDIz4o7U92UKQAAAIBmhSl9' \
+ 'CVPgVMv1xO8DAHVhM1huIIK8mNFrzMJz+JXzBx81ms1kWSeQOC/nraaXFTBlqiQsvB8tzr' \
+ '4xZdbaI/QzVLKNAF5C8BJ4ScNlTIx1aZJwyMil8Nzb+0YAsw5Ja+bEZZvEVlAYnd10qRWr' \
+ 'PeEY1txLMmX3wDa+JvJL7fmuBg==',
+
+ 'ssh-dss AAAAB3NzaC1kc3MAAACBAMq5EcIFdfCjJakyQnP/BBp9oc6mpaZVguf0Znp5C4' \
+ '0twiG1lASQJZlM1qOB/hkBWYeBCHUkcOLEnVXSZzB62L+W/LGKodqnsiQPRr57AA6jPc6m' \
+ 'NBnejHai8cSdAl9n/0s2IQjdcrxM8CPq2uEyfm0J3AV6Lrbbxr5NgE5xxM+DAAAAFQCmFk' \
+ '/M7Rx2jexsJ9COpHkHwUjcNQAAAIAdg18oByp/tjjDKhWhmmv+HbVIROkRqSxBvuEZEmcW' \
+ 'lg38mLIT1bydfpSou/V4rI5ctxwCfJ1rRr66pw6GwCrz4fXmyVlhrj7TrktyQ9+zRXhynF' \
+ '4wdNPWErhNHb8tGlSOFiOBcUTlouX3V/ka6Dkd6ZQrZLQFaH+gjfyTZZ82HQAAAIEArsJg' \
+ 'p7RLPOsCeLqoia/eljseBFVDazO5Q0ysUotTw9wgXGGVWREwm8wNggFNb9eCiBAAUfVZVf' \
+ 'hVAtFT0pBf/eIVLPXyaMw3prBt7LqeBrbagODc3WAAdMTPIdYYcOKgv+YvTXa51zG64v6p' \
+ 'QOfS8WXgKCzDl44puXfYeDk5lVQ=',
+
+ 'ssh-dss AAAAB3NzaC1kc3MAAACBAKwKBw7D4OA1H/uD4htdh04TBIHdbSjeXUSnWJsce8' \
+ 'C0tvoB01Yarjv9TFj+tfeDYVWtUK1DA1JkyqSuoAtDANJzF4I6Isyd0KPrW3dHFTcg6Xlz' \
+ '8d3KEaHokY93NOmB/xWEkhme8b7Q0U2iZie2pgWbTLXV0FA+lhskTtPHW3+VAAAAFQDRya' \
+ 'yUlVZKXEweF3bUe03zt9e8VQAAAIAEPK1k3Y6ErAbIl96dnUCnZjuWQ7xXy062pf63QuRW' \
+ 'I6LYSscm3f1pEknWUNFr/erQ02pkfi2eP9uHl1TI1ql+UmJX3g3frfssLNZwWXAW0m8PbY' \
+ '3HZSs+f5hevM3ua32pnKDmbQ2WpvKNyycKHi81hSI14xMcdblJolhN5iY8/wAAAIAjEe5+' \
+ '0m/TlBtVkqQbUit+s/g+eB+PFQ+raaQdL1uztW3etntXAPH1MjxsAC/vthWYSTYXORkDFM' \
+ 'hrO5ssE2rfg9io0NDyTIZt+VRQMGdi++dH8ptU+ldl2ZejLFdTJFwFgcfXz+iQ1mx6h9TP' \
+ 'X1crE1KoMAVOj3yKVfKpLB1EkA== root@lbslave',
+
+ 'ssh-dss AAAAB3NzaC1kc3MAAACBAN3AITryJMQyOKZjAky+mQ/8pOHIlu4q8pzmR0qotK' \
+ 'aLm2yye5a0PY2rOaQRAzi7EPheBXbqTb8a8TrHhGXI5P7GUHaJho5HhEnw+5TwAvP72L7L' \
+ 'cPwxMxj/rLcR/jV+uLMsVeJVWjwJcUv83yzPXoVjK0hrIm+RLLeuTM+gTylHAAAAFQD5gB' \
+ 'dXsXAiTz1atzMg3xDFF1zlowAAAIAlLy6TCMlOBM0IcPsvP/9bEjDj0M8YZazdqt4amO2I' \
+ 'aNUPYt9/sIsLOQfxIj8myDK1TOp8NyRJep7V5aICG4f3Q+XktlmLzdWn3sjvbWuIAXe1op' \
+ 'jG2T69YhxfHZr8Wn7P4tpCgyqM4uHmUKrfnBzQQ9vkUUWsZoUXM2Z7vUXVfQAAAIAU6eNl' \
+ 'phQWDwx0KOBiiYhF9BM6kDbQlyw8333rAG3G4CcjI2G8eYGtpBNliaD185UjCEsjPiudhG' \
+ 'il/j4Zt/+VY3aGOLoi8kqXBBc8ZAML9bbkXpyhQhMgwiywx3ciFmvSn2UAin8yurStYPQx' \
+ 'tXauZN5PYbdwCHPS7ApIStdpMA== wood@endec1',
+
+ 'ssh-dss AAAAB3NzaC1kc3MAAACBAISAE3CAX4hsxTw0dRc0gx8nQ41r3Vkj9OmG6LGeKW' \
+ 'Rmpy7C6vaExuupjxid76fd4aS56lCUEEoRlJ3zE93qoK9acI6EGqGQFLuDZ0fqMyRSX+il' \
+ 'f+1HDo/TRyuraggxp9Hj9LMpZVbpFATMm0+d9Xs7eLmaJjuMsowNlOf8NFdHAAAAFQCwdv' \
+ 'qOAkR6QhuiAapQ/9iVuR0UAQAAAIBpLMo4dhSeWkChfv659WLPftxRrX/HR8YMD/jqa3R4' \
+ 'PsVM2g6dQ1191nHugtdV7uaMeOqOJ/QRWeYM+UYwT0Zgx2LqvgVSjNDfdjk+ZRY8x3SmEx' \
+ 'Fi62mKFoTGSOCXfcAfuanjaoF+sepnaiLUd+SoJShGYHoqR2QWiysTRqknlwAAAIBLEgYm' \
+ 'r9XCSqjENFDVQPFELYKT7Zs9J87PjPS1AP0qF1OoRGZ5mefK6X/6VivPAUWmmmev/BuAs8' \
+ 'M1HtfGeGGzMzDIiU/WZQ3bScLB1Ykrcjk7TOFD6xrnk/inYAp5l29hjidoAONcXoHmUAMY' \
+ 'OKqn63Q2AsDpExVcmfj99/BlpQ=='
+ ]
+ end
+
+ context 'when ssh_banned_key feature flag is enabled with a user' do
+ before do
+ stub_feature_flags(ssh_banned_key: user)
+ end
+
+ where(:key_content) { banned_keys }
+
+ with_them do
+ it 'does not allow banned keys' do
+ key.key = key_content
+ key.user = user
+
+ expect(key).to be_invalid
+ expect(key.errors[:key]).to include(
+ _('cannot be used because it belongs to a compromised private key. Stop using this key and generate a new one.'))
+ end
+
+ it 'allows when the user is a ghost user' do
+ key.key = key_content
+ key.user = User.ghost
+
+ expect(key).to be_valid
+ end
+
+ it 'allows when the user is nil' do
+ key.key = key_content
+ key.user = nil
+
+ expect(key).to be_valid
+ end
+ end
+
+ it 'allows other keys' do
+ key.user = user
+
+ expect(key).to be_valid
+ end
+
+ it 'allows other users' do
+ key.user = User.ghost
+
+ expect(key).to be_valid
+ end
+ end
+
+ context 'when ssh_banned_key feature flag is disabled' do
+ before do
+ stub_feature_flags(ssh_banned_key: false)
+ end
+
+ where(:key_content) { banned_keys }
+
+ with_them do
+ it 'allows banned keys' do
+ key.key = key_content
+
+ expect(key).to be_valid
+ end
+ end
+
+ it 'allows other keys' do
+ expect(key).to be_valid
+ end
+ end
+ end
end
describe "Methods" do
diff --git a/spec/models/members/group_member_spec.rb b/spec/models/members/group_member_spec.rb
index 92f9099d04d..f93c2d36966 100644
--- a/spec/models/members/group_member_spec.rb
+++ b/spec/models/members/group_member_spec.rb
@@ -47,6 +47,16 @@ RSpec.describe GroupMember do
end
end
+ describe '#permissible_access_level_roles' do
+ let_it_be(:group) { create(:group) }
+
+ it 'returns Gitlab::Access.options_with_owner' do
+ result = described_class.permissible_access_level_roles(group.first_owner, group)
+
+ expect(result).to eq(Gitlab::Access.options_with_owner)
+ end
+ end
+
it_behaves_like 'members notifications', :group
describe '#namespace_id' do
@@ -148,4 +158,135 @@ RSpec.describe GroupMember do
end
end
end
+
+ context 'authorization refresh on addition/updation/deletion' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project_a) { create(:project, group: group) }
+ let_it_be(:project_b) { create(:project, group: group) }
+ let_it_be(:project_c) { create(:project, group: group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:affected_project_ids) { Project.id_in([project_a, project_b, project_c]).ids }
+
+ before do
+ stub_const(
+ "#{described_class.name}::THRESHOLD_FOR_REFRESHING_AUTHORIZATIONS_VIA_PROJECTS",
+ affected_project_ids.size - 1)
+ end
+
+ shared_examples_for 'calls UserProjectAccessChangedService to recalculate authorizations' do
+ it 'calls UserProjectAccessChangedService to recalculate authorizations' do
+ expect_next_instance_of(UserProjectAccessChangedService, user.id) do |service|
+ expect(service).to receive(:execute).with(blocking: blocking)
+ end
+
+ action
+ end
+ end
+
+ shared_examples_for 'tries to update permissions via refreshing authorizations for the affected projects' do
+ context 'when the number of affected projects exceeds the set threshold' do
+ it 'updates permissions via refreshing authorizations for the affected projects asynchronously' do
+ expect_next_instance_of(
+ AuthorizedProjectUpdate::ProjectAccessChangedService, affected_project_ids
+ ) do |service|
+ expect(service).to receive(:execute).with(blocking: false)
+ end
+
+ action
+ end
+
+ it 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay as a safety net' do
+ expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
+ receive(:bulk_perform_in)
+ .with(1.hour,
+ [[user.id]],
+ batch_delay: 30.seconds, batch_size: 100)
+ )
+
+ action
+ end
+ end
+
+ context 'when the number of affected projects does not exceed the set threshold' do
+ before do
+ stub_const(
+ "#{described_class.name}::THRESHOLD_FOR_REFRESHING_AUTHORIZATIONS_VIA_PROJECTS",
+ affected_project_ids.size + 1)
+ end
+
+ it_behaves_like 'calls UserProjectAccessChangedService to recalculate authorizations'
+ end
+ end
+
+ context 'on create' do
+ let(:action) { group.add_user(user, Gitlab::Access::GUEST) }
+ let(:blocking) { true }
+
+ it 'changes access level', :sidekiq_inline do
+ expect { action }.to change { user.can?(:guest_access, project_a) }.from(false).to(true)
+ .and change { user.can?(:guest_access, project_b) }.from(false).to(true)
+ .and change { user.can?(:guest_access, project_c) }.from(false).to(true)
+ end
+
+ it_behaves_like 'tries to update permissions via refreshing authorizations for the affected projects'
+
+ context 'when the feature flag `refresh_authorizations_via_affected_projects_on_group_membership` is disabled' do
+ before do
+ stub_feature_flags(refresh_authorizations_via_affected_projects_on_group_membership: false)
+ end
+
+ it_behaves_like 'calls UserProjectAccessChangedService to recalculate authorizations'
+ end
+ end
+
+ context 'on update' do
+ before do
+ group.add_user(user, Gitlab::Access::GUEST)
+ end
+
+ let(:action) { group.members.find_by(user: user).update!(access_level: Gitlab::Access::DEVELOPER) }
+ let(:blocking) { true }
+
+ it 'changes access level', :sidekiq_inline do
+ expect { action }.to change { user.can?(:developer_access, project_a) }.from(false).to(true)
+ .and change { user.can?(:developer_access, project_b) }.from(false).to(true)
+ .and change { user.can?(:developer_access, project_c) }.from(false).to(true)
+ end
+
+ it_behaves_like 'tries to update permissions via refreshing authorizations for the affected projects'
+
+ context 'when the feature flag `refresh_authorizations_via_affected_projects_on_group_membership` is disabled' do
+ before do
+ stub_feature_flags(refresh_authorizations_via_affected_projects_on_group_membership: false)
+ end
+
+ it_behaves_like 'calls UserProjectAccessChangedService to recalculate authorizations'
+ end
+ end
+
+ context 'on destroy' do
+ before do
+ group.add_user(user, Gitlab::Access::GUEST)
+ end
+
+ let(:action) { group.members.find_by(user: user).destroy! }
+ let(:blocking) { false }
+
+ it 'changes access level', :sidekiq_inline do
+ expect { action }.to change { user.can?(:guest_access, project_a) }.from(true).to(false)
+ .and change { user.can?(:guest_access, project_b) }.from(true).to(false)
+ .and change { user.can?(:guest_access, project_c) }.from(true).to(false)
+ end
+
+ it_behaves_like 'tries to update permissions via refreshing authorizations for the affected projects'
+
+ context 'when the feature flag `refresh_authorizations_via_affected_projects_on_group_membership` is disabled' do
+ before do
+ stub_feature_flags(refresh_authorizations_via_affected_projects_on_group_membership: false)
+ end
+
+ it_behaves_like 'calls UserProjectAccessChangedService to recalculate authorizations'
+ end
+ end
+ end
end
diff --git a/spec/models/members/last_group_owner_assigner_spec.rb b/spec/models/members/last_group_owner_assigner_spec.rb
index bb0f751e7d5..429cf4190cf 100644
--- a/spec/models/members/last_group_owner_assigner_spec.rb
+++ b/spec/models/members/last_group_owner_assigner_spec.rb
@@ -94,5 +94,18 @@ RSpec.describe LastGroupOwnerAssigner do
end
end
end
+
+ context 'when there are bot members' do
+ context 'with a bot owner' do
+ specify do
+ create(:group_member, :owner, source: group, user: create(:user, :project_bot))
+
+ expect { assigner.execute }.to change(group_member, :last_owner)
+ .from(nil).to(true)
+ .and change(group_member, :last_blocked_owner)
+ .from(nil).to(false)
+ end
+ end
+ end
end
end
diff --git a/spec/models/members/project_member_spec.rb b/spec/models/members/project_member_spec.rb
index 3923f4161cc..8c989f5aaca 100644
--- a/spec/models/members/project_member_spec.rb
+++ b/spec/models/members/project_member_spec.rb
@@ -23,6 +23,30 @@ RSpec.describe ProjectMember do
end
end
+ describe '#permissible_access_level_roles' do
+ let_it_be(:owner) { create(:user) }
+ let_it_be(:maintainer) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+
+ before do
+ project.add_owner(owner)
+ project.add_maintainer(maintainer)
+ end
+
+ context 'when member can manage owners' do
+ it 'returns Gitlab::Access.options_with_owner' do
+ expect(described_class.permissible_access_level_roles(owner, project)).to eq(Gitlab::Access.options_with_owner)
+ end
+ end
+
+ context 'when member cannot manage owners' do
+ it 'returns Gitlab::Access.options' do
+ expect(described_class.permissible_access_level_roles(maintainer, project)).to eq(Gitlab::Access.options)
+ end
+ end
+ end
+
describe '#real_source_type' do
subject { create(:project_member).real_source_type }
diff --git a/spec/models/merge_request/cleanup_schedule_spec.rb b/spec/models/merge_request/cleanup_schedule_spec.rb
index 85208f901fd..9c50b64f2bd 100644
--- a/spec/models/merge_request/cleanup_schedule_spec.rb
+++ b/spec/models/merge_request/cleanup_schedule_spec.rb
@@ -120,6 +120,41 @@ RSpec.describe MergeRequest::CleanupSchedule do
end
end
+ describe '.stuck' do
+ let!(:cleanup_schedule_1) { create(:merge_request_cleanup_schedule, updated_at: 1.day.ago) }
+ let!(:cleanup_schedule_2) { create(:merge_request_cleanup_schedule, :running, updated_at: 5.hours.ago) }
+ let!(:cleanup_schedule_3) { create(:merge_request_cleanup_schedule, :running, updated_at: 7.hours.ago) }
+ let!(:cleanup_schedule_4) { create(:merge_request_cleanup_schedule, :completed, updated_at: 1.day.ago) }
+ let!(:cleanup_schedule_5) { create(:merge_request_cleanup_schedule, :failed, updated_at: 1.day.ago) }
+
+ it 'returns records that has been in running state for more than 6 hours' do
+ expect(described_class.stuck).to match_array([cleanup_schedule_3])
+ end
+ end
+
+ describe '.stuck_retry!' do
+ let!(:cleanup_schedule_1) { create(:merge_request_cleanup_schedule, :running, updated_at: 5.hours.ago) }
+ let!(:cleanup_schedule_2) { create(:merge_request_cleanup_schedule, :running, updated_at: 7.hours.ago) }
+
+ it 'sets stuck records to unstarted' do
+ expect { described_class.stuck_retry! }.to change { cleanup_schedule_2.reload.unstarted? }.from(false).to(true)
+ end
+
+ context 'when there are more than 5 stuck schedules' do
+ before do
+ create_list(:merge_request_cleanup_schedule, 5, :running, updated_at: 1.day.ago)
+ end
+
+ it 'only retries 5 stuck schedules at once' do
+ expect(described_class.stuck.count).to eq 6
+
+ described_class.stuck_retry!
+
+ expect(described_class.stuck.count).to eq 1
+ end
+ end
+ end
+
describe '.start_next' do
let!(:cleanup_schedule_1) { create(:merge_request_cleanup_schedule, :completed, scheduled_at: 1.day.ago) }
let!(:cleanup_schedule_2) { create(:merge_request_cleanup_schedule, scheduled_at: 2.days.ago) }
diff --git a/spec/models/merge_request_diff_file_spec.rb b/spec/models/merge_request_diff_file_spec.rb
index 5a48438adab..c9bcb900eca 100644
--- a/spec/models/merge_request_diff_file_spec.rb
+++ b/spec/models/merge_request_diff_file_spec.rb
@@ -85,5 +85,13 @@ RSpec.describe MergeRequestDiffFile do
expect { subject.utf8_diff }.not_to raise_error
end
+
+ it 'calls #diff once' do
+ allow(subject).to receive(:diff).and_return('test')
+
+ expect(subject).to receive(:diff).once
+
+ subject.utf8_diff
+ end
end
end
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index d40c78b5b60..381eccf2376 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -1381,7 +1381,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
- describe "#work_in_progress?" do
+ describe "#draft?" do
subject { build_stubbed(:merge_request) }
[
@@ -1390,102 +1390,89 @@ RSpec.describe MergeRequest, factory_default: :keep do
it "detects the '#{draft_prefix}' prefix" do
subject.title = "#{draft_prefix}#{subject.title}"
- expect(subject.work_in_progress?).to eq true
+ expect(subject.draft?).to eq true
end
end
- [
- 'WIP:', 'WIP: ', '[WIP]', '[WIP] ', ' [WIP] WIP: [WIP] WIP:',
- "WIP ", "(WIP)",
- "draft", "Draft", "Draft -", "draft - ", "Draft ", "draft "
- ].each do |draft_prefix|
- it "doesn't detect '#{draft_prefix}' at the start of the title as a draft" do
- subject.title = "#{draft_prefix}#{subject.title}"
+ context "returns false" do
+ # We have removed support for variations of "WIP", and additionally need
+ # to test unsupported variations of "Draft" that we have seen users
+ # attempt.
+ #
+ [
+ 'WIP:', 'WIP: ', '[WIP]', '[WIP] ', ' [WIP] WIP: [WIP] WIP:',
+ "WIP ", "(WIP)",
+ "draft", "Draft", "Draft -", "draft - ", "Draft ", "draft "
+ ].each do |trigger|
+ it "when '#{trigger}' prefixes the title" do
+ subject.title = "#{trigger}#{subject.title}"
- expect(subject.work_in_progress?).to eq false
+ expect(subject.draft?).to eq false
+ end
end
- end
-
- it "doesn't detect merge request title just saying 'wip'" do
- subject.title = "wip"
- expect(subject.work_in_progress?).to eq false
- end
+ ["WIP", "Draft"].each do |trigger| # rubocop:disable Style/WordArray
+ it "when merge request title is simply '#{trigger}'" do
+ subject.title = trigger
- it "does not detect merge request title just saying 'draft'" do
- subject.title = "draft"
-
- expect(subject.work_in_progress?).to eq false
- end
-
- it 'does not detect WIP in the middle of the title' do
- subject.title = 'Something with WIP in the middle'
-
- expect(subject.work_in_progress?).to eq false
- end
-
- it 'does not detect Draft in the middle of the title' do
- subject.title = 'Something with Draft in the middle'
+ expect(subject.draft?).to eq false
+ end
- expect(subject.work_in_progress?).to eq false
- end
+ it "when #{trigger} is in the middle of the title" do
+ subject.title = "Something with #{trigger} in the middle"
- it 'does not detect Draft: in the middle of the title' do
- subject.title = 'Something with Draft: in the middle'
+ expect(subject.draft?).to eq false
+ end
- expect(subject.work_in_progress?).to eq false
- end
+ it "when #{trigger} is at the end of the title" do
+ subject.title = "Something ends with #{trigger}"
- it 'does not detect WIP at the end of the title' do
- subject.title = 'Something ends with WIP'
+ expect(subject.draft?).to eq false
+ end
- expect(subject.work_in_progress?).to eq false
- end
+ it "when title contains words starting with #{trigger}" do
+ subject.title = "#{trigger}foo #{subject.title}"
- it 'does not detect Draft at the end of the title' do
- subject.title = 'Something ends with Draft'
+ expect(subject.draft?).to eq false
+ end
- expect(subject.work_in_progress?).to eq false
- end
+ it "when title contains words containing with #{trigger}" do
+ subject.title = "Foo#{trigger}Bar #{subject.title}"
- it "doesn't detect WIP for words starting with WIP" do
- subject.title = "Wipwap #{subject.title}"
- expect(subject.work_in_progress?).to eq false
- end
+ expect(subject.draft?).to eq false
+ end
+ end
- it "doesn't detect WIP for words containing with WIP" do
- subject.title = "WupWipwap #{subject.title}"
- expect(subject.work_in_progress?).to eq false
- end
+ it 'when Draft: in the middle of the title' do
+ subject.title = 'Something with Draft: in the middle'
- it "doesn't detect draft for words containing with draft" do
- subject.title = "Drafting #{subject.title}"
- expect(subject.work_in_progress?).to eq false
- end
+ expect(subject.draft?).to eq false
+ end
- it "doesn't detect WIP by default" do
- expect(subject.work_in_progress?).to eq false
- end
+ it "when the title does not contain draft" do
+ expect(subject.draft?).to eq false
+ end
- it "is aliased to #draft?" do
- expect(subject.method(:work_in_progress?)).to eq(subject.method(:draft?))
+ it "is aliased to #draft?" do
+ expect(subject.method(:work_in_progress?)).to eq(subject.method(:draft?))
+ end
end
end
- describe "#wipless_title" do
+ describe "#draftless_title" do
subject { build_stubbed(:merge_request) }
['draft:', 'Draft: ', '[Draft]', '[DRAFT] '].each do |draft_prefix|
it "removes a '#{draft_prefix}' prefix" do
- wipless_title = subject.title
+ draftless_title = subject.title
subject.title = "#{draft_prefix}#{subject.title}"
- expect(subject.wipless_title).to eq wipless_title
+ expect(subject.draftless_title).to eq draftless_title
end
it "is satisfies the #work_in_progress? method" do
subject.title = "#{draft_prefix}#{subject.title}"
- subject.title = subject.wipless_title
+ subject.title = subject.draftless_title
expect(subject.work_in_progress?).to eq false
end
@@ -1497,58 +1484,58 @@ RSpec.describe MergeRequest, factory_default: :keep do
it "doesn't remove a '#{wip_prefix}' prefix" do
subject.title = "#{wip_prefix}#{subject.title}"
- expect(subject.wipless_title).to eq subject.title
+ expect(subject.draftless_title).to eq subject.title
end
end
it 'removes only draft prefix from the MR title' do
subject.title = 'Draft: Implement feature called draft'
- expect(subject.wipless_title).to eq 'Implement feature called draft'
+ expect(subject.draftless_title).to eq 'Implement feature called draft'
end
it 'does not remove WIP in the middle of the title' do
subject.title = 'Something with WIP in the middle'
- expect(subject.wipless_title).to eq subject.title
+ expect(subject.draftless_title).to eq subject.title
end
it 'does not remove Draft in the middle of the title' do
subject.title = 'Something with Draft in the middle'
- expect(subject.wipless_title).to eq subject.title
+ expect(subject.draftless_title).to eq subject.title
end
it 'does not remove WIP at the end of the title' do
subject.title = 'Something ends with WIP'
- expect(subject.wipless_title).to eq subject.title
+ expect(subject.draftless_title).to eq subject.title
end
it 'does not remove Draft at the end of the title' do
subject.title = 'Something ends with Draft'
- expect(subject.wipless_title).to eq subject.title
+ expect(subject.draftless_title).to eq subject.title
end
end
- describe "#wip_title" do
+ describe "#draft_title" do
it "adds the Draft: prefix to the title" do
- wip_title = "Draft: #{subject.title}"
+ draft_title = "Draft: #{subject.title}"
- expect(subject.wip_title).to eq wip_title
+ expect(subject.draft_title).to eq draft_title
end
it "does not add the Draft: prefix multiple times" do
- wip_title = "Draft: #{subject.title}"
- subject.title = subject.wip_title
- subject.title = subject.wip_title
+ draft_title = "Draft: #{subject.title}"
+ subject.title = subject.draft_title
+ subject.title = subject.draft_title
- expect(subject.wip_title).to eq wip_title
+ expect(subject.draft_title).to eq draft_title
end
it "is satisfies the #work_in_progress? method" do
- subject.title = subject.wip_title
+ subject.title = subject.draft_title
expect(subject.work_in_progress?).to eq true
end
@@ -5077,4 +5064,75 @@ RSpec.describe MergeRequest, factory_default: :keep do
expect(assignees).to match_array([subject.merge_request_assignees[0]])
end
end
+
+ describe '#recent_diff_head_shas' do
+ let_it_be(:merge_request_with_diffs) do
+ params = {
+ target_project: project,
+ source_project: project,
+ target_branch: 'master',
+ source_branch: 'feature'
+ }
+
+ create(:merge_request, params).tap do |mr|
+ 4.times { mr.merge_request_diffs.create! }
+ mr.create_merge_head_diff
+ end
+ end
+
+ let(:shas) do
+ # re-find to avoid caching the association
+ described_class.find(merge_request_with_diffs.id).merge_request_diffs.order(id: :desc).pluck(:head_commit_sha)
+ end
+
+ shared_examples 'correctly sorted and limited diff_head_shas' do
+ it 'has up to MAX_RECENT_DIFF_HEAD_SHAS, ordered most recent first' do
+ stub_const('MergeRequest::MAX_RECENT_DIFF_HEAD_SHAS', 3)
+
+ expect(subject.recent_diff_head_shas).to eq(shas.first(3))
+ end
+
+ it 'supports limits' do
+ expect(subject.recent_diff_head_shas(2)).to eq(shas.first(2))
+ end
+ end
+
+ context 'when the association is not loaded' do
+ subject(:mr) { merge_request_with_diffs }
+
+ include_examples 'correctly sorted and limited diff_head_shas'
+ end
+
+ context 'when the association is loaded' do
+ subject(:mr) do
+ described_class.where(id: merge_request_with_diffs.id).preload(:merge_request_diffs).first
+ end
+
+ include_examples 'correctly sorted and limited diff_head_shas'
+
+ it 'does not issue any queries' do
+ expect(subject).to be_a(described_class) # preload here
+
+ expect { subject.recent_diff_head_shas }.not_to exceed_query_limit(0)
+ end
+ end
+ end
+
+ describe '#target_default_branch?' do
+ let_it_be(:merge_request) { build(:merge_request, project: project) }
+
+ it 'returns false' do
+ expect(merge_request.target_default_branch?).to be false
+ end
+
+ context 'with target_branch equal project default branch' do
+ before do
+ merge_request.target_branch = "master"
+ end
+
+ it 'returns false' do
+ expect(merge_request.target_default_branch?).to be true
+ end
+ end
+ end
end
diff --git a/spec/models/milestone_spec.rb b/spec/models/milestone_spec.rb
index 06044cf53cc..72a57b6076a 100644
--- a/spec/models/milestone_spec.rb
+++ b/spec/models/milestone_spec.rb
@@ -82,14 +82,16 @@ RSpec.describe Milestone do
context 'when it is tied to a release for another project' do
it 'creates a validation error' do
other_project = create(:project)
- milestone.releases << build(:release, project: other_project)
+ milestone.releases << build(:release,
+ project: other_project, author_id: other_project.members.first.user_id)
expect(milestone).not_to be_valid
end
end
context 'when it is tied to a release for the same project' do
it 'is valid' do
- milestone.releases << build(:release, project: project)
+ milestone.releases << build(:release,
+ project: project, author_id: project.members.first.user_id)
expect(milestone).to be_valid
end
end
diff --git a/spec/models/namespace/root_storage_statistics_spec.rb b/spec/models/namespace/root_storage_statistics_spec.rb
index c399a0084fb..d2ee0b40ed6 100644
--- a/spec/models/namespace/root_storage_statistics_spec.rb
+++ b/spec/models/namespace/root_storage_statistics_spec.rb
@@ -58,6 +58,19 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model do
expect(root_storage_statistics.uploads_size).to eq(total_uploads_size)
end
+ it 'aggregates container_repositories_size and storage_size' do
+ allow(namespace).to receive(:container_repositories_size).and_return(999)
+
+ root_storage_statistics.recalculate!
+
+ root_storage_statistics.reload
+
+ total_storage_size = project_stat1.storage_size + project_stat2.storage_size + 999
+
+ expect(root_storage_statistics.container_registry_size).to eq(999)
+ expect(root_storage_statistics.storage_size).to eq(total_storage_size)
+ end
+
it 'works when there are no projects' do
Project.delete_all
diff --git a/spec/models/namespace_setting_spec.rb b/spec/models/namespace_setting_spec.rb
index c9f8a1bcdc2..25234db5734 100644
--- a/spec/models/namespace_setting_spec.rb
+++ b/spec/models/namespace_setting_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe NamespaceSetting, type: :model do
end
it { is_expected.to define_enum_for(:jobs_to_be_done).with_values([:basics, :move_repository, :code_storage, :exploring, :ci, :other]).with_suffix }
+ it { is_expected.to define_enum_for(:enabled_git_access_protocol).with_values([:all, :ssh, :http]).with_suffix }
describe "validations" do
describe "#default_branch_name_content" do
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 4373d9a0b24..96e06e617d5 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -31,6 +31,7 @@ RSpec.describe Namespace do
it { is_expected.to have_many :pending_builds }
it { is_expected.to have_one :namespace_route }
it { is_expected.to have_many :namespace_members }
+ it { is_expected.to have_one :cluster_enabled_grant }
it do
is_expected.to have_one(:ci_cd_settings).class_name('NamespaceCiCdSetting').inverse_of(:namespace).autosave(true)
@@ -374,6 +375,14 @@ RSpec.describe Namespace do
context 'linear' do
it_behaves_like 'namespace traversal scopes'
+
+ context 'without inner join ancestors query' do
+ before do
+ stub_feature_flags(use_traversal_ids_for_ancestor_scopes_with_inner_join: false)
+ end
+
+ it_behaves_like 'namespace traversal scopes'
+ end
end
shared_examples 'makes recursive queries' do
@@ -574,6 +583,107 @@ RSpec.describe Namespace do
end
end
+ describe '#container_repositories_size_cache_key' do
+ it 'returns the correct cache key' do
+ expect(namespace.container_repositories_size_cache_key).to eq "namespaces:#{namespace.id}:container_repositories_size"
+ end
+ end
+
+ describe '#container_repositories_size', :clean_gitlab_redis_cache do
+ let(:project_namespace) { create(:namespace) }
+
+ subject { project_namespace.container_repositories_size }
+
+ context 'on gitlab.com' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:gitlab_api_supported, :no_container_repositories, :all_migrated, :returned_size, :expected_result) do
+ nil | nil | nil | nil | nil
+ false | nil | nil | nil | nil
+ true | true | nil | nil | 0
+ true | false | false | nil | nil
+ true | false | true | 555 | 555
+ true | false | true | nil | nil
+ end
+
+ with_them do
+ before do
+ stub_container_registry_config(enabled: true, api_url: 'http://container-registry', key: 'spec/fixtures/x509_certificate_pk.key')
+ allow(Gitlab).to receive(:com?).and_return(true)
+ allow(ContainerRegistry::GitlabApiClient).to receive(:supports_gitlab_api?).and_return(gitlab_api_supported)
+ allow(project_namespace).to receive_message_chain(:all_container_repositories, :empty?).and_return(no_container_repositories)
+ allow(project_namespace).to receive_message_chain(:all_container_repositories, :all_migrated?).and_return(all_migrated)
+ allow(ContainerRegistry::GitlabApiClient).to receive(:deduplicated_size).with(project_namespace.full_path).and_return(returned_size)
+ end
+
+ it { is_expected.to eq(expected_result) }
+
+ it 'caches the result when all migrated' do
+ if all_migrated
+ expect(Rails.cache)
+ .to receive(:fetch)
+ .with(project_namespace.container_repositories_size_cache_key, expires_in: 7.days)
+
+ subject
+ end
+ end
+ end
+ end
+
+ context 'not on gitlab.com' do
+ it { is_expected.to eq(nil) }
+ end
+
+ context 'for a sub-group' do
+ let(:parent_namespace) { create(:group) }
+ let(:project_namespace) { create(:group, parent: parent_namespace) }
+
+ it { is_expected.to eq(nil) }
+ end
+ end
+
+ describe '#all_container_repositories' do
+ context 'with personal namespace' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project_namespace) { user.namespace }
+
+ context 'with no project' do
+ it { expect(project_namespace.all_container_repositories).to match_array([]) }
+ end
+
+ context 'with projects' do
+ it "returns container repositories" do
+ project = create(:project, namespace: project_namespace)
+ rep = create(:container_repository, project: project)
+
+ expect(project_namespace.all_container_repositories).to match_array([rep])
+ end
+ end
+ end
+
+ context 'with subgroups' do
+ let_it_be(:project_namespace) { create(:group) }
+ let_it_be(:subgroup1) { create(:group, parent: project_namespace) }
+ let_it_be(:subgroup2) { create(:group, parent: subgroup1) }
+
+ context 'with no project' do
+ it { expect(project_namespace.all_container_repositories).to match_array([]) }
+ end
+
+ context 'with projects' do
+ it "returns container repositories" do
+ subgrp1_project = create(:project, namespace: subgroup1)
+ rep1 = create(:container_repository, project: subgrp1_project)
+
+ subgrp2_project = create(:project, namespace: subgroup2)
+ rep2 = create(:container_repository, project: subgrp2_project)
+
+ expect(project_namespace.all_container_repositories).to match_array([rep1, rep2])
+ end
+ end
+ end
+ end
+
describe '.search' do
let_it_be(:first_group) { create(:group, name: 'my first namespace', path: 'old-path') }
let_it_be(:parent_group) { create(:group, name: 'my parent namespace', path: 'parent-path') }
@@ -894,24 +1004,6 @@ RSpec.describe Namespace do
expect_project_directories_at('parent/renamed', with_pages: false)
end
end
-
- context 'when the project has pages deployed' do
- before do
- project.pages_metadatum.update!(deployed: true)
- end
-
- it 'correctly moves the repository, uploads and pages', :sidekiq_inline do
- child.update!(path: 'renamed')
-
- expect_project_directories_at('parent/renamed')
- end
-
- it 'performs the move async of pages async' do
- expect(PagesTransferWorker).to receive(:perform_async).with('rename_namespace', ['parent/child', 'parent/renamed'])
-
- child.update!(path: 'renamed')
- end
- end
end
context 'renaming parent' do
@@ -923,24 +1015,6 @@ RSpec.describe Namespace do
expect_project_directories_at('renamed/child', with_pages: false)
end
end
-
- context 'when the project has pages deployed' do
- before do
- project.pages_metadatum.update!(deployed: true)
- end
-
- it 'correctly moves the repository, uploads and pages', :sidekiq_inline do
- parent.update!(path: 'renamed')
-
- expect_project_directories_at('renamed/child')
- end
-
- it 'performs the move async of pages async' do
- expect(PagesTransferWorker).to receive(:perform_async).with('rename_namespace', %w(parent renamed))
-
- parent.update!(path: 'renamed')
- end
- end
end
context 'moving from one parent to another' do
@@ -952,24 +1026,6 @@ RSpec.describe Namespace do
expect_project_directories_at('new_parent/child', with_pages: false)
end
end
-
- context 'when the project has pages deployed' do
- before do
- project.pages_metadatum.update!(deployed: true)
- end
-
- it 'correctly moves the repository, uploads and pages', :sidekiq_inline do
- child.update!(parent: new_parent)
-
- expect_project_directories_at('new_parent/child')
- end
-
- it 'performs the move async of pages async' do
- expect(PagesTransferWorker).to receive(:perform_async).with('move_namespace', %w(child parent new_parent))
-
- child.update!(parent: new_parent)
- end
- end
end
context 'moving from having a parent to root' do
@@ -981,24 +1037,6 @@ RSpec.describe Namespace do
expect_project_directories_at('child', with_pages: false)
end
end
-
- context 'when the project has pages deployed' do
- before do
- project.pages_metadatum.update!(deployed: true)
- end
-
- it 'correctly moves the repository, uploads and pages', :sidekiq_inline do
- child.update!(parent: nil)
-
- expect_project_directories_at('child')
- end
-
- it 'performs the move async of pages async' do
- expect(PagesTransferWorker).to receive(:perform_async).with('move_namespace', ['child', 'parent', nil])
-
- child.update!(parent: nil)
- end
- end
end
context 'moving from root to having a parent' do
@@ -1010,24 +1048,6 @@ RSpec.describe Namespace do
expect_project_directories_at('new_parent/parent/child', with_pages: false)
end
end
-
- context 'when the project has pages deployed' do
- before do
- project.pages_metadatum.update!(deployed: true)
- end
-
- it 'correctly moves the repository, uploads and pages', :sidekiq_inline do
- parent.update!(parent: new_parent)
-
- expect_project_directories_at('new_parent/parent/child')
- end
-
- it 'performs the move async of pages async' do
- expect(PagesTransferWorker).to receive(:perform_async).with('move_namespace', ['parent', nil, 'new_parent'])
-
- parent.update!(parent: new_parent)
- end
- end
end
end
end
@@ -2238,10 +2258,24 @@ RSpec.describe Namespace do
describe 'storage_enforcement_date' do
let_it_be(:namespace) { create(:group) }
+ before do
+ stub_feature_flags(namespace_storage_limit_bypass_date_check: false)
+ end
+
# Date TBD: https://gitlab.com/gitlab-org/gitlab/-/issues/350632
- it 'returns false' do
+ it 'returns nil' do
expect(namespace.storage_enforcement_date).to be(nil)
end
+
+ context 'when :storage_banner_bypass_date_check is enabled' do
+ before do
+ stub_feature_flags(namespace_storage_limit_bypass_date_check: true)
+ end
+
+ it 'returns the current date', :freeze_time do
+ expect(namespace.storage_enforcement_date).to eq(Date.current)
+ end
+ end
end
describe 'serialization' do
@@ -2251,27 +2285,23 @@ RSpec.describe Namespace do
end
describe '#certificate_based_clusters_enabled?' do
- it 'does not call Feature.enabled? twice with request_store', :request_store do
- expect(Feature).to receive(:enabled?).once
-
- namespace.certificate_based_clusters_enabled?
- namespace.certificate_based_clusters_enabled?
- end
-
- it 'call Feature.enabled? twice without request_store' do
- expect(Feature).to receive(:enabled?).twice
-
- namespace.certificate_based_clusters_enabled?
- namespace.certificate_based_clusters_enabled?
- end
-
context 'with ff disabled' do
before do
stub_feature_flags(certificate_based_clusters: false)
end
- it 'is truthy' do
- expect(namespace.certificate_based_clusters_enabled?).to be_falsy
+ context 'with a cluster_enabled_grant' do
+ it 'is truthy' do
+ create(:cluster_enabled_grant, namespace: namespace)
+
+ expect(namespace.certificate_based_clusters_enabled?).to be_truthy
+ end
+ end
+
+ context 'without a cluster_enabled_grant' do
+ it 'is falsy' do
+ expect(namespace.certificate_based_clusters_enabled?).to be_falsy
+ end
end
end
@@ -2280,8 +2310,18 @@ RSpec.describe Namespace do
stub_feature_flags(certificate_based_clusters: true)
end
- it 'is truthy' do
- expect(namespace.certificate_based_clusters_enabled?).to be_truthy
+ context 'with a cluster_enabled_grant' do
+ it 'is truthy' do
+ create(:cluster_enabled_grant, namespace: namespace)
+
+ expect(namespace.certificate_based_clusters_enabled?).to be_truthy
+ end
+ end
+
+ context 'without a cluster_enabled_grant' do
+ it 'is truthy' do
+ expect(namespace.certificate_based_clusters_enabled?).to be_truthy
+ end
end
end
end
diff --git a/spec/models/packages/cleanup/policy_spec.rb b/spec/models/packages/cleanup/policy_spec.rb
index 972071aa0ad..c08ae4aa7e7 100644
--- a/spec/models/packages/cleanup/policy_spec.rb
+++ b/spec/models/packages/cleanup/policy_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Packages::Cleanup::Policy, type: :model do
is_expected
.to validate_inclusion_of(:keep_n_duplicated_package_files)
.in_array(described_class::KEEP_N_DUPLICATED_PACKAGE_FILES_VALUES)
- .with_message('keep_n_duplicated_package_files is invalid')
+ .with_message('is invalid')
end
end
diff --git a/spec/models/packages/package_spec.rb b/spec/models/packages/package_spec.rb
index a9ed811e77d..06f02f021cf 100644
--- a/spec/models/packages/package_spec.rb
+++ b/spec/models/packages/package_spec.rb
@@ -1336,4 +1336,24 @@ RSpec.describe Packages::Package, type: :model do
end
end
end
+
+ describe '#normalized_pypi_name' do
+ let_it_be(:package) { create(:pypi_package) }
+
+ subject { package.normalized_pypi_name }
+
+ where(:package_name, :normalized_name) do
+ 'ASDF' | 'asdf'
+ 'a.B_c-d' | 'a-b-c-d'
+ 'a-------b....c___d' | 'a-b-c-d'
+ end
+
+ with_them do
+ before do
+ package.update_column(:name, package_name)
+ end
+
+ it { is_expected.to eq(normalized_name) }
+ end
+ end
end
diff --git a/spec/models/plan_limits_spec.rb b/spec/models/plan_limits_spec.rb
index 381e42978f4..f9c458b2c80 100644
--- a/spec/models/plan_limits_spec.rb
+++ b/spec/models/plan_limits_spec.rb
@@ -213,8 +213,11 @@ RSpec.describe PlanLimits do
storage_size_limit
daily_invites
web_hook_calls
+ web_hook_calls_mid
+ web_hook_calls_low
ci_daily_pipeline_schedule_triggers
repository_size
+ security_policy_scan_execution_schedules
] + disabled_max_artifact_size_columns
end
diff --git a/spec/models/preloaders/user_max_access_level_in_projects_preloader_spec.rb b/spec/models/preloaders/user_max_access_level_in_projects_preloader_spec.rb
index eefe5bfc6c4..7d4268f74e9 100644
--- a/spec/models/preloaders/user_max_access_level_in_projects_preloader_spec.rb
+++ b/spec/models/preloaders/user_max_access_level_in_projects_preloader_spec.rb
@@ -7,8 +7,10 @@ RSpec.describe Preloaders::UserMaxAccessLevelInProjectsPreloader do
let_it_be(:project_1) { create(:project) }
let_it_be(:project_2) { create(:project) }
let_it_be(:project_3) { create(:project) }
+ let_it_be(:project_4) { create(:project) }
+ let_it_be(:project_5) { create(:project) }
- let(:projects) { [project_1, project_2, project_3] }
+ let(:projects) { [project_1, project_2, project_3, project_4, project_5] }
let(:query) { projects.each { |project| user.can?(:read_project, project) } }
before do
@@ -17,8 +19,11 @@ RSpec.describe Preloaders::UserMaxAccessLevelInProjectsPreloader do
end
context 'without preloader' do
- it 'runs N queries' do
- expect { query }.to make_queries(projects.size)
+ it 'runs some queries' do
+ # we have an existing N+1, one for each project for which user is not a member
+ # in this spec, project_3, project_4, project_5
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/362890
+ expect { query }.to make_queries(projects.size + 3)
end
end
@@ -34,7 +39,7 @@ RSpec.describe Preloaders::UserMaxAccessLevelInProjectsPreloader do
end
context 'when projects is an array of IDs' do
- let(:projects_arg) { [project_1.id, project_2.id, project_3.id] }
+ let(:projects_arg) { projects.map(&:id) }
it 'avoids N+1 queries' do
expect { query }.not_to make_queries
diff --git a/spec/models/project_feature_spec.rb b/spec/models/project_feature_spec.rb
index 941f6c0a49d..dae0f84eda3 100644
--- a/spec/models/project_feature_spec.rb
+++ b/spec/models/project_feature_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe ProjectFeature do
end
end
- it_behaves_like 'access level validation', ProjectFeature::FEATURES - %i(pages) do
+ it_behaves_like 'access level validation', ProjectFeature::FEATURES - %i(pages package_registry) do
let(:container_features) { project.project_feature }
end
@@ -170,6 +170,10 @@ RSpec.describe ProjectFeature do
expect(described_class.required_minimum_access_level(:repository)).to eq(Gitlab::Access::GUEST)
end
+ it 'handles package registry' do
+ expect(described_class.required_minimum_access_level(:package_registry)).to eq(Gitlab::Access::REPORTER)
+ end
+
it 'raises error if feature is invalid' do
expect do
described_class.required_minimum_access_level(:foos)
@@ -243,6 +247,50 @@ RSpec.describe ProjectFeature do
end
end
+ describe 'package_registry_access_level' do
+ context 'with default value' do
+ where(:config_packages_enabled, :expected_result) do
+ false | ProjectFeature::DISABLED
+ true | ProjectFeature::ENABLED
+ nil | ProjectFeature::DISABLED
+ end
+
+ with_them do
+ it 'creates project_feature with correct package_registry_access_level' do
+ stub_packages_setting(enabled: config_packages_enabled)
+ project = Project.new
+
+ expect(project.project_feature.package_registry_access_level).to eq(expected_result)
+ end
+ end
+ end
+
+ context 'sync packages_enabled' do
+ # rubocop:disable Lint/BinaryOperatorWithIdenticalOperands
+ where(:initial_value, :new_value, :expected_result) do
+ ProjectFeature::DISABLED | ProjectFeature::DISABLED | false
+ ProjectFeature::DISABLED | ProjectFeature::ENABLED | true
+ ProjectFeature::DISABLED | ProjectFeature::PUBLIC | true
+ ProjectFeature::ENABLED | ProjectFeature::DISABLED | false
+ ProjectFeature::ENABLED | ProjectFeature::ENABLED | true
+ ProjectFeature::ENABLED | ProjectFeature::PUBLIC | true
+ ProjectFeature::PUBLIC | ProjectFeature::DISABLED | false
+ ProjectFeature::PUBLIC | ProjectFeature::ENABLED | true
+ ProjectFeature::PUBLIC | ProjectFeature::PUBLIC | true
+ end
+ # rubocop:enable Lint/BinaryOperatorWithIdenticalOperands
+
+ with_them do
+ it 'set correct value' do
+ project = create(:project, package_registry_access_level: initial_value)
+
+ project.project_feature.update!(package_registry_access_level: new_value)
+
+ expect(project.packages_enabled).to eq(expected_result)
+ end
+ end
+ end
+ end
# rubocop:disable Gitlab/FeatureAvailableUsage
describe '#feature_available?' do
let(:features) { ProjectFeature::FEATURES }
diff --git a/spec/models/project_group_link_spec.rb b/spec/models/project_group_link_spec.rb
index c925d87170c..8b95b86b14b 100644
--- a/spec/models/project_group_link_spec.rb
+++ b/spec/models/project_group_link_spec.rb
@@ -30,6 +30,12 @@ RSpec.describe ProjectGroupLink do
expect(project_group_link).not_to be_valid
end
+
+ it 'does not allow a project to be shared with `OWNER` access level' do
+ project_group_link.group_access = Gitlab::Access::OWNER
+
+ expect(project_group_link).not_to be_valid
+ end
end
describe 'scopes' do
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index ed5b3d4e0be..2d84c1b843e 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -147,6 +147,7 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_many(:job_artifacts).dependent(:restrict_with_error) }
it { is_expected.to have_many(:build_trace_chunks).through(:builds).dependent(:restrict_with_error) }
it { is_expected.to have_many(:secure_files).class_name('Ci::SecureFile').dependent(:restrict_with_error) }
+ it { is_expected.to have_one(:build_artifacts_size_refresh).class_name('Projects::BuildArtifactsSizeRefresh') }
# GitLab Pages
it { is_expected.to have_many(:pages_domains) }
@@ -287,41 +288,35 @@ RSpec.describe Project, factory_default: :keep do
end
context 'updating a project' do
- shared_examples 'project update' do
- let_it_be(:project_namespace) { create(:project_namespace) }
- let_it_be(:project) { project_namespace.project }
+ let_it_be(:project_namespace) { create(:project_namespace) }
+ let_it_be(:project) { project_namespace.project }
- context 'when project namespace is not set' do
- before do
- project.update_column(:project_namespace_id, nil)
- project.reload
- end
+ context 'when project has an associated project namespace' do
+ # when FF is disabled creating a project does not create a project_namespace, so we create one
+ it 'project is INVALID when trying to remove project namespace' do
+ project.reload
+ # check that project actually has an associated project namespace
+ expect(project.project_namespace_id).to eq(project_namespace.id)
- it 'updates the project successfully' do
- # pre-check that project does not have a project namespace
- expect(project.project_namespace).to be_nil
+ expect do
+ project.update!(project_namespace_id: nil, path: 'hopefully-valid-path1')
+ end.to raise_error(ActiveRecord::RecordInvalid)
+ expect(project).to be_invalid
+ expect(project.errors.full_messages).to include("Project namespace can't be blank")
+ expect(project.reload.project_namespace).to be_in_sync_with_project(project)
+ end
- project.update!(path: 'hopefully-valid-path2')
+ context 'when same project is being updated in 2 instances' do
+ it 'syncs only changed attributes' do
+ project1 = Project.last
+ project2 = Project.last
- expect(project).to be_persisted
- expect(project).to be_valid
- expect(project.path).to eq('hopefully-valid-path2')
- expect(project.project_namespace).to be_nil
- end
- end
+ project_name = project1.name
+ project_path = project1.path
- context 'when project has an associated project namespace' do
- # when FF is disabled creating a project does not create a project_namespace, so we create one
- it 'project is INVALID when trying to remove project namespace' do
- project.reload
- # check that project actually has an associated project namespace
- expect(project.project_namespace_id).to eq(project_namespace.id)
+ project1.update!(name: project_name + "-1")
+ project2.update!(path: project_path + "-1")
- expect do
- project.update!(project_namespace_id: nil, path: 'hopefully-valid-path1')
- end.to raise_error(ActiveRecord::RecordInvalid)
- expect(project).to be_invalid
- expect(project.errors.full_messages).to include("Project namespace can't be blank")
expect(project.reload.project_namespace).to be_in_sync_with_project(project)
end
end
@@ -4744,8 +4739,7 @@ RSpec.describe Project, factory_default: :keep do
shared_examples 'filter respects visibility' do
it 'respects visibility' do
enable_admin_mode!(user) if admin_mode
- project.update!(visibility_level: Gitlab::VisibilityLevel.level_value(project_level.to_s))
- update_feature_access_level(project, feature_access_level)
+ update_feature_access_level(project, feature_access_level, visibility_level: Gitlab::VisibilityLevel.level_value(project_level.to_s))
expected_objects = expected_count == 1 ? [project] : []
@@ -6225,7 +6219,7 @@ RSpec.describe Project, factory_default: :keep do
describe '#gitlab_deploy_token' do
let(:project) { create(:project) }
- subject { project.gitlab_deploy_token }
+ subject(:gitlab_deploy_token) { project.gitlab_deploy_token }
context 'when there is a gitlab deploy token associated' do
let!(:deploy_token) { create(:deploy_token, :gitlab_deploy_token, projects: [project]) }
@@ -6257,10 +6251,43 @@ RSpec.describe Project, factory_default: :keep do
context 'when there is a deploy token associated to a different project' do
let(:project_2) { create(:project) }
- let!(:deploy_token) { create(:deploy_token, projects: [project_2]) }
+ let!(:deploy_token) { create(:deploy_token, :gitlab_deploy_token, projects: [project_2]) }
it { is_expected.to be_nil }
end
+
+ context 'when the project group has a gitlab deploy token associated' do
+ let(:group) { create(:group) }
+ let(:project) { create(:project, group: group) }
+ let!(:deploy_token) { create(:deploy_token, :gitlab_deploy_token, :group, groups: [group]) }
+
+ it { is_expected.to eq(deploy_token) }
+
+ context 'when the FF ci_variable_for_group_gitlab_deploy_token is disabled' do
+ before do
+ stub_feature_flags(ci_variable_for_group_gitlab_deploy_token: false)
+ end
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ context 'when the project and its group has a gitlab deploy token associated' do
+ let(:group) { create(:group) }
+ let(:project) { create(:project, group: group) }
+ let!(:project_deploy_token) { create(:deploy_token, :gitlab_deploy_token, projects: [project]) }
+ let!(:group_deploy_token) { create(:deploy_token, :gitlab_deploy_token, :group, groups: [group]) }
+
+ it { is_expected.to eq(project_deploy_token) }
+
+ context 'when the FF ci_variable_for_group_gitlab_deploy_token is disabled' do
+ before do
+ stub_feature_flags(ci_variable_for_group_gitlab_deploy_token: false)
+ end
+
+ it { is_expected.to eq(project_deploy_token) }
+ end
+ end
end
context 'with uploads' do
@@ -6824,50 +6851,46 @@ RSpec.describe Project, factory_default: :keep do
end
describe '#access_request_approvers_to_be_notified' do
- context 'for a personal project' do
- let_it_be(:project) { create(:project) }
- let_it_be(:maintainer) { create(:user) }
+ shared_examples 'returns active, non_invited, non_requested owners/maintainers of the project' do
+ specify do
+ maintainer = create(:project_member, :maintainer, source: project)
- let(:owner_membership) { project.members.owners.find_by(user_id: project.namespace.owner_id) }
+ create(:project_member, :developer, project: project)
+ create(:project_member, :maintainer, :invited, project: project)
+ create(:project_member, :maintainer, :access_request, project: project)
+ create(:project_member, :maintainer, :blocked, project: project)
+ create(:project_member, :owner, :blocked, project: project)
- it 'includes only the owner of the personal project' do
- expect(project.access_request_approvers_to_be_notified.to_a).to eq([owner_membership])
+ expect(project.access_request_approvers_to_be_notified.to_a).to match_array([maintainer, owner])
end
+ end
- it 'includes the maintainers of the personal project, if any' do
- project.add_maintainer(maintainer)
- maintainer_membership = project.members.maintainers.find_by(user_id: maintainer.id)
+ context 'for a personal project' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:owner) { project.members.find_by(user_id: project.first_owner.id) }
- expect(project.access_request_approvers_to_be_notified.to_a).to match_array([owner_membership, maintainer_membership])
- end
+ it_behaves_like 'returns active, non_invited, non_requested owners/maintainers of the project'
end
- let_it_be(:project) { create(:project, group: create(:group, :public)) }
+ context 'for a project in a group' do
+ let_it_be(:project) { create(:project, group: create(:group, :public)) }
+ let_it_be(:owner) { create(:project_member, :owner, source: project) }
- it 'returns a maximum of ten maintainers of the project in recent_sign_in descending order' do
- limit = 2
- stub_const("Member::ACCESS_REQUEST_APPROVERS_TO_BE_NOTIFIED_LIMIT", limit)
- users = create_list(:user, limit + 1, :with_sign_ins)
- active_maintainers = users.map do |user|
- create(:project_member, :maintainer, user: user, project: project)
- end
-
- active_maintainers_in_recent_sign_in_desc_order = project.members_and_requesters
- .id_in(active_maintainers)
- .order_recent_sign_in.limit(limit)
+ it 'returns a maximum of ten maintainers/owners of the project in recent_sign_in descending order' do
+ users = create_list(:user, 11, :with_sign_ins)
- expect(project.access_request_approvers_to_be_notified).to eq(active_maintainers_in_recent_sign_in_desc_order)
- end
+ active_maintainers_and_owners = users.map do |user|
+ create(:project_member, [:maintainer, :owner].sample, user: user, project: project)
+ end
- it 'returns active, non_invited, non_requested maintainers of the project' do
- maintainer = create(:project_member, :maintainer, source: project)
+ active_maintainers_and_owners_in_recent_sign_in_desc_order = project.members
+ .id_in(active_maintainers_and_owners)
+ .order_recent_sign_in.limit(10)
- create(:project_member, :developer, project: project)
- create(:project_member, :maintainer, :invited, project: project)
- create(:project_member, :maintainer, :access_request, project: project)
- create(:project_member, :maintainer, :blocked, project: project)
+ expect(project.access_request_approvers_to_be_notified).to eq(active_maintainers_and_owners_in_recent_sign_in_desc_order)
+ end
- expect(project.access_request_approvers_to_be_notified.to_a).to eq([maintainer])
+ it_behaves_like 'returns active, non_invited, non_requested owners/maintainers of the project'
end
end
@@ -7353,6 +7376,44 @@ RSpec.describe Project, factory_default: :keep do
subject { create(:project).packages_enabled }
it { is_expected.to be true }
+
+ context 'when packages_enabled is enabled' do
+ where(:project_visibility, :expected_result) do
+ Gitlab::VisibilityLevel::PRIVATE | ProjectFeature::PRIVATE
+ Gitlab::VisibilityLevel::INTERNAL | ProjectFeature::ENABLED
+ Gitlab::VisibilityLevel::PUBLIC | ProjectFeature::PUBLIC
+ end
+
+ with_them do
+ it 'set package_registry_access_level to correct value' do
+ project = create(:project,
+ visibility_level: project_visibility,
+ packages_enabled: false,
+ package_registry_access_level: ProjectFeature::DISABLED
+ )
+
+ project.update!(packages_enabled: true)
+
+ expect(project.package_registry_access_level).to eq(expected_result)
+ end
+ end
+ end
+
+ context 'when packages_enabled is disabled' do
+ Gitlab::VisibilityLevel.options.values.each do |project_visibility|
+ it 'set package_registry_access_level to DISABLED' do
+ project = create(:project,
+ visibility_level: project_visibility,
+ packages_enabled: true,
+ package_registry_access_level: ProjectFeature::PUBLIC
+ )
+
+ project.update!(packages_enabled: false)
+
+ expect(project.package_registry_access_level).to eq(ProjectFeature::DISABLED)
+ end
+ end
+ end
end
describe '#related_group_ids' do
@@ -8290,6 +8351,46 @@ RSpec.describe Project, factory_default: :keep do
end
end
+ describe "#refreshing_build_artifacts_size?" do
+ let_it_be(:project) { create(:project) }
+
+ subject { project.refreshing_build_artifacts_size? }
+
+ context 'when project has no existing refresh record' do
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when project has existing refresh record' do
+ context 'and refresh has not yet started' do
+ before do
+ allow(project)
+ .to receive_message_chain(:build_artifacts_size_refresh, :started?)
+ .and_return(false)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'and refresh has started' do
+ before do
+ allow(project)
+ .to receive_message_chain(:build_artifacts_size_refresh, :started?)
+ .and_return(true)
+ end
+
+ it { is_expected.to eq(true) }
+ end
+ end
+ end
+
+ describe '#security_training_available?' do
+ subject { build(:project) }
+
+ it 'returns false' do
+ expect(subject.security_training_available?).to eq false
+ end
+ end
+
private
def finish_job(export_job)
diff --git a/spec/models/project_statistics_spec.rb b/spec/models/project_statistics_spec.rb
index 2c29d4c42f4..53175a2f840 100644
--- a/spec/models/project_statistics_spec.rb
+++ b/spec/models/project_statistics_spec.rb
@@ -54,6 +54,18 @@ RSpec.describe ProjectStatistics do
end
end
+ describe 'namespace relatable columns' do
+ it 'treats the correct columns as namespace relatable' do
+ expect(described_class::NAMESPACE_RELATABLE_COLUMNS).to match_array %i[
+ repository_size
+ wiki_size
+ lfs_objects_size
+ uploads_size
+ container_registry_size
+ ]
+ end
+ end
+
describe '#total_repository_size' do
it "sums repository and LFS object size" do
statistics.repository_size = 2
@@ -346,20 +358,6 @@ RSpec.describe ProjectStatistics do
expect(statistics.container_registry_size).to eq(0)
end
-
- context 'with container_registry_project_statistics FF disabled' do
- before do
- stub_feature_flags(container_registry_project_statistics: false)
- end
-
- it 'does not update the container_registry_size' do
- expect(project).not_to receive(:container_repositories_size)
-
- update_container_registry_size
-
- expect(statistics.container_registry_size).to eq(0)
- end
- end
end
describe '#update_storage_size' do
diff --git a/spec/models/projects/build_artifacts_size_refresh_spec.rb b/spec/models/projects/build_artifacts_size_refresh_spec.rb
index a55e4b31d21..052e654af76 100644
--- a/spec/models/projects/build_artifacts_size_refresh_spec.rb
+++ b/spec/models/projects/build_artifacts_size_refresh_spec.rb
@@ -30,6 +30,12 @@ RSpec.describe Projects::BuildArtifactsSizeRefresh, type: :model do
expect(described_class.remaining).to match_array([refresh_1, refresh_3, refresh_4])
end
end
+
+ describe 'processing_queue' do
+ it 'prioritizes pending -> stale -> created' do
+ expect(described_class.processing_queue).to eq([refresh_3, refresh_1, refresh_4])
+ end
+ end
end
describe 'state machine', :clean_gitlab_redis_shared_state do
@@ -49,7 +55,7 @@ RSpec.describe Projects::BuildArtifactsSizeRefresh, type: :model do
describe '#process!' do
context 'when refresh state is created' do
- let!(:refresh) do
+ let_it_be_with_reload(:refresh) do
create(
:project_build_artifacts_size_refresh,
:created,
@@ -59,25 +65,31 @@ RSpec.describe Projects::BuildArtifactsSizeRefresh, type: :model do
)
end
+ let!(:last_job_artifact_id_on_refresh_start) { create(:ci_job_artifact, project: refresh.project) }
+
before do
stats = create(:project_statistics, project: refresh.project, build_artifacts_size: 120)
stats.increment_counter(:build_artifacts_size, 30)
end
it 'transitions the state to running' do
- expect { refresh.process! }.to change { refresh.reload.state }.to(described_class::STATES[:running])
+ expect { refresh.process! }.to change { refresh.state }.to(described_class::STATES[:running])
end
it 'sets the refresh_started_at' do
- expect { refresh.process! }.to change { refresh.reload.refresh_started_at.to_i }.to(now.to_i)
+ expect { refresh.process! }.to change { refresh.refresh_started_at.to_i }.to(now.to_i)
+ end
+
+ it 'sets last_job_artifact_id_on_refresh_start' do
+ expect { refresh.process! }.to change { refresh.last_job_artifact_id_on_refresh_start.to_i }.to(last_job_artifact_id_on_refresh_start.id)
end
it 'bumps the updated_at' do
- expect { refresh.process! }.to change { refresh.reload.updated_at.to_i }.to(now.to_i)
+ expect { refresh.process! }.to change { refresh.updated_at.to_i }.to(now.to_i)
end
it 'resets the build artifacts size stats' do
- expect { refresh.process! }.to change { refresh.project.statistics.reload.build_artifacts_size }.to(0)
+ expect { refresh.process! }.to change { refresh.project.statistics.build_artifacts_size }.to(0)
end
it 'resets the counter attribute to zero' do
@@ -159,15 +171,13 @@ RSpec.describe Projects::BuildArtifactsSizeRefresh, type: :model do
end
describe '.process_next_refresh!' do
- let!(:refresh_running) { create(:project_build_artifacts_size_refresh, :running) }
let!(:refresh_created) { create(:project_build_artifacts_size_refresh, :created) }
- let!(:refresh_stale) { create(:project_build_artifacts_size_refresh, :stale) }
let!(:refresh_pending) { create(:project_build_artifacts_size_refresh, :pending) }
subject(:processed_refresh) { described_class.process_next_refresh! }
it 'picks the first record from the remaining work' do
- expect(processed_refresh).to eq(refresh_created)
+ expect(processed_refresh).to eq(refresh_pending)
expect(processed_refresh.reload).to be_running
end
end
@@ -214,7 +224,8 @@ RSpec.describe Projects::BuildArtifactsSizeRefresh, type: :model do
project: project,
updated_at: 2.days.ago,
refresh_started_at: 10.days.ago,
- last_job_artifact_id: artifact_1.id
+ last_job_artifact_id: artifact_1.id,
+ last_job_artifact_id_on_refresh_start: artifact_3.id
)
end
@@ -223,5 +234,35 @@ RSpec.describe Projects::BuildArtifactsSizeRefresh, type: :model do
it 'returns the job artifact records that were created not later than the refresh_started_at and IDs greater than the last_job_artifact_id' do
expect(batch).to eq([artifact_2, artifact_3])
end
+
+ context 'when created_at is set before artifact id is persisted' do
+ it 'returns ordered job artifacts' do
+ artifact_3.update!(created_at: artifact_2.created_at)
+
+ expect(batch).to eq([artifact_2, artifact_3])
+ end
+ end
+ end
+
+ describe '#started?' do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:project) { create(:project) }
+
+ subject { refresh.started? }
+
+ where(:refresh_state, :result) do
+ :created | false
+ :pending | true
+ :running | true
+ end
+
+ with_them do
+ let(:refresh) do
+ create(:project_build_artifacts_size_refresh, refresh_state, project: project)
+ end
+
+ it { is_expected.to eq(result) }
+ end
end
end
diff --git a/spec/models/protected_tag_spec.rb b/spec/models/protected_tag_spec.rb
index e5cee6f18cd..b97954c055d 100644
--- a/spec/models/protected_tag_spec.rb
+++ b/spec/models/protected_tag_spec.rb
@@ -11,4 +11,56 @@ RSpec.describe ProtectedTag do
it { is_expected.to validate_presence_of(:project) }
it { is_expected.to validate_presence_of(:name) }
end
+
+ describe '#protected?' do
+ let(:project) { create(:project, :repository) }
+
+ it 'returns true when the tag matches a protected tag via direct match' do
+ create(:protected_tag, project: project, name: 'foo')
+
+ expect(described_class.protected?(project, 'foo')).to eq(true)
+ end
+
+ it 'returns true when the tag matches a protected tag via wildcard match' do
+ create(:protected_tag, project: project, name: 'production/*')
+
+ expect(described_class.protected?(project, 'production/some-tag')).to eq(true)
+ end
+
+ it 'returns false when the tag does not match a protected tag via direct match' do
+ expect(described_class.protected?(project, 'foo')).to eq(false)
+ end
+
+ it 'returns false when the tag does not match a protected tag via wildcard match' do
+ create(:protected_tag, project: project, name: 'production/*')
+
+ expect(described_class.protected?(project, 'staging/some-tag')).to eq(false)
+ end
+
+ it 'returns false when tag name is nil' do
+ expect(described_class.protected?(project, nil)).to eq(false)
+ end
+
+ context 'with caching', :request_store do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:protected_tag) { create(:protected_tag, project: project, name: 'foo') }
+
+ it 'correctly invalidates a cache' do
+ expect(described_class.protected?(project, 'foo')).to eq(true)
+ expect(described_class.protected?(project, 'bar')).to eq(false)
+
+ create(:protected_tag, project: project, name: 'bar')
+
+ expect(described_class.protected?(project, 'bar')).to eq(true)
+ end
+
+ it 'correctly uses the cached version' do
+ expect(project).to receive(:protected_tags).once.and_call_original
+
+ 2.times do
+ expect(described_class.protected?(project, protected_tag.name)).to eq(true)
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/release_spec.rb b/spec/models/release_spec.rb
index 4ae1927dcca..83d7596ff51 100644
--- a/spec/models/release_spec.rb
+++ b/spec/models/release_spec.rb
@@ -66,6 +66,32 @@ RSpec.describe Release do
expect { release.milestones << milestone }.to change { MilestoneRelease.count }.by(1)
end
end
+
+ context 'when creating new release' do
+ subject { build(:release, project: project, name: 'Release 1.0') }
+
+ it { is_expected.to validate_presence_of(:author_id) }
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(validate_release_with_author: false)
+ end
+
+ it { is_expected.not_to validate_presence_of(:author_id) }
+ end
+ end
+
+ # Mimic releases created before 11.7
+ # See: https://gitlab.com/gitlab-org/gitlab/-/blob/8e5a110b01f842d8b6a702197928757a40ce9009/app/models/release.rb#L14
+ context 'when updating existing release without author' do
+ let(:release) { create(:release, :legacy) }
+
+ it 'updates successfully' do
+ release.description += 'Update'
+
+ expect { release.save! }.not_to raise_error
+ end
+ end
end
describe '#assets_count' do
diff --git a/spec/models/remote_mirror_spec.rb b/spec/models/remote_mirror_spec.rb
index 9f1d1c84da3..d2d7859e726 100644
--- a/spec/models/remote_mirror_spec.rb
+++ b/spec/models/remote_mirror_spec.rb
@@ -254,8 +254,6 @@ RSpec.describe RemoteMirror, :mailer do
it 'does not remove the remote' do
mirror = create_mirror(url: 'http://foo:bar@test.com')
- expect(RepositoryRemoveRemoteWorker).not_to receive(:perform_async)
-
mirror.destroy!
end
end
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index 215f83adf5d..e1d903a40cf 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -418,46 +418,31 @@ RSpec.describe Repository do
end
describe '#new_commits' do
- shared_examples '#new_commits' do
- let_it_be(:project) { create(:project, :repository) }
-
- let(:repository) { project.repository }
-
- subject { repository.new_commits(rev, allow_quarantine: allow_quarantine) }
-
- context 'when there are no new commits' do
- let(:rev) { repository.commit.id }
+ let_it_be(:project) { create(:project, :repository) }
- it 'returns an empty array' do
- expect(subject).to eq([])
- end
- end
+ let(:repository) { project.repository }
- context 'when new commits are found' do
- let(:branch) { 'orphaned-branch' }
- let!(:rev) { repository.commit(branch).id }
- let(:allow_quarantine) { false }
+ subject { repository.new_commits(rev) }
- it 'returns the commits' do
- repository.delete_branch(branch)
+ context 'when there are no new commits' do
+ let(:rev) { repository.commit.id }
- expect(subject).not_to be_empty
- expect(subject).to all( be_a(::Commit) )
- expect(subject.size).to eq(1)
- end
+ it 'returns an empty array' do
+ expect(subject).to eq([])
end
end
- context 'with quarantine' do
- let(:allow_quarantine) { true }
+ context 'when new commits are found' do
+ let(:branch) { 'orphaned-branch' }
+ let!(:rev) { repository.commit(branch).id }
- it_behaves_like '#new_commits'
- end
-
- context 'without quarantine' do
- let(:allow_quarantine) { false }
+ it 'returns the commits' do
+ repository.delete_branch(branch)
- it_behaves_like '#new_commits'
+ expect(subject).not_to be_empty
+ expect(subject).to all( be_a(::Commit) )
+ expect(subject.size).to eq(1)
+ end
end
end
diff --git a/spec/models/route_spec.rb b/spec/models/route_spec.rb
index 0489a4fb995..929eaca85f7 100644
--- a/spec/models/route_spec.rb
+++ b/spec/models/route_spec.rb
@@ -22,13 +22,6 @@ RSpec.describe Route do
end
describe 'callbacks' do
- context 'before validation' do
- it 'calls #delete_conflicting_orphaned_routes' do
- expect(route).to receive(:delete_conflicting_orphaned_routes)
- route.valid?
- end
- end
-
context 'after update' do
it 'calls #create_redirect_for_old_path' do
expect(route).to receive(:create_redirect_for_old_path)
@@ -44,7 +37,7 @@ RSpec.describe Route do
context 'after create' do
it 'calls #delete_conflicting_redirects' do
route.destroy!
- new_route = described_class.new(source: group, path: group.path)
+ new_route = described_class.new(source: group, path: group.path, namespace: group)
expect(new_route).to receive(:delete_conflicting_redirects)
new_route.save!
end
@@ -275,7 +268,7 @@ RSpec.describe Route do
end
end
- describe '#delete_conflicting_orphaned_routes' do
+ describe 'conflicting routes validation' do
context 'when there is a conflicting route' do
let!(:conflicting_group) { create(:group, path: 'foo') }
@@ -283,47 +276,31 @@ RSpec.describe Route do
route.path = conflicting_group.route.path
end
- context 'when the route is orphaned' do
+ context 'when deleting the conflicting route' do
let!(:offending_route) { conflicting_group.route }
- before do
- Group.delete(conflicting_group) # Orphan the route
- end
+ it 'does not delete the original route' do
+ # before deleting the route, check its there
+ expect(Route.where(path: offending_route.path).count).to eq(1)
- it 'deletes the orphaned route' do
expect do
- route.valid?
- end.to change { described_class.count }.from(2).to(1)
- end
+ Group.delete(conflicting_group) # delete group with conflicting route
+ end.to change { described_class.count }.by(-1)
- it 'passes validation, as usual' do
+ # check the conflicting route is gone
+ expect(Route.where(path: offending_route.path).count).to eq(0)
+ expect(route.path).to eq(offending_route.path)
expect(route.valid?).to be_truthy
end
end
- context 'when the route is not orphaned' do
- it 'does not delete the conflicting route' do
- expect do
- route.valid?
- end.not_to change { described_class.count }
- end
-
- it 'fails validation, as usual' do
- expect(route.valid?).to be_falsey
- end
+ it 'fails validation' do
+ expect(route.valid?).to be_falsey
end
end
context 'when there are no conflicting routes' do
- it 'does not delete any routes' do
- route
-
- expect do
- route.valid?
- end.not_to change { described_class.count }
- end
-
- it 'passes validation, as usual' do
+ it 'passes validation' do
expect(route.valid?).to be_truthy
end
end
diff --git a/spec/models/terraform/state_spec.rb b/spec/models/terraform/state_spec.rb
index a113ae37203..a484952bfe9 100644
--- a/spec/models/terraform/state_spec.rb
+++ b/spec/models/terraform/state_spec.rb
@@ -11,8 +11,6 @@ RSpec.describe Terraform::State do
it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_presence_of(:project_id) }
- it { is_expected.to validate_uniqueness_of(:name).scoped_to(:project_id) }
-
describe 'scopes' do
describe '.ordered_by_name' do
let_it_be(:project) { create(:project) }
@@ -40,22 +38,6 @@ RSpec.describe Terraform::State do
end
end
- describe '#destroy' do
- let(:terraform_state) { create(:terraform_state) }
- let(:user) { terraform_state.project.creator }
-
- it 'deletes when the state is unlocked' do
- expect(terraform_state.destroy).to be_truthy
- end
-
- it 'fails to delete when the state is locked', :aggregate_failures do
- terraform_state.update!(lock_xid: SecureRandom.uuid, locked_by_user: user, locked_at: Time.current)
-
- expect(terraform_state.destroy).to be_falsey
- expect(terraform_state.errors.full_messages).to eq(["You cannot remove the State file because it's locked. Unlock the State file first before removing it."])
- end
- end
-
describe '#latest_file' do
let(:terraform_state) { create(:terraform_state, :with_version) }
let(:latest_version) { terraform_state.latest_version }
diff --git a/spec/models/terraform/state_version_spec.rb b/spec/models/terraform/state_version_spec.rb
index 7af9b7897ff..22b1397f30a 100644
--- a/spec/models/terraform/state_version_spec.rb
+++ b/spec/models/terraform/state_version_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Terraform::StateVersion do
it { is_expected.to be_a FileStoreMounter }
+ it { is_expected.to be_a EachBatch }
it { is_expected.to belong_to(:terraform_state).required }
it { is_expected.to belong_to(:created_by_user).class_name('User').optional }
diff --git a/spec/models/time_tracking/timelog_category_spec.rb b/spec/models/time_tracking/timelog_category_spec.rb
new file mode 100644
index 00000000000..d8b938e9d68
--- /dev/null
+++ b/spec/models/time_tracking/timelog_category_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe TimeTracking::TimelogCategory, type: :model do
+ describe 'associations' do
+ it { is_expected.to belong_to(:namespace).with_foreign_key('namespace_id') }
+ end
+
+ describe 'validations' do
+ subject { create(:timelog_category) }
+
+ it { is_expected.to validate_presence_of(:namespace) }
+ it { is_expected.to validate_presence_of(:name) }
+ it { is_expected.to validate_uniqueness_of(:name).case_insensitive.scoped_to([:namespace_id]) }
+ it { is_expected.to validate_length_of(:name).is_at_most(255) }
+ it { is_expected.to validate_length_of(:description).is_at_most(1024) }
+ it { is_expected.to validate_length_of(:color).is_at_most(7) }
+ end
+
+ describe 'validations when billable' do
+ subject { create(:timelog_category, billable: true, billing_rate: 10.5) }
+
+ it { is_expected.to validate_presence_of(:billing_rate) }
+ it { is_expected.to validate_numericality_of(:billing_rate).is_greater_than(0) }
+ end
+
+ describe '#name' do
+ it 'strips name' do
+ timelog_category = described_class.new(name: ' TimelogCategoryTest ')
+ timelog_category.valid?
+
+ expect(timelog_category.name).to eq('TimelogCategoryTest')
+ end
+ end
+
+ describe '#color' do
+ it 'strips color' do
+ timelog_category = described_class.new(name: 'TimelogCategoryTest', color: ' #fafafa ')
+ timelog_category.valid?
+
+ expect(timelog_category.color).to eq(::Gitlab::Color.of('#fafafa'))
+ end
+ end
+
+ describe '#find_by_name' do
+ let_it_be(:namespace_a) { create(:namespace) }
+ let_it_be(:namespace_b) { create(:namespace) }
+ let_it_be(:timelog_category_a) { create(:timelog_category, namespace: namespace_a, name: 'TimelogCategoryTest') }
+
+ it 'finds the correct timelog category' do
+ expect(described_class.find_by_name(namespace_a.id, 'TIMELOGCATEGORYTest')).to match_array([timelog_category_a])
+ end
+
+ it 'returns empty if not found' do
+ expect(described_class.find_by_name(namespace_b.id, 'TIMELOGCATEGORYTest')).to be_empty
+ end
+ end
+end
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index f087fab1ef3..dcf6b224009 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -4374,24 +4374,6 @@ RSpec.describe User do
it_behaves_like '#ci_owned_runners'
end
-
- context 'when FF ci_owned_runners_cross_joins_fix is disabled' do
- before do
- skip_if_multiple_databases_are_setup
-
- stub_feature_flags(ci_owned_runners_cross_joins_fix: false)
- end
-
- it_behaves_like '#ci_owned_runners'
- end
-
- context 'when FF ci_owned_runners_unnest_index is disabled uses GIN index' do
- before do
- stub_feature_flags(ci_owned_runners_unnest_index: false)
- end
-
- it_behaves_like '#ci_owned_runners'
- end
end
describe '#projects_with_reporter_access_limited_to' do
@@ -6655,8 +6637,10 @@ RSpec.describe User do
describe '.with_no_activity' do
it 'returns users with no activity' do
freeze_time do
- not_that_long_ago = (described_class::MINIMUM_INACTIVE_DAYS - 1).days.ago.to_date
- too_long_ago = described_class::MINIMUM_INACTIVE_DAYS.days.ago.to_date
+ active_not_that_long_ago = (described_class::MINIMUM_INACTIVE_DAYS - 1).days.ago.to_date
+ active_too_long_ago = described_class::MINIMUM_INACTIVE_DAYS.days.ago.to_date
+ created_recently = (described_class::MINIMUM_DAYS_CREATED - 1).days.ago.to_date
+ created_not_recently = described_class::MINIMUM_DAYS_CREATED.days.ago.to_date
create(:user, :deactivated, last_activity_on: nil)
@@ -6664,12 +6648,13 @@ RSpec.describe User do
create(:user, state: :active, user_type: user_type, last_activity_on: nil)
end
- create(:user, last_activity_on: not_that_long_ago)
- create(:user, last_activity_on: too_long_ago)
+ create(:user, last_activity_on: active_not_that_long_ago)
+ create(:user, last_activity_on: active_too_long_ago)
+ create(:user, last_activity_on: nil, created_at: created_recently)
- user_with_no_activity = create(:user, last_activity_on: nil)
+ old_enough_user_with_no_activity = create(:user, last_activity_on: nil, created_at: created_not_recently)
- expect(described_class.with_no_activity).to contain_exactly(user_with_no_activity)
+ expect(described_class.with_no_activity).to contain_exactly(old_enough_user_with_no_activity)
end
end
end
diff --git a/spec/models/users/callout_spec.rb b/spec/models/users/callout_spec.rb
index 293f0279e79..14f555863ec 100644
--- a/spec/models/users/callout_spec.rb
+++ b/spec/models/users/callout_spec.rb
@@ -11,4 +11,16 @@ RSpec.describe Users::Callout do
it { is_expected.to validate_presence_of(:feature_name) }
it { is_expected.to validate_uniqueness_of(:feature_name).scoped_to(:user_id).ignoring_case_sensitivity }
end
+
+ describe 'scopes' do
+ describe '.with_feature_name' do
+ let_it_be(:feature_name) { described_class.feature_names.keys.last }
+ let_it_be(:user_callouts_for_feature_name) { create_list(:callout, 2, feature_name: feature_name) }
+ let_it_be(:another_user_callout) { create(:callout, feature_name: described_class.feature_names.each_key.first) }
+
+ it 'returns user callouts for the given feature name only' do
+ expect(described_class.with_feature_name(feature_name)).to eq(user_callouts_for_feature_name)
+ end
+ end
+ end
end
diff --git a/spec/models/work_item_spec.rb b/spec/models/work_item_spec.rb
index e92ae746911..5e757c11f99 100644
--- a/spec/models/work_item_spec.rb
+++ b/spec/models/work_item_spec.rb
@@ -3,6 +3,28 @@
require 'spec_helper'
RSpec.describe WorkItem do
+ describe 'associations' do
+ it { is_expected.to have_one(:work_item_parent).class_name('WorkItem') }
+
+ it 'has one `parent_link`' do
+ is_expected.to have_one(:parent_link)
+ .class_name('::WorkItems::ParentLink')
+ .with_foreign_key('work_item_id')
+ end
+
+ it 'has many `work_item_children`' do
+ is_expected.to have_many(:work_item_children)
+ .class_name('WorkItem')
+ .with_foreign_key('work_item_id')
+ end
+
+ it 'has many `child_links`' do
+ is_expected.to have_many(:child_links)
+ .class_name('::WorkItems::ParentLink')
+ .with_foreign_key('work_item_parent_id')
+ end
+ end
+
describe '#noteable_target_type_name' do
it 'returns `issue` as the target name' do
work_item = build(:work_item)
@@ -11,6 +33,15 @@ RSpec.describe WorkItem do
end
end
+ describe '#widgets' do
+ subject { build(:work_item).widgets }
+
+ it 'returns instances of supported widgets' do
+ is_expected.to match_array([instance_of(WorkItems::Widgets::Description),
+ instance_of(WorkItems::Widgets::Hierarchy)])
+ end
+ end
+
describe 'callbacks' do
describe 'record_create_action' do
it 'records the creation action after saving' do
diff --git a/spec/models/work_items/parent_link_spec.rb b/spec/models/work_items/parent_link_spec.rb
new file mode 100644
index 00000000000..9516baa7340
--- /dev/null
+++ b/spec/models/work_items/parent_link_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::ParentLink do
+ describe 'associations' do
+ it { is_expected.to belong_to(:work_item) }
+ it { is_expected.to belong_to(:work_item_parent).class_name('WorkItem') }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:work_item) }
+ it { is_expected.to validate_presence_of(:work_item_parent) }
+
+ describe 'hierarchy' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:issue) { build(:work_item, project: project) }
+ let_it_be(:task1) { build(:work_item, :task, project: project) }
+ let_it_be(:task2) { build(:work_item, :task, project: project) }
+
+ it 'is valid if not-task parent has task child' do
+ expect(build(:parent_link, work_item: task1, work_item_parent: issue)).to be_valid
+ end
+
+ it 'is not valid if child is not task' do
+ link = build(:parent_link, work_item: issue)
+
+ expect(link).not_to be_valid
+ expect(link.errors[:work_item]).to include('Only Task can be assigned as a child in hierarchy.')
+ end
+
+ it 'is not valid if parent is task' do
+ link = build(:parent_link, work_item_parent: task1)
+
+ expect(link).not_to be_valid
+ expect(link.errors[:work_item_parent]).to include('Only Issue can be parent of Task.')
+ end
+
+ it 'is not valid if parent is in other project' do
+ link = build(:parent_link, work_item_parent: task1, work_item: build(:work_item))
+
+ expect(link).not_to be_valid
+ expect(link.errors[:work_item_parent]).to include('Parent must be in the same project as child.')
+ end
+
+ context 'when parent already has maximum number of links' do
+ let_it_be(:link1) { create(:parent_link, work_item_parent: issue, work_item: task1) }
+
+ before do
+ stub_const("#{described_class}::MAX_CHILDREN", 1)
+ end
+
+ it 'is not valid when another link is added' do
+ link2 = build(:parent_link, work_item_parent: issue, work_item: task2)
+
+ expect(link2).not_to be_valid
+ expect(link2.errors[:work_item_parent]).to include('Parent already has maximum number of children.')
+ end
+
+ it 'existing link is still valid' do
+ expect(link1).to be_valid
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/work_items/type_spec.rb b/spec/models/work_items/type_spec.rb
index 6e9f3210e65..81663d0eb41 100644
--- a/spec/models/work_items/type_spec.rb
+++ b/spec/models/work_items/type_spec.rb
@@ -60,7 +60,16 @@ RSpec.describe WorkItems::Type do
it { is_expected.not_to allow_value('s' * 256).for(:icon_name) }
end
- describe 'default?' do
+ describe '.available_widgets' do
+ subject { described_class.available_widgets }
+
+ it 'returns list of all possible widgets' do
+ is_expected.to match_array([::WorkItems::Widgets::Description,
+ ::WorkItems::Widgets::Hierarchy])
+ end
+ end
+
+ describe '#default?' do
subject { build(:work_item_type, namespace: namespace).default? }
context 'when namespace is nil' do
diff --git a/spec/models/work_items/widgets/base_spec.rb b/spec/models/work_items/widgets/base_spec.rb
new file mode 100644
index 00000000000..9b4b4d9e98f
--- /dev/null
+++ b/spec/models/work_items/widgets/base_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::Widgets::Base do
+ let_it_be(:work_item) { create(:work_item, description: '# Title') }
+
+ describe '.type' do
+ subject { described_class.type }
+
+ it { is_expected.to eq(:base) }
+ end
+
+ describe '#type' do
+ subject { described_class.new(work_item).type }
+
+ it { is_expected.to eq(:base) }
+ end
+end
diff --git a/spec/models/work_items/widgets/description_spec.rb b/spec/models/work_items/widgets/description_spec.rb
new file mode 100644
index 00000000000..8359db31bff
--- /dev/null
+++ b/spec/models/work_items/widgets/description_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::Widgets::Description do
+ let_it_be(:work_item) { create(:work_item, description: '# Title') }
+
+ describe '.type' do
+ subject { described_class.type }
+
+ it { is_expected.to eq(:description) }
+ end
+
+ describe '#type' do
+ subject { described_class.new(work_item).type }
+
+ it { is_expected.to eq(:description) }
+ end
+
+ describe '#description' do
+ subject { described_class.new(work_item).description }
+
+ it { is_expected.to eq(work_item.description) }
+ end
+end
diff --git a/spec/models/work_items/widgets/hierarchy_spec.rb b/spec/models/work_items/widgets/hierarchy_spec.rb
new file mode 100644
index 00000000000..0141731529b
--- /dev/null
+++ b/spec/models/work_items/widgets/hierarchy_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::Widgets::Hierarchy do
+ let_it_be(:work_item) { create(:work_item) }
+
+ describe '.type' do
+ subject { described_class.type }
+
+ it { is_expected.to eq(:hierarchy) }
+ end
+
+ describe '#type' do
+ subject { described_class.new(work_item).type }
+
+ it { is_expected.to eq(:hierarchy) }
+ end
+
+ describe '#parent' do
+ let_it_be(:parent_link) { create(:parent_link) }
+
+ subject { described_class.new(parent_link.work_item).parent }
+
+ it { is_expected.to eq parent_link.work_item_parent }
+
+ context 'when work_items_hierarchy flag is disabled' do
+ before do
+ stub_feature_flags(work_items_hierarchy: false)
+ end
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ describe '#children' do
+ let_it_be(:parent_link1) { create(:parent_link, work_item_parent: work_item) }
+ let_it_be(:parent_link2) { create(:parent_link, work_item_parent: work_item) }
+
+ subject { described_class.new(work_item).children }
+
+ it { is_expected.to match_array([parent_link1.work_item, parent_link2.work_item]) }
+
+ context 'when work_items_hierarchy flag is disabled' do
+ before do
+ stub_feature_flags(work_items_hierarchy: false)
+ end
+
+ it { is_expected.to be_empty }
+ end
+ end
+end
diff --git a/spec/policies/blob_policy_spec.rb b/spec/policies/blob_policy_spec.rb
index 2b0465f3615..1be2318a0fe 100644
--- a/spec/policies/blob_policy_spec.rb
+++ b/spec/policies/blob_policy_spec.rb
@@ -20,8 +20,11 @@ RSpec.describe BlobPolicy do
with_them do
it 'grants permission' do
enable_admin_mode!(user) if admin_mode
- project.update!(visibility_level: Gitlab::VisibilityLevel.level_value(project_level.to_s))
- update_feature_access_level(project, feature_access_level)
+ update_feature_access_level(
+ project,
+ feature_access_level,
+ visibility_level: Gitlab::VisibilityLevel.level_value(project_level.to_s)
+ )
if expected_count == 1
expect(policy).to be_allowed(:read_blob)
diff --git a/spec/policies/group_policy_spec.rb b/spec/policies/group_policy_spec.rb
index 05bba167bd3..c513baea517 100644
--- a/spec/policies/group_policy_spec.rb
+++ b/spec/policies/group_policy_spec.rb
@@ -1216,19 +1216,6 @@ RSpec.describe GroupPolicy do
end
end
- context 'with customer relations feature flag disabled' do
- let(:current_user) { owner }
-
- before do
- stub_feature_flags(customer_relations: false)
- end
-
- it { is_expected.to be_disallowed(:read_crm_contact) }
- it { is_expected.to be_disallowed(:read_crm_organization) }
- it { is_expected.to be_disallowed(:admin_crm_contact) }
- it { is_expected.to be_disallowed(:admin_crm_organization) }
- end
-
context 'when crm_enabled is false' do
let(:current_user) { owner }
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index b77ccb83509..7b3d1abadc1 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe ProjectPolicy do
end
end
- it 'does not include the read_issue permission when the issue author is not a member of the private project' do
+ it 'does not include the read permissions when the issue author is not a member of the private project' do
project = create(:project, :private)
issue = create(:issue, project: project, author: create(:user))
user = issue.author
@@ -40,6 +40,7 @@ RSpec.describe ProjectPolicy do
expect(project.team.member?(issue.author)).to be false
expect(Ability).not_to be_allowed(user, :read_issue, project)
+ expect(Ability).not_to be_allowed(user, :read_work_item, project)
end
it_behaves_like 'model with wiki policies' do
@@ -61,7 +62,7 @@ RSpec.describe ProjectPolicy do
end
it 'does not include the issues permissions' do
- expect_disallowed :read_issue, :read_issue_iid, :create_issue, :update_issue, :admin_issue, :create_incident, :create_work_item, :create_task
+ expect_disallowed :read_issue, :read_issue_iid, :create_issue, :update_issue, :admin_issue, :create_incident, :create_work_item, :create_task, :read_work_item
end
it 'disables boards and lists permissions' do
@@ -73,7 +74,7 @@ RSpec.describe ProjectPolicy do
it 'does not include the issues permissions' do
create(:jira_integration, project: project)
- expect_disallowed :read_issue, :read_issue_iid, :create_issue, :update_issue, :admin_issue, :create_incident, :create_work_item, :create_task
+ expect_disallowed :read_issue, :read_issue_iid, :create_issue, :update_issue, :admin_issue, :create_incident, :create_work_item, :create_task, :read_work_item
end
end
end
@@ -463,6 +464,62 @@ RSpec.describe ProjectPolicy do
end
end
+ context 'owner access' do
+ let!(:owner_user) { create(:user) }
+ let!(:owner_of_different_thing) { create(:user) }
+ let(:stranger) { create(:user) }
+
+ context 'personal project' do
+ let!(:project) { create(:project) }
+ let!(:project2) { create(:project) }
+
+ before do
+ project.add_guest(guest)
+ project.add_reporter(reporter)
+ project.add_developer(developer)
+ project.add_maintainer(maintainer)
+ project2.add_owner(owner_of_different_thing)
+ end
+
+ it 'allows owner access', :aggregate_failures do
+ expect(described_class.new(owner_of_different_thing, project)).to be_disallowed(:owner_access)
+ expect(described_class.new(stranger, project)).to be_disallowed(:owner_access)
+ expect(described_class.new(guest, project)).to be_disallowed(:owner_access)
+ expect(described_class.new(reporter, project)).to be_disallowed(:owner_access)
+ expect(described_class.new(developer, project)).to be_disallowed(:owner_access)
+ expect(described_class.new(maintainer, project)).to be_disallowed(:owner_access)
+ expect(described_class.new(project.owner, project)).to be_allowed(:owner_access)
+ end
+ end
+
+ context 'group project' do
+ let(:group) { create(:group) }
+ let!(:group2) { create(:group) }
+ let!(:project) { create(:project, group: group) }
+
+ context 'group members' do
+ before do
+ group.add_guest(guest)
+ group.add_reporter(reporter)
+ group.add_developer(developer)
+ group.add_maintainer(maintainer)
+ group.add_owner(owner_user)
+ group2.add_owner(owner_of_different_thing)
+ end
+
+ it 'allows owner access', :aggregate_failures do
+ expect(described_class.new(owner_of_different_thing, project)).to be_disallowed(:owner_access)
+ expect(described_class.new(stranger, project)).to be_disallowed(:owner_access)
+ expect(described_class.new(guest, project)).to be_disallowed(:owner_access)
+ expect(described_class.new(reporter, project)).to be_disallowed(:owner_access)
+ expect(described_class.new(developer, project)).to be_disallowed(:owner_access)
+ expect(described_class.new(maintainer, project)).to be_disallowed(:owner_access)
+ expect(described_class.new(owner_user, project)).to be_allowed(:owner_access)
+ end
+ end
+ end
+ end
+
context 'reading a project' do
it 'allows access when a user has read access to the repo' do
expect(described_class.new(owner, project)).to be_allowed(:read_project)
@@ -678,14 +735,14 @@ RSpec.describe ProjectPolicy do
allow(project).to receive(:service_desk_enabled?).and_return(true)
end
- it { expect_allowed(:reporter_access, :create_note, :read_issue) }
+ it { expect_allowed(:reporter_access, :create_note, :read_issue, :read_work_item) }
context 'when issues are protected members only' do
before do
project.project_feature.update!(issues_access_level: ProjectFeature::PRIVATE)
end
- it { expect_allowed(:reporter_access, :create_note, :read_issue) }
+ it { expect_allowed(:reporter_access, :create_note, :read_issue, :read_work_item) }
end
end
end
@@ -1282,6 +1339,98 @@ RSpec.describe ProjectPolicy do
end
end
+ describe 'admin_package' do
+ context 'with admin' do
+ let(:current_user) { admin }
+
+ context 'when admin mode enabled', :enable_admin_mode do
+ it { is_expected.to be_allowed(:admin_package) }
+ end
+
+ context 'when admin mode disabled' do
+ it { is_expected.to be_disallowed(:admin_package) }
+ end
+ end
+
+ %i[owner maintainer].each do |role|
+ context "with #{role}" do
+ let(:current_user) { public_send(role) }
+
+ it { is_expected.to be_allowed(:admin_package) }
+ end
+ end
+
+ %i[developer reporter guest non_member anonymous].each do |role|
+ context "with #{role}" do
+ let(:current_user) { public_send(role) }
+
+ it { is_expected.to be_disallowed(:admin_package) }
+ end
+ end
+ end
+
+ describe 'view_package_registry_project_settings' do
+ context 'with registry enabled' do
+ before do
+ stub_config(registry: { enabled: true })
+ end
+
+ context 'with an admin user' do
+ let(:current_user) { admin }
+
+ context 'when admin mode enabled', :enable_admin_mode do
+ it { is_expected.to be_allowed(:view_package_registry_project_settings) }
+ end
+
+ context 'when admin mode disabled' do
+ it { is_expected.to be_disallowed(:view_package_registry_project_settings) }
+ end
+ end
+
+ %i[owner maintainer].each do |role|
+ context "with #{role}" do
+ let(:current_user) { public_send(role) }
+
+ it { is_expected.to be_allowed(:view_package_registry_project_settings) }
+ end
+ end
+
+ %i[developer reporter guest non_member anonymous].each do |role|
+ context "with #{role}" do
+ let(:current_user) { public_send(role) }
+
+ it { is_expected.to be_disallowed(:view_package_registry_project_settings) }
+ end
+ end
+ end
+
+ context 'with registry disabled' do
+ before do
+ stub_config(registry: { enabled: false })
+ end
+
+ context 'with admin user' do
+ let(:current_user) { admin }
+
+ context 'when admin mode enabled', :enable_admin_mode do
+ it { is_expected.to be_disallowed(:view_package_registry_project_settings) }
+ end
+
+ context 'when admin mode disabled' do
+ it { is_expected.to be_disallowed(:view_package_registry_project_settings) }
+ end
+ end
+
+ %i[owner maintainer developer reporter guest non_member anonymous].each do |role|
+ context "with #{role}" do
+ let(:current_user) { public_send(role) }
+
+ it { is_expected.to be_disallowed(:view_package_registry_project_settings) }
+ end
+ end
+ end
+ end
+
describe 'read_feature_flag' do
subject { described_class.new(current_user, project) }
diff --git a/spec/policies/work_item_policy_spec.rb b/spec/policies/work_item_policy_spec.rb
index b19f7d2557d..9cfc4455979 100644
--- a/spec/policies/work_item_policy_spec.rb
+++ b/spec/policies/work_item_policy_spec.rb
@@ -37,6 +37,12 @@ RSpec.describe WorkItemPolicy do
let(:current_user) { guest_author }
it { is_expected.to be_allowed(:read_work_item) }
+
+ context 'when work_item is confidential' do
+ let(:work_item_subject) { create(:work_item, confidential: true, project: project) }
+
+ it { is_expected.not_to be_allowed(:read_work_item) }
+ end
end
end
diff --git a/spec/presenters/blob_presenter_spec.rb b/spec/presenters/blob_presenter_spec.rb
index 80e08db6099..7b7463e6abc 100644
--- a/spec/presenters/blob_presenter_spec.rb
+++ b/spec/presenters/blob_presenter_spec.rb
@@ -250,45 +250,6 @@ RSpec.describe BlobPresenter do
presenter.highlight
end
end
-
- context 'when blob is ipynb' do
- let(:blob) { repository.blob_at('f6b7a707', 'files/ipython/markdown-table.ipynb') }
- let(:git_blob) { blob.__getobj__ }
-
- before do
- allow(Gitlab::Diff::CustomDiff).to receive(:transformed_for_diff?).and_return(true)
- end
-
- it 'uses md as the transformed language' do
- expect(Gitlab::Highlight).to receive(:highlight).with('files/ipython/markdown-table.ipynb', anything, plain: nil, language: 'md')
-
- presenter.highlight
- end
-
- it 'transforms the blob' do
- expect(Gitlab::Highlight).to receive(:highlight).with('files/ipython/markdown-table.ipynb', include("%%"), plain: nil, language: 'md')
-
- presenter.highlight
- end
- end
-
- context 'when blob is other file type' do
- let(:git_blob) { blob.__getobj__ }
-
- before do
- allow(git_blob)
- .to receive(:data)
- .and_return("line one\nline two\nline 3")
-
- allow(blob).to receive(:language_from_gitattributes).and_return('ruby')
- end
-
- it 'does not transform the file' do
- expect(Gitlab::Highlight).to receive(:highlight).with('files/ruby/regex.rb', git_blob.data, plain: nil, language: 'ruby')
-
- presenter.highlight
- end
- end
end
describe '#blob_language' do
@@ -304,16 +265,6 @@ RSpec.describe BlobPresenter do
it { is_expected.to eq('cpp') }
end
- context 'when blob is ipynb' do
- let(:blob) { repository.blob_at('f6b7a707', 'files/ipython/markdown-table.ipynb') }
-
- before do
- allow(Gitlab::Diff::CustomDiff).to receive(:transformed_for_diff?).and_return(true)
- end
-
- it { is_expected.to eq('md') }
- end
-
context 'when blob is binary' do
let(:blob) { repository.blob_at('HEAD', 'Gemfile.zip') }
diff --git a/spec/presenters/merge_request_presenter_spec.rb b/spec/presenters/merge_request_presenter_spec.rb
index dbf5af095cb..798bee70e42 100644
--- a/spec/presenters/merge_request_presenter_spec.rb
+++ b/spec/presenters/merge_request_presenter_spec.rb
@@ -4,9 +4,45 @@ require 'spec_helper'
RSpec.describe MergeRequestPresenter do
let_it_be(:project) { create(:project, :repository) }
- let_it_be(:resource) { create(:merge_request, source_project: project) }
+ let(:resource) { create(:merge_request, source_project: project) }
+
let_it_be(:user) { create(:user) }
+ describe '#mergeable_discussions_state' do
+ subject { described_class.new(resource).mergeable_discussions_state }
+
+ let(:discussions_state) { double }
+
+ before do
+ allow(resource).to receive(:mergeable_discussions_state?).and_return(discussions_state)
+ end
+
+ context 'when change_response_code_merge_status is enabled' do
+ it 'returns the mergeable_discussions_state' do
+ is_expected.to eq(discussions_state)
+ end
+ end
+
+ context 'when change_response_code_merge_status is disabled' do
+ before do
+ stub_feature_flags(change_response_code_merge_status: false)
+ end
+
+ context 'when it is not mergeable' do
+ it 'returns false' do
+ resource.close!
+ is_expected.to eq(false)
+ end
+ end
+
+ context 'when it is mergeable' do
+ it 'returns the mergeable_discussions_state' do
+ is_expected.to eq(discussions_state)
+ end
+ end
+ end
+ end
+
describe '#ci_status' do
subject { described_class.new(resource).ci_status }
@@ -308,7 +344,7 @@ RSpec.describe MergeRequestPresenter do
end
before do
- allow(resource).to receive(:work_in_progress?).and_return(true)
+ allow(resource).to receive(:draft?).and_return(true)
end
context 'when merge request enabled and has permission' do
diff --git a/spec/presenters/packages/pypi/simple_index_presenter_spec.rb b/spec/presenters/packages/pypi/simple_index_presenter_spec.rb
new file mode 100644
index 00000000000..d915706577f
--- /dev/null
+++ b/spec/presenters/packages/pypi/simple_index_presenter_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Packages::Pypi::SimpleIndexPresenter, :aggregate_failures do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:package_name) { 'sample-project' }
+ let_it_be(:package1) { create(:pypi_package, project: project, name: package_name, version: '1.0.0') }
+ let_it_be(:package2) { create(:pypi_package, project: project, name: package_name, version: '2.0.0') }
+
+ let(:packages) { project.packages }
+
+ describe '#body' do
+ subject(:presenter) { described_class.new(packages, project_or_group).body }
+
+ shared_examples_for "pypi package presenter" do
+ where(:version, :expected_version) do
+ '>=2.7' | '&gt;=2.7'
+ '"><script>alert(1)</script>' | '&quot;&gt;&lt;script&gt;alert(1)&lt;/script&gt;'
+ '>=2.7, !=3.0' | '&gt;=2.7, !=3.0'
+ end
+
+ with_them do
+ let(:python_version) { version }
+ let(:expected_python_version) { expected_version }
+
+ before do
+ package1.pypi_metadatum.update_column(:required_python, python_version)
+ package2.pypi_metadatum.update_column(:required_python, '')
+ end
+
+ it 'contains links for all packages' do
+ expect(presenter).to include(expected_link1)
+ expect(presenter).to include(expected_link2)
+ end
+ end
+ end
+
+ context 'for project' do
+ let(:project_or_group) { project }
+ let(:expected_link1) { "<a href=\"http://localhost/api/v4/projects/#{project.id}/packages/pypi/simple/#{package1.normalized_pypi_name}\" data-requires-python=\"#{expected_python_version}\">#{package1.name}</a>" } # rubocop:disable Layout/LineLength
+ let(:expected_link2) { "<a href=\"http://localhost/api/v4/projects/#{project.id}/packages/pypi/simple/#{package2.normalized_pypi_name}\" data-requires-python=\"\">#{package2.name}</a>" } # rubocop:disable Layout/LineLength
+
+ it_behaves_like 'pypi package presenter'
+ end
+
+ context 'for group' do
+ let(:project_or_group) { group }
+ let(:expected_link1) { "<a href=\"http://localhost/api/v4/groups/#{group.id}/-/packages/pypi/simple/#{package1.normalized_pypi_name}\" data-requires-python=\"#{expected_python_version}\">#{package1.name}</a>" } # rubocop:disable Layout/LineLength
+ let(:expected_link2) { "<a href=\"http://localhost/api/v4/groups/#{group.id}/-/packages/pypi/simple/#{package2.normalized_pypi_name}\" data-requires-python=\"\">#{package2.name}</a>" } # rubocop:disable Layout/LineLength
+
+ it_behaves_like 'pypi package presenter'
+ end
+
+ context 'with package files pending destruction' do
+ let_it_be(:package_pending_destruction) do
+ create(:package, :pending_destruction, project: project, name: "package_pending_destruction")
+ end
+
+ let(:project_or_group) { project }
+
+ it { is_expected.not_to include(package_pending_destruction.name) }
+ end
+ end
+end
diff --git a/spec/presenters/packages/pypi/package_presenter_spec.rb b/spec/presenters/packages/pypi/simple_package_versions_presenter_spec.rb
index b19abdbc17a..be454e5168c 100644
--- a/spec/presenters/packages/pypi/package_presenter_spec.rb
+++ b/spec/presenters/packages/pypi/simple_package_versions_presenter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::Packages::Pypi::PackagePresenter do
+RSpec.describe ::Packages::Pypi::SimplePackageVersionsPresenter, :aggregate_failures do
using RSpec::Parameterized::TableSyntax
let_it_be(:group) { create(:group) }
@@ -11,14 +11,13 @@ RSpec.describe ::Packages::Pypi::PackagePresenter do
let_it_be(:package1) { create(:pypi_package, project: project, name: package_name, version: '1.0.0') }
let_it_be(:package2) { create(:pypi_package, project: project, name: package_name, version: '2.0.0') }
- let(:packages) { [package1, package2] }
-
let(:file) { package.package_files.first }
let(:filename) { file.file_name }
-
- subject(:presenter) { described_class.new(packages, project_or_group).body}
+ let(:packages) { project.packages }
describe '#body' do
+ subject(:presenter) { described_class.new(packages, project_or_group).body }
+
shared_examples_for "pypi package presenter" do
where(:version, :expected_version, :with_package1) do
'>=2.7' | '&gt;=2.7' | true
@@ -32,29 +31,31 @@ RSpec.describe ::Packages::Pypi::PackagePresenter do
let(:package) { with_package1 ? package1 : package2 }
before do
- package.pypi_metadatum.required_python = python_version
+ package.pypi_metadatum.update_column(:required_python, python_version)
end
- it { is_expected.to include expected_file }
+ it { is_expected.to include expected_link }
end
end
context 'for project' do
let(:project_or_group) { project }
- let(:expected_file) { "<a href=\"http://localhost/api/v4/projects/#{project.id}/packages/pypi/files/#{file.file_sha256}/#{filename}#sha256=#{file.file_sha256}\" data-requires-python=\"#{expected_python_version}\">#{filename}</a><br>" }
+ let(:expected_link) { "<a href=\"http://localhost/api/v4/projects/#{project.id}/packages/pypi/files/#{file.file_sha256}/#{filename}#sha256=#{file.file_sha256}\" data-requires-python=\"#{expected_python_version}\">#{filename}</a>" } # rubocop:disable Layout/LineLength
it_behaves_like 'pypi package presenter'
end
context 'for group' do
let(:project_or_group) { group }
- let(:expected_file) { "<a href=\"http://localhost/api/v4/groups/#{group.id}/-/packages/pypi/files/#{file.file_sha256}/#{filename}#sha256=#{file.file_sha256}\" data-requires-python=\"#{expected_python_version}\">#{filename}</a><br>" }
+ let(:expected_link) { "<a href=\"http://localhost/api/v4/groups/#{group.id}/-/packages/pypi/files/#{file.file_sha256}/#{filename}#sha256=#{file.file_sha256}\" data-requires-python=\"#{expected_python_version}\">#{filename}</a>" } # rubocop:disable Layout/LineLength
it_behaves_like 'pypi package presenter'
end
context 'with package files pending destruction' do
- let_it_be(:package_file_pending_destruction) { create(:package_file, :pending_destruction, package: package1, file_name: "package_file_pending_destruction") }
+ let_it_be(:package_file_pending_destruction) do
+ create(:package_file, :pending_destruction, package: package1, file_name: "package_file_pending_destruction")
+ end
let(:project_or_group) { project }
diff --git a/spec/presenters/project_presenter_spec.rb b/spec/presenters/project_presenter_spec.rb
index 33a4a1b9d4c..df3e4b985ab 100644
--- a/spec/presenters/project_presenter_spec.rb
+++ b/spec/presenters/project_presenter_spec.rb
@@ -73,8 +73,6 @@ RSpec.describe ProjectPresenter do
context 'when repository is not empty' do
let_it_be(:project) { create(:project, :public, :repository) }
- let(:release) { create(:release, project: project, author: user) }
-
it 'returns files and readme if user has repository access' do
allow(presenter).to receive(:can?).with(nil, :download_code, project).and_return(true)
@@ -98,6 +96,9 @@ RSpec.describe ProjectPresenter do
end
it 'returns releases anchor' do
+ user = create(:user)
+ release = create(:release, project: project, author: user)
+
expect(release).to be_truthy
expect(presenter.releases_anchor_data).to have_attributes(
is_link: true,
@@ -211,16 +212,6 @@ RSpec.describe ProjectPresenter do
context 'statistics anchors (empty repo)' do
let_it_be(:project) { create(:project, :empty_repo) }
- describe '#files_anchor_data' do
- it 'returns files data' do
- expect(presenter.files_anchor_data).to have_attributes(
- is_link: true,
- label: a_string_including('0 Bytes'),
- link: nil
- )
- end
- end
-
describe '#storage_anchor_data' do
it 'returns storage data' do
expect(presenter.storage_anchor_data).to have_attributes(
@@ -275,22 +266,22 @@ RSpec.describe ProjectPresenter do
let(:presenter) { described_class.new(project, current_user: user) }
- describe '#files_anchor_data' do
- it 'returns files data' do
- expect(presenter.files_anchor_data).to have_attributes(
+ describe '#storage_anchor_data' do
+ it 'returns storage data without usage quotas link for non-admin users' do
+ expect(presenter.storage_anchor_data).to have_attributes(
is_link: true,
label: a_string_including('0 Bytes'),
- link: presenter.project_tree_path(project)
+ link: nil
)
end
- end
- describe '#storage_anchor_data' do
- it 'returns storage data' do
+ it 'returns storage data with usage quotas link for admin users' do
+ project.add_owner(user)
+
expect(presenter.storage_anchor_data).to have_attributes(
is_link: true,
label: a_string_including('0 Bytes'),
- link: presenter.project_tree_path(project)
+ link: presenter.project_usage_quotas_path(project)
)
end
end
diff --git a/spec/presenters/releases/link_presenter_spec.rb b/spec/presenters/releases/link_presenter_spec.rb
new file mode 100644
index 00000000000..e52c68ffb38
--- /dev/null
+++ b/spec/presenters/releases/link_presenter_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Releases::LinkPresenter do
+ describe '#direct_asset_url' do
+ let_it_be(:release) { create(:release) }
+
+ let(:link) { build(:release_link, release: release, url: url, filepath: filepath) }
+ let(:url) { 'https://google.com/-/jobs/140463678/artifacts/download' }
+ let(:presenter) { described_class.new(link) }
+
+ subject { presenter.direct_asset_url }
+
+ context 'when filepath is provided' do
+ let(:filepath) { '/bin/bigfile.exe' }
+ let(:expected_url) do
+ "http://localhost/#{release.project.namespace.path}/#{release.project.name}" \
+ "/-/releases/#{release.tag}/downloads/bin/bigfile.exe"
+ end
+
+ it { is_expected.to eq(expected_url) }
+ end
+
+ context 'when filepath is not provided' do
+ let(:filepath) { nil }
+
+ it { is_expected.to eq(url) }
+ end
+ end
+end
diff --git a/spec/presenters/service_hook_presenter_spec.rb b/spec/presenters/service_hook_presenter_spec.rb
index 25ded17fb34..c7703593327 100644
--- a/spec/presenters/service_hook_presenter_spec.rb
+++ b/spec/presenters/service_hook_presenter_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe ServiceHookPresenter do
subject { service_hook.present.logs_details_path(web_hook_log) }
let(:expected_path) do
- "/#{project.namespace.path}/#{project.name}/-/integrations/#{integration.to_param}/hook_logs/#{web_hook_log.id}"
+ "/#{project.namespace.path}/#{project.name}/-/settings/integrations/#{integration.to_param}/hook_logs/#{web_hook_log.id}"
end
it { is_expected.to eq(expected_path) }
@@ -22,7 +22,7 @@ RSpec.describe ServiceHookPresenter do
subject { service_hook.present.logs_retry_path(web_hook_log) }
let(:expected_path) do
- "/#{project.namespace.path}/#{project.name}/-/integrations/#{integration.to_param}/hook_logs/#{web_hook_log.id}/retry"
+ "/#{project.namespace.path}/#{project.name}/-/settings/integrations/#{integration.to_param}/hook_logs/#{web_hook_log.id}/retry"
end
it { is_expected.to eq(expected_path) }
diff --git a/spec/presenters/web_hook_log_presenter_spec.rb b/spec/presenters/web_hook_log_presenter_spec.rb
index 5827f3378de..188737e0fb6 100644
--- a/spec/presenters/web_hook_log_presenter_spec.rb
+++ b/spec/presenters/web_hook_log_presenter_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe WebHookLogPresenter do
let(:web_hook) { create(:service_hook, integration: integration) }
let(:integration) { create(:drone_ci_integration, project: project) }
- it { is_expected.to eq(project_integration_hook_log_path(project, integration, web_hook_log)) }
+ it { is_expected.to eq(project_settings_integration_hook_log_path(project, integration, web_hook_log)) }
end
end
@@ -41,7 +41,7 @@ RSpec.describe WebHookLogPresenter do
let(:web_hook) { create(:service_hook, integration: integration) }
let(:integration) { create(:drone_ci_integration, project: project) }
- it { is_expected.to eq(retry_project_integration_hook_log_path(project, integration, web_hook_log)) }
+ it { is_expected.to eq(retry_project_settings_integration_hook_log_path(project, integration, web_hook_log)) }
end
end
end
diff --git a/spec/requests/admin/background_migrations_controller_spec.rb b/spec/requests/admin/background_migrations_controller_spec.rb
index 0fd2ba26cb8..884448fdd95 100644
--- a/spec/requests/admin/background_migrations_controller_spec.rb
+++ b/spec/requests/admin/background_migrations_controller_spec.rb
@@ -40,15 +40,17 @@ RSpec.describe Admin::BackgroundMigrationsController, :enable_admin_mode do
describe 'GET #index' do
let(:default_model) { ActiveRecord::Base }
+ let(:db_config) { instance_double(ActiveRecord::DatabaseConfigurations::HashConfig, name: 'fake_db') }
before do
+ allow(Gitlab::Database).to receive(:db_config_for_connection).and_return(db_config)
allow(Gitlab::Database).to receive(:database_base_models).and_return(base_models)
end
let!(:main_database_migration) { create(:batched_background_migration, :active) }
context 'when no database is provided' do
- let(:base_models) { { 'fake_db' => default_model } }
+ let(:base_models) { { 'fake_db' => default_model }.with_indifferent_access }
before do
stub_const('Gitlab::Database::MAIN_DATABASE_NAME', 'fake_db')
@@ -68,7 +70,7 @@ RSpec.describe Admin::BackgroundMigrationsController, :enable_admin_mode do
end
context 'when multiple database is enabled', :add_ci_connection do
- let(:base_models) { { 'fake_db' => default_model, 'ci' => ci_model } }
+ let(:base_models) { { 'fake_db' => default_model, 'ci' => ci_model }.with_indifferent_access }
let(:ci_model) { Ci::ApplicationRecord }
context 'when CI database is provided' do
diff --git a/spec/requests/admin/batched_jobs_controller_spec.rb b/spec/requests/admin/batched_jobs_controller_spec.rb
index 9a0654c64b4..fb51b3bce88 100644
--- a/spec/requests/admin/batched_jobs_controller_spec.rb
+++ b/spec/requests/admin/batched_jobs_controller_spec.rb
@@ -42,7 +42,7 @@ RSpec.describe Admin::BatchedJobsController, :enable_admin_mode do
end
context 'when multiple database is enabled', :add_ci_connection do
- let(:base_models) { { 'fake_db' => default_model, 'ci' => ci_model } }
+ let(:base_models) { { 'main' => default_model, 'ci' => ci_model }.with_indifferent_access }
let(:ci_model) { Ci::ApplicationRecord }
before do
diff --git a/spec/requests/api/bulk_imports_spec.rb b/spec/requests/api/bulk_imports_spec.rb
index 3b8136f265b..9f9907f4f00 100644
--- a/spec/requests/api/bulk_imports_spec.rb
+++ b/spec/requests/api/bulk_imports_spec.rb
@@ -62,7 +62,7 @@ RSpec.describe API::BulkImports do
entities: [
source_type: 'group_entity',
source_full_path: 'full_path',
- destination_name: 'destination_name',
+ destination_name: 'destination_slug',
destination_namespace: 'destination_namespace'
]
}
@@ -82,7 +82,7 @@ RSpec.describe API::BulkImports do
entities: [
source_type: 'group_entity',
source_full_path: 'full_path',
- destination_name: 'destination_name',
+ destination_name: 'destination_slug',
destination_namespace: 'destination_namespace'
]
}
diff --git a/spec/requests/api/ci/job_artifacts_spec.rb b/spec/requests/api/ci/job_artifacts_spec.rb
index 1dd1ca4e115..2fa1ffb4974 100644
--- a/spec/requests/api/ci/job_artifacts_spec.rb
+++ b/spec/requests/api/ci/job_artifacts_spec.rb
@@ -41,42 +41,58 @@ RSpec.describe API::Ci::JobArtifacts do
describe 'DELETE /projects/:id/jobs/:job_id/artifacts' do
let!(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) }
- before do
- delete api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
- end
+ context 'when project is not undergoing stats refresh' do
+ before do
+ delete api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
+ end
- context 'when user is anonymous' do
- let(:api_user) { nil }
+ context 'when user is anonymous' do
+ let(:api_user) { nil }
- it 'does not delete artifacts' do
- expect(job.job_artifacts.size).to eq 2
- end
+ it 'does not delete artifacts' do
+ expect(job.job_artifacts.size).to eq 2
+ end
- it 'returns status 401 (unauthorized)' do
- expect(response).to have_gitlab_http_status(:unauthorized)
+ it 'returns status 401 (unauthorized)' do
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
end
- end
- context 'with developer' do
- it 'does not delete artifacts' do
- expect(job.job_artifacts.size).to eq 2
+ context 'with developer' do
+ it 'does not delete artifacts' do
+ expect(job.job_artifacts.size).to eq 2
+ end
+
+ it 'returns status 403 (forbidden)' do
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
end
- it 'returns status 403 (forbidden)' do
- expect(response).to have_gitlab_http_status(:forbidden)
+ context 'with authorized user' do
+ let(:maintainer) { create(:project_member, :maintainer, project: project).user }
+ let!(:api_user) { maintainer }
+
+ it 'deletes artifacts' do
+ expect(job.job_artifacts.size).to eq 0
+ end
+
+ it 'returns status 204 (no content)' do
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
end
end
- context 'with authorized user' do
- let(:maintainer) { create(:project_member, :maintainer, project: project).user }
- let!(:api_user) { maintainer }
+ context 'when project is undergoing stats refresh' do
+ it_behaves_like 'preventing request because of ongoing project stats refresh' do
+ let(:maintainer) { create(:project_member, :maintainer, project: project).user }
+ let(:api_user) { maintainer }
+ let(:make_request) { delete api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user) }
- it 'deletes artifacts' do
- expect(job.job_artifacts.size).to eq 0
- end
+ it 'does not delete artifacts' do
+ make_request
- it 'returns status 204 (no content)' do
- expect(response).to have_gitlab_http_status(:no_content)
+ expect(job.job_artifacts.size).to eq 2
+ end
end
end
end
@@ -131,6 +147,22 @@ RSpec.describe API::Ci::JobArtifacts do
expect(response).to have_gitlab_http_status(:accepted)
end
+
+ context 'when project is undergoing stats refresh' do
+ let!(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) }
+
+ it_behaves_like 'preventing request because of ongoing project stats refresh' do
+ let(:maintainer) { create(:project_member, :maintainer, project: project).user }
+ let(:api_user) { maintainer }
+ let(:make_request) { delete api("/projects/#{project.id}/artifacts", api_user) }
+
+ it 'does not delete artifacts' do
+ make_request
+
+ expect(job.job_artifacts.size).to eq 2
+ end
+ end
+ end
end
end
diff --git a/spec/requests/api/ci/jobs_spec.rb b/spec/requests/api/ci/jobs_spec.rb
index 4bd9f81fd1d..84ef9f8db1b 100644
--- a/spec/requests/api/ci/jobs_spec.rb
+++ b/spec/requests/api/ci/jobs_spec.rb
@@ -239,6 +239,17 @@ RSpec.describe API::Ci::Jobs do
end
end
+ context 'when non-deployment environment action' do
+ let(:job) do
+ create(:environment, name: 'review', project_id: project.id)
+ create(:ci_build, :artifacts, :stop_review_app, environment: 'review', pipeline: pipeline, user: api_user, status: job_status)
+ end
+
+ it 'includes environment slug' do
+ expect(json_response.dig('environment', 'slug')).to eq('review')
+ end
+ end
+
context 'when passing the token as params' do
let(:headers) { {} }
let(:params) { { job_token: job.token } }
@@ -655,62 +666,80 @@ RSpec.describe API::Ci::Jobs do
before do
project.add_role(user, role)
-
- post api("/projects/#{project.id}/jobs/#{job.id}/erase", user)
end
- shared_examples_for 'erases job' do
- it 'erases job content' do
- expect(response).to have_gitlab_http_status(:created)
- expect(job.job_artifacts.count).to eq(0)
- expect(job.trace.exist?).to be_falsy
- expect(job.artifacts_file.present?).to be_falsy
- expect(job.artifacts_metadata.present?).to be_falsy
- expect(job.has_job_artifacts?).to be_falsy
+ context 'when project is not undergoing stats refresh' do
+ before do
+ post api("/projects/#{project.id}/jobs/#{job.id}/erase", user)
end
- end
- context 'job is erasable' do
- let(:job) { create(:ci_build, :trace_artifact, :artifacts, :test_reports, :success, project: project, pipeline: pipeline) }
+ shared_examples_for 'erases job' do
+ it 'erases job content' do
+ expect(response).to have_gitlab_http_status(:created)
+ expect(job.job_artifacts.count).to eq(0)
+ expect(job.trace.exist?).to be_falsy
+ expect(job.artifacts_file.present?).to be_falsy
+ expect(job.artifacts_metadata.present?).to be_falsy
+ expect(job.has_job_artifacts?).to be_falsy
+ end
+ end
+
+ context 'job is erasable' do
+ let(:job) { create(:ci_build, :trace_artifact, :artifacts, :test_reports, :success, project: project, pipeline: pipeline) }
- it_behaves_like 'erases job'
+ it_behaves_like 'erases job'
- it 'updates job' do
- job.reload
+ it 'updates job' do
+ job.reload
- expect(job.erased_at).to be_truthy
- expect(job.erased_by).to eq(user)
+ expect(job.erased_at).to be_truthy
+ expect(job.erased_by).to eq(user)
+ end
end
- end
- context 'when job has an unarchived trace artifact' do
- let(:job) { create(:ci_build, :success, :trace_live, :unarchived_trace_artifact, project: project, pipeline: pipeline) }
+ context 'when job has an unarchived trace artifact' do
+ let(:job) { create(:ci_build, :success, :trace_live, :unarchived_trace_artifact, project: project, pipeline: pipeline) }
- it_behaves_like 'erases job'
- end
+ it_behaves_like 'erases job'
+ end
- context 'job is not erasable' do
- let(:job) { create(:ci_build, :trace_live, project: project, pipeline: pipeline) }
+ context 'job is not erasable' do
+ let(:job) { create(:ci_build, :trace_live, project: project, pipeline: pipeline) }
- it 'responds with forbidden' do
- expect(response).to have_gitlab_http_status(:forbidden)
+ it 'responds with forbidden' do
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
end
- end
- context 'when a developer erases a build' do
- let(:role) { :developer }
- let(:job) { create(:ci_build, :trace_artifact, :artifacts, :success, project: project, pipeline: pipeline, user: owner) }
+ context 'when a developer erases a build' do
+ let(:role) { :developer }
+ let(:job) { create(:ci_build, :trace_artifact, :artifacts, :success, project: project, pipeline: pipeline, user: owner) }
- context 'when the build was created by the developer' do
- let(:owner) { user }
+ context 'when the build was created by the developer' do
+ let(:owner) { user }
+
+ it { expect(response).to have_gitlab_http_status(:created) }
+ end
- it { expect(response).to have_gitlab_http_status(:created) }
+ context 'when the build was created by another user' do
+ let(:owner) { create(:user) }
+
+ it { expect(response).to have_gitlab_http_status(:forbidden) }
+ end
end
+ end
- context 'when the build was created by the other' do
- let(:owner) { create(:user) }
+ context 'when project is undergoing stats refresh' do
+ let(:job) { create(:ci_build, :trace_artifact, :artifacts, :test_reports, :success, project: project, pipeline: pipeline) }
- it { expect(response).to have_gitlab_http_status(:forbidden) }
+ it_behaves_like 'preventing request because of ongoing project stats refresh' do
+ let(:make_request) { post api("/projects/#{project.id}/jobs/#{job.id}/erase", user) }
+
+ it 'does not delete artifacts' do
+ make_request
+
+ expect(job.reload.job_artifacts).not_to be_empty
+ end
end
end
end
diff --git a/spec/requests/api/ci/pipelines_spec.rb b/spec/requests/api/ci/pipelines_spec.rb
index 12faeec94da..697fe16e222 100644
--- a/spec/requests/api/ci/pipelines_spec.rb
+++ b/spec/requests/api/ci/pipelines_spec.rb
@@ -1018,6 +1018,18 @@ RSpec.describe API::Ci::Pipelines do
expect { build.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
end
+
+ context 'when project is undergoing stats refresh' do
+ it_behaves_like 'preventing request because of ongoing project stats refresh' do
+ let(:make_request) { delete api("/projects/#{project.id}/pipelines/#{pipeline.id}", owner) }
+
+ it 'does not delete the pipeline' do
+ make_request
+
+ expect(pipeline.reload).to be_persisted
+ end
+ end
+ end
end
context 'unauthorized user' do
diff --git a/spec/requests/api/ci/runner/jobs_request_post_spec.rb b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
index dbc5f0e74e2..3c6f9ac2816 100644
--- a/spec/requests/api/ci/runner/jobs_request_post_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
@@ -216,7 +216,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
expect(json_response['token']).to eq(job.token)
expect(json_response['job_info']).to eq(expected_job_info)
expect(json_response['git_info']).to eq(expected_git_info)
- expect(json_response['image']).to eq({ 'name' => 'image:1.0', 'entrypoint' => '/bin/sh', 'ports' => [] })
+ expect(json_response['image']).to eq({ 'name' => 'image:1.0', 'entrypoint' => '/bin/sh', 'ports' => [], 'pull_policy' => nil })
expect(json_response['services']).to eq([{ 'name' => 'postgres', 'entrypoint' => nil,
'alias' => nil, 'command' => nil, 'ports' => [], 'variables' => nil },
{ 'name' => 'docker:stable-dind', 'entrypoint' => '/bin/sh',
@@ -810,6 +810,45 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
end
end
+ context 'when image has pull_policy' do
+ let(:job) { create(:ci_build, :pending, :queued, pipeline: pipeline, options: options) }
+
+ let(:options) do
+ {
+ image: {
+ name: 'ruby',
+ pull_policy: ['if-not-present']
+ }
+ }
+ end
+
+ it 'returns the image with pull policy' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response).to include(
+ 'id' => job.id,
+ 'image' => { 'name' => 'ruby', 'pull_policy' => ['if-not-present'], 'entrypoint' => nil, 'ports' => [] }
+ )
+ end
+
+ context 'when the FF ci_docker_image_pull_policy is disabled' do
+ before do
+ stub_feature_flags(ci_docker_image_pull_policy: false)
+ end
+
+ it 'returns the image without pull policy' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response).to include(
+ 'id' => job.id,
+ 'image' => { 'name' => 'ruby', 'entrypoint' => nil, 'ports' => [] }
+ )
+ end
+ end
+ end
+
describe 'a job with excluded artifacts' do
context 'when excluded paths are defined' do
let(:job) do
diff --git a/spec/requests/api/clusters/agent_tokens_spec.rb b/spec/requests/api/clusters/agent_tokens_spec.rb
index ba26faa45a3..2dca21ca6f1 100644
--- a/spec/requests/api/clusters/agent_tokens_spec.rb
+++ b/spec/requests/api/clusters/agent_tokens_spec.rb
@@ -37,7 +37,7 @@ RSpec.describe API::Clusters::AgentTokens do
it 'cannot access agent tokens' do
get api("/projects/#{project.id}/cluster_agents/#{agent.id}/tokens", unauthorized_user)
- expect(response).to have_gitlab_http_status(:forbidden)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -85,7 +85,7 @@ RSpec.describe API::Clusters::AgentTokens do
it 'cannot access single agent token' do
get api("/projects/#{project.id}/cluster_agents/#{agent.id}/tokens/#{agent_token_one.id}", unauthorized_user)
- expect(response).to have_gitlab_http_status(:forbidden)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'cannot access token from agent of another project' do
diff --git a/spec/requests/api/clusters/agents_spec.rb b/spec/requests/api/clusters/agents_spec.rb
index e29be255289..72d4266b9e3 100644
--- a/spec/requests/api/clusters/agents_spec.rb
+++ b/spec/requests/api/clusters/agents_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe API::Clusters::Agents do
before do
project.add_maintainer(user)
+ project.add_guest(unauthorized_user)
end
describe 'GET /projects/:id/cluster_agents' do
@@ -26,6 +27,19 @@ RSpec.describe API::Clusters::Agents do
expect(json_response.first['name']).to eq(agent.name)
end
end
+
+ it 'returns empty list when no agents registered' do
+ no_agents_project = create(:project, namespace: user.namespace)
+
+ get api("/projects/#{no_agents_project.id}/cluster_agents", user)
+
+ aggregate_failures "testing response" do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(response).to match_response_schema('public_api/v4/agents')
+ expect(json_response.count).to eq(0)
+ end
+ end
end
context 'unauthorized user' do
@@ -140,10 +154,10 @@ RSpec.describe API::Clusters::Agents do
expect(response).to have_gitlab_http_status(:not_found)
end
- it 'returns a 404 if the user is unauthorized to delete' do
+ it 'returns a 403 if the user is unauthorized to delete' do
delete api("/projects/#{project.id}/cluster_agents/#{agent.id}", unauthorized_user)
- expect(response).to have_gitlab_http_status(:not_found)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
it_behaves_like '412 response' do
diff --git a/spec/requests/api/environments_spec.rb b/spec/requests/api/environments_spec.rb
index 8328b454122..93f21c880a4 100644
--- a/spec/requests/api/environments_spec.rb
+++ b/spec/requests/api/environments_spec.rb
@@ -113,7 +113,7 @@ RSpec.describe API::Environments do
end
context 'when filtering' do
- let_it_be(:environment2) { create(:environment, project: project) }
+ let_it_be(:stopped_environment) { create(:environment, :stopped, project: project) }
it 'returns environment by name' do
get api("/projects/#{project.id}/environments?name=#{environment.name}", user)
@@ -152,11 +152,32 @@ RSpec.describe API::Environments do
expect(json_response.size).to eq(0)
end
- it 'returns a 400 status code with invalid states' do
+ it 'returns environment by valid state' do
+ get api("/projects/#{project.id}/environments?states=available", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.size).to eq(1)
+ expect(json_response.first['name']).to eq(environment.name)
+ end
+
+ it 'returns all environments when state is not specified' do
+ get api("/projects/#{project.id}/environments", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.size).to eq(2)
+ expect(json_response.first['name']).to eq(environment.name)
+ expect(json_response.last['name']).to eq(stopped_environment.name)
+ end
+
+ it 'returns a 400 when filtering by invalid state' do
get api("/projects/#{project.id}/environments?states=test", user)
expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['message']).to include('Requested states are invalid')
+ expect(json_response['error']).to eq('states does not have a valid value')
end
end
end
diff --git a/spec/requests/api/error_tracking/collector_spec.rb b/spec/requests/api/error_tracking/collector_spec.rb
index c0d7eb5460f..dfca994d1c3 100644
--- a/spec/requests/api/error_tracking/collector_spec.rb
+++ b/spec/requests/api/error_tracking/collector_spec.rb
@@ -106,17 +106,30 @@ RSpec.describe API::ErrorTracking::Collector do
end
context 'gzip body' do
- let(:headers) do
+ let(:standard_headers) do
{
'X-Sentry-Auth' => "Sentry sentry_key=#{client_key.public_key}",
- 'HTTP_CONTENT_ENCODING' => 'gzip',
- 'CONTENT_TYPE' => 'application/x-sentry-envelope'
+ 'HTTP_CONTENT_ENCODING' => 'gzip'
}
end
let(:params) { ActiveSupport::Gzip.compress(raw_event) }
- it_behaves_like 'successful request'
+ context 'with application/x-sentry-envelope Content-Type' do
+ let(:headers) { standard_headers.merge({ 'CONTENT_TYPE' => 'application/x-sentry-envelope' }) }
+
+ it_behaves_like 'successful request'
+ end
+
+ context 'with unexpected Content-Type' do
+ let(:headers) { standard_headers.merge({ 'CONTENT_TYPE' => 'application/gzip' }) }
+
+ it 'responds with 415' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:unsupported_media_type)
+ end
+ end
end
end
diff --git a/spec/requests/api/features_spec.rb b/spec/requests/api/features_spec.rb
index 4e75b0510d0..b54be4f5258 100644
--- a/spec/requests/api/features_spec.rb
+++ b/spec/requests/api/features_spec.rb
@@ -168,19 +168,15 @@ RSpec.describe API::Features, stub_feature_flags: false do
end
end
- shared_examples 'does not enable the flag' do |actor_type, actor_path|
+ shared_examples 'does not enable the flag' do |actor_type|
+ let(:actor_path) { raise NotImplementedError }
+ let(:expected_inexistent_path) { actor_path }
+
it 'returns the current state of the flag without changes' do
post api("/features/#{feature_name}", admin), params: { value: 'true', actor_type => actor_path }
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response).to match(
- "name" => feature_name,
- "state" => "off",
- "gates" => [
- { "key" => "boolean", "value" => false }
- ],
- 'definition' => known_feature_flag_definition_hash
- )
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to eq("400 Bad request - #{expected_inexistent_path} is not found!")
end
end
@@ -201,6 +197,19 @@ RSpec.describe API::Features, stub_feature_flags: false do
end
end
+ shared_examples 'creates an enabled feature for the specified entries' do
+ it do
+ post api("/features/#{feature_name}", admin), params: { value: 'true', **gate_params }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['name']).to eq(feature_name)
+ expect(json_response['gates']).to contain_exactly(
+ { 'key' => 'boolean', 'value' => false },
+ { 'key' => 'actors', 'value' => array_including(expected_gate_params) }
+ )
+ end
+ end
+
context 'when enabling for a project by path' do
context 'when the project exists' do
it_behaves_like 'enables the flag for the actor', :project do
@@ -209,7 +218,9 @@ RSpec.describe API::Features, stub_feature_flags: false do
end
context 'when the project does not exist' do
- it_behaves_like 'does not enable the flag', :project, 'mep/to/the/mep/mep'
+ it_behaves_like 'does not enable the flag', :project do
+ let(:actor_path) { 'mep/to/the/mep/mep' }
+ end
end
end
@@ -221,7 +232,9 @@ RSpec.describe API::Features, stub_feature_flags: false do
end
context 'when the group does not exist' do
- it_behaves_like 'does not enable the flag', :group, 'not/a/group'
+ it_behaves_like 'does not enable the flag', :group do
+ let(:actor_path) { 'not/a/group' }
+ end
end
end
@@ -239,7 +252,9 @@ RSpec.describe API::Features, stub_feature_flags: false do
end
context 'when the user namespace does not exist' do
- it_behaves_like 'does not enable the flag', :namespace, 'not/a/group'
+ it_behaves_like 'does not enable the flag', :namespace do
+ let(:actor_path) { 'not/a/group' }
+ end
end
context 'when a project namespace exists' do
@@ -251,6 +266,98 @@ RSpec.describe API::Features, stub_feature_flags: false do
end
end
+ context 'with multiple users' do
+ let_it_be(:users) { create_list(:user, 3) }
+
+ it_behaves_like 'creates an enabled feature for the specified entries' do
+ let(:gate_params) { { user: users.map(&:username).join(',') } }
+ let(:expected_gate_params) { users.map(&:flipper_id) }
+ end
+
+ context 'when empty value exists between comma' do
+ it_behaves_like 'creates an enabled feature for the specified entries' do
+ let(:gate_params) { { user: "#{users.first.username},,,," } }
+ let(:expected_gate_params) { users.first.flipper_id }
+ end
+ end
+
+ context 'when one of the users does not exist' do
+ it_behaves_like 'does not enable the flag', :user do
+ let(:actor_path) { "#{users.first.username},inexistent-entry" }
+ let(:expected_inexistent_path) { "inexistent-entry" }
+ end
+ end
+ end
+
+ context 'with multiple projects' do
+ let_it_be(:projects) { create_list(:project, 3) }
+
+ it_behaves_like 'creates an enabled feature for the specified entries' do
+ let(:gate_params) { { project: projects.map(&:full_path).join(',') } }
+ let(:expected_gate_params) { projects.map(&:flipper_id) }
+ end
+
+ context 'when empty value exists between comma' do
+ it_behaves_like 'creates an enabled feature for the specified entries' do
+ let(:gate_params) { { project: "#{projects.first.full_path},,,," } }
+ let(:expected_gate_params) { projects.first.flipper_id }
+ end
+ end
+
+ context 'when one of the projects does not exist' do
+ it_behaves_like 'does not enable the flag', :project do
+ let(:actor_path) { "#{projects.first.full_path},inexistent-entry" }
+ let(:expected_inexistent_path) { "inexistent-entry" }
+ end
+ end
+ end
+
+ context 'with multiple groups' do
+ let_it_be(:groups) { create_list(:group, 3) }
+
+ it_behaves_like 'creates an enabled feature for the specified entries' do
+ let(:gate_params) { { group: groups.map(&:full_path).join(',') } }
+ let(:expected_gate_params) { groups.map(&:flipper_id) }
+ end
+
+ context 'when empty value exists between comma' do
+ it_behaves_like 'creates an enabled feature for the specified entries' do
+ let(:gate_params) { { group: "#{groups.first.full_path},,,," } }
+ let(:expected_gate_params) { groups.first.flipper_id }
+ end
+ end
+
+ context 'when one of the groups does not exist' do
+ it_behaves_like 'does not enable the flag', :group do
+ let(:actor_path) { "#{groups.first.full_path},inexistent-entry" }
+ let(:expected_inexistent_path) { "inexistent-entry" }
+ end
+ end
+ end
+
+ context 'with multiple namespaces' do
+ let_it_be(:namespaces) { create_list(:namespace, 3) }
+
+ it_behaves_like 'creates an enabled feature for the specified entries' do
+ let(:gate_params) { { namespace: namespaces.map(&:full_path).join(',') } }
+ let(:expected_gate_params) { namespaces.map(&:flipper_id) }
+ end
+
+ context 'when empty value exists between comma' do
+ it_behaves_like 'creates an enabled feature for the specified entries' do
+ let(:gate_params) { { namespace: "#{namespaces.first.full_path},,,," } }
+ let(:expected_gate_params) { namespaces.first.flipper_id }
+ end
+ end
+
+ context 'when one of the namespaces does not exist' do
+ it_behaves_like 'does not enable the flag', :namespace do
+ let(:actor_path) { "#{namespaces.first.full_path},inexistent-entry" }
+ let(:expected_inexistent_path) { "inexistent-entry" }
+ end
+ end
+ end
+
it 'creates a feature with the given percentage of time if passed an integer' do
post api("/features/#{feature_name}", admin), params: { value: '50' }
diff --git a/spec/requests/api/graphql/ci/jobs_spec.rb b/spec/requests/api/graphql/ci/jobs_spec.rb
index 2d1bb45390b..d1737fc22ae 100644
--- a/spec/requests/api/graphql/ci/jobs_spec.rb
+++ b/spec/requests/api/graphql/ci/jobs_spec.rb
@@ -258,4 +258,81 @@ RSpec.describe 'Query.project.pipeline' do
end
end
end
+
+ describe '.jobs.count' do
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be(:successful_job) { create(:ci_build, :success, pipeline: pipeline) }
+ let_it_be(:pending_job) { create(:ci_build, :pending, pipeline: pipeline) }
+ let_it_be(:failed_job) { create(:ci_build, :failed, pipeline: pipeline) }
+
+ let(:query) do
+ %(
+ query {
+ project(fullPath: "#{project.full_path}") {
+ pipeline(iid: "#{pipeline.iid}") {
+ jobs {
+ count
+ }
+ }
+ }
+ }
+ )
+ end
+
+ before do
+ post_graphql(query, current_user: user)
+ end
+
+ it 'returns the number of jobs' do
+ expect(graphql_data_at(:project, :pipeline, :jobs, :count)).to eq(3)
+ end
+
+ context 'with limit value' do
+ let(:limit) { 1 }
+
+ let(:query) do
+ %(
+ query {
+ project(fullPath: "#{project.full_path}") {
+ pipeline(iid: "#{pipeline.iid}") {
+ jobs {
+ count(limit: #{limit})
+ }
+ }
+ }
+ }
+ )
+ end
+
+ it 'returns a limited number of jobs' do
+ expect(graphql_data_at(:project, :pipeline, :jobs, :count)).to eq(2)
+ end
+
+ context 'with invalid value' do
+ let(:limit) { 1500 }
+
+ it 'returns a validation error' do
+ expect(graphql_errors).to include(a_hash_including('message' => 'limit must be less than or equal to 1000'))
+ end
+ end
+ end
+
+ context 'with jobs filter' do
+ let(:query) do
+ %(
+ query {
+ project(fullPath: "#{project.full_path}") {
+ jobs(statuses: FAILED) {
+ count
+ }
+ }
+ }
+ )
+ end
+
+ it 'returns the number of failed jobs' do
+ expect(graphql_data_at(:project, :jobs, :count)).to eq(1)
+ end
+ end
+ end
end
diff --git a/spec/requests/api/graphql/ci/runner_spec.rb b/spec/requests/api/graphql/ci/runner_spec.rb
index 6fa455cbfca..446d1fb1bdb 100644
--- a/spec/requests/api/graphql/ci/runner_spec.rb
+++ b/spec/requests/api/graphql/ci/runner_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe 'Query.runner(id)' do
create(:ci_runner, :instance, description: 'Runner 1', contacted_at: 2.hours.ago,
active: true, version: 'adfe156', revision: 'a', locked: true, ip_address: '127.0.0.1', maximum_timeout: 600,
access_level: 0, tag_list: %w[tag1 tag2], run_untagged: true, executor_type: :custom,
- maintenance_note: 'Test maintenance note')
+ maintenance_note: '**Test maintenance note**')
end
let_it_be(:inactive_instance_runner) do
@@ -66,6 +66,8 @@ RSpec.describe 'Query.runner(id)' do
'architectureName' => runner.architecture,
'platformName' => runner.platform,
'maintenanceNote' => runner.maintenance_note,
+ 'maintenanceNoteHtml' =>
+ runner.maintainer_note.present? ? a_string_including('<strong>Test maintenance note</strong>') : '',
'jobCount' => 0,
'jobs' => a_hash_including("count" => 0, "nodes" => [], "pageInfo" => anything),
'projectCount' => nil,
@@ -150,34 +152,72 @@ RSpec.describe 'Query.runner(id)' do
end
describe 'for project runner' do
- using RSpec::Parameterized::TableSyntax
+ describe 'locked' do
+ using RSpec::Parameterized::TableSyntax
- where(is_locked: [true, false])
+ where(is_locked: [true, false])
- with_them do
- let(:project_runner) do
- create(:ci_runner, :project, description: 'Runner 3', contacted_at: 1.day.ago, active: false, locked: is_locked,
- version: 'adfe157', revision: 'b', ip_address: '10.10.10.10', access_level: 1, run_untagged: true)
- end
+ with_them do
+ let(:project_runner) do
+ create(:ci_runner, :project, description: 'Runner 3', contacted_at: 1.day.ago, active: false, locked: is_locked,
+ version: 'adfe157', revision: 'b', ip_address: '10.10.10.10', access_level: 1, run_untagged: true)
+ end
- let(:query) do
- wrap_fields(query_graphql_path(query_path, all_graphql_fields_for('CiRunner')))
+ let(:query) do
+ wrap_fields(query_graphql_path(query_path, 'id locked'))
+ end
+
+ let(:query_path) do
+ [
+ [:runner, { id: project_runner.to_global_id.to_s }]
+ ]
+ end
+
+ it 'retrieves correct locked value' do
+ post_graphql(query, current_user: user)
+
+ runner_data = graphql_data_at(:runner)
+
+ expect(runner_data).to match a_hash_including(
+ 'id' => project_runner.to_global_id.to_s,
+ 'locked' => is_locked
+ )
+ end
end
+ end
- let(:query_path) do
- [
- [:runner, { id: project_runner.to_global_id.to_s }]
- ]
+ describe 'ownerProject' do
+ let_it_be(:project1) { create(:project) }
+ let_it_be(:project2) { create(:project) }
+ let_it_be(:runner1) { create(:ci_runner, :project, projects: [project2, project1]) }
+ let_it_be(:runner2) { create(:ci_runner, :project, projects: [project1, project2]) }
+
+ let(:runner_query_fragment) { 'id ownerProject { id }' }
+ let(:query) do
+ %(
+ query {
+ runner1: runner(id: "#{runner1.to_global_id}") { #{runner_query_fragment} }
+ runner2: runner(id: "#{runner2.to_global_id}") { #{runner_query_fragment} }
+ }
+ )
end
- it 'retrieves correct locked value' do
+ it 'retrieves correct ownerProject.id values' do
post_graphql(query, current_user: user)
- runner_data = graphql_data_at(:runner)
-
- expect(runner_data).to match a_hash_including(
- 'id' => project_runner.to_global_id.to_s,
- 'locked' => is_locked
+ expect(graphql_data).to match a_hash_including(
+ 'runner1' => {
+ 'id' => runner1.to_global_id.to_s,
+ 'ownerProject' => {
+ 'id' => project2.to_global_id.to_s
+ }
+ },
+ 'runner2' => {
+ 'id' => runner2.to_global_id.to_s,
+ 'ownerProject' => {
+ 'id' => project1.to_global_id.to_s
+ }
+ }
)
end
end
@@ -405,17 +445,35 @@ RSpec.describe 'Query.runner(id)' do
<<~SINGLE
runner(id: "#{runner.to_global_id}") {
#{all_graphql_fields_for('CiRunner', excluded: excluded_fields)}
+ groups {
+ nodes {
+ id
+ }
+ }
+ projects {
+ nodes {
+ id
+ }
+ }
+ ownerProject {
+ id
+ }
}
SINGLE
end
- # Currently excluding a known N+1 issue, see https://gitlab.com/gitlab-org/gitlab/-/issues/334759
- let(:excluded_fields) { %w[jobCount] }
+ let(:active_project_runner2) { create(:ci_runner, :project) }
+ let(:active_group_runner2) { create(:ci_runner, :group) }
+
+ # Currently excluding known N+1 issues, see https://gitlab.com/gitlab-org/gitlab/-/issues/334759
+ let(:excluded_fields) { %w[jobCount groups projects ownerProject] }
let(:single_query) do
<<~QUERY
{
- active: #{runner_query(active_instance_runner)}
+ instance_runner1: #{runner_query(active_instance_runner)}
+ project_runner1: #{runner_query(active_project_runner)}
+ group_runner1: #{runner_query(active_group_runner)}
}
QUERY
end
@@ -423,22 +481,51 @@ RSpec.describe 'Query.runner(id)' do
let(:double_query) do
<<~QUERY
{
- active: #{runner_query(active_instance_runner)}
- inactive: #{runner_query(inactive_instance_runner)}
+ instance_runner1: #{runner_query(active_instance_runner)}
+ instance_runner2: #{runner_query(inactive_instance_runner)}
+ group_runner1: #{runner_query(active_group_runner)}
+ group_runner2: #{runner_query(active_group_runner2)}
+ project_runner1: #{runner_query(active_project_runner)}
+ project_runner2: #{runner_query(active_project_runner2)}
}
QUERY
end
it 'does not execute more queries per runner', :aggregate_failures do
# warm-up license cache and so on:
- post_graphql(single_query, current_user: user)
+ post_graphql(double_query, current_user: user)
control = ActiveRecord::QueryRecorder.new { post_graphql(single_query, current_user: user) }
expect { post_graphql(double_query, current_user: user) }
.not_to exceed_query_limit(control)
- expect(graphql_data_at(:active)).not_to be_nil
- expect(graphql_data_at(:inactive)).not_to be_nil
+
+ expect(graphql_data.count).to eq 6
+ expect(graphql_data).to match(
+ a_hash_including(
+ 'instance_runner1' => a_hash_including('id' => active_instance_runner.to_global_id.to_s),
+ 'instance_runner2' => a_hash_including('id' => inactive_instance_runner.to_global_id.to_s),
+ 'group_runner1' => a_hash_including(
+ 'id' => active_group_runner.to_global_id.to_s,
+ 'groups' => { 'nodes' => [a_hash_including('id' => group.to_global_id.to_s)] }
+ ),
+ 'group_runner2' => a_hash_including(
+ 'id' => active_group_runner2.to_global_id.to_s,
+ 'groups' => { 'nodes' => [a_hash_including('id' => active_group_runner2.groups[0].to_global_id.to_s)] }
+ ),
+ 'project_runner1' => a_hash_including(
+ 'id' => active_project_runner.to_global_id.to_s,
+ 'projects' => { 'nodes' => [a_hash_including('id' => active_project_runner.projects[0].to_global_id.to_s)] },
+ 'ownerProject' => a_hash_including('id' => active_project_runner.projects[0].to_global_id.to_s)
+ ),
+ 'project_runner2' => a_hash_including(
+ 'id' => active_project_runner2.to_global_id.to_s,
+ 'projects' => {
+ 'nodes' => [a_hash_including('id' => active_project_runner2.projects[0].to_global_id.to_s)]
+ },
+ 'ownerProject' => a_hash_including('id' => active_project_runner2.projects[0].to_global_id.to_s)
+ )
+ ))
end
end
end
diff --git a/spec/requests/api/graphql/ci/runners_spec.rb b/spec/requests/api/graphql/ci/runners_spec.rb
index d3e94671724..a5b8115286e 100644
--- a/spec/requests/api/graphql/ci/runners_spec.rb
+++ b/spec/requests/api/graphql/ci/runners_spec.rb
@@ -18,7 +18,10 @@ RSpec.describe 'Query.runners' do
let(:fields) do
<<~QUERY
nodes {
- #{all_graphql_fields_for('CiRunner')}
+ #{all_graphql_fields_for('CiRunner', excluded: %w[ownerProject])}
+ ownerProject {
+ id
+ }
}
QUERY
end
@@ -123,3 +126,47 @@ RSpec.describe 'Query.runners' do
end
end
end
+
+RSpec.describe 'Group.runners' do
+ include GraphqlHelpers
+
+ let_it_be(:group) { create(:group) }
+ let_it_be(:group_owner) { create_default(:user) }
+
+ before do
+ group.add_owner(group_owner)
+ end
+
+ describe 'edges' do
+ let_it_be(:runner) do
+ create(:ci_runner, :group, active: false, version: 'def', revision: '456',
+ description: 'Project runner', groups: [group], ip_address: '127.0.0.1')
+ end
+
+ let(:query) do
+ %(
+ query($path: ID!) {
+ group(fullPath: $path) {
+ runners {
+ edges {
+ webUrl
+ editUrl
+ node { #{all_graphql_fields_for('CiRunner')} }
+ }
+ }
+ }
+ }
+ )
+ end
+
+ it 'contains custom edge information' do
+ r = GitlabSchema.execute(query,
+ context: { current_user: group_owner },
+ variables: { path: group.full_path })
+
+ edges = graphql_dig_at(r.to_h, :data, :group, :runners, :edges)
+
+ expect(edges).to contain_exactly(a_graphql_entity_for(web_url: be_present, edit_url: be_present))
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/gitlab_schema_spec.rb b/spec/requests/api/graphql/gitlab_schema_spec.rb
index e80f5e0e0ff..c1beadb6c45 100644
--- a/spec/requests/api/graphql/gitlab_schema_spec.rb
+++ b/spec/requests/api/graphql/gitlab_schema_spec.rb
@@ -190,7 +190,7 @@ RSpec.describe 'GitlabSchema configurations' do
let(:query) { File.read(Rails.root.join('spec/fixtures/api/graphql/introspection.graphql')) }
it 'logs the query complexity and depth' do
- expect_any_instance_of(Gitlab::Graphql::QueryAnalyzers::LoggerAnalyzer).to receive(:duration).and_return(7)
+ expect_any_instance_of(Gitlab::Graphql::QueryAnalyzers::AST::LoggerAnalyzer).to receive(:duration).and_return(7)
expect(Gitlab::GraphqlLogger).to receive(:info).with(
hash_including(
diff --git a/spec/requests/api/graphql/issue/issue_spec.rb b/spec/requests/api/graphql/issue/issue_spec.rb
index 05fd6bf3022..6e2d736f244 100644
--- a/spec/requests/api/graphql/issue/issue_spec.rb
+++ b/spec/requests/api/graphql/issue/issue_spec.rb
@@ -129,6 +129,29 @@ RSpec.describe 'Query.issue(id)' do
expect(graphql_errors.first['message']).to eq("\"#{gid}\" does not represent an instance of Issue")
end
end
+
+ context 'when selecting `closed_as_duplicate_of`' do
+ let(:issue_fields) { ['closedAsDuplicateOf { id }'] }
+ let(:duplicate_issue) { create(:issue, project: project) }
+
+ before do
+ issue.update!(duplicated_to_id: duplicate_issue.id)
+
+ post_graphql(query, current_user: current_user)
+ end
+
+ it 'returns the related issue' do
+ expect(issue_data['closedAsDuplicateOf']['id']).to eq(duplicate_issue.to_global_id.to_s)
+ end
+
+ context 'no permission to related issue' do
+ let(:duplicate_issue) { create(:issue) }
+
+ it 'does not return the related issue' do
+ expect(issue_data['closedAsDuplicateOf']).to eq(nil)
+ end
+ end
+ end
end
context 'when there is a confidential issue' do
diff --git a/spec/requests/api/graphql/milestone_spec.rb b/spec/requests/api/graphql/milestone_spec.rb
index 59de116fa2b..f6835936418 100644
--- a/spec/requests/api/graphql/milestone_spec.rb
+++ b/spec/requests/api/graphql/milestone_spec.rb
@@ -5,43 +5,125 @@ require 'spec_helper'
RSpec.describe 'Querying a Milestone' do
include GraphqlHelpers
- let_it_be(:current_user) { create(:user) }
+ let_it_be(:guest) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:milestone) { create(:milestone, project: project) }
+ let_it_be(:release_a) { create(:release, project: project) }
+ let_it_be(:release_b) { create(:release, project: project) }
- let(:query) do
- graphql_query_for('milestone', { id: milestone.to_global_id.to_s }, 'title')
+ before_all do
+ milestone.releases << [release_a, release_b]
+ project.add_guest(guest)
end
- subject { graphql_data['milestone'] }
-
- before do
- post_graphql(query, current_user: current_user)
+ let(:expected_release_nodes) do
+ contain_exactly(a_graphql_entity_for(release_a), a_graphql_entity_for(release_b))
end
- context 'when the user has access to the milestone' do
- before_all do
- project.add_guest(current_user)
+ context 'when we post the query' do
+ let(:current_user) { nil }
+ let(:query) do
+ graphql_query_for('milestone', { id: milestone.to_global_id.to_s }, all_graphql_fields_for('Milestone'))
end
- it_behaves_like 'a working graphql query'
+ subject { graphql_data['milestone'] }
- it { is_expected.to include('title' => milestone.name) }
- end
+ before do
+ post_graphql(query, current_user: current_user)
+ end
- context 'when the user does not have access to the milestone' do
- it_behaves_like 'a working graphql query'
+ context 'when the user has access to the milestone' do
+ let(:current_user) { guest }
- it { is_expected.to be_nil }
+ it_behaves_like 'a working graphql query'
+
+ it { is_expected.to include('title' => milestone.name) }
+
+ it 'contains release information' do
+ is_expected.to include('releases' => include('nodes' => expected_release_nodes))
+ end
+ end
+
+ context 'when the user does not have access to the milestone' do
+ it_behaves_like 'a working graphql query'
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when ID argument is missing' do
+ let(:query) do
+ graphql_query_for('milestone', {}, 'title')
+ end
+
+ it 'raises an exception' do
+ expect(graphql_errors).to include(a_hash_including('message' => "Field 'milestone' is missing required arguments: id"))
+ end
+ end
end
- context 'when ID argument is missing' do
- let(:query) do
- graphql_query_for('milestone', {}, 'title')
+ context 'when there are two milestones' do
+ let_it_be(:milestone_b) { create(:milestone, project: project) }
+
+ let(:current_user) { guest }
+ let(:milestone_fields) do
+ <<~GQL
+ fragment milestoneFields on Milestone {
+ #{all_graphql_fields_for('Milestone', max_depth: 1)}
+ releases { nodes { #{all_graphql_fields_for('Release', max_depth: 1)} } }
+ }
+ GQL
+ end
+
+ let(:single_query) do
+ <<~GQL
+ query ($id_a: MilestoneID!) {
+ a: milestone(id: $id_a) { ...milestoneFields }
+ }
+
+ #{milestone_fields}
+ GQL
+ end
+
+ let(:multi_query) do
+ <<~GQL
+ query ($id_a: MilestoneID!, $id_b: MilestoneID!) {
+ a: milestone(id: $id_a) { ...milestoneFields }
+ b: milestone(id: $id_b) { ...milestoneFields }
+ }
+ #{milestone_fields}
+ GQL
+ end
+
+ it 'produces correct results' do
+ r = run_with_clean_state(multi_query,
+ context: { current_user: current_user },
+ variables: {
+ id_a: global_id_of(milestone).to_s,
+ id_b: milestone_b.to_global_id.to_s
+ })
+
+ expect(r.to_h['errors']).to be_blank
+ expect(graphql_dig_at(r.to_h, :data, :a, :releases, :nodes)).to match expected_release_nodes
+ expect(graphql_dig_at(r.to_h, :data, :b, :releases, :nodes)).to be_empty
end
- it 'raises an exception' do
- expect(graphql_errors).to include(a_hash_including('message' => "Field 'milestone' is missing required arguments: id"))
+ it 'does not suffer from N+1 performance issues' do
+ baseline = ActiveRecord::QueryRecorder.new do
+ run_with_clean_state(single_query,
+ context: { current_user: current_user },
+ variables: { id_a: milestone.to_global_id.to_s })
+ end
+
+ multi = ActiveRecord::QueryRecorder.new do
+ run_with_clean_state(multi_query,
+ context: { current_user: current_user },
+ variables: {
+ id_a: milestone.to_global_id.to_s,
+ id_b: milestone_b.to_global_id.to_s
+ })
+ end
+
+ expect(multi).not_to exceed_query_limit(baseline)
end
end
end
diff --git a/spec/requests/api/graphql/mutations/ci/pipeline_destroy_spec.rb b/spec/requests/api/graphql/mutations/ci/pipeline_destroy_spec.rb
index 37656ab4eea..7abd5ca8772 100644
--- a/spec/requests/api/graphql/mutations/ci/pipeline_destroy_spec.rb
+++ b/spec/requests/api/graphql/mutations/ci/pipeline_destroy_spec.rb
@@ -28,4 +28,21 @@ RSpec.describe 'PipelineDestroy' do
expect(response).to have_gitlab_http_status(:success)
expect { pipeline.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
+
+ context 'when project is undergoing stats refresh' do
+ before do
+ create(:project_build_artifacts_size_refresh, :pending, project: pipeline.project)
+ end
+
+ it 'returns an error and does not destroy the pipeline' do
+ expect(Gitlab::ProjectStatsRefreshConflictsLogger)
+ .to receive(:warn_request_rejected_during_stats_refresh)
+ .with(pipeline.project.id)
+
+ post_graphql_mutation(mutation, current_user: user)
+
+ expect(graphql_mutation_response(:pipeline_destroy)['errors']).not_to be_empty
+ expect(pipeline.reload).to be_persisted
+ end
+ end
end
diff --git a/spec/requests/api/graphql/mutations/container_repository/destroy_tags_spec.rb b/spec/requests/api/graphql/mutations/container_repository/destroy_tags_spec.rb
index decb2e7bccc..ef00f45ef18 100644
--- a/spec/requests/api/graphql/mutations/container_repository/destroy_tags_spec.rb
+++ b/spec/requests/api/graphql/mutations/container_repository/destroy_tags_spec.rb
@@ -91,7 +91,7 @@ RSpec.describe 'Destroying a container repository tags' do
it 'returns too many tags error' do
expect { subject }.not_to change { ::Packages::Event.count }
- explanation = graphql_errors.dig(0, 'extensions', 'problems', 0, 'explanation')
+ explanation = graphql_errors.dig(0, 'message')
expect(explanation).to eq(Mutations::ContainerRepositories::DestroyTags::TOO_MANY_TAGS_ERROR_MESSAGE)
end
end
diff --git a/spec/requests/api/graphql/mutations/design_management/delete_spec.rb b/spec/requests/api/graphql/mutations/design_management/delete_spec.rb
index 1f43f113e65..e2ab08b301b 100644
--- a/spec/requests/api/graphql/mutations/design_management/delete_spec.rb
+++ b/spec/requests/api/graphql/mutations/design_management/delete_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe "deleting designs" do
context 'the designs list is empty' do
it_behaves_like 'a failed request' do
let(:designs) { [] }
- let(:the_error) { a_string_matching %r/was provided invalid value/ }
+ let(:the_error) { a_string_matching %r/no filenames/ }
end
end
diff --git a/spec/requests/api/graphql/mutations/incident_management/timeline_event/create_spec.rb b/spec/requests/api/graphql/mutations/incident_management/timeline_event/create_spec.rb
index 3ea8b38e20f..923e12a3c06 100644
--- a/spec/requests/api/graphql/mutations/incident_management/timeline_event/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/incident_management/timeline_event/create_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe 'Creating an incident timeline event' do
},
'note' => note,
'action' => 'comment',
- 'editable' => false,
+ 'editable' => true,
'occurredAt' => event_occurred_at.iso8601
)
end
diff --git a/spec/requests/api/graphql/mutations/incident_management/timeline_event/destroy_spec.rb b/spec/requests/api/graphql/mutations/incident_management/timeline_event/destroy_spec.rb
index faff3bfe23a..85208869ad9 100644
--- a/spec/requests/api/graphql/mutations/incident_management/timeline_event/destroy_spec.rb
+++ b/spec/requests/api/graphql/mutations/incident_management/timeline_event/destroy_spec.rb
@@ -56,7 +56,7 @@ RSpec.describe 'Removing an incident timeline event' do
},
'note' => timeline_event.note,
'noteHtml' => timeline_event.note_html,
- 'editable' => false,
+ 'editable' => true,
'action' => timeline_event.action,
'occurredAt' => timeline_event.occurred_at.iso8601,
'createdAt' => timeline_event.created_at.iso8601,
diff --git a/spec/requests/api/graphql/mutations/incident_management/timeline_event/promote_from_note_spec.rb b/spec/requests/api/graphql/mutations/incident_management/timeline_event/promote_from_note_spec.rb
index b92f6af1d3d..9272e218172 100644
--- a/spec/requests/api/graphql/mutations/incident_management/timeline_event/promote_from_note_spec.rb
+++ b/spec/requests/api/graphql/mutations/incident_management/timeline_event/promote_from_note_spec.rb
@@ -55,7 +55,7 @@ RSpec.describe 'Promote an incident timeline event from a comment' do
},
'note' => comment.note,
'action' => 'comment',
- 'editable' => false,
+ 'editable' => true,
'occurredAt' => comment.created_at.iso8601
)
end
diff --git a/spec/requests/api/graphql/mutations/issues/set_crm_contacts_spec.rb b/spec/requests/api/graphql/mutations/issues/set_crm_contacts_spec.rb
index 715507c3cc5..395a490bfc3 100644
--- a/spec/requests/api/graphql/mutations/issues/set_crm_contacts_spec.rb
+++ b/spec/requests/api/graphql/mutations/issues/set_crm_contacts_spec.rb
@@ -102,18 +102,6 @@ RSpec.describe 'Setting issues crm contacts' do
group.add_reporter(user)
end
- context 'when the feature is disabled' do
- before do
- stub_feature_flags(customer_relations: false)
- end
-
- it 'raises expected error' do
- post_graphql_mutation(mutation, current_user: user)
-
- expect(graphql_errors).to include(a_hash_including('message' => 'Feature disabled'))
- end
- end
-
it_behaves_like 'successful mutation'
context 'when the contact does not exist' do
diff --git a/spec/requests/api/graphql/mutations/issues/set_escalation_status_spec.rb b/spec/requests/api/graphql/mutations/issues/set_escalation_status_spec.rb
index 0166871502b..a81364d37b2 100644
--- a/spec/requests/api/graphql/mutations/issues/set_escalation_status_spec.rb
+++ b/spec/requests/api/graphql/mutations/issues/set_escalation_status_spec.rb
@@ -49,14 +49,6 @@ RSpec.describe 'Setting the escalation status of an incident' do
it_behaves_like 'a mutation that returns top-level errors', errors: ['Feature unavailable for provided issue']
end
- context 'with feature disabled' do
- before do
- stub_feature_flags(incident_escalations: false)
- end
-
- it_behaves_like 'a mutation that returns top-level errors', errors: ['Feature unavailable for provided issue']
- end
-
it 'sets given escalation_policy to the escalation status for the issue' do
post_graphql_mutation(mutation, current_user: current_user)
diff --git a/spec/requests/api/graphql/mutations/jira_import/import_users_spec.rb b/spec/requests/api/graphql/mutations/jira_import/import_users_spec.rb
index 45cc70f09fd..b438e1ba881 100644
--- a/spec/requests/api/graphql/mutations/jira_import/import_users_spec.rb
+++ b/spec/requests/api/graphql/mutations/jira_import/import_users_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Importing Jira Users' do
- include JiraServiceHelper
+ include JiraIntegrationHelpers
include GraphqlHelpers
let_it_be(:user) { create(:user) }
diff --git a/spec/requests/api/graphql/mutations/jira_import/start_spec.rb b/spec/requests/api/graphql/mutations/jira_import/start_spec.rb
index b14305281af..1508ba31e37 100644
--- a/spec/requests/api/graphql/mutations/jira_import/start_spec.rb
+++ b/spec/requests/api/graphql/mutations/jira_import/start_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Starting a Jira Import' do
- include JiraServiceHelper
+ include JiraIntegrationHelpers
include GraphqlHelpers
let_it_be(:user) { create(:user) }
diff --git a/spec/requests/api/graphql/mutations/metrics/dashboard/annotations/create_spec.rb b/spec/requests/api/graphql/mutations/metrics/dashboard/annotations/create_spec.rb
index 5bc3c68cf26..9ef443af76a 100644
--- a/spec/requests/api/graphql/mutations/metrics/dashboard/annotations/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/metrics/dashboard/annotations/create_spec.rb
@@ -76,7 +76,6 @@ RSpec.describe Mutations::Metrics::Dashboard::Annotations::Create do
context 'when environment_id is missing' do
let(:mutation) do
variables = {
- environment_id: nil,
starting_at: starting_at,
ending_at: ending_at,
dashboard_path: dashboard_path,
@@ -147,7 +146,6 @@ RSpec.describe Mutations::Metrics::Dashboard::Annotations::Create do
context 'when cluster_id is missing' do
let(:mutation) do
variables = {
- cluster_id: nil,
starting_at: starting_at,
ending_at: ending_at,
dashboard_path: dashboard_path,
diff --git a/spec/requests/api/graphql/mutations/notes/reposition_image_diff_note_spec.rb b/spec/requests/api/graphql/mutations/notes/reposition_image_diff_note_spec.rb
index 0f7ccac3179..c4674155aa0 100644
--- a/spec/requests/api/graphql/mutations/notes/reposition_image_diff_note_spec.rb
+++ b/spec/requests/api/graphql/mutations/notes/reposition_image_diff_note_spec.rb
@@ -68,15 +68,7 @@ RSpec.describe 'Repositioning an ImageDiffNote' do
let(:new_position) { { x: nil } }
it_behaves_like 'a mutation that returns top-level errors' do
- let(:match_errors) { include(/RepositionImageDiffNoteInput! was provided invalid value/) }
- end
-
- it 'contains an explanation for the error' do
- post_graphql_mutation(mutation, current_user: current_user)
-
- explanation = graphql_errors.first['extensions']['problems'].first['explanation']
-
- expect(explanation).to eq('At least one property of `UpdateDiffImagePositionInput` must be set')
+ let(:match_errors) { include(/At least one property of `UpdateDiffImagePositionInput` must be set/) }
end
end
end
diff --git a/spec/requests/api/graphql/mutations/packages/cleanup/policy/update_spec.rb b/spec/requests/api/graphql/mutations/packages/cleanup/policy/update_spec.rb
new file mode 100644
index 00000000000..7e00f3ca53a
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/packages/cleanup/policy/update_spec.rb
@@ -0,0 +1,109 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Updating the packages cleanup policy' do
+ include GraphqlHelpers
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:project, reload: true) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ let(:params) do
+ {
+ project_path: project.full_path,
+ keep_n_duplicated_package_files: 'TWENTY_PACKAGE_FILES'
+ }
+ end
+
+ let(:mutation) do
+ graphql_mutation(:update_packages_cleanup_policy, params,
+ <<~QUERY
+ packagesCleanupPolicy {
+ keepNDuplicatedPackageFiles
+ nextRunAt
+ }
+ errors
+ QUERY
+ )
+ end
+
+ let(:mutation_response) { graphql_mutation_response(:update_packages_cleanup_policy) }
+ let(:packages_cleanup_policy_response) { mutation_response['packagesCleanupPolicy'] }
+
+ shared_examples 'accepting the mutation request and updates the existing policy' do
+ it 'returns the updated packages cleanup policy' do
+ expect { subject }.not_to change { ::Packages::Cleanup::Policy.count }
+
+ expect(project.packages_cleanup_policy.keep_n_duplicated_package_files).to eq('20')
+ expect_graphql_errors_to_be_empty
+ expect(packages_cleanup_policy_response['keepNDuplicatedPackageFiles'])
+ .to eq(params[:keep_n_duplicated_package_files])
+ expect(packages_cleanup_policy_response['nextRunAt']).not_to eq(nil)
+ end
+ end
+
+ shared_examples 'accepting the mutation request and creates a policy' do
+ it 'returns the created packages cleanup policy' do
+ expect { subject }.to change { ::Packages::Cleanup::Policy.count }.by(1)
+
+ expect(project.packages_cleanup_policy.keep_n_duplicated_package_files).to eq('20')
+ expect_graphql_errors_to_be_empty
+ expect(packages_cleanup_policy_response['keepNDuplicatedPackageFiles'])
+ .to eq(params[:keep_n_duplicated_package_files])
+ expect(packages_cleanup_policy_response['nextRunAt']).not_to eq(nil)
+ end
+ end
+
+ shared_examples 'denying the mutation request' do
+ it 'returns an error' do
+ expect { subject }.not_to change { ::Packages::Cleanup::Policy.count }
+
+ expect(project.packages_cleanup_policy.keep_n_duplicated_package_files).not_to eq('20')
+ expect(mutation_response).to be_nil
+ expect_graphql_errors_to_include(/you don't have permission to perform this action/)
+ end
+ end
+
+ describe 'post graphql mutation' do
+ subject { post_graphql_mutation(mutation, current_user: user) }
+
+ context 'with existing packages cleanup policy' do
+ let_it_be(:project_packages_cleanup_policy) { create(:packages_cleanup_policy, project: project) }
+
+ where(:user_role, :shared_examples_name) do
+ :maintainer | 'accepting the mutation request and updates the existing policy'
+ :developer | 'denying the mutation request'
+ :reporter | 'denying the mutation request'
+ :guest | 'denying the mutation request'
+ :anonymous | 'denying the mutation request'
+ end
+
+ with_them do
+ before do
+ project.send("add_#{user_role}", user) unless user_role == :anonymous
+ end
+
+ it_behaves_like params[:shared_examples_name]
+ end
+ end
+
+ context 'without existing packages cleanup policy' do
+ where(:user_role, :shared_examples_name) do
+ :maintainer | 'accepting the mutation request and creates a policy'
+ :developer | 'denying the mutation request'
+ :reporter | 'denying the mutation request'
+ :guest | 'denying the mutation request'
+ :anonymous | 'denying the mutation request'
+ end
+
+ with_them do
+ before do
+ project.send("add_#{user_role}", user) unless user_role == :anonymous
+ end
+
+ it_behaves_like params[:shared_examples_name]
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/packages/destroy_files_spec.rb b/spec/requests/api/graphql/mutations/packages/destroy_files_spec.rb
new file mode 100644
index 00000000000..002cd634ebd
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/packages/destroy_files_spec.rb
@@ -0,0 +1,103 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Destroying multiple package files' do
+ using RSpec::Parameterized::TableSyntax
+
+ include GraphqlHelpers
+
+ let_it_be_with_reload(:package) { create(:maven_package) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { package.project }
+
+ let(:ids) { package.package_files.first(2).map { |pf| pf.to_global_id.to_s } }
+
+ let(:query) do
+ <<~GQL
+ errors
+ GQL
+ end
+
+ let(:params) do
+ {
+ project_path: project.full_path,
+ ids: ids
+ }
+ end
+
+ let(:mutation) { graphql_mutation(:destroy_package_files, params, query) }
+
+ describe 'post graphql mutation' do
+ subject(:mutation_request) { post_graphql_mutation(mutation, current_user: user) }
+
+ shared_examples 'destroying the package files' do
+ it 'marks the package file as pending destruction' do
+ expect { mutation_request }.to change { ::Packages::PackageFile.pending_destruction.count }.by(2)
+ end
+
+ it_behaves_like 'returning response status', :success
+ end
+
+ shared_examples 'denying the mutation request' do |response = "you don't have permission to perform this action"|
+ it 'does not mark the package file as pending destruction' do
+ expect { mutation_request }.not_to change { ::Packages::PackageFile.pending_destruction.count }
+
+ expect_graphql_errors_to_include(response)
+ end
+
+ it_behaves_like 'returning response status', :success
+ end
+
+ context 'with valid params' do
+ where(:user_role, :shared_examples_name) do
+ :maintainer | 'destroying the package files'
+ :developer | 'denying the mutation request'
+ :reporter | 'denying the mutation request'
+ :guest | 'denying the mutation request'
+ :anonymous | 'denying the mutation request'
+ end
+
+ with_them do
+ before do
+ project.send("add_#{user_role}", user) unless user_role == :anonymous
+ end
+
+ it_behaves_like params[:shared_examples_name]
+ end
+
+ context 'with more than 100 files' do
+ let(:ids) { package.package_files.map { |pf| pf.to_global_id.to_s } }
+
+ before do
+ project.add_maintainer(user)
+ create_list(:package_file, 99, package: package)
+ end
+
+ it_behaves_like 'denying the mutation request', 'Cannot delete more than 100 files'
+ end
+
+ context 'with files outside of the project' do
+ let_it_be(:package2) { create(:maven_package) }
+
+ let(:ids) { super().push(package2.package_files.first.to_global_id.to_s) }
+
+ before do
+ project.add_maintainer(user)
+ end
+
+ it_behaves_like 'denying the mutation request', 'All files must be in the requested project'
+ end
+ end
+
+ context 'with invalid params' do
+ let(:params) { { id: 'foo' } }
+
+ before do
+ project.add_maintainer(user)
+ end
+
+ it_behaves_like 'denying the mutation request', 'invalid value for id'
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/releases/create_spec.rb b/spec/requests/api/graphql/mutations/releases/create_spec.rb
index 86995c10f10..1e62942c29d 100644
--- a/spec/requests/api/graphql/mutations/releases/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/releases/create_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe 'Creation of a new release' do
let(:mutation_name) { :release_create }
let(:tag_name) { 'v7.12.5'}
+ let(:tag_message) { nil }
let(:ref) { 'master'}
let(:name) { 'Version 7.12.5'}
let(:description) { 'Release 7.12.5 :rocket:' }
@@ -29,6 +30,7 @@ RSpec.describe 'Creation of a new release' do
{
projectPath: project.full_path,
tagName: tag_name,
+ tagMessage: tag_message,
ref: ref,
name: name,
description: description,
@@ -191,10 +193,26 @@ RSpec.describe 'Creation of a new release' do
context 'when the provided tag does not already exist' do
let(:tag_name) { 'v7.12.5-alpha' }
+ after do
+ project.repository.rm_tag(developer, tag_name)
+ end
+
it_behaves_like 'no errors'
- it 'creates a new tag' do
+ it 'creates a new lightweight tag' do
expect { create_release }.to change { Project.find_by_id(project.id).repository.tag_count }.by(1)
+ expect(project.repository.find_tag(tag_name).message).to be_blank
+ end
+
+ context 'and tag_message is provided' do
+ let(:tag_message) { 'Annotated tag message' }
+
+ it_behaves_like 'no errors'
+
+ it 'creates a new annotated tag with the message' do
+ expect { create_release }.to change { Project.find_by_id(project.id).repository.tag_count }.by(1)
+ expect(project.repository.find_tag(tag_name).message).to eq(tag_message)
+ end
end
end
diff --git a/spec/requests/api/graphql/mutations/user_preferences/update_spec.rb b/spec/requests/api/graphql/mutations/user_preferences/update_spec.rb
index 85194e6eb20..e1c7fd9d60d 100644
--- a/spec/requests/api/graphql/mutations/user_preferences/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/user_preferences/update_spec.rb
@@ -28,17 +28,6 @@ RSpec.describe Mutations::UserPreferences::Update do
expect(current_user.user_preference.persisted?).to eq(true)
expect(current_user.user_preference.issues_sort).to eq(Types::IssueSortEnum.values[sort_value].value.to_s)
end
-
- context 'when incident_escalations feature flag is disabled' do
- let(:sort_value) { 'ESCALATION_STATUS_ASC' }
-
- before do
- stub_feature_flags(incident_escalations: false)
- end
-
- it_behaves_like 'a mutation that returns top-level errors',
- errors: ['Feature flag `incident_escalations` must be enabled to use this sort order.']
- end
end
context 'when user has existing preference' do
@@ -56,16 +45,5 @@ RSpec.describe Mutations::UserPreferences::Update do
expect(current_user.user_preference.issues_sort).to eq(Types::IssueSortEnum.values[sort_value].value.to_s)
end
-
- context 'when incident_escalations feature flag is disabled' do
- let(:sort_value) { 'ESCALATION_STATUS_DESC' }
-
- before do
- stub_feature_flags(incident_escalations: false)
- end
-
- it_behaves_like 'a mutation that returns top-level errors',
- errors: ['Feature flag `incident_escalations` must be enabled to use this sort order.']
- end
end
end
diff --git a/spec/requests/api/graphql/mutations/work_items/update_task_spec.rb b/spec/requests/api/graphql/mutations/work_items/update_task_spec.rb
new file mode 100644
index 00000000000..32468a46ace
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/work_items/update_task_spec.rb
@@ -0,0 +1,101 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Update a work item task' do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user).tap { |user| project.add_developer(user) } }
+ let_it_be(:unauthorized_work_item) { create(:work_item) }
+ let_it_be(:referenced_work_item, refind: true) { create(:work_item, project: project, title: 'REFERENCED') }
+ let_it_be(:parent_work_item) do
+ create(:work_item, project: project, description: "- [ ] #{referenced_work_item.to_reference}+")
+ end
+
+ let(:task) { referenced_work_item }
+ let(:work_item) { parent_work_item }
+ let(:task_params) { { 'title' => 'UPDATED' } }
+ let(:task_input) { { 'id' => task.to_global_id.to_s }.merge(task_params) }
+ let(:input) { { 'id' => work_item.to_global_id.to_s, 'taskData' => task_input } }
+ let(:mutation) { graphql_mutation(:workItemUpdateTask, input) }
+ let(:mutation_response) { graphql_mutation_response(:work_item_update_task) }
+
+ context 'the user is not allowed to read a work item' do
+ let(:current_user) { create(:user) }
+
+ it_behaves_like 'a mutation that returns a top-level access error'
+ end
+
+ context 'when user has permissions to update a work item' do
+ let(:current_user) { developer }
+
+ it 'updates the work item and invalidates markdown cache on the original work item' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ work_item.reload
+ referenced_work_item.reload
+ end.to change(referenced_work_item, :title).from(referenced_work_item.title).to('UPDATED')
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response).to include(
+ 'workItem' => hash_including(
+ 'title' => work_item.title,
+ 'descriptionHtml' => a_string_including('UPDATED')
+ ),
+ 'task' => hash_including(
+ 'title' => 'UPDATED'
+ )
+ )
+ end
+
+ context 'when providing invalid task params' do
+ let(:task_params) { { 'title' => '' } }
+
+ it 'makes no changes to the DB and returns an error message' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ work_item.reload
+ task.reload
+ end.to not_change(task, :title).and(
+ not_change(work_item, :description_html)
+ )
+
+ expect(mutation_response['errors']).to contain_exactly("Title can't be blank")
+ end
+ end
+
+ context 'when user cannot update the task' do
+ let(:task) { unauthorized_work_item }
+
+ it_behaves_like 'a mutation that returns a top-level access error'
+ end
+
+ it_behaves_like 'has spam protection' do
+ let(:mutation_class) { ::Mutations::WorkItems::UpdateTask }
+ end
+
+ context 'when the work_items feature flag is disabled' do
+ before do
+ stub_feature_flags(work_items: false)
+ end
+
+ it 'does not update the task item and returns and error' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ work_item.reload
+ task.reload
+ end.to not_change(task, :title)
+
+ expect(mutation_response['errors']).to contain_exactly('`work_items` feature flag disabled for this project')
+ end
+ end
+ end
+
+ context 'when user does not have permissions to update a work item' do
+ let(:current_user) { developer }
+ let(:work_item) { unauthorized_work_item }
+
+ it_behaves_like 'a mutation that returns a top-level access error'
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/work_items/update_widgets_spec.rb b/spec/requests/api/graphql/mutations/work_items/update_widgets_spec.rb
new file mode 100644
index 00000000000..595d8fe97ed
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/work_items/update_widgets_spec.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Update work item widgets' do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user).tap { |user| project.add_developer(user) } }
+ let_it_be(:work_item, refind: true) { create(:work_item, project: project) }
+
+ let(:input) do
+ {
+ 'descriptionWidget' => { 'description' => 'updated description' }
+ }
+ end
+
+ let(:mutation) { graphql_mutation(:workItemUpdateWidgets, input.merge('id' => work_item.to_global_id.to_s)) }
+
+ let(:mutation_response) { graphql_mutation_response(:work_item_update_widgets) }
+
+ context 'the user is not allowed to update a work item' do
+ let(:current_user) { create(:user) }
+
+ it_behaves_like 'a mutation that returns a top-level access error'
+ end
+
+ context 'when user has permissions to update a work item', :aggregate_failures do
+ let(:current_user) { developer }
+
+ context 'when the updated work item is not valid' do
+ it 'returns validation errors without the work item' do
+ errors = ActiveModel::Errors.new(work_item).tap { |e| e.add(:description, 'error message') }
+
+ allow_next_found_instance_of(::WorkItem) do |instance|
+ allow(instance).to receive(:valid?).and_return(false)
+ allow(instance).to receive(:errors).and_return(errors)
+ end
+
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response['workItem']).to be_nil
+ expect(mutation_response['errors']).to match_array(['Description error message'])
+ end
+ end
+
+ it 'updates the work item widgets' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ work_item.reload
+ end.to change(work_item, :description).from(nil).to('updated description')
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['workItem']).to include(
+ 'title' => work_item.title
+ )
+ end
+
+ it_behaves_like 'has spam protection' do
+ let(:mutation_class) { ::Mutations::WorkItems::UpdateWidgets }
+ end
+
+ context 'when the work_items feature flag is disabled' do
+ before do
+ stub_feature_flags(work_items: false)
+ end
+
+ it 'does not update the work item and returns and error' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ work_item.reload
+ end.to not_change(work_item, :title)
+
+ expect(mutation_response['errors']).to contain_exactly('`work_items` feature flag disabled for this project')
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/project/incident_management/timeline_events_spec.rb b/spec/requests/api/graphql/project/incident_management/timeline_events_spec.rb
index 708fa96986c..31fef75f679 100644
--- a/spec/requests/api/graphql/project/incident_management/timeline_events_spec.rb
+++ b/spec/requests/api/graphql/project/incident_management/timeline_events_spec.rb
@@ -94,7 +94,7 @@ RSpec.describe 'getting incident timeline events' do
'id' => promoted_from_note.to_global_id.to_s,
'body' => promoted_from_note.note
},
- 'editable' => false,
+ 'editable' => true,
'action' => timeline_event.action,
'occurredAt' => timeline_event.occurred_at.iso8601,
'createdAt' => timeline_event.created_at.iso8601,
diff --git a/spec/requests/api/graphql/project/issues_spec.rb b/spec/requests/api/graphql/project/issues_spec.rb
index f358ec3e53f..69e14eace66 100644
--- a/spec/requests/api/graphql/project/issues_spec.rb
+++ b/spec/requests/api/graphql/project/issues_spec.rb
@@ -635,6 +635,18 @@ RSpec.describe 'getting an issue list for a project' do
include_examples 'N+1 query check'
end
+ context 'when requesting `closed_as_duplicate_of`' do
+ let(:requested_fields) { 'closedAsDuplicateOf { id }' }
+ let(:issue_a_dup) { create(:issue, project: project) }
+ let(:issue_b_dup) { create(:issue, project: project) }
+
+ before do
+ issue_a.update!(duplicated_to_id: issue_a_dup)
+ issue_b.update!(duplicated_to_id: issue_a_dup)
+ end
+
+ include_examples 'N+1 query check'
+ end
end
def issues_ids
diff --git a/spec/requests/api/graphql/project/merge_request/pipelines_spec.rb b/spec/requests/api/graphql/project/merge_request/pipelines_spec.rb
index 820a5d818c7..4dc272b5c2e 100644
--- a/spec/requests/api/graphql/project/merge_request/pipelines_spec.rb
+++ b/spec/requests/api/graphql/project/merge_request/pipelines_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'Query.project.mergeRequests.pipelines' do
let_it_be(:project) { create(:project, :public, :repository) }
let_it_be(:author) { create(:user) }
+ let_it_be(:mr_nodes_path) { [:data, :project, :merge_requests, :nodes] }
let_it_be(:merge_requests) do
[
create(:merge_request, author: author, source_project: project),
@@ -33,8 +34,49 @@ RSpec.describe 'Query.project.mergeRequests.pipelines' do
GQL
end
- def run_query(first = nil)
- post_graphql(query, current_user: author, variables: { path: project.full_path, first: first })
+ before do
+ merge_requests.first(2).each do |mr|
+ shas = mr.recent_diff_head_shas
+
+ shas.each do |sha|
+ create(:ci_pipeline, :success, project: project, ref: mr.source_branch, sha: sha)
+ end
+ end
+ end
+
+ it 'produces correct results' do
+ r = run_query(3)
+
+ nodes = graphql_dig_at(r, *mr_nodes_path)
+
+ expect(nodes).to all(match('iid' => be_present, 'pipelines' => include('count' => be_a(Integer))))
+ expect(graphql_dig_at(r, *mr_nodes_path, :pipelines, :count)).to contain_exactly(1, 1, 0)
+ end
+
+ it 'is scalable', :request_store, :use_clean_rails_memory_store_caching do
+ baseline = ActiveRecord::QueryRecorder.new { run_query(1) }
+
+ expect { run_query(2) }.not_to exceed_query_limit(baseline)
+ end
+ end
+
+ describe '.nodes' do
+ let(:query) do
+ <<~GQL
+ query($path: ID!, $first: Int) {
+ project(fullPath: $path) {
+ mergeRequests(first: $first) {
+ nodes {
+ iid
+ pipelines {
+ count
+ nodes { id }
+ }
+ }
+ }
+ }
+ }
+ GQL
end
before do
@@ -48,18 +90,27 @@ RSpec.describe 'Query.project.mergeRequests.pipelines' do
end
it 'produces correct results' do
- run_query(2)
-
- p_nodes = graphql_data_at(:project, :merge_requests, :nodes)
+ r = run_query
- expect(p_nodes).to all(match('iid' => be_present, 'pipelines' => match('count' => 1)))
+ expect(graphql_dig_at(r, *mr_nodes_path, :pipelines, :nodes, :id).uniq.size).to eq 3
end
it 'is scalable', :request_store, :use_clean_rails_memory_store_caching do
- # warm up
- run_query
+ baseline = ActiveRecord::QueryRecorder.new { run_query(1) }
- expect { run_query(2) }.to(issue_same_number_of_queries_as { run_query(1) }.ignoring_cached_queries)
+ expect { run_query(2) }.not_to exceed_query_limit(baseline)
end
+
+ it 'requests merge_request_diffs at most once' do
+ r = ActiveRecord::QueryRecorder.new { run_query(2) }
+
+ expect(r.log.grep(/merge_request_diffs/)).to contain_exactly(a_string_including('SELECT'))
+ end
+ end
+
+ def run_query(first = nil)
+ run_with_clean_state(query,
+ context: { current_user: author },
+ variables: { path: project.full_path, first: first })
end
end
diff --git a/spec/requests/api/graphql/project/milestones_spec.rb b/spec/requests/api/graphql/project/milestones_spec.rb
index 3e8948d83b1..d1ee157fc74 100644
--- a/spec/requests/api/graphql/project/milestones_spec.rb
+++ b/spec/requests/api/graphql/project/milestones_spec.rb
@@ -59,6 +59,27 @@ RSpec.describe 'getting milestone listings nested in a project' do
end
end
+ context 'the user does not have access' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:milestones) { create_list(:milestone, 2, project: project) }
+
+ it 'is nil' do
+ post_graphql(query, current_user: current_user)
+
+ expect(graphql_data_at(:project)).to be_nil
+ end
+
+ context 'the user has access' do
+ let(:expected) { milestones }
+
+ before do
+ project.add_guest(current_user)
+ end
+
+ it_behaves_like 'searching with parameters'
+ end
+ end
+
context 'there are no search params' do
let(:search_params) { nil }
let(:expected) { all_milestones }
diff --git a/spec/requests/api/graphql/project/packages_cleanup_policy_spec.rb b/spec/requests/api/graphql/project/packages_cleanup_policy_spec.rb
new file mode 100644
index 00000000000..a025c57d4b8
--- /dev/null
+++ b/spec/requests/api/graphql/project/packages_cleanup_policy_spec.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe 'getting the packages cleanup policy linked to a project' do
+ using RSpec::Parameterized::TableSyntax
+ include GraphqlHelpers
+
+ let_it_be_with_reload(:project) { create(:project) }
+ let_it_be(:current_user) { project.first_owner }
+
+ let(:fields) do
+ <<~QUERY
+ #{all_graphql_fields_for('packages_cleanup_policy'.classify)}
+ QUERY
+ end
+
+ let(:query) do
+ graphql_query_for(
+ 'project',
+ { 'fullPath' => project.full_path },
+ query_graphql_field('packagesCleanupPolicy', {}, fields)
+ )
+ end
+
+ subject { post_graphql(query, current_user: current_user) }
+
+ it_behaves_like 'a working graphql query' do
+ before do
+ subject
+ end
+ end
+
+ context 'with an existing policy' do
+ let_it_be(:policy) { create(:packages_cleanup_policy, project: project) }
+
+ it_behaves_like 'a working graphql query' do
+ before do
+ subject
+ end
+ end
+ end
+
+ context 'with different permissions' do
+ let_it_be(:current_user) { create(:user) }
+
+ let(:packages_cleanup_policy_response) { graphql_data_at('project', 'packagesCleanupPolicy') }
+
+ where(:visibility, :role, :policy_visible) do
+ :private | :maintainer | true
+ :private | :developer | false
+ :private | :reporter | false
+ :private | :guest | false
+ :private | :anonymous | false
+ :public | :maintainer | true
+ :public | :developer | false
+ :public | :reporter | false
+ :public | :guest | false
+ :public | :anonymous | false
+ end
+
+ with_them do
+ before do
+ project.update!(visibility: visibility.to_s)
+ project.add_user(current_user, role) unless role == :anonymous
+ end
+
+ it 'return the proper response' do
+ subject
+
+ if policy_visible
+ expect(packages_cleanup_policy_response)
+ .to eq('keepNDuplicatedPackageFiles' => 'ALL_PACKAGE_FILES', 'nextRunAt' => nil)
+ else
+ expect(packages_cleanup_policy_response).to be_blank
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/project/work_items_spec.rb b/spec/requests/api/graphql/project/work_items_spec.rb
new file mode 100644
index 00000000000..66742fcbeb6
--- /dev/null
+++ b/spec/requests/api/graphql/project/work_items_spec.rb
@@ -0,0 +1,121 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'getting an work item list for a project' do
+ include GraphqlHelpers
+
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :repository, :public, group: group) }
+ let_it_be(:current_user) { create(:user) }
+
+ let_it_be(:item1) { create(:work_item, project: project, discussion_locked: true, title: 'item1') }
+ let_it_be(:item2) { create(:work_item, project: project, title: 'item2') }
+ let_it_be(:confidential_item) { create(:work_item, confidential: true, project: project, title: 'item3') }
+ let_it_be(:other_item) { create(:work_item) }
+
+ let(:items_data) { graphql_data['project']['workItems']['edges'] }
+ let(:item_filter_params) { {} }
+
+ let(:fields) do
+ <<~QUERY
+ edges {
+ node {
+ #{all_graphql_fields_for('workItems'.classify)}
+ }
+ }
+ QUERY
+ end
+
+ let(:query) do
+ graphql_query_for(
+ 'project',
+ { 'fullPath' => project.full_path },
+ query_graphql_field('workItems', item_filter_params, fields)
+ )
+ end
+
+ it_behaves_like 'a working graphql query' do
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+ end
+
+ context 'when the user does not have access to the item' do
+ before do
+ project.project_feature.update!(issues_access_level: ProjectFeature::PRIVATE)
+ end
+
+ it 'returns an empty list' do
+ post_graphql(query)
+
+ expect(items_data).to eq([])
+ end
+ end
+
+ context 'when work_items flag is disabled' do
+ before do
+ stub_feature_flags(work_items: false)
+ end
+
+ it 'returns an empty list' do
+ post_graphql(query)
+
+ expect(items_data).to eq([])
+ end
+ end
+
+ it 'returns only items visible to user' do
+ post_graphql(query, current_user: current_user)
+
+ expect(item_ids).to eq([item2.to_global_id.to_s, item1.to_global_id.to_s])
+ end
+
+ context 'when the user can see confidential items' do
+ before do
+ project.add_developer(current_user)
+ end
+
+ it 'returns also confidential items' do
+ post_graphql(query, current_user: current_user)
+
+ expect(item_ids).to eq([confidential_item.to_global_id.to_s, item2.to_global_id.to_s, item1.to_global_id.to_s])
+ end
+ end
+
+ describe 'sorting and pagination' do
+ let(:data_path) { [:project, :work_items] }
+
+ def pagination_query(params)
+ graphql_query_for(
+ 'project',
+ { 'fullPath' => project.full_path },
+ query_graphql_field('workItems', params, "#{page_info} nodes { id }")
+ )
+ end
+
+ before do
+ project.add_developer(current_user)
+ end
+
+ context 'when sorting by title ascending' do
+ it_behaves_like 'sorted paginated query' do
+ let(:sort_param) { :TITLE_ASC }
+ let(:first_param) { 2 }
+ let(:all_records) { [item1, item2, confidential_item].map { |item| item.to_global_id.to_s } }
+ end
+ end
+
+ context 'when sorting by title descending' do
+ it_behaves_like 'sorted paginated query' do
+ let(:sort_param) { :TITLE_DESC }
+ let(:first_param) { 2 }
+ let(:all_records) { [confidential_item, item2, item1].map { |item| item.to_global_id.to_s } }
+ end
+ end
+ end
+
+ def item_ids
+ graphql_dig_at(items_data, :node, :id)
+ end
+end
diff --git a/spec/requests/api/graphql/terraform/state/delete_spec.rb b/spec/requests/api/graphql/terraform/state/delete_spec.rb
index 35927d03b49..ba0619ea611 100644
--- a/spec/requests/api/graphql/terraform/state/delete_spec.rb
+++ b/spec/requests/api/graphql/terraform/state/delete_spec.rb
@@ -12,12 +12,12 @@ RSpec.describe 'delete a terraform state' do
let(:mutation) { graphql_mutation(:terraform_state_delete, id: state.to_global_id.to_s) }
before do
+ expect_next_instance_of(Terraform::States::TriggerDestroyService, state, current_user: user) do |service|
+ expect(service).to receive(:execute).once.and_return(ServiceResponse.success)
+ end
+
post_graphql_mutation(mutation, current_user: user)
end
include_examples 'a working graphql query'
-
- it 'deletes the state' do
- expect { state.reload }.to raise_error(ActiveRecord::RecordNotFound)
- end
end
diff --git a/spec/requests/api/graphql/user/starred_projects_query_spec.rb b/spec/requests/api/graphql/user/starred_projects_query_spec.rb
index 37a85b98e5f..75a17ed34c4 100644
--- a/spec/requests/api/graphql/user/starred_projects_query_spec.rb
+++ b/spec/requests/api/graphql/user/starred_projects_query_spec.rb
@@ -17,7 +17,6 @@ RSpec.describe 'Getting starredProjects of the user' do
let_it_be(:user, reload: true) { create(:user) }
let(:user_fields) { 'starredProjects { nodes { id } }' }
- let(:current_user) { nil }
let(:starred_projects) do
post_graphql(query, current_user: current_user)
@@ -34,21 +33,23 @@ RSpec.describe 'Getting starredProjects of the user' do
user.toggle_star(project_c)
end
- it_behaves_like 'a working graphql query' do
- before do
- post_graphql(query)
- end
- end
+ context 'anonymous access' do
+ let(:current_user) { nil }
- it 'found only public project' do
- expect(starred_projects).to contain_exactly(
- a_graphql_entity_for(project_a)
- )
+ it 'returns nothing' do
+ expect(starred_projects).to be_nil
+ end
end
context 'the current user is the user' do
let(:current_user) { user }
+ it_behaves_like 'a working graphql query' do
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+ end
+
it 'found all projects' do
expect(starred_projects).to contain_exactly(
a_graphql_entity_for(project_a),
diff --git a/spec/requests/api/graphql/work_item_spec.rb b/spec/requests/api/graphql/work_item_spec.rb
index 5b34c21989a..09bda8ee0d5 100644
--- a/spec/requests/api/graphql/work_item_spec.rb
+++ b/spec/requests/api/graphql/work_item_spec.rb
@@ -6,8 +6,13 @@ RSpec.describe 'Query.work_item(id)' do
include GraphqlHelpers
let_it_be(:developer) { create(:user) }
- let_it_be(:project) { create(:project, :private).tap { |project| project.add_developer(developer) } }
- let_it_be(:work_item) { create(:work_item, project: project) }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:project) { create(:project, :private) }
+ let_it_be(:work_item) { create(:work_item, project: project, description: '- List item') }
+ let_it_be(:child_item1) { create(:work_item, :task, project: project) }
+ let_it_be(:child_item2) { create(:work_item, :task, confidential: true, project: project) }
+ let_it_be(:child_link1) { create(:parent_link, work_item_parent: work_item, work_item: child_item1) }
+ let_it_be(:child_link2) { create(:parent_link, work_item_parent: work_item, work_item: child_item2) }
let(:current_user) { developer }
let(:work_item_data) { graphql_data['workItem'] }
@@ -20,6 +25,9 @@ RSpec.describe 'Query.work_item(id)' do
context 'when the user can read the work item' do
before do
+ project.add_developer(developer)
+ project.add_guest(guest)
+
post_graphql(query, current_user: current_user)
end
@@ -38,6 +46,136 @@ RSpec.describe 'Query.work_item(id)' do
)
end
+ context 'when querying widgets' do
+ describe 'description widget' do
+ let(:work_item_fields) do
+ <<~GRAPHQL
+ id
+ widgets {
+ type
+ ... on WorkItemWidgetDescription {
+ description
+ descriptionHtml
+ }
+ }
+ GRAPHQL
+ end
+
+ it 'returns widget information' do
+ expect(work_item_data).to include(
+ 'id' => work_item.to_gid.to_s,
+ 'widgets' => match_array([
+ hash_including(
+ 'type' => 'DESCRIPTION',
+ 'description' => work_item.description,
+ 'descriptionHtml' => ::MarkupHelper.markdown_field(work_item, :description, {})
+ ),
+ hash_including(
+ 'type' => 'HIERARCHY'
+ )
+ ])
+ )
+ end
+ end
+
+ describe 'hierarchy widget' do
+ let(:work_item_fields) do
+ <<~GRAPHQL
+ id
+ widgets {
+ type
+ ... on WorkItemWidgetHierarchy {
+ parent {
+ id
+ }
+ children {
+ nodes {
+ id
+ }
+ }
+ }
+ }
+ GRAPHQL
+ end
+
+ it 'returns widget information' do
+ expect(work_item_data).to include(
+ 'id' => work_item.to_gid.to_s,
+ 'widgets' => match_array([
+ hash_including(
+ 'type' => 'DESCRIPTION'
+ ),
+ hash_including(
+ 'type' => 'HIERARCHY',
+ 'parent' => nil,
+ 'children' => { 'nodes' => match_array([
+ hash_including('id' => child_link1.work_item.to_gid.to_s),
+ hash_including('id' => child_link2.work_item.to_gid.to_s)
+ ]) }
+ )
+ ])
+ )
+ end
+
+ it 'avoids N+1 queries' do
+ post_graphql(query, current_user: current_user) # warm up
+
+ control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ post_graphql(query, current_user: current_user)
+ end
+
+ create_list(:parent_link, 3, work_item_parent: work_item)
+
+ expect do
+ post_graphql(query, current_user: current_user)
+ end.not_to exceed_all_query_limit(control_count)
+ end
+
+ context 'when user is guest' do
+ let(:current_user) { guest }
+
+ it 'filters out not accessible children or parent' do
+ expect(work_item_data).to include(
+ 'id' => work_item.to_gid.to_s,
+ 'widgets' => match_array([
+ hash_including(
+ 'type' => 'DESCRIPTION'
+ ),
+ hash_including(
+ 'type' => 'HIERARCHY',
+ 'parent' => nil,
+ 'children' => { 'nodes' => match_array([
+ hash_including('id' => child_link1.work_item.to_gid.to_s)
+ ]) }
+ )
+ ])
+ )
+ end
+ end
+
+ context 'when requesting child item' do
+ let_it_be(:work_item) { create(:work_item, :task, project: project, description: '- List item') }
+ let_it_be(:parent_link) { create(:parent_link, work_item: work_item) }
+
+ it 'returns parent information' do
+ expect(work_item_data).to include(
+ 'id' => work_item.to_gid.to_s,
+ 'widgets' => match_array([
+ hash_including(
+ 'type' => 'DESCRIPTION'
+ ),
+ hash_including(
+ 'type' => 'HIERARCHY',
+ 'parent' => hash_including('id' => parent_link.work_item_parent.to_gid.to_s),
+ 'children' => { 'nodes' => match_array([]) }
+ )
+ ])
+ )
+ end
+ end
+ end
+ end
+
context 'when an Issue Global ID is provided' do
let(:global_id) { Issue.find(work_item.id).to_gid.to_s }
diff --git a/spec/requests/api/graphql_spec.rb b/spec/requests/api/graphql_spec.rb
index 3bd59450d49..d94257c61eb 100644
--- a/spec/requests/api/graphql_spec.rb
+++ b/spec/requests/api/graphql_spec.rb
@@ -67,10 +67,10 @@ RSpec.describe 'GraphQL' do
context 'when there is an error in the logger' do
before do
- logger_analyzer = GitlabSchema.query_analyzers.find do |qa|
- qa.is_a? Gitlab::Graphql::QueryAnalyzers::LoggerAnalyzer
- end
- allow(logger_analyzer).to receive(:process_variables)
+ allow(GraphQL::Analysis::AST).to receive(:analyze_query)
+ .and_call_original
+ allow(GraphQL::Analysis::AST).to receive(:analyze_query)
+ .with(anything, Gitlab::Graphql::QueryAnalyzers::AST::LoggerAnalyzer::ALL_ANALYZERS, anything)
.and_raise(StandardError.new("oh noes!"))
end
diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb
index ffc5d353958..56f08249bdd 100644
--- a/spec/requests/api/groups_spec.rb
+++ b/spec/requests/api/groups_spec.rb
@@ -1177,7 +1177,6 @@ RSpec.describe API::Groups do
it "only looks up root ancestor once and returns projects including those in subgroups" do
expect(Namespace).to receive(:find_by).with(id: group1.id.to_s).once.and_call_original # For the group sent in the API call
- expect(Namespace).to receive(:find_by).with(id: group1.traversal_ids.first).once.and_call_original # root_ancestor direct lookup
expect(Namespace).to receive(:joins).with(start_with('INNER JOIN (SELECT id, traversal_ids[1]')).once.and_call_original # All-in-one root_ancestor query
get api("/groups/#{group1.id}/projects", user1), params: { include_subgroups: true }
@@ -1187,25 +1186,6 @@ RSpec.describe API::Groups do
expect(json_response).to be_an(Array)
expect(json_response.length).to eq(6)
end
-
- context 'when group_projects_api_preload_groups feature is disabled' do
- before do
- stub_feature_flags(group_projects_api_preload_groups: false)
- end
-
- it 'looks up the root ancestor multiple times' do
- expect(Namespace).to receive(:find_by).with(id: group1.id.to_s).once.and_call_original
- expect(Namespace).to receive(:find_by).with(id: group1.traversal_ids.first).at_least(:twice).and_call_original
- expect(Namespace).not_to receive(:joins).with(start_with('INNER JOIN (SELECT id, traversal_ids[1]'))
-
- get api("/groups/#{group1.id}/projects", user1), params: { include_subgroups: true }
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an(Array)
- expect(json_response.length).to eq(6)
- end
- end
end
context 'when include_ancestor_groups is true' do
diff --git a/spec/requests/api/integrations/jira_connect/subscriptions_spec.rb b/spec/requests/api/integrations/jira_connect/subscriptions_spec.rb
index 86f8992a624..8a222a99b34 100644
--- a/spec/requests/api/integrations/jira_connect/subscriptions_spec.rb
+++ b/spec/requests/api/integrations/jira_connect/subscriptions_spec.rb
@@ -41,6 +41,7 @@ RSpec.describe API::Integrations::JiraConnect::Subscriptions do
post_subscriptions
expect(response).to have_gitlab_http_status(:unauthorized)
+ expect(json_response).to eq('message' => '401 Unauthorized - JWT authentication failed')
end
end
diff --git a/spec/requests/api/integrations/slack/events_spec.rb b/spec/requests/api/integrations/slack/events_spec.rb
new file mode 100644
index 00000000000..176e9eded31
--- /dev/null
+++ b/spec/requests/api/integrations/slack/events_spec.rb
@@ -0,0 +1,112 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Integrations::Slack::Events do
+ describe 'POST /integrations/slack/events' do
+ let(:params) { {} }
+ let(:headers) do
+ {
+ ::API::Integrations::Slack::Request::VERIFICATION_TIMESTAMP_HEADER => Time.current.to_i.to_s,
+ ::API::Integrations::Slack::Request::VERIFICATION_SIGNATURE_HEADER => 'mock_verified_signature'
+ }
+ end
+
+ before do
+ allow(ActiveSupport::SecurityUtils).to receive(:secure_compare) do |signature|
+ signature == 'mock_verified_signature'
+ end
+
+ stub_application_setting(slack_app_signing_secret: 'mock_key')
+ end
+
+ subject { post api('/integrations/slack/events'), params: params, headers: headers }
+
+ shared_examples 'an unauthorized request' do
+ specify do
+ subject
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ shared_examples 'a successful request that generates a tracked error' do
+ specify do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).once
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ expect(response.body).to be_empty
+ end
+ end
+
+ context 'when the slack_app_signing_secret setting is not set' do
+ before do
+ stub_application_setting(slack_app_signing_secret: nil)
+ end
+
+ it_behaves_like 'an unauthorized request'
+ end
+
+ context 'when the timestamp header has expired' do
+ before do
+ headers[::API::Integrations::Slack::Request::VERIFICATION_TIMESTAMP_HEADER] = 5.minutes.ago.to_i.to_s
+ end
+
+ it_behaves_like 'an unauthorized request'
+ end
+
+ context 'when the timestamp header is missing' do
+ before do
+ headers.delete(::API::Integrations::Slack::Request::VERIFICATION_TIMESTAMP_HEADER)
+ end
+
+ it_behaves_like 'an unauthorized request'
+ end
+
+ context 'when the signature header is missing' do
+ before do
+ headers.delete(::API::Integrations::Slack::Request::VERIFICATION_SIGNATURE_HEADER)
+ end
+
+ it_behaves_like 'an unauthorized request'
+ end
+
+ context 'when the signature is not verified' do
+ before do
+ headers[::API::Integrations::Slack::Request::VERIFICATION_SIGNATURE_HEADER] = 'unverified_signature'
+ end
+
+ it_behaves_like 'an unauthorized request'
+ end
+
+ context 'when type param is missing' do
+ it_behaves_like 'a successful request that generates a tracked error'
+ end
+
+ context 'when type param is unknown' do
+ let(:params) do
+ { type: 'unknown_type' }
+ end
+
+ it_behaves_like 'a successful request that generates a tracked error'
+ end
+
+ context 'when type param is url_verification' do
+ let(:params) do
+ {
+ type: 'url_verification',
+ challenge: '3eZbrw1aBm2rZgRNFdxV2595E9CY3gmdALWMmHkvFXO7tYXAYM8P'
+ }
+ end
+
+ it 'responds in-request with the challenge' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq({ 'challenge' => '3eZbrw1aBm2rZgRNFdxV2595E9CY3gmdALWMmHkvFXO7tYXAYM8P' })
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/integrations_spec.rb b/spec/requests/api/integrations_spec.rb
index 96cc101e73a..cd9a0746581 100644
--- a/spec/requests/api/integrations_spec.rb
+++ b/spec/requests/api/integrations_spec.rb
@@ -55,33 +55,49 @@ RSpec.describe API::Integrations do
describe "PUT /projects/:id/#{endpoint}/#{integration.dasherize}" do
include_context integration
+ # NOTE: Some attributes are not supported for PUT requests, even though in most cases they should be.
+ # For some of them the problem is somewhere else, i.e. most chat integrations don't support the `*_channel`
+ # fields but they're incorrectly included in `#fields`.
+ #
+ # We can fix these manually, or with a generic approach like https://gitlab.com/gitlab-org/gitlab/-/issues/348208
+ let(:missing_channel_attributes) { %i[push_channel issue_channel confidential_issue_channel merge_request_channel note_channel confidential_note_channel tag_push_channel pipeline_channel wiki_page_channel] }
+ let(:missing_attributes) do
+ {
+ datadog: %i[archive_trace_events],
+ discord: missing_channel_attributes + %i[branches_to_be_notified notify_only_broken_pipelines],
+ hangouts_chat: missing_channel_attributes + %i[notify_only_broken_pipelines],
+ jira: %i[issues_enabled project_key vulnerabilities_enabled vulnerabilities_issuetype],
+ mattermost: %i[deployment_channel labels_to_be_notified],
+ microsoft_teams: missing_channel_attributes,
+ mock_ci: %i[enable_ssl_verification],
+ prometheus: %i[manual_configuration],
+ slack: %i[alert_events alert_channel deployment_channel labels_to_be_notified],
+ unify_circuit: missing_channel_attributes + %i[branches_to_be_notified notify_only_broken_pipelines],
+ webex_teams: missing_channel_attributes + %i[branches_to_be_notified notify_only_broken_pipelines]
+ }
+ end
+
it "updates #{integration} settings and returns the correct fields" do
- put api("/projects/#{project.id}/#{endpoint}/#{dashed_integration}", user), params: integration_attrs
+ supported_attrs = integration_attrs.without(missing_attributes.fetch(integration.to_sym, []))
+
+ put api("/projects/#{project.id}/#{endpoint}/#{dashed_integration}", user), params: supported_attrs
expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['slug']).to eq(dashed_integration)
current_integration = project.integrations.first
- events = current_integration.event_names.empty? ? ["foo"].freeze : current_integration.event_names
- query_strings = []
- events.map(&:to_sym).each do |event|
- event_value = !current_integration[event]
- query_strings << "#{event}=#{event_value}"
- integration_attrs[event] = event_value if integration_attrs[event].present?
+ expect(current_integration).to have_attributes(supported_attrs)
+ assert_correct_response_fields(json_response['properties'].keys, current_integration)
+
+ # Flip all booleans and verify that we can set these too
+ flipped_attrs = supported_attrs.transform_values do |value|
+ [true, false].include?(value) ? !value : value
end
- query_strings = query_strings.join('&')
- put api("/projects/#{project.id}/#{endpoint}/#{dashed_integration}?#{query_strings}", user), params: integration_attrs
+ put api("/projects/#{project.id}/#{endpoint}/#{dashed_integration}", user), params: flipped_attrs
expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['slug']).to eq(dashed_integration)
- events.each do |event|
- next if event == "foo"
-
- expect(project.integrations.first[event]).not_to eq(current_integration[event]),
- "expected #{!current_integration[event]} for event #{event} for #{endpoint} #{current_integration.title}, got #{current_integration[event]}"
- end
-
- assert_correct_response_fields(json_response['properties'].keys, current_integration)
+ expect(project.integrations.first).to have_attributes(flipped_attrs)
end
it "returns if required fields missing" do
diff --git a/spec/requests/api/internal/base_spec.rb b/spec/requests/api/internal/base_spec.rb
index acfe476a864..93e4e72f78f 100644
--- a/spec/requests/api/internal/base_spec.rb
+++ b/spec/requests/api/internal/base_spec.rb
@@ -51,6 +51,64 @@ RSpec.describe API::Internal::Base do
end
end
+ describe 'GET /internal/error_tracking_allowed' do
+ let_it_be(:project) { create(:project) }
+
+ let(:params) { { project_id: project.id, public_key: 'key' } }
+
+ context 'when the secret header is missing' do
+ it 'responds with unauthorized entity' do
+ post api("/internal/error_tracking_allowed"), params: params
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ context 'when some params are missing' do
+ it 'responds with unprocessable entity' do
+ post api("/internal/error_tracking_allowed"), params: params.except(:public_key),
+ headers: { API::Helpers::GITLAB_SHARED_SECRET_HEADER => Base64.encode64(secret_token) }
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ end
+ end
+
+ context 'when the error tracking is disabled' do
+ it 'returns enabled: false' do
+ create(:error_tracking_client_key, project: project, active: false)
+
+ post api("/internal/error_tracking_allowed"), params: params,
+ headers: { API::Helpers::GITLAB_SHARED_SECRET_HEADER => Base64.encode64(secret_token) }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq({ 'enabled' => false })
+ end
+
+ context 'when the error tracking record does not exist' do
+ it 'returns enabled: false' do
+ post api("/internal/error_tracking_allowed"), params: params,
+ headers: { API::Helpers::GITLAB_SHARED_SECRET_HEADER => Base64.encode64(secret_token) }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq({ 'enabled' => false })
+ end
+ end
+ end
+
+ context 'when the error tracking is enabled' do
+ it 'returns enabled: true' do
+ client_key = create(:error_tracking_client_key, project: project, active: true)
+ params[:public_key] = client_key.public_key
+
+ post api("/internal/error_tracking_allowed"), params: params,
+ headers: { API::Helpers::GITLAB_SHARED_SECRET_HEADER => Base64.encode64(secret_token) }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq({ 'enabled' => true })
+ end
+ end
+ end
+
describe 'GET /internal/two_factor_recovery_codes' do
let(:key_id) { key.id }
diff --git a/spec/requests/api/internal/mail_room_spec.rb b/spec/requests/api/internal/mail_room_spec.rb
index 67ea617f90d..a0a9c1f9cb3 100644
--- a/spec/requests/api/internal/mail_room_spec.rb
+++ b/spec/requests/api/internal/mail_room_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe API::Internal::MailRoom do
let(:incoming_email_secret) { 'incoming_email_secret' }
let(:service_desk_email_secret) { 'service_desk_email_secret' }
- let(:email_content) { fixture_file("emails/commands_in_reply.eml") }
+ let(:email_content) { fixture_file("emails/service_desk_reply.eml") }
before do
allow(Gitlab::MailRoom::Authenticator).to receive(:secret).with(:incoming_email).and_return(incoming_email_secret)
@@ -117,7 +117,7 @@ RSpec.describe API::Internal::MailRoom do
email = ActionMailer::Base.deliveries.last
expect(email).not_to be_nil
- expect(email.to).to match_array(["jake@adventuretime.ooo"])
+ expect(email.to).to match_array(["alan@adventuretime.ooo"])
expect(email.subject).to include("Rejected")
expect(email.body.parts.last.to_s).to include("We couldn't process your email")
end
@@ -190,5 +190,54 @@ RSpec.describe API::Internal::MailRoom do
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
+
+ context 'handle invalid utf-8 email content' do
+ let(:email_content) do
+ File.open(expand_fixture_path("emails/service_desk_reply_illegal_utf8.eml"), "r:SHIFT_JIS") { |f| f.read }
+ end
+
+ let(:encoded_email_content) { Gitlab::EncodingHelper.encode_utf8(email_content) }
+ let(:auth_headers) do
+ jwt_token = JWT.encode(auth_payload, incoming_email_secret, 'HS256')
+ { Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER => jwt_token }
+ end
+
+ it 'schedules a EmailReceiverWorker job with email content encoded to utf-8 forcefully' do
+ Sidekiq::Testing.fake! do
+ expect do
+ post api("/internal/mail_room/incoming_email"), headers: auth_headers, params: email_content
+ end.to change { EmailReceiverWorker.jobs.size }.by(1)
+ end
+
+ expect(response).to have_gitlab_http_status(:ok)
+
+ job = EmailReceiverWorker.jobs.last
+ expect(job).to match a_hash_including('args' => [encoded_email_content])
+ end
+ end
+
+ context 'handle text/plain request content type' do
+ let(:auth_headers) do
+ jwt_token = JWT.encode(auth_payload, incoming_email_secret, 'HS256')
+ {
+ Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER => jwt_token,
+ 'Content-Type' => 'text/plain'
+ }
+ end
+
+ it 'schedules a EmailReceiverWorker job with email content encoded to utf-8 forcefully' do
+ Sidekiq::Testing.fake! do
+ expect do
+ post api("/internal/mail_room/incoming_email"), headers: auth_headers, params: email_content
+ end.to change { EmailReceiverWorker.jobs.size }.by(1)
+ end
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.content_type).to eql('application/json')
+
+ job = EmailReceiverWorker.jobs.last
+ expect(job).to match a_hash_including('args' => [email_content])
+ end
+ end
end
end
diff --git a/spec/requests/api/internal/workhorse_spec.rb b/spec/requests/api/internal/workhorse_spec.rb
new file mode 100644
index 00000000000..d40c14cc0fd
--- /dev/null
+++ b/spec/requests/api/internal/workhorse_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Internal::Workhorse, :allow_forgery_protection do
+ include WorkhorseHelpers
+
+ context '/authorize_upload' do
+ let_it_be(:user) { create(:user) }
+
+ let(:headers) { {} }
+
+ subject { post(api('/internal/workhorse/authorize_upload'), headers: headers) }
+
+ def expect_status(status)
+ subject
+ expect(response).to have_gitlab_http_status(status)
+ end
+
+ context 'without workhorse internal header' do
+ it { expect_status(:forbidden) }
+ end
+
+ context 'with workhorse internal header' do
+ let(:headers) { workhorse_internal_api_request_header }
+
+ it { expect_status(:unauthorized) }
+
+ context 'as a logged in user' do
+ before do
+ login_as(user)
+ end
+
+ it { expect_status(:success) }
+ it 'returns the temp upload path' do
+ subject
+ expect(json_response['TempPath']).to eq(Rails.root.join('tmp/tests/public/uploads/tmp').to_s)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/invitations_spec.rb b/spec/requests/api/invitations_spec.rb
index d093894720e..64ad5733c1b 100644
--- a/spec/requests/api/invitations_spec.rb
+++ b/spec/requests/api/invitations_spec.rb
@@ -59,13 +59,13 @@ RSpec.describe API::Invitations do
context 'when authenticated as a maintainer/owner' do
context 'and new member is already a requester' do
- it 'does not transform the requester into a proper member' do
+ it 'transforms the requester into a proper member' do
expect do
post invitations_url(source, maintainer),
params: { email: access_requester.email, access_level: Member::MAINTAINER }
expect(response).to have_gitlab_http_status(:created)
- end.not_to change { source.members.count }
+ end.to change { source.members.count }.by(1)
end
end
@@ -258,12 +258,13 @@ RSpec.describe API::Invitations do
end
end
- it "returns a message if member already exists" do
+ it "updates an already existing active member" do
post invitations_url(source, maintainer),
params: { email: developer.email, access_level: Member::MAINTAINER }
expect(response).to have_gitlab_http_status(:created)
- expect(json_response['message'][developer.email]).to eq("User already exists in source")
+ expect(json_response['status']).to eq("success")
+ expect(source.members.find_by(user: developer).access_level).to eq Member::MAINTAINER
end
it 'returns 400 when the invite params of email and user_id are not sent' do
@@ -328,7 +329,7 @@ RSpec.describe API::Invitations do
emails = 'email3@example.com,email4@example.com,email5@example.com,email6@example.com,email7@example.com'
- unresolved_n_plus_ones = 44 # old 48 with 12 per new email, currently there are 11 queries added per email
+ unresolved_n_plus_ones = 40 # currently there are 10 queries added per email
expect do
post invitations_url(project, maintainer), params: { email: emails, access_level: Member::DEVELOPER }
@@ -351,7 +352,7 @@ RSpec.describe API::Invitations do
emails = 'email3@example.com,email4@example.com,email5@example.com,email6@example.com,email7@example.com'
- unresolved_n_plus_ones = 67 # currently there are 11 queries added per email
+ unresolved_n_plus_ones = 59 # currently there are 10 queries added per email
expect do
post invitations_url(project, maintainer), params: { email: emails, access_level: Member::DEVELOPER }
@@ -373,7 +374,7 @@ RSpec.describe API::Invitations do
emails = 'email3@example.com,email4@example.com,email5@example.com,email6@example.com,email7@example.com'
- unresolved_n_plus_ones = 36 # old 40 with 10 per new email, currently there are 9 queries added per email
+ unresolved_n_plus_ones = 32 # currently there are 8 queries added per email
expect do
post invitations_url(group, maintainer), params: { email: emails, access_level: Member::DEVELOPER }
@@ -396,7 +397,7 @@ RSpec.describe API::Invitations do
emails = 'email3@example.com,email4@example.com,email5@example.com,email6@example.com,email7@example.com'
- unresolved_n_plus_ones = 62 # currently there are 9 queries added per email
+ unresolved_n_plus_ones = 56 # currently there are 8 queries added per email
expect do
post invitations_url(group, maintainer), params: { email: emails, access_level: Member::DEVELOPER }
diff --git a/spec/requests/api/issue_links_spec.rb b/spec/requests/api/issue_links_spec.rb
index 81dd4c3dfa0..90238c8bf76 100644
--- a/spec/requests/api/issue_links_spec.rb
+++ b/spec/requests/api/issue_links_spec.rb
@@ -156,6 +156,87 @@ RSpec.describe API::IssueLinks do
end
end
+ describe 'GET /links/:issue_link_id' do
+ def perform_request(issue_link_id, user = nil, params = {})
+ get api("/projects/#{project.id}/issues/#{issue.iid}/links/#{issue_link_id}", user), params: params
+ end
+
+ context 'when unauthenticated' do
+ it 'returns 401' do
+ issue_link = create(:issue_link)
+
+ perform_request(issue_link.id)
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ context 'when authenticated' do
+ context 'when issue link does not exist' do
+ it 'returns 404' do
+ perform_request(non_existing_record_id, user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ let_it_be(:target_issue) { create(:issue, project: project) }
+
+ context 'when issue link does not belong to the specified issue' do
+ it 'returns 404' do
+ other_issue = create(:issue, project: project)
+ # source is different than the given API route issue
+ issue_link = create(:issue_link, source: other_issue, target: target_issue)
+
+ perform_request(issue_link.id, user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when user has ability to read the issue link' do
+ it 'returns 200' do
+ issue_link = create(:issue_link, source: issue, target: target_issue)
+
+ perform_request(issue_link.id, user)
+
+ aggregate_failures "testing response" do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/issue_link')
+ end
+ end
+ end
+
+ context 'when user cannot read issue link' do
+ let(:private_project) { create(:project) }
+ let(:public_project) { create(:project, :public) }
+ let(:public_issue) { create(:issue, project: public_project) }
+
+ context 'when the issue link targets an issue in a non-accessible project' do
+ it 'returns 404' do
+ private_issue = create(:issue, project: private_project)
+ issue_link = create(:issue_link, source: public_issue, target: private_issue)
+
+ perform_request(issue_link.id, user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when issue link targets a non-accessible issue' do
+ it 'returns 404' do
+ confidential_issue = create(:issue, :confidential, project: public_project)
+ issue_link = create(:issue_link, source: public_issue, target: confidential_issue)
+
+ perform_request(issue_link.id, user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+ end
+ end
+
describe 'DELETE /links/:issue_link_id' do
context 'when unauthenticated' do
it 'returns 401' do
diff --git a/spec/requests/api/issues/issues_spec.rb b/spec/requests/api/issues/issues_spec.rb
index 1419d39981a..480baff6eed 100644
--- a/spec/requests/api/issues/issues_spec.rb
+++ b/spec/requests/api/issues/issues_spec.rb
@@ -575,6 +575,26 @@ RSpec.describe API::Issues do
end
end
+ context 'with issues closed as duplicates' do
+ let_it_be(:dup_issue_1) { create(:issue, :closed_as_duplicate, project: project) }
+
+ it 'avoids N+1 queries' do
+ get api('/issues', user) # warm up
+
+ control = ActiveRecord::QueryRecorder.new do
+ get api('/issues', user)
+ end
+
+ create(:issue, :closed_as_duplicate, project: project)
+
+ expect do
+ get api('/issues', user)
+ end.not_to exceed_query_limit(control)
+ # 2 pre-existed issues + 2 duplicated incidents (2 closed, 2 new)
+ expect(json_response.count).to eq(6)
+ end
+ end
+
context 'filter by labels or label_name param' do
context 'N+1' do
let(:label_b) { create(:label, title: 'foo', project: project) }
@@ -1101,6 +1121,51 @@ RSpec.describe API::Issues do
expect(json_response['references']['relative']).to eq("##{issue.iid}")
expect(json_response['references']['full']).to eq("#{project.parent.path}/#{project.path}##{issue.iid}")
end
+
+ context 'when issue is closed as duplicate' do
+ let(:new_issue) { create(:issue) }
+ let!(:issue_closed_as_dup) { create(:issue, project: project, duplicated_to: new_issue) }
+
+ before do
+ project.add_developer(user)
+ end
+
+ context 'user does not have permission to view new issue' do
+ it 'does not return the issue as closed_as_duplicate_of' do
+ get api("/projects/#{project.id}/issues/#{issue_closed_as_dup.iid}", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.dig('_links', 'closed_as_duplicate_of')).to eq(nil)
+ end
+ end
+
+ context 'when user has access to new issue' do
+ before do
+ new_issue.project.add_guest(user)
+ end
+
+ it 'returns the issue as closed_as_duplicate_of' do
+ get api("/projects/#{project.id}/issues/#{issue_closed_as_dup.iid}", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expected_url = expose_url(api_v4_project_issue_path(id: new_issue.project_id, issue_iid: new_issue.iid))
+ expect(json_response.dig('_links', 'closed_as_duplicate_of')).to eq(expected_url)
+ end
+
+ context 'feature flag is disabled' do
+ before do
+ stub_feature_flags(closed_as_duplicate_of_issues_api: false)
+ end
+
+ it 'does not return the issue as closed_as_duplicate_of' do
+ get api("/projects/#{project.id}/issues/#{issue_closed_as_dup.iid}", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.dig('_links', 'closed_as_duplicate_of')).to eq(nil)
+ end
+ end
+ end
+ end
end
describe "POST /projects/:id/issues" do
diff --git a/spec/requests/api/markdown_snapshot_spec.rb b/spec/requests/api/markdown_snapshot_spec.rb
new file mode 100644
index 00000000000..37607a4e866
--- /dev/null
+++ b/spec/requests/api/markdown_snapshot_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+# See https://docs.gitlab.com/ee/development/gitlab_flavored_markdown/specification_guide/#markdown-snapshot-testing
+# for documentation on this spec.
+RSpec.describe API::Markdown, 'Snapshot' do
+ glfm_specification_dir = File.expand_path('../../../glfm_specification', __dir__)
+ glfm_example_snapshots_dir = File.expand_path('../../fixtures/glfm/example_snapshots', __dir__)
+ include_context 'with API::Markdown Snapshot shared context', glfm_specification_dir, glfm_example_snapshots_dir
+end
diff --git a/spec/requests/api/members_spec.rb b/spec/requests/api/members_spec.rb
index 63ef8643088..e4c2f17af47 100644
--- a/spec/requests/api/members_spec.rb
+++ b/spec/requests/api/members_spec.rb
@@ -4,13 +4,14 @@ require 'spec_helper'
RSpec.describe API::Members do
let(:maintainer) { create(:user, username: 'maintainer_user') }
+ let(:maintainer2) { create(:user, username: 'user-with-maintainer-role') }
let(:developer) { create(:user) }
let(:access_requester) { create(:user) }
let(:stranger) { create(:user) }
let(:user_with_minimal_access) { create(:user) }
let(:project) do
- create(:project, :public, creator_id: maintainer.id, namespace: maintainer.namespace) do |project|
+ create(:project, :public, creator_id: maintainer.id, group: create(:group, :public)) do |project|
project.add_maintainer(maintainer)
project.add_developer(developer, current_user: maintainer)
project.request_access(access_requester)
@@ -253,21 +254,48 @@ RSpec.describe API::Members do
expect(response).to have_gitlab_http_status(:forbidden)
end
end
+
+ context 'adding a member of higher access level' do
+ before do
+ # the other 'maintainer' is in fact an owner of the group!
+ source.add_maintainer(maintainer2)
+ end
+
+ context 'when an access requester' do
+ it 'is not successful' do
+ post api("/#{source_type.pluralize}/#{source.id}/members", maintainer2),
+ params: { user_id: access_requester.id, access_level: Member::OWNER }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when a totally new user' do
+ it 'is not successful' do
+ post api("/#{source_type.pluralize}/#{source.id}/members", maintainer2),
+ params: { user_id: stranger.id, access_level: Member::OWNER }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
end
end
- context 'when authenticated as a maintainer/owner' do
+ context 'when authenticated as a member with membership management rights' do
context 'and new member is already a requester' do
- it 'transforms the requester into a proper member' do
- expect do
- post api("/#{source_type.pluralize}/#{source.id}/members", maintainer),
- params: { user_id: access_requester.id, access_level: Member::MAINTAINER }
-
- expect(response).to have_gitlab_http_status(:created)
- end.to change { source.members.count }.by(1)
- expect(source.requesters.count).to eq(0)
- expect(json_response['id']).to eq(access_requester.id)
- expect(json_response['access_level']).to eq(Member::MAINTAINER)
+ context 'when the requester is of equal or lower access level' do
+ it 'transforms the requester into a proper member' do
+ expect do
+ post api("/#{source_type.pluralize}/#{source.id}/members", maintainer),
+ params: { user_id: access_requester.id, access_level: Member::MAINTAINER }
+
+ expect(response).to have_gitlab_http_status(:created)
+ end.to change { source.members.count }.by(1)
+ expect(source.requesters.count).to eq(0)
+ expect(json_response['id']).to eq(access_requester.id)
+ expect(json_response['access_level']).to eq(Member::MAINTAINER)
+ end
end
end
@@ -445,7 +473,7 @@ RSpec.describe API::Members do
it 'returns 404 when the user_id is not valid' do
post api("/#{source_type.pluralize}/#{source.id}/members", maintainer),
- params: { user_id: 0, access_level: Member::MAINTAINER }
+ params: { user_id: non_existing_record_id, access_level: Member::MAINTAINER }
expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 User Not Found')
@@ -515,16 +543,49 @@ RSpec.describe API::Members do
end
end
end
+
+ context 'as a maintainer updating a member to one with higher access level than themselves' do
+ before do
+ # the other 'maintainer' is in fact an owner of the group!
+ source.add_maintainer(maintainer2)
+ end
+
+ it 'returns 403' do
+ put api("/#{source_type.pluralize}/#{source.id}/members/#{developer.id}", maintainer2),
+ params: { access_level: Member::OWNER }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
end
context 'when authenticated as a maintainer/owner' do
- it 'updates the member' do
- put api("/#{source_type.pluralize}/#{source.id}/members/#{developer.id}", maintainer),
- params: { access_level: Member::MAINTAINER }
+ context 'when updating a member with the same or lower access level' do
+ it 'updates the member' do
+ put api("/#{source_type.pluralize}/#{source.id}/members/#{developer.id}", maintainer),
+ params: { access_level: Member::MAINTAINER }
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['id']).to eq(developer.id)
- expect(json_response['access_level']).to eq(Member::MAINTAINER)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['id']).to eq(developer.id)
+ expect(json_response['access_level']).to eq(Member::MAINTAINER)
+ end
+ end
+
+ context 'when updating a member with higher access level' do
+ let(:owner) { create(:user) }
+
+ before do
+ source.add_owner(owner)
+ # the other 'maintainer' is in fact an owner of the group!
+ source.add_maintainer(maintainer2)
+ end
+
+ it 'returns 403' do
+ put api("/#{source_type.pluralize}/#{source.id}/members/#{owner.id}", maintainer2),
+ params: { access_level: Member::DEVELOPER }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
end
end
@@ -619,6 +680,23 @@ RSpec.describe API::Members do
end
end
+ context 'when attempting to delete a member with higher access level' do
+ let(:owner) { create(:user) }
+
+ before do
+ source.add_owner(owner)
+ # the other 'maintainer' is in fact an owner of the group!
+ source.add_maintainer(maintainer2)
+ end
+
+ it 'returns 403' do
+ delete api("/#{source_type.pluralize}/#{source.id}/members/#{owner.id}", maintainer2),
+ params: { access_level: Member::DEVELOPER }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
it 'deletes the member' do
expect do
delete api("/#{source_type.pluralize}/#{source.id}/members/#{developer.id}", maintainer)
@@ -694,13 +772,11 @@ RSpec.describe API::Members do
end
context 'adding owner to project' do
- it 'returns created status' do
- expect do
- post api("/projects/#{project.id}/members", maintainer),
- params: { user_id: stranger.id, access_level: Member::OWNER }
+ it 'returns 403' do
+ post api("/projects/#{project.id}/members", maintainer),
+ params: { user_id: stranger.id, access_level: Member::OWNER }
- expect(response).to have_gitlab_http_status(:created)
- end.to change { project.members.count }.by(1)
+ expect(response).to have_gitlab_http_status(:forbidden)
end
end
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index a7ede7f4150..695c0ed1749 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -2134,54 +2134,6 @@ RSpec.describe API::MergeRequests do
expect(response).to have_gitlab_http_status(:created)
end
end
-
- describe 'SSE counter' do
- let(:headers) { {} }
- let(:params) do
- {
- title: 'Test merge_request',
- source_branch: 'feature_conflict',
- target_branch: 'master',
- author_id: user.id,
- milestone_id: milestone.id,
- squash: true
- }
- end
-
- subject { post api("/projects/#{project.id}/merge_requests", user), params: params, headers: headers }
-
- it 'does not increase the SSE counter by default' do
- expect(Gitlab::UsageDataCounters::EditorUniqueCounter).not_to receive(:track_sse_edit_action)
-
- subject
-
- expect(response).to have_gitlab_http_status(:created)
- end
-
- context 'when referer is not the SSE' do
- let(:headers) { { 'HTTP_REFERER' => 'https://gitlab.com' } }
-
- it 'does not increase the SSE counter by default' do
- expect(Gitlab::UsageDataCounters::EditorUniqueCounter).not_to receive(:track_sse_edit_action)
-
- subject
-
- expect(response).to have_gitlab_http_status(:created)
- end
- end
-
- context 'when referer is the SSE' do
- let(:headers) { { 'HTTP_REFERER' => project_show_sse_url(project, 'master/README.md') } }
-
- it 'increases the SSE counter by default' do
- expect(Gitlab::UsageDataCounters::EditorUniqueCounter).to receive(:track_sse_edit_action).with(author: user)
-
- subject
-
- expect(response).to have_gitlab_http_status(:created)
- end
- end
- end
end
describe 'PUT /projects/:id/merge_requests/:merge_request_iid' do
@@ -2535,14 +2487,34 @@ RSpec.describe API::MergeRequests do
expect(response).to have_gitlab_http_status(:ok)
end
- it "returns 406 if branch can't be merged" do
- allow_any_instance_of(MergeRequest)
- .to receive(:can_be_merged?).and_return(false)
+ context 'when change_response_code_merge_status is enabled' do
+ it "returns 422 if branch can't be merged" do
+ allow_next_found_instance_of(MergeRequest) do |merge_request|
+ allow(merge_request).to receive(:can_be_merged?).and_return(false)
+ end
- put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user)
+ put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user)
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ expect(json_response['message']).to eq('Branch cannot be merged')
+ end
+ end
+
+ context 'when change_response_code_merge_status is disabled' do
+ before do
+ stub_feature_flags(change_response_code_merge_status: false)
+ end
- expect(response).to have_gitlab_http_status(:not_acceptable)
- expect(json_response['message']).to eq('Branch cannot be merged')
+ it "returns 406 if branch can't be merged" do
+ allow_next_found_instance_of(MergeRequest) do |merge_request|
+ allow(merge_request).to receive(:can_be_merged?).and_return(false)
+ end
+
+ put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user)
+
+ expect(response).to have_gitlab_http_status(:not_acceptable)
+ expect(json_response['message']).to eq('Branch cannot be merged')
+ end
end
it "returns 405 if merge_request is not open" do
@@ -2693,6 +2665,7 @@ RSpec.describe API::MergeRequests do
expect(response).to have_gitlab_http_status(:ok)
expect(source_repository.branch_exists?(source_branch)).to be false
+ expect(merge_request.reload.should_remove_source_branch?).to be true
end
end
@@ -2711,6 +2684,7 @@ RSpec.describe API::MergeRequests do
expect(response).to have_gitlab_http_status(:ok)
expect(source_repository.branch_exists?(source_branch)).to be false
+ expect(merge_request.reload.should_remove_source_branch?).to be nil
end
it 'does not remove the source branch' do
@@ -2721,6 +2695,7 @@ RSpec.describe API::MergeRequests do
expect(response).to have_gitlab_http_status(:ok)
expect(source_repository.branch_exists?(source_branch)).to be_truthy
+ expect(merge_request.reload.should_remove_source_branch?).to be false
end
end
diff --git a/spec/requests/api/namespaces_spec.rb b/spec/requests/api/namespaces_spec.rb
index 01dbf523071..09b87f41b82 100644
--- a/spec/requests/api/namespaces_spec.rb
+++ b/spec/requests/api/namespaces_spec.rb
@@ -325,6 +325,24 @@ RSpec.describe API::Namespaces do
expect(response.body).to eq(expected_json)
end
+ it 'ignores paths of groups present in other hierarchies when making suggestions' do
+ (1..2).to_a.each do |suffix|
+ create(:group, name: "mygroup#{suffix}", path: "mygroup#{suffix}", parent: namespace2)
+ end
+
+ create(:group, name: 'mygroup', path: 'mygroup', parent: namespace1)
+
+ get api("/namespaces/mygroup/exists", user), params: { parent_id: namespace1.id }
+
+ # if the paths of groups present in hierachies aren't ignored, the suggestion generated would have
+ # been `mygroup3`, just because groups with path `mygroup1` and `mygroup2` exists somewhere else.
+ # But there is no reason for those groups that exists elsewhere to cause a conflict because
+ # their hierarchies differ. Hence, the correct suggestion to be generated would be `mygroup1`
+ expected_json = { exists: true, suggests: ["mygroup1"] }.to_json
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.body).to eq(expected_json)
+ end
+
it 'ignores top-level namespaces when checking with parent_id' do
get api("/namespaces/#{namespace1.path}/exists", user), params: { parent_id: namespace1.id }
diff --git a/spec/requests/api/personal_access_tokens_spec.rb b/spec/requests/api/personal_access_tokens_spec.rb
index 01f69f0aae2..403c646ee32 100644
--- a/spec/requests/api/personal_access_tokens_spec.rb
+++ b/spec/requests/api/personal_access_tokens_spec.rb
@@ -73,6 +73,61 @@ RSpec.describe API::PersonalAccessTokens do
end
end
+ describe 'GET /personal_access_tokens/:id' do
+ let_it_be(:user_token) { create(:personal_access_token, user: current_user) }
+ let_it_be(:user_token_path) { "/personal_access_tokens/#{user_token.id}" }
+ let_it_be(:invalid_path) { "/personal_access_tokens/#{non_existing_record_id}" }
+
+ context 'when current_user is an administrator', :enable_admin_mode do
+ let_it_be(:admin_user) { create(:admin) }
+ let_it_be(:admin_token) { create(:personal_access_token, user: admin_user) }
+ let_it_be(:admin_path) { "/personal_access_tokens/#{admin_token.id}" }
+
+ it 'returns admins own PAT by id' do
+ get api(admin_path, admin_user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['id']).to eq(admin_token.id)
+ end
+
+ it 'returns a different users PAT by id' do
+ get api(user_token_path, admin_user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['id']).to eq(user_token.id)
+ end
+
+ it 'fails to return PAT because no PAT exists with this id' do
+ get api(invalid_path, admin_user)
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ context 'when current_user is not an administrator' do
+ let_it_be(:other_users_path) { "/personal_access_tokens/#{token1.id}" }
+
+ it 'returns users own PAT by id' do
+ get api(user_token_path, current_user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['id']).to eq(user_token.id)
+ end
+
+ it 'fails to return other users PAT by id' do
+ get api(other_users_path, current_user)
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+
+ it 'fails to return PAT because no PAT exists with this id' do
+ get api(invalid_path, current_user)
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+ end
+
describe 'DELETE /personal_access_tokens/self' do
let(:path) { '/personal_access_tokens/self' }
let(:token) { create(:personal_access_token, user: current_user) }
diff --git a/spec/requests/api/project_attributes.yml b/spec/requests/api/project_attributes.yml
index eb6f81c2810..35844631287 100644
--- a/spec/requests/api/project_attributes.yml
+++ b/spec/requests/api/project_attributes.yml
@@ -122,6 +122,7 @@ project_feature:
- id
- created_at
- metrics_dashboard_access_level
+ - package_registry_access_level
- project_id
- updated_at
computed_attributes:
diff --git a/spec/requests/api/project_hooks_spec.rb b/spec/requests/api/project_hooks_spec.rb
index b5aedde2b2e..26e0adc11b3 100644
--- a/spec/requests/api/project_hooks_spec.rb
+++ b/spec/requests/api/project_hooks_spec.rb
@@ -44,6 +44,8 @@ RSpec.describe API::ProjectHooks, 'ProjectHooks' do
expect(json_response.first['releases_events']).to eq(true)
expect(json_response.first['enable_ssl_verification']).to eq(true)
expect(json_response.first['push_events_branch_filter']).to eq('master')
+ expect(json_response.first['alert_status']).to eq('executable')
+ expect(json_response.first['disabled_until']).to be_nil
end
end
@@ -76,6 +78,8 @@ RSpec.describe API::ProjectHooks, 'ProjectHooks' do
expect(json_response['releases_events']).to eq(hook.releases_events)
expect(json_response['deployment_events']).to eq(true)
expect(json_response['enable_ssl_verification']).to eq(hook.enable_ssl_verification)
+ expect(json_response['alert_status']).to eq(hook.alert_status.to_s)
+ expect(json_response['disabled_until']).to be_nil
end
it "returns a 404 error if hook id is not available" do
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index d2189ab02ea..431d2e56cb5 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -3106,6 +3106,13 @@ RSpec.describe API::Projects do
expect(json_response['error']).to eq 'group_access does not have a valid value'
end
+ it "returns a 400 error when the project-group share is created with an OWNER access level" do
+ post api("/projects/#{project.id}/share", user), params: { group_id: group.id, group_access: Gitlab::Access::OWNER }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq 'group_access does not have a valid value'
+ end
+
it "returns a 409 error when link is not saved" do
allow(::Projects::GroupLinks::CreateService).to receive_message_chain(:new, :execute)
.and_return({ status: :error, http_status: 409, message: 'error' })
diff --git a/spec/requests/api/pypi_packages_spec.rb b/spec/requests/api/pypi_packages_spec.rb
index 8fa5f409298..a24b852cdac 100644
--- a/spec/requests/api/pypi_packages_spec.rb
+++ b/spec/requests/api/pypi_packages_spec.rb
@@ -17,7 +17,68 @@ RSpec.describe API::PypiPackages do
let(:headers) { {} }
- context 'simple API endpoint' do
+ context 'simple index API endpoint' do
+ let_it_be(:package) { create(:pypi_package, project: project) }
+ let_it_be(:package2) { create(:pypi_package, project: project) }
+
+ subject { get api(url), headers: headers }
+
+ describe 'GET /api/v4/groups/:id/-/packages/pypi/simple' do
+ let(:url) { "/groups/#{group.id}/-/packages/pypi/simple" }
+ let(:snowplow_gitlab_standard_context) { { project: project, namespace: project.namespace } }
+
+ it_behaves_like 'pypi simple index API endpoint'
+ it_behaves_like 'rejects PyPI access with unknown group id'
+
+ context 'deploy tokens' do
+ let_it_be(:group_deploy_token) { create(:group_deploy_token, deploy_token: deploy_token, group: group) }
+
+ before do
+ project.update_column(:visibility_level, Gitlab::VisibilityLevel::PRIVATE)
+ group.update_column(:visibility_level, Gitlab::VisibilityLevel::PRIVATE)
+ end
+
+ it_behaves_like 'deploy token for package GET requests'
+
+ context 'with group path as id' do
+ let(:url) { "/groups/#{CGI.escape(group.full_path)}/-/packages/pypi/simple"}
+
+ it_behaves_like 'deploy token for package GET requests'
+ end
+ end
+
+ context 'job token' do
+ before do
+ project.update_column(:visibility_level, Gitlab::VisibilityLevel::PRIVATE)
+ group.update_column(:visibility_level, Gitlab::VisibilityLevel::PRIVATE)
+ group.add_developer(user)
+ end
+
+ it_behaves_like 'job token for package GET requests'
+ end
+
+ it_behaves_like 'a pypi user namespace endpoint'
+ end
+
+ describe 'GET /api/v4/projects/:id/packages/pypi/simple' do
+ let(:package_name) { package.name }
+ let(:url) { "/projects/#{project.id}/packages/pypi/simple" }
+ let(:snowplow_gitlab_standard_context) { { project: nil, namespace: group } }
+
+ it_behaves_like 'pypi simple index API endpoint'
+ it_behaves_like 'rejects PyPI access with unknown project id'
+ it_behaves_like 'deploy token for package GET requests'
+ it_behaves_like 'job token for package GET requests'
+
+ context 'with project path as id' do
+ let(:url) { "/projects/#{CGI.escape(project.full_path)}/packages/pypi/simple" }
+
+ it_behaves_like 'deploy token for package GET requests'
+ end
+ end
+ end
+
+ context 'simple package API endpoint' do
let_it_be(:package) { create(:pypi_package, project: project) }
subject { get api(url), headers: headers }
@@ -25,7 +86,7 @@ RSpec.describe API::PypiPackages do
describe 'GET /api/v4/groups/:id/-/packages/pypi/simple/:package_name' do
let(:package_name) { package.name }
let(:url) { "/groups/#{group.id}/-/packages/pypi/simple/#{package_name}" }
- let(:snowplow_gitlab_standard_context) { {} }
+ let(:snowplow_gitlab_standard_context) { { project: nil, namespace: group } }
it_behaves_like 'pypi simple API endpoint'
it_behaves_like 'rejects PyPI access with unknown group id'
diff --git a/spec/requests/api/release/links_spec.rb b/spec/requests/api/release/links_spec.rb
index 00326426af5..2345c0063dd 100644
--- a/spec/requests/api/release/links_spec.rb
+++ b/spec/requests/api/release/links_spec.rb
@@ -49,6 +49,20 @@ RSpec.describe API::Release::Links do
expect(response).to match_response_schema('release/links')
end
+
+ context 'when using JOB-TOKEN auth' do
+ let(:job) { create(:ci_build, :running, user: maintainer) }
+
+ it 'returns releases links' do
+ get api("/projects/#{project.id}/releases/v0.1/assets/links", job_token: job.token)
+
+ aggregate_failures "testing response" do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('release/links')
+ expect(json_response.count).to eq(2)
+ end
+ end
+ end
end
context 'when release does not exist' do
@@ -116,6 +130,20 @@ RSpec.describe API::Release::Links do
expect(response).to match_response_schema('release/link')
end
+ context 'when using JOB-TOKEN auth' do
+ let(:job) { create(:ci_build, :running, user: maintainer) }
+
+ it 'returns releases link' do
+ get api("/projects/#{project.id}/releases/v0.1/assets/links/#{release_link.id}", job_token: job.token)
+
+ aggregate_failures "testing response" do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('release/link')
+ expect(json_response['name']).to eq(release_link.name)
+ end
+ end
+ end
+
context 'when specified tag is not found in the project' do
it_behaves_like '404 response' do
let(:request) { get api("/projects/#{project.id}/releases/non_existing_tag/assets/links/#{release_link.id}", maintainer) }
@@ -198,6 +226,25 @@ RSpec.describe API::Release::Links do
expect(response).to match_response_schema('release/link')
end
+ context 'when using JOB-TOKEN auth' do
+ let(:job) { create(:ci_build, :running, user: maintainer) }
+
+ it 'creates a new release link' do
+ expect do
+ post api("/projects/#{project.id}/releases/v0.1/assets/links"), params: params.merge(job_token: job.token)
+ end.to change { Releases::Link.count }.by(1)
+
+ release.reload
+
+ aggregate_failures "testing response" do
+ expect(response).to have_gitlab_http_status(:created)
+ expect(last_release_link.name).to eq('awesome-app.dmg')
+ expect(last_release_link.filepath).to eq('/binaries/awesome-app.dmg')
+ expect(last_release_link.url).to eq('https://example.com/download/awesome-app.dmg')
+ end
+ end
+ end
+
context 'with protected tag' do
context 'when user has access to the protected tag' do
let!(:protected_tag) { create(:protected_tag, :developers_can_create, name: '*', project: project) }
@@ -314,6 +361,20 @@ RSpec.describe API::Release::Links do
expect(response).to match_response_schema('release/link')
end
+ context 'when using JOB-TOKEN auth' do
+ let(:job) { create(:ci_build, :running, user: maintainer) }
+
+ it 'updates the release link' do
+ put api("/projects/#{project.id}/releases/v0.1/assets/links/#{release_link.id}"), params: params.merge(job_token: job.token)
+
+ aggregate_failures "testing response" do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('release/link')
+ expect(json_response['name']).to eq('awesome-app.msi')
+ end
+ end
+ end
+
context 'with protected tag' do
context 'when user has access to the protected tag' do
let!(:protected_tag) { create(:protected_tag, :developers_can_create, name: '*', project: project) }
@@ -411,6 +472,21 @@ RSpec.describe API::Release::Links do
expect(response).to match_response_schema('release/link')
end
+ context 'when using JOB-TOKEN auth' do
+ let(:job) { create(:ci_build, :running, user: maintainer) }
+
+ it 'deletes the release link' do
+ expect do
+ delete api("/projects/#{project.id}/releases/v0.1/assets/links/#{release_link.id}", job_token: job.token)
+ end.to change { Releases::Link.count }.by(-1)
+
+ aggregate_failures "testing response" do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('release/link')
+ end
+ end
+ end
+
context 'with protected tag' do
context 'when user has access to the protected tag' do
let!(:protected_tag) { create(:protected_tag, :developers_can_create, name: '*', project: project) }
diff --git a/spec/requests/api/releases_spec.rb b/spec/requests/api/releases_spec.rb
index 3c0f3a75f10..c050214ff50 100644
--- a/spec/requests/api/releases_spec.rb
+++ b/spec/requests/api/releases_spec.rb
@@ -227,6 +227,7 @@ RSpec.describe API::Releases do
get api("/projects/#{project.id}/releases", maintainer)
expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response[0]['tag_path']).to include('%2F') # properly escape the slash
end
end
@@ -928,6 +929,22 @@ RSpec.describe API::Releases do
expect(json_response['message']).to eq('Tag name invalid')
end
end
+
+ context 'when tag_message is provided' do
+ let(:tag_message) { 'Annotated tag message created by Release API' }
+
+ before do
+ params.merge!(tag_message: tag_message)
+ end
+
+ it 'creates an annotated tag with the tag message' do
+ expect do
+ post api("/projects/#{project.id}/releases", maintainer), params: params
+ end.to change { Project.find_by_id(project.id).repository.tag_count }.by(1)
+
+ expect(project.repository.find_tag(tag_name).message).to eq(tag_message)
+ end
+ end
end
context 'when release already exists' do
diff --git a/spec/requests/api/system_hooks_spec.rb b/spec/requests/api/system_hooks_spec.rb
index d94b70ec0f9..2460a98129f 100644
--- a/spec/requests/api/system_hooks_spec.rb
+++ b/spec/requests/api/system_hooks_spec.rb
@@ -44,6 +44,8 @@ RSpec.describe API::SystemHooks do
expect(json_response.first['merge_requests_events']).to be false
expect(json_response.first['repository_update_events']).to be true
expect(json_response.first['enable_ssl_verification']).to be true
+ expect(json_response.first['disabled_until']).to be nil
+ expect(json_response.first['alert_status']).to eq 'executable'
end
end
end
@@ -79,10 +81,43 @@ RSpec.describe API::SystemHooks do
'tag_push_events' => be(hook.tag_push_events),
'merge_requests_events' => be(hook.merge_requests_events),
'repository_update_events' => be(hook.repository_update_events),
- 'enable_ssl_verification' => be(hook.enable_ssl_verification)
+ 'enable_ssl_verification' => be(hook.enable_ssl_verification),
+ 'alert_status' => eq(hook.alert_status.to_s),
+ 'disabled_until' => eq(hook.disabled_until&.iso8601(3))
)
end
+ context 'the hook is disabled' do
+ before do
+ hook.disable!
+ end
+
+ it "has the correct alert status", :aggregate_failures do
+ get api("/hooks/#{hook.id}", admin)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/system_hook')
+ expect(json_response).to include('alert_status' => 'disabled')
+ end
+ end
+
+ context 'the hook is backed-off' do
+ before do
+ hook.backoff!
+ end
+
+ it "has the correct alert status", :aggregate_failures do
+ get api("/hooks/#{hook.id}", admin)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/system_hook')
+ expect(json_response).to include(
+ 'alert_status' => 'temporarily_disabled',
+ 'disabled_until' => hook.disabled_until.iso8601(3)
+ )
+ end
+ end
+
it 'returns 404 if the system hook does not exist' do
get api("/hooks/#{non_existing_record_id}", admin)
diff --git a/spec/requests/api/terraform/modules/v1/packages_spec.rb b/spec/requests/api/terraform/modules/v1/packages_spec.rb
index 7d86244cb1b..12bce4da011 100644
--- a/spec/requests/api/terraform/modules/v1/packages_spec.rb
+++ b/spec/requests/api/terraform/modules/v1/packages_spec.rb
@@ -17,12 +17,14 @@ RSpec.describe API::Terraform::Modules::V1::Packages do
let_it_be(:project_deploy_token) { create(:project_deploy_token, deploy_token: deploy_token, project: project) }
let(:headers) { {} }
+ let(:token) { tokens[token_type] }
let(:tokens) do
{
personal_access_token: personal_access_token.token,
deploy_token: deploy_token.token,
- job_token: job.token
+ job_token: job.token,
+ invalid: 'invalid-token123'
}
end
@@ -48,44 +50,43 @@ RSpec.describe API::Terraform::Modules::V1::Packages do
end
context 'with valid namespace' do
- where(:visibility, :user_role, :member, :token_type, :valid_token, :shared_examples_name, :expected_status) do
- :public | :developer | true | :personal_access_token | true | 'returns terraform module packages' | :success
- :public | :guest | true | :personal_access_token | true | 'returns terraform module packages' | :success
- :public | :developer | true | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :guest | true | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :developer | false | :personal_access_token | true | 'returns no terraform module packages' | :success
- :public | :guest | false | :personal_access_token | true | 'returns no terraform module packages' | :success
- :public | :developer | false | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :guest | false | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :anonymous | false | :personal_access_token | true | 'returns no terraform module packages' | :success
- :private | :developer | true | :personal_access_token | true | 'returns terraform module packages' | :success
- :private | :guest | true | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
- :private | :developer | true | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :guest | true | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :developer | false | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
- :private | :guest | false | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
- :private | :developer | false | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :guest | false | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :anonymous | false | :personal_access_token | true | 'rejects terraform module packages access' | :unauthorized
- :public | :developer | true | :job_token | true | 'returns terraform module packages' | :success
- :public | :guest | true | :job_token | true | 'returns no terraform module packages' | :success
- :public | :guest | true | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :developer | false | :job_token | true | 'returns no terraform module packages' | :success
- :public | :guest | false | :job_token | true | 'returns no terraform module packages' | :success
- :public | :developer | false | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :guest | false | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :developer | true | :job_token | true | 'returns terraform module packages' | :success
- :private | :guest | true | :job_token | true | 'rejects terraform module packages access' | :forbidden
- :private | :developer | true | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :guest | true | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :developer | false | :job_token | true | 'rejects terraform module packages access' | :forbidden
- :private | :guest | false | :job_token | true | 'rejects terraform module packages access' | :forbidden
- :private | :developer | false | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :guest | false | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ where(:visibility, :user_role, :member, :token_type, :shared_examples_name, :expected_status) do
+ :public | :developer | true | :personal_access_token | 'returns terraform module packages' | :success
+ :public | :guest | true | :personal_access_token | 'returns terraform module packages' | :success
+ :public | :developer | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | false | :personal_access_token | 'returns no terraform module packages' | :success
+ :public | :guest | false | :personal_access_token | 'returns no terraform module packages' | :success
+ :public | :developer | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :anonymous | false | nil | 'returns no terraform module packages' | :success
+ :private | :developer | true | :personal_access_token | 'returns terraform module packages' | :success
+ :private | :guest | true | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | false | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :private | :guest | false | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :anonymous | false | nil | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | true | :job_token | 'returns terraform module packages' | :success
+ :public | :guest | true | :job_token | 'returns no terraform module packages' | :success
+ :public | :guest | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | false | :job_token | 'returns no terraform module packages' | :success
+ :public | :guest | false | :job_token | 'returns no terraform module packages' | :success
+ :public | :developer | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | true | :job_token | 'returns terraform module packages' | :success
+ :private | :guest | true | :job_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | false | :job_token | 'rejects terraform module packages access' | :forbidden
+ :private | :guest | false | :job_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | false | :invalid | 'rejects terraform module packages access' | :unauthorized
end
with_them do
- let(:token) { valid_token ? tokens[token_type] : 'invalid-token123' }
let(:headers) { user_role == :anonymous ? {} : { 'Authorization' => "Bearer #{token}" } }
before do
@@ -104,48 +105,48 @@ RSpec.describe API::Terraform::Modules::V1::Packages do
subject { get(url, headers: headers) }
context 'with valid namespace' do
- where(:visibility, :user_role, :member, :token_type, :valid_token, :shared_examples_name, :expected_status) do
- :public | :developer | true | :personal_access_token | true | 'grants terraform module download' | :success
- :public | :guest | true | :personal_access_token | true | 'rejects terraform module packages access' | :not_found
- :public | :developer | true | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :guest | true | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :developer | false | :personal_access_token | true | 'rejects terraform module packages access' | :not_found
- :public | :guest | false | :personal_access_token | true | 'rejects terraform module packages access' | :not_found
- :public | :developer | false | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :guest | false | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :anonymous | false | :personal_access_token | true | 'rejects terraform module packages access' | :not_found
- :private | :developer | true | :personal_access_token | true | 'grants terraform module download' | :success
- :private | :guest | true | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
- :private | :developer | true | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :guest | true | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :developer | false | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
- :private | :guest | false | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
- :private | :developer | false | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :guest | false | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :anonymous | false | :personal_access_token | true | 'rejects terraform module packages access' | :unauthorized
- :public | :developer | true | :job_token | true | 'grants terraform module download' | :success
- :public | :guest | true | :job_token | true | 'rejects terraform module packages access' | :not_found
- :public | :guest | true | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :developer | false | :job_token | true | 'rejects terraform module packages access' | :not_found
- :public | :guest | false | :job_token | true | 'rejects terraform module packages access' | :not_found
- :public | :developer | false | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :guest | false | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :developer | true | :job_token | true | 'grants terraform module download' | :success
- :private | :guest | true | :job_token | true | 'rejects terraform module packages access' | :forbidden
- :private | :developer | true | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :guest | true | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :developer | false | :job_token | true | 'rejects terraform module packages access' | :forbidden
- :private | :guest | false | :job_token | true | 'rejects terraform module packages access' | :forbidden
- :private | :developer | false | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :guest | false | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ where(:visibility, :user_role, :member, :token_type, :shared_examples_name, :expected_status) do
+ :public | :developer | true | :personal_access_token | 'grants terraform module download' | :success
+ :public | :guest | true | :personal_access_token | 'grants terraform module download' | :success
+ :public | :developer | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | false | :personal_access_token | 'grants terraform module download' | :success
+ :public | :guest | false | :personal_access_token | 'grants terraform module download' | :success
+ :public | :developer | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :anonymous | false | nil | 'grants terraform module download' | :success
+ :private | :developer | true | :personal_access_token | 'grants terraform module download' | :success
+ :private | :guest | true | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | false | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :private | :guest | false | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :anonymous | false | nil | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | true | :job_token | 'grants terraform module download' | :success
+ :public | :guest | true | :job_token | 'grants terraform module download' | :success
+ :public | :guest | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | false | :job_token | 'grants terraform module download' | :success
+ :public | :guest | false | :job_token | 'grants terraform module download' | :success
+ :public | :developer | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | true | :job_token | 'grants terraform module download' | :success
+ :private | :guest | true | :job_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | false | :job_token | 'rejects terraform module packages access' | :forbidden
+ :private | :guest | false | :job_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | false | :invalid | 'rejects terraform module packages access' | :unauthorized
end
with_them do
- let(:token) { valid_token ? tokens[token_type] : 'invalid-token123' }
let(:headers) { user_role == :anonymous ? {} : { 'Authorization' => "Bearer #{token}" } }
before do
group.update!(visibility: visibility.to_s)
+ project.update!(visibility: visibility.to_s)
end
it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
@@ -158,55 +159,62 @@ RSpec.describe API::Terraform::Modules::V1::Packages do
let(:tokens) do
{
personal_access_token: ::Gitlab::JWTToken.new.tap { |jwt| jwt['token'] = personal_access_token.id }.encoded,
- job_token: ::Gitlab::JWTToken.new.tap { |jwt| jwt['token'] = job.token }.encoded
+ job_token: ::Gitlab::JWTToken.new.tap { |jwt| jwt['token'] = job.token }.encoded,
+ invalid: 'invalid-token123'
}
end
subject { get(url, headers: headers) }
context 'with valid namespace' do
- where(:visibility, :user_role, :member, :token_type, :valid_token, :shared_examples_name, :expected_status) do
- :public | :developer | true | :personal_access_token | true | 'grants terraform module package file access' | :success
- :public | :guest | true | :personal_access_token | true | 'rejects terraform module packages access' | :not_found
- :public | :developer | true | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :guest | true | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :developer | false | :personal_access_token | true | 'rejects terraform module packages access' | :not_found
- :public | :guest | false | :personal_access_token | true | 'rejects terraform module packages access' | :not_found
- :public | :developer | false | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :guest | false | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :anonymous | false | :personal_access_token | true | 'rejects terraform module packages access' | :not_found
- :private | :developer | true | :personal_access_token | true | 'grants terraform module package file access' | :success
- :private | :guest | true | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
- :private | :developer | true | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :guest | true | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :developer | false | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
- :private | :guest | false | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
- :private | :developer | false | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :guest | false | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :anonymous | false | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
- :public | :developer | true | :job_token | true | 'grants terraform module package file access' | :success
- :public | :guest | true | :job_token | true | 'rejects terraform module packages access' | :not_found
- :public | :guest | true | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :developer | false | :job_token | true | 'rejects terraform module packages access' | :not_found
- :public | :guest | false | :job_token | true | 'rejects terraform module packages access' | :not_found
- :public | :developer | false | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :guest | false | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :developer | true | :job_token | true | 'grants terraform module package file access' | :success
- :private | :guest | true | :job_token | true | 'rejects terraform module packages access' | :forbidden
- :private | :developer | true | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :guest | true | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :developer | false | :job_token | true | 'rejects terraform module packages access' | :forbidden
- :private | :guest | false | :job_token | true | 'rejects terraform module packages access' | :forbidden
- :private | :developer | false | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :guest | false | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ where(:visibility, :user_role, :member, :token_type, :shared_examples_name, :expected_status) do
+ :public | :developer | true | :personal_access_token | 'grants terraform module package file access' | :success
+ :public | :guest | true | :personal_access_token | 'grants terraform module package file access' | :success
+ :public | :developer | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | false | :personal_access_token | 'grants terraform module package file access' | :success
+ :public | :guest | false | :personal_access_token | 'grants terraform module package file access' | :success
+ :public | :developer | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :anonymous | false | nil | 'grants terraform module package file access' | :success
+ :private | :developer | true | :personal_access_token | 'grants terraform module package file access' | :success
+ :private | :guest | true | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | false | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :private | :guest | false | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :anonymous | false | nil | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | true | :job_token | 'grants terraform module package file access' | :success
+ :public | :guest | true | :job_token | 'grants terraform module package file access' | :success
+ :public | :guest | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | false | :job_token | 'grants terraform module package file access' | :success
+ :public | :guest | false | :job_token | 'grants terraform module package file access' | :success
+ :public | :developer | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | true | :job_token | 'grants terraform module package file access' | :success
+ :private | :guest | true | :job_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | true | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | false | :job_token | 'rejects terraform module packages access' | :forbidden
+ :private | :guest | false | :job_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | false | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | false | :invalid | 'rejects terraform module packages access' | :unauthorized
end
with_them do
- let(:token) { valid_token ? tokens[token_type] : 'invalid-token123' }
- let(:snowplow_gitlab_standard_context) { { project: project, user: user, namespace: project.namespace } }
+ let(:snowplow_gitlab_standard_context) do
+ {
+ project: project,
+ user: user_role == :anonymous ? nil : user,
+ namespace: project.namespace
+ }
+ end
before do
group.update!(visibility: visibility.to_s)
+ project.update!(visibility: visibility.to_s)
end
it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
@@ -244,49 +252,48 @@ RSpec.describe API::Terraform::Modules::V1::Packages do
subject { put(url, headers: headers) }
context 'with valid project' do
- where(:visibility, :user_role, :member, :token_header, :token_type, :valid_token, :shared_examples_name, :expected_status) do
- :public | :developer | true | 'PRIVATE-TOKEN' | :personal_access_token | true | 'process terraform module workhorse authorization' | :success
- :public | :guest | true | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
- :public | :developer | true | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :guest | true | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :developer | false | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
- :public | :guest | false | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
- :public | :developer | false | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :guest | false | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :anonymous | false | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :unauthorized
- :private | :developer | true | 'PRIVATE-TOKEN' | :personal_access_token | true | 'process terraform module workhorse authorization' | :success
- :private | :guest | true | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
- :private | :developer | true | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :guest | true | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :developer | false | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :not_found
- :private | :guest | false | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :not_found
- :private | :developer | false | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :guest | false | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :anonymous | false | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :unauthorized
- :public | :developer | true | 'JOB-TOKEN' | :job_token | true | 'process terraform module workhorse authorization' | :success
- :public | :guest | true | 'JOB-TOKEN' | :job_token | true | 'rejects terraform module packages access' | :forbidden
- :public | :developer | true | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :guest | true | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :developer | false | 'JOB-TOKEN' | :job_token | true | 'rejects terraform module packages access' | :forbidden
- :public | :guest | false | 'JOB-TOKEN' | :job_token | true | 'rejects terraform module packages access' | :forbidden
- :public | :developer | false | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :guest | false | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :developer | true | 'JOB-TOKEN' | :job_token | true | 'process terraform module workhorse authorization' | :success
- :private | :guest | true | 'JOB-TOKEN' | :job_token | true | 'rejects terraform module packages access' | :forbidden
- :private | :developer | true | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :guest | true | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :developer | false | 'JOB-TOKEN' | :job_token | true | 'rejects terraform module packages access' | :not_found
- :private | :guest | false | 'JOB-TOKEN' | :job_token | true | 'rejects terraform module packages access' | :not_found
- :private | :developer | false | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :guest | false | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :developer | true | 'DEPLOY-TOKEN' | :deploy_token | true | 'process terraform module workhorse authorization' | :success
- :public | :developer | true | 'DEPLOY-TOKEN' | :deploy_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :developer | true | 'DEPLOY-TOKEN' | :deploy_token | true | 'process terraform module workhorse authorization' | :success
- :private | :developer | true | 'DEPLOY-TOKEN' | :deploy_token | false | 'rejects terraform module packages access' | :unauthorized
+ where(:visibility, :user_role, :member, :token_header, :token_type, :shared_examples_name, :expected_status) do
+ :public | :developer | true | 'PRIVATE-TOKEN' | :personal_access_token | 'process terraform module workhorse authorization' | :success
+ :public | :guest | true | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :public | :developer | true | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | true | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | false | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :public | :guest | false | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :public | :developer | false | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | false | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :anonymous | false | nil | nil | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | true | 'PRIVATE-TOKEN' | :personal_access_token | 'process terraform module workhorse authorization' | :success
+ :private | :guest | true | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | true | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | true | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | false | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :not_found
+ :private | :guest | false | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :not_found
+ :private | :developer | false | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | false | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :anonymous | false | nil | nil | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | true | 'JOB-TOKEN' | :job_token | 'process terraform module workhorse authorization' | :success
+ :public | :guest | true | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :forbidden
+ :public | :developer | true | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | true | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | false | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :forbidden
+ :public | :guest | false | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :forbidden
+ :public | :developer | false | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | false | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | true | 'JOB-TOKEN' | :job_token | 'process terraform module workhorse authorization' | :success
+ :private | :guest | true | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | true | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | true | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | false | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :not_found
+ :private | :guest | false | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :not_found
+ :private | :developer | false | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | false | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | true | 'DEPLOY-TOKEN' | :deploy_token | 'process terraform module workhorse authorization' | :success
+ :public | :developer | true | 'DEPLOY-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | true | 'DEPLOY-TOKEN' | :deploy_token | 'process terraform module workhorse authorization' | :success
+ :private | :developer | true | 'DEPLOY-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
end
with_them do
- let(:token) { valid_token ? tokens[token_type] : 'invalid-token123' }
let(:headers) { user_headers.merge(workhorse_headers) }
let(:user_headers) { user_role == :anonymous ? {} : { token_header => token } }
@@ -322,49 +329,48 @@ RSpec.describe API::Terraform::Modules::V1::Packages do
end
context 'with valid project' do
- where(:visibility, :user_role, :member, :token_header, :token_type, :valid_token, :shared_examples_name, :expected_status) do
- :public | :developer | true | 'PRIVATE-TOKEN' | :personal_access_token | true | 'process terraform module upload' | :created
- :public | :guest | true | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
- :public | :developer | true | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :guest | true | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :developer | false | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
- :public | :guest | false | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
- :public | :developer | false | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :guest | false | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :anonymous | false | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :unauthorized
- :private | :developer | true | 'PRIVATE-TOKEN' | :personal_access_token | true | 'process terraform module upload' | :created
- :private | :guest | true | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
- :private | :developer | true | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :guest | true | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :developer | false | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :not_found
- :private | :guest | false | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :not_found
- :private | :developer | false | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :guest | false | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :anonymous | false | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :unauthorized
- :public | :developer | true | 'JOB-TOKEN' | :job_token | true | 'process terraform module upload' | :created
- :public | :guest | true | 'JOB-TOKEN' | :job_token | true | 'rejects terraform module packages access' | :forbidden
- :public | :developer | true | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :guest | true | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :developer | false | 'JOB-TOKEN' | :job_token | true | 'rejects terraform module packages access' | :forbidden
- :public | :guest | false | 'JOB-TOKEN' | :job_token | true | 'rejects terraform module packages access' | :forbidden
- :public | :developer | false | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :guest | false | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :developer | true | 'JOB-TOKEN' | :job_token | true | 'process terraform module upload' | :created
- :private | :guest | true | 'JOB-TOKEN' | :job_token | true | 'rejects terraform module packages access' | :forbidden
- :private | :developer | true | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :guest | true | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :developer | false | 'JOB-TOKEN' | :job_token | true | 'rejects terraform module packages access' | :not_found
- :private | :guest | false | 'JOB-TOKEN' | :job_token | true | 'rejects terraform module packages access' | :not_found
- :private | :developer | false | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :guest | false | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
- :public | :developer | true | 'DEPLOY-TOKEN' | :deploy_token | true | 'process terraform module upload' | :created
- :public | :developer | true | 'DEPLOY-TOKEN' | :deploy_token | false | 'rejects terraform module packages access' | :unauthorized
- :private | :developer | true | 'DEPLOY-TOKEN' | :deploy_token | true | 'process terraform module upload' | :created
- :private | :developer | true | 'DEPLOY-TOKEN' | :deploy_token | false | 'rejects terraform module packages access' | :unauthorized
+ where(:visibility, :user_role, :member, :token_header, :token_type, :shared_examples_name, :expected_status) do
+ :public | :developer | true | 'PRIVATE-TOKEN' | :personal_access_token | 'process terraform module upload' | :created
+ :public | :guest | true | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :public | :developer | true | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | true | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | false | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :public | :guest | false | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :public | :developer | false | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | false | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :anonymous | false | nil | nil | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | true | 'PRIVATE-TOKEN' | :personal_access_token | 'process terraform module upload' | :created
+ :private | :guest | true | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | true | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | true | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | false | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :not_found
+ :private | :guest | false | 'PRIVATE-TOKEN' | :personal_access_token | 'rejects terraform module packages access' | :not_found
+ :private | :developer | false | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | false | 'PRIVATE-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :anonymous | false | nil | nil | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | true | 'JOB-TOKEN' | :job_token | 'process terraform module upload' | :created
+ :public | :guest | true | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :forbidden
+ :public | :developer | true | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | true | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | false | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :forbidden
+ :public | :guest | false | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :forbidden
+ :public | :developer | false | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | false | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | true | 'JOB-TOKEN' | :job_token | 'process terraform module upload' | :created
+ :private | :guest | true | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | true | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | true | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | false | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :not_found
+ :private | :guest | false | 'JOB-TOKEN' | :job_token | 'rejects terraform module packages access' | :not_found
+ :private | :developer | false | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | false | 'JOB-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | true | 'DEPLOY-TOKEN' | :deploy_token | 'process terraform module upload' | :created
+ :public | :developer | true | 'DEPLOY-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | true | 'DEPLOY-TOKEN' | :deploy_token | 'process terraform module upload' | :created
+ :private | :developer | true | 'DEPLOY-TOKEN' | :invalid | 'rejects terraform module packages access' | :unauthorized
end
with_them do
- let(:token) { valid_token ? tokens[token_type] : 'invalid-token123' }
let(:user_headers) { user_role == :anonymous ? {} : { token_header => token } }
let(:headers) { user_headers.merge(workhorse_headers) }
let(:snowplow_gitlab_standard_context) { { project: project, namespace: project.namespace, user: snowplow_user } }
diff --git a/spec/requests/api/terraform/state_spec.rb b/spec/requests/api/terraform/state_spec.rb
index ae1e461d433..e8458db4a4d 100644
--- a/spec/requests/api/terraform/state_spec.rb
+++ b/spec/requests/api/terraform/state_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Terraform::State do
+RSpec.describe API::Terraform::State, :snowplow do
include HttpBasicAuthHelpers
let_it_be(:project) { create(:project) }
@@ -25,11 +25,17 @@ RSpec.describe API::Terraform::State do
context 'without authentication' do
let(:auth_header) { basic_auth_header('bad', 'token') }
- it 'does not track unique event' do
+ it 'does not track unique hll event' do
expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
request
end
+
+ it 'does not track Snowplow event' do
+ request
+
+ expect_no_snowplow_event
+ end
end
context 'with maintainer permissions' do
@@ -39,6 +45,41 @@ RSpec.describe API::Terraform::State do
let(:target_event) { 'p_terraform_state_api_unique_users' }
let(:expected_value) { instance_of(Integer) }
end
+
+ it 'tracks Snowplow event' do
+ request
+
+ expect_snowplow_event(
+ category: described_class.to_s,
+ action: 'p_terraform_state_api_unique_users',
+ namespace: project.namespace.reload,
+ user: current_user
+ )
+ end
+
+ context 'when route_hll_to_snowplow_phase2 FF is disabled' do
+ before do
+ stub_feature_flags(route_hll_to_snowplow_phase2: false)
+ end
+
+ it 'does not track Snowplow event' do
+ request
+
+ expect_no_snowplow_event
+ end
+ end
+ end
+ end
+
+ shared_context 'cannot access a state that is scheduled for deletion' do
+ before do
+ state.update!(deleted_at: Time.current)
+ end
+
+ it 'returns unprocessable entity' do
+ request
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
@@ -77,6 +118,8 @@ RSpec.describe API::Terraform::State do
expect(response).to have_gitlab_http_status(:not_found)
end
end
+
+ it_behaves_like 'cannot access a state that is scheduled for deletion'
end
context 'with developer permissions' do
@@ -162,6 +205,8 @@ RSpec.describe API::Terraform::State do
expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
+
+ it_behaves_like 'cannot access a state that is scheduled for deletion'
end
context 'without body' do
@@ -240,13 +285,19 @@ RSpec.describe API::Terraform::State do
context 'with maintainer permissions' do
let(:current_user) { maintainer }
+ let(:deletion_service) { instance_double(Terraform::States::TriggerDestroyService) }
+
+ it 'schedules the state for deletion and returns empty body' do
+ expect(Terraform::States::TriggerDestroyService).to receive(:new).and_return(deletion_service)
+ expect(deletion_service).to receive(:execute).once
- it 'deletes the state and returns empty body' do
- expect { request }.to change { Terraform::State.count }.by(-1)
+ request
expect(response).to have_gitlab_http_status(:ok)
expect(Gitlab::Json.parse(response.body)).to be_empty
end
+
+ it_behaves_like 'cannot access a state that is scheduled for deletion'
end
context 'with developer permissions' do
@@ -276,6 +327,7 @@ RSpec.describe API::Terraform::State do
subject(:request) { post api("#{state_path}/lock"), headers: auth_header, params: params }
it_behaves_like 'endpoint with unique user tracking'
+ it_behaves_like 'cannot access a state that is scheduled for deletion'
it 'locks the terraform state' do
request
@@ -330,6 +382,10 @@ RSpec.describe API::Terraform::State do
let(:lock_id) { 'irrelevant to this test, just needs to be present' }
end
+ it_behaves_like 'cannot access a state that is scheduled for deletion' do
+ let(:lock_id) { 'irrelevant to this test, just needs to be present' }
+ end
+
context 'with the correct lock id' do
let(:lock_id) { '123-456' }
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index 2c5a734a0e1..d4dc7375e9e 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -1694,6 +1694,111 @@ RSpec.describe API::Users do
end
end
+ describe 'GET /users/:id/project_deploy_keys' do
+ let(:project) { create(:project) }
+
+ before do
+ project.add_maintainer(user)
+
+ deploy_key = create(:deploy_key, user: user)
+ create(:deploy_keys_project, project: project, deploy_key_id: deploy_key.id)
+ end
+
+ it 'returns 404 for non-existing user' do
+ get api("/users/#{non_existing_record_id}/project_deploy_keys")
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message']).to eq('404 User Not Found')
+ end
+
+ it 'returns array of project deploy keys with pagination' do
+ get api("/users/#{user.id}/project_deploy_keys", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.first['title']).to eq(user.deploy_keys.first.title)
+ end
+
+ it 'forbids when a developer fetches maintainer keys' do
+ dev_user = create(:user)
+ project.add_developer(dev_user)
+
+ get api("/users/#{user.id}/project_deploy_keys", dev_user)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ expect(json_response['message']).to eq('403 Forbidden - No common authorized project found')
+ end
+
+ context 'with multiple projects' do
+ let(:second_project) { create(:project) }
+ let(:second_user) { create(:user) }
+
+ before do
+ second_project.add_maintainer(second_user)
+
+ deploy_key = create(:deploy_key, user: second_user)
+ create(:deploy_keys_project, project: second_project, deploy_key_id: deploy_key.id)
+ end
+
+ context 'when no common projects for user and current_user' do
+ it 'forbids' do
+ get api("/users/#{user.id}/project_deploy_keys", second_user)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ expect(json_response['message']).to eq('403 Forbidden - No common authorized project found')
+ end
+ end
+
+ context 'when there are common projects for user and current_user' do
+ before do
+ project.add_maintainer(second_user)
+ end
+
+ it 'lists only common project keys' do
+ expect(second_user.project_deploy_keys).to contain_exactly(
+ project.deploy_keys.first, second_project.deploy_keys.first)
+
+ get api("/users/#{second_user.id}/project_deploy_keys", user)
+
+ expect(json_response.count).to eq(1)
+ expect(json_response.first['key']).to eq(project.deploy_keys.first.key)
+ end
+
+ it 'lists only project_deploy_keys and not user deploy_keys' do
+ third_user = create(:user)
+
+ project.add_maintainer(third_user)
+ second_project.add_maintainer(third_user)
+
+ create(:deploy_key, user: second_user)
+ create(:deploy_key, user: third_user)
+
+ get api("/users/#{second_user.id}/project_deploy_keys", third_user)
+
+ expect(json_response.count).to eq(2)
+ expect([json_response.first['key'], json_response.second['key']]).to contain_exactly(
+ project.deploy_keys.first.key, second_project.deploy_keys.first.key)
+ end
+
+ it 'avoids N+1 queries' do
+ second_project.add_maintainer(user)
+
+ control_count = ActiveRecord::QueryRecorder.new do
+ get api("/users/#{second_user.id}/project_deploy_keys", user)
+ end.count
+
+ deploy_key = create(:deploy_key, user: second_user)
+ create(:deploy_keys_project, project: second_project, deploy_key_id: deploy_key.id)
+
+ expect do
+ get api("/users/#{second_user.id}/project_deploy_keys", user)
+ end.not_to exceed_query_limit(control_count)
+ end
+ end
+ end
+ end
+
describe 'GET /user/:id/keys' do
it 'returns 404 for non-existing user' do
get api("/users/#{non_existing_record_id}/keys")
diff --git a/spec/requests/api/wikis_spec.rb b/spec/requests/api/wikis_spec.rb
index 06ae61ca5eb..f4096eef8d0 100644
--- a/spec/requests/api/wikis_spec.rb
+++ b/spec/requests/api/wikis_spec.rb
@@ -256,6 +256,24 @@ RSpec.describe API::Wikis do
include_examples 'wiki API 404 Wiki Page Not Found'
end
end
+
+ context 'when content contains a reference' do
+ let(:issue) { create(:issue, project: project) }
+ let(:params) { { render_html: true } }
+ let(:page) { create(:wiki_page, wiki: project.wiki, title: 'page_with_ref', content: issue.to_reference) }
+ let(:expected_content) { %r{<a href=".*#{issue.iid}".*>#{issue.to_reference}</a>} }
+
+ before do
+ project.add_developer(user)
+
+ request
+ end
+
+ it 'expands the reference in the content' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['content']).to match(expected_content)
+ end
+ end
end
end
diff --git a/spec/requests/git_http_spec.rb b/spec/requests/git_http_spec.rb
index 38c8d43376e..05b16119a0e 100644
--- a/spec/requests/git_http_spec.rb
+++ b/spec/requests/git_http_spec.rb
@@ -245,6 +245,16 @@ RSpec.describe 'Git HTTP requests' do
end
end
+ context 'when project name is missing' do
+ let(:path) { "/#{user.namespace.path}/info/refs" }
+
+ it 'does not redirect to the incorrect path' do
+ get path
+
+ expect(response).not_to redirect_to("/#{user.namespace.path}.git/info/refs")
+ end
+ end
+
it_behaves_like 'project path without .git suffix' do
let(:repository_path) { "#{user.namespace.path}/project.git-project" }
end
diff --git a/spec/requests/groups/crm/contacts_controller_spec.rb b/spec/requests/groups/crm/contacts_controller_spec.rb
index 0ee72233418..70086ddbbba 100644
--- a/spec/requests/groups/crm/contacts_controller_spec.rb
+++ b/spec/requests/groups/crm/contacts_controller_spec.rb
@@ -42,14 +42,6 @@ RSpec.describe Groups::Crm::ContactsController do
it_behaves_like 'response with 404 status'
end
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(customer_relations: false)
- end
-
- it_behaves_like 'response with 404 status'
- end
-
context 'when subgroup' do
let(:group) { create(:group, :private, :crm_enabled, parent: create(:group)) }
diff --git a/spec/requests/groups/crm/organizations_controller_spec.rb b/spec/requests/groups/crm/organizations_controller_spec.rb
index 410fc979262..e841dd80b67 100644
--- a/spec/requests/groups/crm/organizations_controller_spec.rb
+++ b/spec/requests/groups/crm/organizations_controller_spec.rb
@@ -42,14 +42,6 @@ RSpec.describe Groups::Crm::OrganizationsController do
it_behaves_like 'response with 404 status'
end
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(customer_relations: false)
- end
-
- it_behaves_like 'response with 404 status'
- end
-
context 'when subgroup' do
let(:group) { create(:group, :private, :crm_enabled, parent: create(:group)) }
diff --git a/spec/requests/ide_controller_spec.rb b/spec/requests/ide_controller_spec.rb
index 4bf1e43ba40..151fa89b819 100644
--- a/spec/requests/ide_controller_spec.rb
+++ b/spec/requests/ide_controller_spec.rb
@@ -208,6 +208,31 @@ RSpec.describe IdeController do
it_behaves_like 'user access rights check'
end
+
+ describe 'Snowplow view event', :snowplow do
+ it 'is tracked' do
+ subject
+
+ expect_snowplow_event(
+ category: described_class.to_s,
+ action: 'web_ide_views',
+ namespace: project.namespace,
+ user: user
+ )
+ end
+
+ context 'when route_hll_to_snowplow_phase2 FF is disabled' do
+ before do
+ stub_feature_flags(route_hll_to_snowplow_phase2: false)
+ end
+
+ it 'does not track Snowplow event' do
+ subject
+
+ expect_no_snowplow_event
+ end
+ end
+ end
end
end
end
diff --git a/spec/requests/jira_connect/oauth_application_ids_controller_spec.rb b/spec/requests/jira_connect/oauth_application_ids_controller_spec.rb
new file mode 100644
index 00000000000..ffeaf1075f3
--- /dev/null
+++ b/spec/requests/jira_connect/oauth_application_ids_controller_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe JiraConnect::OauthApplicationIdsController do
+ describe 'GET /-/jira_connect/oauth_application_id' do
+ before do
+ stub_application_setting(jira_connect_application_key: '123456')
+ end
+
+ it 'renders the jira connect application id' do
+ get '/-/jira_connect/oauth_application_id'
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq({ "application_id" => "123456" })
+ end
+
+ context 'application ID is empty' do
+ before do
+ stub_application_setting(jira_connect_application_key: '')
+ end
+
+ it 'renders not found' do
+ get '/-/jira_connect/oauth_application_id'
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when jira_connect_oauth_self_managed disabled' do
+ before do
+ stub_feature_flags(jira_connect_oauth_self_managed: false)
+ end
+
+ it 'renders not found' do
+ get '/-/jira_connect/oauth_application_id'
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+end
diff --git a/spec/requests/mailgun/webhooks_controller_spec.rb b/spec/requests/mailgun/webhooks_controller_spec.rb
new file mode 100644
index 00000000000..ae6dc89d003
--- /dev/null
+++ b/spec/requests/mailgun/webhooks_controller_spec.rb
@@ -0,0 +1,149 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mailgun::WebhooksController do
+ let(:mailgun_signing_key) { 'abc123' }
+ let(:valid_signature) do
+ {
+ timestamp: "1625056677",
+ token: "eb944d0ace7227667a1b97d2d07276ae51d2b849ed2cfa68f3",
+ signature: "9790cc6686eb70f0b1f869180d906870cdfd496d27fee81da0aa86b9e539e790"
+ }
+ end
+
+ let(:event_data) { {} }
+
+ before do
+ stub_application_setting(mailgun_events_enabled: true, mailgun_signing_key: mailgun_signing_key)
+ end
+
+ def post_request(override_params = {})
+ post mailgun_webhooks_path, params: standard_params.merge(override_params)
+ end
+
+ describe '#process_webhook' do
+ it 'returns 406 when integration is not enabled' do
+ stub_application_setting(mailgun_events_enabled: false)
+
+ post_request
+
+ expect(response).to have_gitlab_http_status(:not_acceptable)
+ end
+
+ it 'returns 404 when signing key is not configured' do
+ stub_application_setting(mailgun_signing_key: nil)
+
+ post_request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'returns 404 when signature invalid' do
+ post_request(
+ 'signature' => valid_signature.merge('signature' => 'xxx')
+ )
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'returns 200 when signature is valid' do
+ post_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'invite email failures' do
+ let_it_be(:member) { create(:project_member, :invited) }
+
+ let(:event_data) do
+ {
+ 'event': 'failed',
+ 'severity': 'permanent',
+ 'tags': [Members::Mailgun::INVITE_EMAIL_TAG],
+ 'user-variables': {
+ Members::Mailgun::INVITE_EMAIL_TOKEN_KEY => member.raw_invite_token
+ }
+ }
+ end
+
+ it 'marks the member invite email success as false' do
+ expect { post_request }.to change { member.reload.invite_email_success }.from(true).to(false)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'supports legacy URL' do
+ expect do
+ post members_mailgun_permanent_failures_path, params: {
+ 'signature' => valid_signature,
+ 'event-data' => event_data
+ }
+ end.to change { member.reload.invite_email_success }.from(true).to(false)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'does not change the invite status if failure is temporary' do
+ expect do
+ post_request({ 'event-data' => event_data.merge(severity: 'temporary') })
+ end.not_to change { member.reload.invite_email_success }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ def standard_params
+ {
+ "signature": valid_signature,
+ "event-data": {
+ "severity": "permanent",
+ "tags": ["invite_email"],
+ "timestamp": 1521233195.375624,
+ "storage": {
+ "url": "_anything_",
+ "key": "_anything_"
+ },
+ "log-level": "error",
+ "id": "_anything_",
+ "campaigns": [],
+ "reason": "suppress-bounce",
+ "user-variables": {
+ "invite_token": '12345'
+ },
+ "flags": {
+ "is-routed": false,
+ "is-authenticated": true,
+ "is-system-test": false,
+ "is-test-mode": false
+ },
+ "recipient-domain": "example.com",
+ "envelope": {
+ "sender": "bob@mg.gitlab.com",
+ "transport": "smtp",
+ "targets": "alice@example.com"
+ },
+ "message": {
+ "headers": {
+ "to": "Alice <alice@example.com>",
+ "message-id": "20130503192659.13651.20287@mg.gitlab.com",
+ "from": "Bob <bob@mg.gitlab.com>",
+ "subject": "Test permanent_fail webhook"
+ },
+ "attachments": [],
+ "size": 111
+ },
+ "recipient": "alice@example.com",
+ "event": "failed",
+ "delivery-status": {
+ "attempt-no": 1,
+ "message": "",
+ "code": 605,
+ "description": "Not delivering to previously bounced address",
+ "session-seconds": 0
+ }
+ }.merge(event_data)
+ }
+ end
+end
diff --git a/spec/requests/members/mailgun/permanent_failure_spec.rb b/spec/requests/members/mailgun/permanent_failure_spec.rb
deleted file mode 100644
index e47aedf8e94..00000000000
--- a/spec/requests/members/mailgun/permanent_failure_spec.rb
+++ /dev/null
@@ -1,128 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'receive a permanent failure' do
- describe 'POST /members/mailgun/permanent_failures', :aggregate_failures do
- let_it_be(:member) { create(:project_member, :invited) }
-
- let(:raw_invite_token) { member.raw_invite_token }
- let(:mailgun_events) { true }
- let(:mailgun_signing_key) { 'abc123' }
-
- subject(:post_request) { post members_mailgun_permanent_failures_path(standard_params) }
-
- before do
- stub_application_setting(mailgun_events_enabled: mailgun_events, mailgun_signing_key: mailgun_signing_key)
- end
-
- it 'marks the member invite email success as false' do
- expect { post_request }.to change { member.reload.invite_email_success }.from(true).to(false)
-
- expect(response).to have_gitlab_http_status(:ok)
- end
-
- context 'when the change to a member is not made' do
- context 'with incorrect signing key' do
- context 'with incorrect signing key' do
- let(:mailgun_signing_key) { '_foobar_' }
-
- it 'does not change member status and responds as not_found' do
- expect { post_request }.not_to change { member.reload.invite_email_success }
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'with nil signing key' do
- let(:mailgun_signing_key) { nil }
-
- it 'does not change member status and responds as not_found' do
- expect { post_request }.not_to change { member.reload.invite_email_success }
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
-
- context 'when the feature is not enabled' do
- let(:mailgun_events) { false }
-
- it 'does not change member status and responds as expected' do
- expect { post_request }.not_to change { member.reload.invite_email_success }
-
- expect(response).to have_gitlab_http_status(:not_acceptable)
- end
- end
-
- context 'when it is not an invite email' do
- before do
- stub_const('::Members::Mailgun::INVITE_EMAIL_TAG', '_foobar_')
- end
-
- it 'does not change member status and responds as expected' do
- expect { post_request }.not_to change { member.reload.invite_email_success }
-
- expect(response).to have_gitlab_http_status(:not_acceptable)
- end
- end
- end
-
- def standard_params
- {
- "signature": {
- "timestamp": "1625056677",
- "token": "eb944d0ace7227667a1b97d2d07276ae51d2b849ed2cfa68f3",
- "signature": "9790cc6686eb70f0b1f869180d906870cdfd496d27fee81da0aa86b9e539e790"
- },
- "event-data": {
- "severity": "permanent",
- "tags": ["invite_email"],
- "timestamp": 1521233195.375624,
- "storage": {
- "url": "_anything_",
- "key": "_anything_"
- },
- "log-level": "error",
- "id": "_anything_",
- "campaigns": [],
- "reason": "suppress-bounce",
- "user-variables": {
- "invite_token": raw_invite_token
- },
- "flags": {
- "is-routed": false,
- "is-authenticated": true,
- "is-system-test": false,
- "is-test-mode": false
- },
- "recipient-domain": "example.com",
- "envelope": {
- "sender": "bob@mg.gitlab.com",
- "transport": "smtp",
- "targets": "alice@example.com"
- },
- "message": {
- "headers": {
- "to": "Alice <alice@example.com>",
- "message-id": "20130503192659.13651.20287@mg.gitlab.com",
- "from": "Bob <bob@mg.gitlab.com>",
- "subject": "Test permanent_fail webhook"
- },
- "attachments": [],
- "size": 111
- },
- "recipient": "alice@example.com",
- "event": "failed",
- "delivery-status": {
- "attempt-no": 1,
- "message": "",
- "code": 605,
- "description": "Not delivering to previously bounced address",
- "session-seconds": 0
- }
- }
- }
- end
- end
-end
diff --git a/spec/requests/oauth/authorizations_controller_spec.rb b/spec/requests/oauth/authorizations_controller_spec.rb
new file mode 100644
index 00000000000..8d19c92865e
--- /dev/null
+++ b/spec/requests/oauth/authorizations_controller_spec.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Oauth::AuthorizationsController do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:application) { create(:oauth_application, redirect_uri: 'custom://test') }
+ let_it_be(:oauth_authorization_path) do
+ Gitlab::Routing.url_helpers.oauth_authorization_url(
+ client_id: application.uid,
+ response_type: 'code',
+ scope: application.scopes,
+ redirect_uri: application.redirect_uri,
+ state: SecureRandom.hex
+ )
+ end
+
+ before do
+ sign_in(user)
+ end
+
+ describe 'GET #new' do
+ context 'when application redirect URI has a custom scheme' do
+ context 'when CSP is disabled' do
+ before do
+ allow_next_instance_of(ActionDispatch::Request) do |instance|
+ allow(instance).to receive(:content_security_policy).and_return(nil)
+ end
+ end
+
+ it 'does not add a CSP' do
+ get oauth_authorization_path
+
+ expect(response.headers['Content-Security-Policy']).to be_nil
+ end
+ end
+
+ context 'when CSP contains form-action' do
+ before do
+ csp = ActionDispatch::ContentSecurityPolicy.new do |p|
+ p.form_action "'self'"
+ end
+
+ allow_next_instance_of(ActionDispatch::Request) do |instance|
+ allow(instance).to receive(:content_security_policy).and_return(csp)
+ end
+ end
+
+ it 'adds custom scheme to CSP form-action' do
+ get oauth_authorization_path
+
+ expect(response.headers['Content-Security-Policy']).to include("form-action 'self' custom:")
+ end
+ end
+
+ context 'when CSP does not contain form-action' do
+ before do
+ csp = ActionDispatch::ContentSecurityPolicy.new do |p|
+ p.script_src :self, 'https://some-cdn.test'
+ p.style_src :self, 'https://some-cdn.test'
+ end
+
+ allow_next_instance_of(ActionDispatch::Request) do |instance|
+ allow(instance).to receive(:content_security_policy).and_return(csp)
+ end
+ end
+
+ it 'does not add form-action to the CSP' do
+ get oauth_authorization_path
+
+ expect(response.headers['Content-Security-Policy']).not_to include('form-action')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/oauth/tokens_controller_spec.rb b/spec/requests/oauth/tokens_controller_spec.rb
index 1967d0ba8b1..3895304dbde 100644
--- a/spec/requests/oauth/tokens_controller_spec.rb
+++ b/spec/requests/oauth/tokens_controller_spec.rb
@@ -6,11 +6,12 @@ RSpec.describe Oauth::TokensController do
let(:cors_request_headers) { { 'Origin' => 'http://notgitlab.com' } }
let(:other_headers) { {} }
let(:headers) { cors_request_headers.merge(other_headers)}
+ let(:allowed_methods) { 'POST, OPTIONS' }
shared_examples 'cross-origin POST request' do
it 'allows cross-origin requests' do
expect(response.headers['Access-Control-Allow-Origin']).to eq '*'
- expect(response.headers['Access-Control-Allow-Methods']).to eq 'POST'
+ expect(response.headers['Access-Control-Allow-Methods']).to eq allowed_methods
expect(response.headers['Access-Control-Allow-Headers']).to be_nil
expect(response.headers['Access-Control-Allow-Credentials']).to be_nil
end
@@ -23,7 +24,7 @@ RSpec.describe Oauth::TokensController do
it 'allows cross-origin requests' do
expect(response.headers['Access-Control-Allow-Origin']).to eq '*'
- expect(response.headers['Access-Control-Allow-Methods']).to eq 'POST'
+ expect(response.headers['Access-Control-Allow-Methods']).to eq allowed_methods
expect(response.headers['Access-Control-Allow-Headers']).to eq 'Authorization'
expect(response.headers['Access-Control-Allow-Credentials']).to be_nil
end
diff --git a/spec/requests/openid_connect_spec.rb b/spec/requests/openid_connect_spec.rb
index 70a310ba0d5..c647fee1564 100644
--- a/spec/requests/openid_connect_spec.rb
+++ b/spec/requests/openid_connect_spec.rb
@@ -98,7 +98,7 @@ RSpec.describe 'OpenID Connect requests' do
shared_examples 'cross-origin GET and POST request' do
it 'allows cross-origin request' do
expect(response.headers['Access-Control-Allow-Origin']).to eq '*'
- expect(response.headers['Access-Control-Allow-Methods']).to eq 'GET, HEAD, POST'
+ expect(response.headers['Access-Control-Allow-Methods']).to eq 'GET, HEAD, POST, OPTIONS'
expect(response.headers['Access-Control-Allow-Headers']).to be_nil
expect(response.headers['Access-Control-Allow-Credentials']).to be_nil
end
diff --git a/spec/requests/projects/environments_controller_spec.rb b/spec/requests/projects/environments_controller_spec.rb
new file mode 100644
index 00000000000..5cdf507abef
--- /dev/null
+++ b/spec/requests/projects/environments_controller_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::EnvironmentsController do
+ let_it_be(:project) { create(:project, :repository) }
+
+ let(:environment) { create(:environment, name: 'production', project: project) }
+
+ describe 'GET #show' do
+ subject { get project_environment_path(project, environment) }
+
+ before do
+ sign_in(project.owner)
+ end
+
+ include_examples 'avoids N+1 queries on environment detail page'
+ end
+
+ def environment_params(opts = {})
+ opts.reverse_merge(namespace_id: project.namespace,
+ project_id: project,
+ id: environment.id)
+ end
+
+ def create_deployment_with_associations(commit_depth:)
+ commit = project.commit("HEAD~#{commit_depth}")
+ create(:user, email: commit.author_email) unless User.find_by(email: commit.author_email)
+
+ deployer = create(:user)
+ pipeline = create(:ci_pipeline, project: environment.project)
+ build = create(:ci_build, environment: environment.name, pipeline: pipeline, user: deployer)
+ create(:deployment, :success, environment: environment, deployable: build, user: deployer,
+ project: project, sha: commit.sha)
+ end
+end
diff --git a/spec/requests/projects/issue_links_controller_spec.rb b/spec/requests/projects/issue_links_controller_spec.rb
index 3447ff83ed8..81fd1adb1fd 100644
--- a/spec/requests/projects/issue_links_controller_spec.rb
+++ b/spec/requests/projects/issue_links_controller_spec.rb
@@ -32,7 +32,10 @@ RSpec.describe Projects::IssueLinksController do
get namespace_project_issue_links_path(issue_links_params)
expect(json_response.count).to eq(1)
- expect(json_response.first).to include('path' => project_work_items_path(issue_b.project, issue_b.id))
+ expect(json_response.first).to include(
+ 'path' => project_work_items_path(issue_b.project, issue_b.id),
+ 'type' => 'TASK'
+ )
end
end
end
diff --git a/spec/requests/projects/merge_requests_controller_spec.rb b/spec/requests/projects/merge_requests_controller_spec.rb
index 3b1ce569033..6580fc8b80f 100644
--- a/spec/requests/projects/merge_requests_controller_spec.rb
+++ b/spec/requests/projects/merge_requests_controller_spec.rb
@@ -3,6 +3,70 @@
require 'spec_helper'
RSpec.describe Projects::MergeRequestsController do
+ describe 'GET #discussions' do
+ let_it_be(:merge_request) { create(:merge_request) }
+ let_it_be(:project) { merge_request.project }
+ let_it_be(:user) { merge_request.author }
+ let_it_be(:discussion) { create(:discussion_note_on_merge_request, noteable: merge_request, project: project) }
+ let_it_be(:discussion_reply) { create(:discussion_note_on_merge_request, noteable: merge_request, project: project, in_reply_to: discussion) }
+ let_it_be(:state_event) { create(:resource_state_event, merge_request: merge_request) }
+ let_it_be(:discussion_2) { create(:discussion_note_on_merge_request, noteable: merge_request, project: project) }
+ let_it_be(:discussion_3) { create(:diff_note_on_merge_request, noteable: merge_request, project: project) }
+
+ before do
+ login_as(user)
+ end
+
+ context 'pagination' do
+ def get_discussions(**params)
+ get discussions_project_merge_request_path(project, merge_request, params: params.merge(format: :json))
+ end
+
+ it 'returns paginated notes and cursor based on per_page param' do
+ get_discussions(per_page: 2)
+
+ discussions = Gitlab::Json.parse(response.body)
+ notes = discussions.flat_map { |d| d['notes'] }
+
+ expect(discussions.count).to eq(2)
+ expect(notes).to match([
+ a_hash_including('id' => discussion.id.to_s),
+ a_hash_including('id' => discussion_reply.id.to_s),
+ a_hash_including('type' => 'StateNote')
+ ])
+
+ cursor = response.header['X-Next-Page-Cursor']
+ expect(cursor).to be_present
+
+ get_discussions(per_page: 1, cursor: cursor)
+
+ discussions = Gitlab::Json.parse(response.body)
+ notes = discussions.flat_map { |d| d['notes'] }
+
+ expect(discussions.count).to eq(1)
+ expect(notes).to match([
+ a_hash_including('id' => discussion_2.id.to_s)
+ ])
+ end
+
+ context 'when paginated_mr_discussions is disabled' do
+ before do
+ stub_feature_flags(paginated_mr_discussions: false)
+ end
+
+ it 'returns all discussions and ignores per_page param' do
+ get_discussions(per_page: 2)
+
+ discussions = Gitlab::Json.parse(response.body)
+ notes = discussions.flat_map { |d| d['notes'] }
+
+ expect(discussions.count).to eq(4)
+ expect(notes.count).to eq(5)
+ end
+ end
+ end
+ end
+
context 'token authentication' do
context 'when public project' do
let_it_be(:public_project) { create(:project, :public) }
diff --git a/spec/requests/pwa_controller_spec.rb b/spec/requests/pwa_controller_spec.rb
index f74f37ea9d0..7a295b17231 100644
--- a/spec/requests/pwa_controller_spec.rb
+++ b/spec/requests/pwa_controller_spec.rb
@@ -3,6 +3,15 @@
require 'spec_helper'
RSpec.describe PwaController do
+ describe 'GET #manifest' do
+ it 'responds with json' do
+ get manifest_path(format: :json)
+
+ expect(response.body).to include('The complete DevOps platform.')
+ expect(response).to have_gitlab_http_status(:success)
+ end
+ end
+
describe 'GET #offline' do
it 'responds with static HTML page' do
get offline_path
diff --git a/spec/requests/robots_txt_spec.rb b/spec/requests/robots_txt_spec.rb
index f6c9b018c68..7c0b7d8117a 100644
--- a/spec/requests/robots_txt_spec.rb
+++ b/spec/requests/robots_txt_spec.rb
@@ -37,6 +37,9 @@ RSpec.describe 'Robots.txt Requests', :aggregate_failures do
'/dashboard',
'/users',
'/users/foo',
+ '/users/foo@email.com/captcha_check',
+ '/users/foo/captcha_check',
+ '/api/v1/users/foo/captcha_check',
'/help',
'/s/',
'/-/profile',
diff --git a/spec/routing/project_routing_spec.rb b/spec/routing/project_routing_spec.rb
index 21012399edf..47fd1622306 100644
--- a/spec/routing/project_routing_spec.rb
+++ b/spec/routing/project_routing_spec.rb
@@ -772,6 +772,58 @@ RSpec.describe 'project routing' do
end
end
+ describe Projects::Settings::IntegrationsController, 'routing' do
+ it 'to #index' do
+ expect(get('/gitlab/gitlabhq/-/settings/integrations')).to route_to('projects/settings/integrations#index', namespace_id: 'gitlab', project_id: 'gitlabhq')
+ end
+
+ it 'to #edit' do
+ expect(get('/gitlab/gitlabhq/-/settings/integrations/acme/edit')).to route_to('projects/settings/integrations#edit', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'acme')
+ end
+
+ it 'to #update' do
+ expect(put('/gitlab/gitlabhq/-/settings/integrations/acme')).to route_to('projects/settings/integrations#update', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'acme')
+ end
+
+ it 'to #test' do
+ expect(put('/gitlab/gitlabhq/-/settings/integrations/acme/test')).to route_to('projects/settings/integrations#test', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'acme')
+ end
+
+ context 'legacy routes' do
+ it 'to #edit' do
+ expect(get('/gitlab/gitlabhq/-/integrations/acme/edit')).to route_to('projects/settings/integrations#edit', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'acme')
+ end
+
+ it 'to #update' do
+ expect(put('/gitlab/gitlabhq/-/integrations/acme')).to route_to('projects/settings/integrations#update', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'acme')
+ end
+
+ it 'to #test' do
+ expect(put('/gitlab/gitlabhq/-/integrations/acme/test')).to route_to('projects/settings/integrations#test', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'acme')
+ end
+ end
+ end
+
+ describe Projects::Settings::IntegrationHookLogsController do
+ it 'to #show' do
+ expect(get('/gitlab/gitlabhq/-/settings/integrations/acme/hook_logs/log')).to route_to('projects/settings/integration_hook_logs#show', namespace_id: 'gitlab', project_id: 'gitlabhq', integration_id: 'acme', id: 'log')
+ end
+
+ it 'to #retry' do
+ expect(post('/gitlab/gitlabhq/-/settings/integrations/acme/hook_logs/log/retry')).to route_to('projects/settings/integration_hook_logs#retry', namespace_id: 'gitlab', project_id: 'gitlabhq', integration_id: 'acme', id: 'log')
+ end
+
+ context 'legacy routes' do
+ it 'to #show' do
+ expect(get('/gitlab/gitlabhq/-/integrations/acme/hook_logs/log')).to route_to('projects/settings/integration_hook_logs#show', namespace_id: 'gitlab', project_id: 'gitlabhq', integration_id: 'acme', id: 'log')
+ end
+
+ it 'to #retry' do
+ expect(post('/gitlab/gitlabhq/-/integrations/acme/hook_logs/log/retry')).to route_to('projects/settings/integration_hook_logs#retry', namespace_id: 'gitlab', project_id: 'gitlabhq', integration_id: 'acme', id: 'log')
+ end
+ end
+ end
+
describe Projects::TemplatesController, 'routing' do
describe '#show' do
def show_with_template_type(template_type)
@@ -808,15 +860,6 @@ RSpec.describe 'project routing' do
end
end
- describe Projects::StaticSiteEditorController, 'routing' do
- it 'routes to static_site_editor#show', :aggregate_failures do
- expect(get('/gitlab/gitlabhq/-/sse/master%2FCONTRIBUTING.md')).to route_to('projects/static_site_editor#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/CONTRIBUTING.md')
- expect(get('/gitlab/gitlabhq/-/sse/master%2FCONTRIBUTING.md/')).to route_to('projects/static_site_editor#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/CONTRIBUTING.md')
- expect(get('/gitlab/gitlabhq/-/sse/master%2FREADME/unsupported/error')).to route_to('projects/static_site_editor#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/README', vueroute: 'unsupported/error')
- expect(get('/gitlab/gitlabhq/-/sse/master%2Flib%2FREADME/success')).to route_to('projects/static_site_editor#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master/lib/README', vueroute: 'success')
- end
- end
-
describe Projects::EnvironmentsController, 'routing' do
describe 'legacy routing' do
it_behaves_like 'redirecting a legacy path', "/gitlab/gitlabhq/environments", "/gitlab/gitlabhq/-/environments"
diff --git a/spec/rubocop/cop/migration/background_migrations_spec.rb b/spec/rubocop/cop/migration/background_migrations_spec.rb
new file mode 100644
index 00000000000..3242211ab47
--- /dev/null
+++ b/spec/rubocop/cop/migration/background_migrations_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require_relative '../../../../rubocop/cop/migration/background_migrations'
+
+RSpec.describe RuboCop::Cop::Migration::BackgroundMigrations do
+ let(:cop) { described_class.new }
+
+ context 'when queue_background_migration_jobs_by_range_at_intervals is used' do
+ it 'registers an offense' do
+ expect_offense(<<~RUBY)
+ def up
+ queue_background_migration_jobs_by_range_at_intervals('example', 'example', 1, batch_size: 1, track_jobs: true)
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Background migrations are deprecated. Please use a Batched Background Migration instead[...]
+ end
+ RUBY
+ end
+ end
+
+ context 'when requeue_background_migration_jobs_by_range_at_intervals is used' do
+ it 'registers an offense' do
+ expect_offense(<<~RUBY)
+ def up
+ requeue_background_migration_jobs_by_range_at_intervals('example', 1)
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Background migrations are deprecated. Please use a Batched Background Migration instead[...]
+ end
+ RUBY
+ end
+ end
+
+ context 'when migrate_in is used' do
+ it 'registers an offense' do
+ expect_offense(<<~RUBY)
+ def up
+ migrate_in(1, 'example', 1, ['example'])
+ ^^^^^^^^^^ Background migrations are deprecated. Please use a Batched Background Migration instead[...]
+ end
+ RUBY
+ end
+ end
+end
diff --git a/spec/rubocop/cop/migration/migration_record_spec.rb b/spec/rubocop/cop/migration/migration_record_spec.rb
index bab0ca469df..bfe6228c421 100644
--- a/spec/rubocop/cop/migration/migration_record_spec.rb
+++ b/spec/rubocop/cop/migration/migration_record_spec.rb
@@ -6,53 +6,55 @@ require_relative '../../../../rubocop/cop/migration/migration_record'
RSpec.describe RuboCop::Cop::Migration::MigrationRecord do
subject(:cop) { described_class.new }
- shared_examples 'a disabled cop' do
+ shared_examples 'a disabled cop' do |klass|
it 'does not register any offenses' do
expect_no_offenses(<<~SOURCE)
class MyMigration < Gitlab::Database::Migration[2.0]
- class Project < ActiveRecord::Base
+ class Project < #{klass}
end
end
SOURCE
end
end
- context 'outside of a migration' do
- it_behaves_like 'a disabled cop'
- end
-
- context 'in migration' do
- before do
- allow(cop).to receive(:in_migration?).and_return(true)
+ %w(ActiveRecord::Base ApplicationRecord).each do |klass|
+ context 'outside of a migration' do
+ it_behaves_like 'a disabled cop', klass
end
- context 'in an old migration' do
+ context 'in migration' do
before do
- allow(cop).to receive(:version).and_return(described_class::ENFORCED_SINCE - 5)
+ allow(cop).to receive(:in_migration?).and_return(true)
end
- it_behaves_like 'a disabled cop'
- end
+ context 'in an old migration' do
+ before do
+ allow(cop).to receive(:version).and_return(described_class::ENFORCED_SINCE - 5)
+ end
- context 'that is recent' do
- before do
- allow(cop).to receive(:version).and_return(described_class::ENFORCED_SINCE)
+ it_behaves_like 'a disabled cop', klass
end
- it 'adds an offense if inheriting from ActiveRecord::Base' do
- expect_offense(<<~RUBY)
- class Project < ActiveRecord::Base
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Don't inherit from ActiveRecord::Base but use MigrationRecord instead.[...]
+ context 'that is recent' do
+ before do
+ allow(cop).to receive(:version).and_return(described_class::ENFORCED_SINCE)
+ end
+
+ it "adds an offense if inheriting from #{klass}" do
+ expect_offense(<<~RUBY)
+ class Project < #{klass}
+ ^^^^^^^^^^^^^^^^#{'^' * klass.length} Don't inherit from ActiveRecord::Base or ApplicationRecord but use MigrationRecord instead.[...]
end
- RUBY
- end
+ RUBY
+ end
- it 'adds an offense if inheriting from ::ActiveRecord::Base' do
- expect_offense(<<~RUBY)
- class Project < ::ActiveRecord::Base
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Don't inherit from ActiveRecord::Base but use MigrationRecord instead.[...]
+ it "adds an offense if inheriting from ::#{klass}" do
+ expect_offense(<<~RUBY)
+ class Project < ::#{klass}
+ ^^^^^^^^^^^^^^^^^^#{'^' * klass.length} Don't inherit from ActiveRecord::Base or ApplicationRecord but use MigrationRecord instead.[...]
end
- RUBY
+ RUBY
+ end
end
end
end
diff --git a/spec/rubocop/cop/static_translation_definition_spec.rb b/spec/rubocop/cop/static_translation_definition_spec.rb
index 554a7c17a4b..372fc194c56 100644
--- a/spec/rubocop/cop/static_translation_definition_spec.rb
+++ b/spec/rubocop/cop/static_translation_definition_spec.rb
@@ -9,11 +9,7 @@ require_relative '../../../rubocop/cop/static_translation_definition'
RSpec.describe RuboCop::Cop::StaticTranslationDefinition do
using RSpec::Parameterized::TableSyntax
- let(:msg) do
- "The text you're translating will be already in the translated form when it's assigned to the constant. " \
- "When a users changes the locale, these texts won't be translated again. " \
- "Consider moving the translation logic to a method."
- end
+ let(:msg) { described_class::MSG }
subject(:cop) { described_class.new }
@@ -62,7 +58,7 @@ RSpec.describe RuboCop::Cop::StaticTranslationDefinition do
}
end
CODE
- <<~CODE
+ <<~CODE,
class MyClass
B = [
[
@@ -72,6 +68,26 @@ RSpec.describe RuboCop::Cop::StaticTranslationDefinition do
]
end
CODE
+ <<~CODE,
+ class MyClass
+ field :foo, title: _('A title')
+ ^^^^^^^^^^^^ #{msg}
+ end
+ CODE
+ <<~CODE
+ included do
+ _('a')
+ ^^^^^^ #{msg}
+ end
+ prepended do
+ self.var = _('a')
+ ^^^^^^ #{msg}
+ end
+ class_methods do
+ _('a')
+ ^^^^^^ #{msg}
+ end
+ CODE
]
end
@@ -95,6 +111,13 @@ RSpec.describe RuboCop::Cop::StaticTranslationDefinition do
CODE
<<~CODE,
class MyClass
+ def self.method
+ @cache ||= { hello: proc { _("hello") } }
+ end
+ end
+ CODE
+ <<~CODE,
+ class MyClass
def method
@cache ||= { hello: _("hello") }
end
@@ -128,13 +151,30 @@ RSpec.describe RuboCop::Cop::StaticTranslationDefinition do
end
end
CODE
- <<~CODE
+ <<~CODE,
Struct.new('SomeClass') do
def text
_('Some translated text')
end
end
CODE
+ <<~CODE,
+ class MyClass
+ field :foo, title: -> { _('A title') }
+ end
+ CODE
+ <<~CODE
+ included do
+ put do
+ _('b')
+ end
+ end
+ class_methods do
+ expose do
+ _('b')
+ end
+ end
+ CODE
]
end
diff --git a/spec/rubocop/formatter/todo_formatter_spec.rb b/spec/rubocop/formatter/todo_formatter_spec.rb
index e1b1de33bfe..fcff028f07d 100644
--- a/spec/rubocop/formatter/todo_formatter_spec.rb
+++ b/spec/rubocop/formatter/todo_formatter_spec.rb
@@ -14,17 +14,18 @@ RSpec.describe RuboCop::Formatter::TodoFormatter do
let(:real_tmp_dir) { File.join(tmp_dir, 'real') }
let(:symlink_tmp_dir) { File.join(tmp_dir, 'symlink') }
let(:rubocop_todo_dir) { "#{symlink_tmp_dir}/.rubocop_todo" }
- let(:options) { { rubocop_todo_dir: rubocop_todo_dir } }
let(:todo_dir) { RuboCop::TodoDir.new(rubocop_todo_dir) }
- subject(:formatter) { described_class.new(stdout, options) }
+ subject(:formatter) { described_class.new(stdout) }
around do |example|
FileUtils.mkdir(real_tmp_dir)
FileUtils.symlink(real_tmp_dir, symlink_tmp_dir)
Dir.chdir(symlink_tmp_dir) do
- example.run
+ described_class.with_base_directory(rubocop_todo_dir) do
+ example.run
+ end
end
end
@@ -38,8 +39,6 @@ RSpec.describe RuboCop::Formatter::TodoFormatter do
let(:offense_autocorrect) { fake_offense('B/AutoCorrect') }
before do
- stub_const("#{described_class}::MAX_OFFENSE_COUNT", 1)
-
stub_rubocop_registry(
'A/Offense' => { autocorrectable: false },
'B/AutoCorrect' => { autocorrectable: true }
diff --git a/spec/rubocop/todo_dir_spec.rb b/spec/rubocop/todo_dir_spec.rb
index ae59def885d..a5c12e23896 100644
--- a/spec/rubocop/todo_dir_spec.rb
+++ b/spec/rubocop/todo_dir_spec.rb
@@ -42,12 +42,6 @@ RSpec.describe RuboCop::TodoDir do
end
end
- describe '#directory' do
- subject { todo_dir.directory }
-
- it { is_expected.to eq(directory) }
- end
-
describe '#read' do
let(:content) { 'a' }
diff --git a/spec/scripts/lib/glfm/update_example_snapshots_spec.rb b/spec/scripts/lib/glfm/update_example_snapshots_spec.rb
index 169f5d1c5a6..82ed8563c3a 100644
--- a/spec/scripts/lib/glfm/update_example_snapshots_spec.rb
+++ b/spec/scripts/lib/glfm/update_example_snapshots_spec.rb
@@ -2,6 +2,31 @@
require 'fast_spec_helper'
require_relative '../../../../scripts/lib/glfm/update_example_snapshots'
+# IMPORTANT NOTE: See https://docs.gitlab.com/ee/development/gitlab_flavored_markdown/specification_guide/
+# for details on the implementation and usage of the `update_example_snapshots` script being tested.
+# This developers guide contains diagrams and documentation of the script,
+# including explanations and examples of all files it reads and writes.
+#
+# Note that this test is not structured in a traditional way, with multiple examples
+# to cover all different scenarios. Instead, the content of the stubbed test fixture
+# files are crafted to cover multiple scenarios with in a single example run.
+#
+# This is because the invocation of the full script is slow, because it executes
+# two subshells for processing, one which runs a full Rails environment, and one
+# which runs a jest test environment. This results in each full run of the script
+# taking between 30-60 seconds. The majority of this is spent loading the Rails environmnent.
+#
+# However, only the `writing html.yml and prosemirror_json.yml` context is used
+# to test these slow sub-processes, and it only contains a single example.
+#
+# All other tests currently in the file pass the `skip_static_and_wysiwyg: true`
+# flag to `#process`, which skips the slow sub-processes. All of these tests
+# should run in sub-second time when the Spring pre-loader is used. This allows
+# logic which is not directly related to the slow sub-processes to be TDD'd with a
+# very rapid feedback cycle.
+#
+# Also, the textual content of the individual fixture file entries is also crafted to help
+# indicate which scenarios which they are covering.
RSpec.describe Glfm::UpdateExampleSnapshots, '#process' do
subject { described_class.new }
@@ -17,8 +42,10 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process' do
let(:es_markdown_yml_path) { described_class::ES_MARKDOWN_YML_PATH }
let(:es_markdown_yml_io) { StringIO.new }
let(:es_html_yml_path) { described_class::ES_HTML_YML_PATH }
+ let(:es_html_yml_io_existing) { StringIO.new(es_html_yml_io_existing_contents) }
let(:es_html_yml_io) { StringIO.new }
let(:es_prosemirror_json_yml_path) { described_class::ES_PROSEMIRROR_JSON_YML_PATH }
+ let(:es_prosemirror_json_yml_io_existing) { StringIO.new(es_prosemirror_json_yml_io_existing_contents) }
let(:es_prosemirror_json_yml_io) { StringIO.new }
# Internal tempfiles
@@ -44,7 +71,7 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process' do
<p><strong>bold</strong></p>
````````````````````````````````
- ```````````````````````````````` example strikethrough
+ ```````````````````````````````` example strong
__bold with more text__
.
<p><strong>bold with more text</strong></p>
@@ -90,6 +117,24 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process' do
</strong></p>
````````````````````````````````
+ # Third GitLab-Specific Section with skipped Examples
+
+ ## Strong but skipped
+
+ ```````````````````````````````` example gitlab strong
+ **this example will be skipped**
+ .
+ <p><strong>this example will be skipped</strong></p>
+ ````````````````````````````````
+
+ ## Strong but manually modified and skipped
+
+ ```````````````````````````````` example gitlab strong
+ **This example will have its manually modified static HTML, WYSIWYG HTML, and ProseMirror JSON preserved**
+ .
+ <p><strong>This example will have its manually modified static HTML, WYSIWYG HTML, and ProseMirror JSON preserved</strong></p>
+ ````````````````````````````````
+
<!-- END TESTS -->
# Appendix
@@ -99,25 +144,90 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process' do
end
let(:glfm_example_status_yml_contents) do
+ # language=YAML
<<~GLFM_EXAMPLE_STATUS_YML_CONTENTS
---
- - 07_01_first_gitlab_specific_section_with_examples_strong_but_with_two_asterisks:
- skip_update_example_snapshots: false
- skip_running_snapshot_static_html_tests: false
- skip_running_snapshot_wysiwyg_html_tests: false
- skip_running_snapshot_prosemirror_json_tests: false
- skip_running_conformance_static_tests: false
- skip_running_conformance_wysiwyg_tests: false
- - 07_02_first_gitlab_specific_section_with_examples_strong_but_with_html:
+ 02_01__inlines__strong__001:
+ # The skip_update_example_snapshots key is present, but false, so this example is not skipped
skip_update_example_snapshots: false
- skip_running_snapshot_static_html_tests: false
- skip_running_snapshot_wysiwyg_html_tests: false
- skip_running_snapshot_prosemirror_json_tests: false
- skip_running_conformance_static_tests: false
- skip_running_conformance_wysiwyg_tests: false
+ 02_01__inlines__strong__002:
+ # It is OK to have an empty (nil) value for an example statuses entry, it means they will all be false.
+ 05_01__third_gitlab_specific_section_with_skipped_examples__strong_but_skipped__001:
+ # Always skip this example
+ skip_update_example_snapshots: 'skipping this example because it is very bad'
+ 05_02__third_gitlab_specific_section_with_skipped_examples__strong_but_manually_modified_and_skipped__001:
+ # Always skip this example, but preserve the existing manual modifications
+ skip_update_example_snapshots: 'skipping this example because we have manually modified it'
GLFM_EXAMPLE_STATUS_YML_CONTENTS
end
+ let(:es_html_yml_io_existing_contents) do
+ # language=YAML
+ <<~ES_HTML_YML_IO_EXISTING_CONTENTS
+ ---
+ 00_00__obsolete_entry_to_be_deleted__001:
+ canonical: |
+ This entry is no longer exists in the spec.txt, and is not skipped, so it will be deleted.
+ static: |-
+ This entry is no longer exists in the spec.txt, and is not skipped, so it will be deleted.
+ wysiwyg: |-
+ This entry is no longer exists in the spec.txt, and is not skipped, so it will be deleted.
+ 02_01__inlines__strong__001:
+ canonical: |
+ This entry is existing, but not skipped, so it will be overwritten.
+ static: |-
+ This entry is existing, but not skipped, so it will be overwritten.
+ wysiwyg: |-
+ This entry is existing, but not skipped, so it will be overwritten.
+ 05_02__third_gitlab_specific_section_with_skipped_examples__strong_but_manually_modified_and_skipped__001:
+ canonical: |
+ <p><strong>This example will have its manually modified static HTML, WYSIWYG HTML, and ProseMirror JSON preserved</strong></p>
+ static: |-
+ <p>This is the manually modified static HTML which will be preserved</p>
+ wysiwyg: |-
+ <p>This is the manually modified WYSIWYG HTML which will be preserved</p>
+ ES_HTML_YML_IO_EXISTING_CONTENTS
+ end
+
+ let(:es_prosemirror_json_yml_io_existing_contents) do
+ # language=YAML
+ <<~ES_PROSEMIRROR_JSON_YML_IO_EXISTING_CONTENTS
+ ---
+ 00_00__obsolete_entry_to_be_deleted__001:
+ {
+ "obsolete": "This entry is no longer exists in the spec.txt, and is not skipped, so it will be deleted."
+ }
+ 02_01__inlines__strong__001: |-
+ {
+ "existing": "This entry is existing, but not skipped, so it will be overwritten."
+ }
+ # 02_01__inlines__strong__002: is omitted from the existing file and skipped, to test that scenario.
+ 02_02__inlines__strikethrough_extension__001: |-
+ {
+ "type": "doc",
+ "content": [
+ {
+ "type": "paragraph",
+ "content": [
+ {
+ "type": "text",
+ "text": "~~Hi~~ Hello, world!"
+ }
+ ]
+ }
+ ]
+ }
+ 04_01__second_gitlab_specific_section_with_examples__strong_but_with_html__001: |-
+ {
+ "existing": "This entry is manually modified and preserved because skip_update_example_snapshot_prosemirror_json will be truthy"
+ }
+ 05_02__third_gitlab_specific_section_with_skipped_examples__strong_but_manually_modified_and_skipped__001: |-
+ {
+ "existing": "This entry is manually modified and preserved because skip_update_example_snapshots will be truthy"
+ }
+ ES_PROSEMIRROR_JSON_YML_IO_EXISTING_CONTENTS
+ end
+
before do
# We mock out the URI and local file IO objects with real StringIO, instead of just mock
# objects. This gives better and more realistic coverage, while still avoiding
@@ -129,12 +239,14 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process' do
# output files
allow(File).to receive(:open).with(es_examples_index_yml_path, 'w') { es_examples_index_yml_io }
- allow(File).to receive(:open).with(es_html_yml_path, 'w') { es_html_yml_io }
- allow(File).to receive(:open).with(es_prosemirror_json_yml_path, 'w') { es_prosemirror_json_yml_io }
# output files which are also input files
allow(File).to receive(:open).with(es_markdown_yml_path, 'w') { es_markdown_yml_io }
allow(File).to receive(:open).with(es_markdown_yml_path) { es_markdown_yml_io }
+ allow(File).to receive(:open).with(es_html_yml_path, 'w') { es_html_yml_io }
+ allow(File).to receive(:open).with(es_html_yml_path) { es_html_yml_io_existing }
+ allow(File).to receive(:open).with(es_prosemirror_json_yml_path, 'w') { es_prosemirror_json_yml_io }
+ allow(File).to receive(:open).with(es_prosemirror_json_yml_path) { es_prosemirror_json_yml_io_existing }
# Allow normal opening of Tempfile files created during script execution.
tempfile_basenames = [
@@ -152,59 +264,110 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process' do
allow(subject).to receive(:output)
end
+ describe 'when skip_update_example_snapshots is truthy' do
+ let(:es_examples_index_yml_contents) { reread_io(es_examples_index_yml_io) }
+ let(:es_markdown_yml_contents) { reread_io(es_markdown_yml_io) }
+ let(:expected_unskipped_example) do
+ /05_01__third_gitlab_specific_section_with_skipped_examples__strong_but_skipped__001/
+ end
+
+ it 'still writes the example to examples_index.yml' do
+ subject.process(skip_static_and_wysiwyg: true)
+
+ expect(es_examples_index_yml_contents).to match(expected_unskipped_example)
+ end
+
+ it 'still writes the example to markdown.yml' do
+ subject.process(skip_static_and_wysiwyg: true)
+
+ expect(es_markdown_yml_contents).to match(expected_unskipped_example)
+ end
+
+ describe 'when any other skip_update_example_* is also truthy' do
+ let(:glfm_example_status_yml_contents) do
+ # language=YAML
+ <<~GLFM_EXAMPLE_STATUS_YML_CONTENTS
+ ---
+ 02_01__inlines__strong__001:
+ skip_update_example_snapshots: 'if the skip_update_example_snapshots key is truthy...'
+ skip_update_example_snapshot_html_static: '...then no other skip_update_example_* keys can be truthy'
+ GLFM_EXAMPLE_STATUS_YML_CONTENTS
+ end
+
+ it 'raises an error' do
+ expected_msg = "Error: '02_01__inlines__strong__001' must not have any 'skip_update_example_snapshot_*' " \
+ "values specified if 'skip_update_example_snapshots' is truthy"
+ expect { subject.process }.to raise_error(/#{Regexp.escape(expected_msg)}/)
+ end
+ end
+ end
+
describe 'writing examples_index.yml' do
let(:es_examples_index_yml_contents) { reread_io(es_examples_index_yml_io) }
+ let(:expected_examples_index_yml_contents) do
+ # language=YAML
+ <<~ES_EXAMPLES_INDEX_YML_CONTENTS
+ ---
+ 02_01__inlines__strong__001:
+ spec_txt_example_position: 1
+ source_specification: commonmark
+ 02_01__inlines__strong__002:
+ spec_txt_example_position: 2
+ source_specification: github
+ 02_02__inlines__strikethrough_extension__001:
+ spec_txt_example_position: 3
+ source_specification: github
+ 03_01__first_gitlab_specific_section_with_examples__strong_but_with_two_asterisks__001:
+ spec_txt_example_position: 4
+ source_specification: gitlab
+ 04_01__second_gitlab_specific_section_with_examples__strong_but_with_html__001:
+ spec_txt_example_position: 5
+ source_specification: gitlab
+ 05_01__third_gitlab_specific_section_with_skipped_examples__strong_but_skipped__001:
+ spec_txt_example_position: 6
+ source_specification: gitlab
+ 05_02__third_gitlab_specific_section_with_skipped_examples__strong_but_manually_modified_and_skipped__001:
+ spec_txt_example_position: 7
+ source_specification: gitlab
+ ES_EXAMPLES_INDEX_YML_CONTENTS
+ end
it 'writes the correct content' do
subject.process(skip_static_and_wysiwyg: true)
- expected =
- <<~ES_EXAMPLES_INDEX_YML_CONTENTS
- ---
- 02_01__inlines__strong__01:
- spec_txt_example_position: 1
- source_specification: commonmark
- 02_01__inlines__strong__02:
- spec_txt_example_position: 2
- source_specification: github
- 02_02__inlines__strikethrough_extension__01:
- spec_txt_example_position: 3
- source_specification: github
- 03_01__first_gitlab_specific_section_with_examples__strong_but_with_two_asterisks__01:
- spec_txt_example_position: 4
- source_specification: gitlab
- 04_01__second_gitlab_specific_section_with_examples__strong_but_with_html__01:
- spec_txt_example_position: 5
- source_specification: gitlab
- ES_EXAMPLES_INDEX_YML_CONTENTS
- expect(es_examples_index_yml_contents).to eq(expected)
+ expect(es_examples_index_yml_contents).to eq(expected_examples_index_yml_contents)
end
end
describe 'writing markdown.yml' do
let(:es_markdown_yml_contents) { reread_io(es_markdown_yml_io) }
+ let(:expected_markdown_yml_contents) do
+ # language=YAML
+ <<~ES_MARKDOWN_YML_CONTENTS
+ ---
+ 02_01__inlines__strong__001: |
+ __bold__
+ 02_01__inlines__strong__002: |
+ __bold with more text__
+ 02_02__inlines__strikethrough_extension__001: |
+ ~~Hi~~ Hello, world!
+ 03_01__first_gitlab_specific_section_with_examples__strong_but_with_two_asterisks__001: |
+ **bold**
+ 04_01__second_gitlab_specific_section_with_examples__strong_but_with_html__001: |
+ <strong>
+ bold
+ </strong>
+ 05_01__third_gitlab_specific_section_with_skipped_examples__strong_but_skipped__001: |
+ **this example will be skipped**
+ 05_02__third_gitlab_specific_section_with_skipped_examples__strong_but_manually_modified_and_skipped__001: |
+ **This example will have its manually modified static HTML, WYSIWYG HTML, and ProseMirror JSON preserved**
+ ES_MARKDOWN_YML_CONTENTS
+ end
it 'writes the correct content' do
subject.process(skip_static_and_wysiwyg: true)
- expected =
- <<~ES_MARKDOWN_YML_CONTENTS
- ---
- 02_01__inlines__strong__01: |
- __bold__
- 02_01__inlines__strong__02: |
- __bold with more text__
- 02_02__inlines__strikethrough_extension__01: |
- ~~Hi~~ Hello, world!
- 03_01__first_gitlab_specific_section_with_examples__strong_but_with_two_asterisks__01: |
- **bold**
- 04_01__second_gitlab_specific_section_with_examples__strong_but_with_html__01: |
- <strong>
- bold
- </strong>
- ES_MARKDOWN_YML_CONTENTS
-
- expect(es_markdown_yml_contents).to eq(expected)
+ expect(es_markdown_yml_contents).to eq(expected_markdown_yml_contents)
end
end
@@ -212,42 +375,152 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process' do
let(:es_html_yml_contents) { reread_io(es_html_yml_io) }
let(:es_prosemirror_json_yml_contents) { reread_io(es_prosemirror_json_yml_io) }
+ # NOTE: This example_status.yml is crafted in conjunction with expected_html_yml_contents
+ # to test the behavior of the `skip_update_*` flags
let(:glfm_example_status_yml_contents) do
+ # language=YAML
<<~GLFM_EXAMPLE_STATUS_YML_CONTENTS
---
- - 02_01_gitlab_specific_section_with_examples_strong_but_with_two_asterisks:
- skip_update_example_snapshots: false
- skip_running_snapshot_static_html_tests: false
- skip_running_snapshot_wysiwyg_html_tests: false
- skip_running_snapshot_prosemirror_json_tests: false
- skip_running_conformance_static_tests: false
- skip_running_conformance_wysiwyg_tests: false
+ 02_01__inlines__strong__002:
+ skip_update_example_snapshot_prosemirror_json: "skipping because JSON isn't cool enough"
+ 03_01__first_gitlab_specific_section_with_examples__strong_but_with_two_asterisks__001:
+ skip_update_example_snapshot_html_static: "skipping because there's too much static"
+ 04_01__second_gitlab_specific_section_with_examples__strong_but_with_html__001:
+ skip_update_example_snapshot_html_wysiwyg: 'skipping because what you see is NOT what you get'
+ skip_update_example_snapshot_prosemirror_json: "skipping because JSON still isn't cool enough"
+ 05_01__third_gitlab_specific_section_with_skipped_examples__strong_but_skipped__001:
+ skip_update_example_snapshots: 'skipping this example because it is very bad'
+ 05_02__third_gitlab_specific_section_with_skipped_examples__strong_but_manually_modified_and_skipped__001:
+ skip_update_example_snapshots: 'skipping this example because we have manually modified it'
GLFM_EXAMPLE_STATUS_YML_CONTENTS
end
- let(:glfm_spec_txt_contents) do
- <<~GLFM_SPEC_TXT_CONTENTS
+ let(:expected_html_yml_contents) do
+ # language=YAML
+ <<~ES_HTML_YML_CONTENTS
---
- title: GitLab Flavored Markdown Spec
- ...
-
- # Introduction
-
- # GitLab-Specific Section with Examples
-
- ## Strong but with two asterisks
-
- ```````````````````````````````` example gitlab strong
- **bold**
- .
- <p><strong>bold</strong></p>
- ````````````````````````````````
- <!-- END TESTS -->
-
- # Appendix
+ 02_01__inlines__strong__001:
+ canonical: |
+ <p><strong>bold</strong></p>
+ static: |-
+ <p data-sourcepos="1:1-1:8" dir="auto"><strong>bold</strong></p>
+ wysiwyg: |-
+ <p><strong>bold</strong></p>
+ 02_01__inlines__strong__002:
+ canonical: |
+ <p><strong>bold with more text</strong></p>
+ static: |-
+ <p data-sourcepos="1:1-1:23" dir="auto"><strong>bold with more text</strong></p>
+ wysiwyg: |-
+ <p><strong>bold with more text</strong></p>
+ 02_02__inlines__strikethrough_extension__001:
+ canonical: |
+ <p><del>Hi</del> Hello, world!</p>
+ static: |-
+ <p data-sourcepos="1:1-1:20" dir="auto"><del>Hi</del> Hello, world!</p>
+ wysiwyg: |-
+ <p><s>Hi</s> Hello, world!</p>
+ 03_01__first_gitlab_specific_section_with_examples__strong_but_with_two_asterisks__001:
+ canonical: |
+ <p><strong>bold</strong></p>
+ wysiwyg: |-
+ <p><strong>bold</strong></p>
+ 04_01__second_gitlab_specific_section_with_examples__strong_but_with_html__001:
+ canonical: |
+ <p><strong>
+ bold
+ </strong></p>
+ static: |-
+ <strong>
+ bold
+ </strong>
+ 05_02__third_gitlab_specific_section_with_skipped_examples__strong_but_manually_modified_and_skipped__001:
+ canonical: |
+ <p><strong>This example will have its manually modified static HTML, WYSIWYG HTML, and ProseMirror JSON preserved</strong></p>
+ static: |-
+ <p>This is the manually modified static HTML which will be preserved</p>
+ wysiwyg: |-
+ <p>This is the manually modified WYSIWYG HTML which will be preserved</p>
+ ES_HTML_YML_CONTENTS
+ end
- Appendix text.
- GLFM_SPEC_TXT_CONTENTS
+ let(:expected_prosemirror_json_contents) do
+ # language=YAML
+ <<~ES_PROSEMIRROR_JSON_YML_CONTENTS
+ ---
+ 02_01__inlines__strong__001: |-
+ {
+ "type": "doc",
+ "content": [
+ {
+ "type": "paragraph",
+ "content": [
+ {
+ "type": "text",
+ "marks": [
+ {
+ "type": "bold"
+ }
+ ],
+ "text": "bold"
+ }
+ ]
+ }
+ ]
+ }
+ 02_02__inlines__strikethrough_extension__001: |-
+ {
+ "type": "doc",
+ "content": [
+ {
+ "type": "paragraph",
+ "content": [
+ {
+ "type": "text",
+ "marks": [
+ {
+ "type": "strike"
+ }
+ ],
+ "text": "Hi"
+ },
+ {
+ "type": "text",
+ "text": " Hello, world!"
+ }
+ ]
+ }
+ ]
+ }
+ 03_01__first_gitlab_specific_section_with_examples__strong_but_with_two_asterisks__001: |-
+ {
+ "type": "doc",
+ "content": [
+ {
+ "type": "paragraph",
+ "content": [
+ {
+ "type": "text",
+ "marks": [
+ {
+ "type": "bold"
+ }
+ ],
+ "text": "bold"
+ }
+ ]
+ }
+ ]
+ }
+ 04_01__second_gitlab_specific_section_with_examples__strong_but_with_html__001: |-
+ {
+ "existing": "This entry is manually modified and preserved because skip_update_example_snapshot_prosemirror_json will be truthy"
+ }
+ 05_02__third_gitlab_specific_section_with_skipped_examples__strong_but_manually_modified_and_skipped__001: |-
+ {
+ "existing": "This entry is manually modified and preserved because skip_update_example_snapshots will be truthy"
+ }
+ ES_PROSEMIRROR_JSON_YML_CONTENTS
end
before do
@@ -263,48 +536,14 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process' do
# avoid slower tests, because generating the static HTML is slow due to the need to invoke
# the rails environment. We could have separate sections, but this would require an extra flag
# to the `process` method to independently skip static vs. WYSIWYG, which is not worth the effort.
- it 'writes the correct content' do
- subject.process
+ it 'writes the correct content', :unlimited_max_formatted_output_length do
+ # expectation that skipping message is only output once per example
+ expect(subject).to receive(:output).once.with(/reason.*skipping this example because it is very bad/i)
- expected_html =
- <<~ES_HTML_YML_CONTENTS
- ---
- 02_01__gitlab_specific_section_with_examples__strong_but_with_two_asterisks__01:
- canonical: |
- <p><strong>bold</strong></p>
- static: |-
- <p data-sourcepos="1:1-1:8" dir="auto"><strong>bold</strong></p>
- wysiwyg: |-
- <p><strong>bold</strong></p>
- ES_HTML_YML_CONTENTS
-
- expected_prosemirror_json =
- <<~ES_PROSEMIRROR_JSON_YML_CONTENTS
- ---
- 02_01__gitlab_specific_section_with_examples__strong_but_with_two_asterisks__01: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "bold"
- }
- ]
- }
- ]
- }
- ES_PROSEMIRROR_JSON_YML_CONTENTS
+ subject.process
- expect(es_html_yml_contents).to eq(expected_html)
- expect(es_prosemirror_json_yml_contents).to eq(expected_prosemirror_json)
+ expect(es_html_yml_contents).to eq(expected_html_yml_contents)
+ expect(es_prosemirror_json_yml_contents).to eq(expected_prosemirror_json_contents)
end
end
diff --git a/spec/serializers/analytics_issue_entity_spec.rb b/spec/serializers/analytics_issue_entity_spec.rb
index bc5cab638cd..ca1e0705d77 100644
--- a/spec/serializers/analytics_issue_entity_spec.rb
+++ b/spec/serializers/analytics_issue_entity_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe AnalyticsIssueEntity do
iid: "1",
id: "1",
created_at: "2016-11-12 15:04:02.948604",
+ end_event_timestamp: "2022-05-24 14:33:01.529701",
author: user,
project_path: project.path,
namespace_path: project.namespace.route.path
@@ -40,10 +41,34 @@ RSpec.describe AnalyticsIssueEntity do
expect(subject).to include(:namespace_full_path)
end
+ it 'contains the end event timestamp' do
+ expect(entity.as_json[:end_event_timestamp]).to match(/ ago$/)
+ end
+
it 'does not contain sensitive information' do
expect(subject).not_to include(/token/)
expect(subject).not_to include(/variables/)
end
+
+ context 'when end_event_timestamp is nil' do
+ let(:entity_hash) do
+ {
+ total_time: "172802.724419",
+ title: "Eos voluptatem inventore in sed.",
+ iid: "1",
+ id: "1",
+ created_at: "2016-11-12 15:04:02.948604",
+ end_event_timestamp: nil,
+ author: user,
+ project_path: project.path,
+ namespace_path: project.namespace.route.path
+ }
+ end
+
+ it 'contains a nil end_event_timestamp' do
+ expect(entity.as_json[:end_event_timestamp]).to be_nil
+ end
+ end
end
context 'without subgroup' do
diff --git a/spec/serializers/deploy_key_entity_spec.rb b/spec/serializers/deploy_keys/basic_deploy_key_entity_spec.rb
index e8d9701be67..c39eb14e339 100644
--- a/spec/serializers/deploy_key_entity_spec.rb
+++ b/spec/serializers/deploy_keys/basic_deploy_key_entity_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe DeployKeyEntity do
+RSpec.describe DeployKeys::BasicDeployKeyEntity do
include RequestAwareEntity
let(:user) { create(:user) }
@@ -18,7 +18,7 @@ RSpec.describe DeployKeyEntity do
project_private.deploy_keys << deploy_key
end
- describe 'returns deploy keys with projects a user can read' do
+ describe 'returns deploy keys' do
let(:expected_result) do
{
id: deploy_key.id,
@@ -30,19 +30,7 @@ RSpec.describe DeployKeyEntity do
almost_orphaned: false,
created_at: deploy_key.created_at,
updated_at: deploy_key.updated_at,
- can_edit: false,
- deploy_keys_projects: [
- {
- can_push: false,
- project:
- {
- id: project.id,
- name: project.name,
- full_path: project_path(project),
- full_name: project.full_name
- }
- }
- ]
+ can_edit: false
}
end
diff --git a/spec/serializers/deploy_keys/deploy_key_entity_spec.rb b/spec/serializers/deploy_keys/deploy_key_entity_spec.rb
new file mode 100644
index 00000000000..e989aa8656c
--- /dev/null
+++ b/spec/serializers/deploy_keys/deploy_key_entity_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe DeployKeys::DeployKeyEntity do
+ include RequestAwareEntity
+
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :internal)}
+ let(:project_private) { create(:project, :private)}
+ let(:deploy_key) { create(:deploy_key) }
+ let(:options) { { user: user } }
+
+ let(:entity) { described_class.new(deploy_key, options) }
+
+ before do
+ project.deploy_keys << deploy_key
+ project_private.deploy_keys << deploy_key
+ end
+
+ describe 'returns deploy keys with projects a user can read' do
+ let(:expected_result) do
+ {
+ id: deploy_key.id,
+ user_id: deploy_key.user_id,
+ title: deploy_key.title,
+ fingerprint: deploy_key.fingerprint,
+ fingerprint_sha256: deploy_key.fingerprint_sha256,
+ destroyed_when_orphaned: true,
+ almost_orphaned: false,
+ created_at: deploy_key.created_at,
+ updated_at: deploy_key.updated_at,
+ can_edit: false,
+ deploy_keys_projects: [
+ {
+ can_push: false,
+ project:
+ {
+ id: project.id,
+ name: project.name,
+ full_path: project_path(project),
+ full_name: project.full_name
+ }
+ }
+ ]
+ }
+ end
+
+ it { expect(entity.as_json).to eq(expected_result) }
+ end
+end
diff --git a/spec/serializers/deployment_entity_spec.rb b/spec/serializers/deployment_entity_spec.rb
index 500d5718bf1..a017f7523e9 100644
--- a/spec/serializers/deployment_entity_spec.rb
+++ b/spec/serializers/deployment_entity_spec.rb
@@ -60,12 +60,16 @@ RSpec.describe DeploymentEntity do
end
context 'when the pipeline has another manual action' do
- let(:other_build) { create(:ci_build, :manual, name: 'another deploy', pipeline: pipeline) }
- let!(:other_deployment) { create(:deployment, deployable: other_build) }
+ let!(:other_build) do
+ create(:ci_build, :manual, name: 'another deploy',
+ pipeline: pipeline, environment: build.environment)
+ end
+
+ let!(:other_deployment) { create(:deployment, deployable: build) }
it 'returns another manual action' do
- expect(subject[:manual_actions].count).to eq(1)
- expect(subject[:manual_actions].first[:name]).to eq('another deploy')
+ expect(subject[:manual_actions].count).to eq(2)
+ expect(subject[:manual_actions].pluck(:name)).to match_array(['test', 'another deploy'])
end
context 'when user is a reporter' do
diff --git a/spec/serializers/diff_file_entity_spec.rb b/spec/serializers/diff_file_entity_spec.rb
index ebfb21c4311..48099cb1fdf 100644
--- a/spec/serializers/diff_file_entity_spec.rb
+++ b/spec/serializers/diff_file_entity_spec.rb
@@ -91,5 +91,38 @@ RSpec.describe DiffFileEntity do
end
end
+ describe '#highlighted_diff_lines' do
+ context 'file without a conflict' do
+ let(:options) { { conflicts: {} } }
+
+ it 'calls diff_lines_for_serializer on diff_file' do
+ # #diff_lines_for_serializer gets called in #fully_expanded? as well so we expect twice
+ expect(diff_file).to receive(:diff_lines_for_serializer).twice.and_return([])
+ expect(subject[:highlighted_diff_lines]).to eq([])
+ end
+ end
+
+ context 'file with a conflict' do
+ let(:conflict_file) { instance_double(Gitlab::Conflict::File, conflict_type: :both_modified) }
+ let(:options) { { conflicts: { diff_file.new_path => conflict_file } } }
+
+ it 'calls diff_lines_for_serializer on matching conflict file' do
+ expect(conflict_file).to receive(:diff_lines_for_serializer).and_return([])
+ expect(subject[:highlighted_diff_lines]).to eq([])
+ end
+
+ context 'when Gitlab::Git::Conflict::Parser::UnmergeableFile gets raised' do
+ before do
+ allow(conflict_file).to receive(:diff_lines_for_serializer).and_raise(Gitlab::Git::Conflict::Parser::UnmergeableFile)
+ end
+
+ it 'falls back to diff_file diff_lines_for_serializer' do
+ expect(diff_file).to receive(:diff_lines_for_serializer).and_return([])
+ expect(subject[:highlighted_diff_lines]).to eq([])
+ end
+ end
+ end
+ end
+
it_behaves_like 'diff file with conflict_type'
end
diff --git a/spec/serializers/environment_serializer_spec.rb b/spec/serializers/environment_serializer_spec.rb
index fe6278084f9..05644dad151 100644
--- a/spec/serializers/environment_serializer_spec.rb
+++ b/spec/serializers/environment_serializer_spec.rb
@@ -212,7 +212,10 @@ RSpec.describe EnvironmentSerializer do
upcoming_deployment = nil
create(:environment, project: project).tap do |environment|
create(:deployment, :success, environment: environment, project: project)
- last_deployment = create(:deployment, :success, environment: environment, project: project)
+
+ create(:ci_build, :success, project: project).tap do |build|
+ last_deployment = create(:deployment, :success, environment: environment, project: project, deployable: build)
+ end
create(:deployment, :running, environment: environment, project: project)
upcoming_deployment = create(:deployment, :running, environment: environment, project: project)
@@ -227,8 +230,22 @@ RSpec.describe EnvironmentSerializer do
def create_environment_with_associations(project)
create(:environment, project: project).tap do |environment|
- create(:deployment, :success, environment: environment, project: project)
- create(:deployment, :running, environment: environment, project: project)
+ create(:ci_pipeline, project: project).tap do |pipeline|
+ create(:ci_build, :manual, project: project, pipeline: pipeline, name: 'stop-action',
+ environment: environment.name)
+
+ create(:ci_build, :scheduled, project: project, pipeline: pipeline,
+ environment: environment.name).tap do |scheduled_build|
+ create(:deployment, :running, environment: environment, project: project,
+ deployable: scheduled_build)
+ end
+
+ create(:ci_build, :success, :manual, project: project, pipeline: pipeline,
+ environment: environment.name).tap do |manual_build|
+ create(:deployment, :success, environment: environment, project: project,
+ deployable: manual_build, on_stop: 'stop-action')
+ end
+ end
end
end
end
diff --git a/spec/serializers/service_event_entity_spec.rb b/spec/serializers/integrations/event_entity_spec.rb
index db82e84fcf8..07281248f5b 100644
--- a/spec/serializers/service_event_entity_spec.rb
+++ b/spec/serializers/integrations/event_entity_spec.rb
@@ -2,17 +2,17 @@
require 'spec_helper'
-RSpec.describe ServiceEventEntity do
- let(:request) { double('request') }
+RSpec.describe Integrations::EventEntity do
+ let(:request) { EntityRequest.new(integration: integration) }
- subject { described_class.new(event, request: request, service: integration).as_json }
+ subject { described_class.new(event, request: request, integration: integration).as_json }
before do
- allow(request).to receive(:service).and_return(integration)
+ allow(request).to receive(:integration).and_return(integration)
end
describe '#as_json' do
- context 'integration without fields' do
+ context 'with integration without fields' do
let(:integration) { create(:emails_on_push_integration, push_events: true) }
let(:event) { 'push' }
@@ -24,7 +24,7 @@ RSpec.describe ServiceEventEntity do
end
end
- context 'integration with fields' do
+ context 'with integration with fields' do
let(:integration) { create(:integrations_slack, note_events: false, note_channel: 'note-channel') }
let(:event) { 'note' }
diff --git a/spec/serializers/service_field_entity_spec.rb b/spec/serializers/integrations/field_entity_spec.rb
index 3a574c522b0..e75dc051f5e 100644
--- a/spec/serializers/service_field_entity_spec.rb
+++ b/spec/serializers/integrations/field_entity_spec.rb
@@ -2,20 +2,20 @@
require 'spec_helper'
-RSpec.describe ServiceFieldEntity do
- let(:request) { double('request') }
+RSpec.describe Integrations::FieldEntity do
+ let(:request) { EntityRequest.new(integration: integration) }
- subject { described_class.new(field, request: request, service: integration).as_json }
+ subject { described_class.new(field, request: request, integration: integration).as_json }
before do
- allow(request).to receive(:service).and_return(integration)
+ allow(request).to receive(:integration).and_return(integration)
end
describe '#as_json' do
- context 'Jira Service' do
+ context 'with Jira integration' do
let(:integration) { create(:jira_integration) }
- context 'field with type text' do
+ context 'with field with type text' do
let(:field) { integration_field('username') }
it 'exposes correct attributes' do
@@ -36,7 +36,7 @@ RSpec.describe ServiceFieldEntity do
end
end
- context 'field with type password' do
+ context 'with field with type password' do
let(:field) { integration_field('password') }
it 'exposes correct attributes but hides password' do
@@ -58,10 +58,10 @@ RSpec.describe ServiceFieldEntity do
end
end
- context 'EmailsOnPush Service' do
+ context 'with EmailsOnPush integration' do
let(:integration) { create(:emails_on_push_integration, send_from_committer_email: '1') }
- context 'field with type checkbox' do
+ context 'with field with type checkbox' do
let(:field) { integration_field('send_from_committer_email') }
it 'exposes correct attributes and casts value to Boolean' do
@@ -78,11 +78,14 @@ RSpec.describe ServiceFieldEntity do
}
is_expected.to include(expected_hash)
- expect(subject[:help]).to include("Send notifications from the committer's email address if the domain matches the domain used by your GitLab instance")
+ expect(subject[:help]).to include(
+ "Send notifications from the committer's email address if the domain " \
+ "matches the domain used by your GitLab instance"
+ )
end
end
- context 'field with type select' do
+ context 'with field with type select' do
let(:field) { integration_field('branches_to_be_notified') }
it 'exposes correct attributes' do
@@ -93,7 +96,12 @@ RSpec.describe ServiceFieldEntity do
title: 'Branches for which notifications are to be sent',
placeholder: nil,
required: nil,
- choices: [['All branches', 'all'], ['Default branch', 'default'], ['Protected branches', 'protected'], ['Default branch and protected branches', 'default_and_protected']],
+ choices: [
+ ['All branches', 'all'],
+ ['Default branch', 'default'],
+ ['Protected branches', 'protected'],
+ ['Default branch and protected branches', 'default_and_protected']
+ ],
help: nil,
value: nil,
checkbox_label: nil
diff --git a/spec/serializers/issue_board_entity_spec.rb b/spec/serializers/issue_board_entity_spec.rb
index b8e2bfeaa3d..7a6a496912f 100644
--- a/spec/serializers/issue_board_entity_spec.rb
+++ b/spec/serializers/issue_board_entity_spec.rb
@@ -43,6 +43,12 @@ RSpec.describe IssueBoardEntity do
expect(subject).to include(labels: array_including(hash_including(:id, :title, :color, :description, :text_color, :priority)))
end
+ describe 'type' do
+ it 'has an issue type' do
+ expect(subject[:type]).to eq('ISSUE')
+ end
+ end
+
describe 'real_path' do
it 'has an issue path' do
expect(subject[:real_path]).to eq(project_issue_path(project, resource.iid))
diff --git a/spec/serializers/issue_entity_spec.rb b/spec/serializers/issue_entity_spec.rb
index 6ccb3dbc657..9525ed02314 100644
--- a/spec/serializers/issue_entity_spec.rb
+++ b/spec/serializers/issue_entity_spec.rb
@@ -24,6 +24,12 @@ RSpec.describe IssueEntity do
end
end
+ describe 'type' do
+ it 'has an issue type' do
+ expect(subject[:type]).to eq('ISSUE')
+ end
+ end
+
it 'has Issuable attributes' do
expect(subject).to include(:id, :iid, :author_id, :description, :lock_version, :milestone_id,
:title, :updated_by_id, :created_at, :updated_at, :milestone, :labels)
diff --git a/spec/serializers/issue_sidebar_basic_entity_spec.rb b/spec/serializers/issue_sidebar_basic_entity_spec.rb
index 564ffb1aea9..64a271e359a 100644
--- a/spec/serializers/issue_sidebar_basic_entity_spec.rb
+++ b/spec/serializers/issue_sidebar_basic_entity_spec.rb
@@ -59,16 +59,6 @@ RSpec.describe IssueSidebarBasicEntity do
expect(entity[:current_user][:can_update_escalation_status]).to be(false)
end
end
-
- context 'with :incident_escalations feature flag disabled' do
- before do
- stub_feature_flags(incident_escalations: false)
- end
-
- it 'is not present' do
- expect(entity[:current_user]).not_to include(:can_update_escalation_status)
- end
- end
end
end
end
diff --git a/spec/serializers/linked_project_issue_entity_spec.rb b/spec/serializers/linked_project_issue_entity_spec.rb
index b28b00bd8e1..c4646754f16 100644
--- a/spec/serializers/linked_project_issue_entity_spec.rb
+++ b/spec/serializers/linked_project_issue_entity_spec.rb
@@ -25,6 +25,22 @@ RSpec.describe LinkedProjectIssueEntity do
it { is_expected.to include(link_type: 'relates_to') }
end
+ describe 'type' do
+ it 'returns the issue type' do
+ expect(serialized_entity).to include(type: 'ISSUE')
+ end
+
+ context 'when related issue is a task' do
+ before do
+ related_issue.update!(issue_type: :task, work_item_type: WorkItems::Type.default_by_type(:task))
+ end
+
+ it 'returns a work item issue type' do
+ expect(serialized_entity).to include(type: 'TASK')
+ end
+ end
+ end
+
describe 'path' do
it 'returns an issue path' do
expect(serialized_entity).to include(path: project_issue_path(related_issue.project, related_issue.iid))
diff --git a/spec/serializers/merge_request_poll_widget_entity_spec.rb b/spec/serializers/merge_request_poll_widget_entity_spec.rb
index 581efd331ef..409585e52f1 100644
--- a/spec/serializers/merge_request_poll_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_poll_widget_entity_spec.rb
@@ -182,4 +182,40 @@ RSpec.describe MergeRequestPollWidgetEntity do
end
end
end
+
+ describe '#mergeable_discussions_state?' do
+ context 'when change_response_code_merge_status is true' do
+ before do
+ stub_feature_flags(change_response_code_merge_status: true)
+ end
+
+ it 'returns mergeable discussions state' do
+ expect(subject[:mergeable_discussions_state]).to eq(true)
+ end
+ end
+
+ context 'when change_response_code_merge_status is false' do
+ context 'when merge request is in a mergeable state' do
+ before do
+ stub_feature_flags(change_response_code_merge_status: false)
+ allow(resource).to receive(:mergeable_discussions_state?).and_return(true)
+ end
+
+ it 'returns mergeable discussions state' do
+ expect(subject[:mergeable_discussions_state]).to eq(true)
+ end
+ end
+
+ context 'when merge request is not in a mergeable state' do
+ before do
+ stub_feature_flags(change_response_code_merge_status: false)
+ allow(resource).to receive(:mergeable_state?).and_return(false)
+ end
+
+ it 'returns mergeable discussions state' do
+ expect(subject[:mergeable_discussions_state]).to eq(false)
+ end
+ end
+ end
+ end
end
diff --git a/spec/serializers/merge_request_widget_entity_spec.rb b/spec/serializers/merge_request_widget_entity_spec.rb
index f0779f1c57c..292f1c395f5 100644
--- a/spec/serializers/merge_request_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_widget_entity_spec.rb
@@ -77,46 +77,6 @@ RSpec.describe MergeRequestWidgetEntity do
.to eq("/#{resource.project.full_path}/-/merge_requests/#{resource.iid}.diff")
end
- describe 'codequality report artifacts', :request_store do
- let(:merge_base_pipeline) { create(:ci_pipeline, :with_codequality_reports, project: project) }
-
- before do
- project.add_developer(user)
-
- allow(resource).to receive_messages(
- merge_base_pipeline: merge_base_pipeline,
- base_pipeline: pipeline,
- head_pipeline: pipeline
- )
- end
-
- context 'with report artifacts' do
- let(:pipeline) { create(:ci_pipeline, :with_codequality_reports, project: project) }
- let(:generic_job_id) { pipeline.builds.first.id }
- let(:merge_base_job_id) { merge_base_pipeline.builds.first.id }
-
- it 'has head_path and base_path entries' do
- expect(subject[:codeclimate][:head_path]).to include("/jobs/#{generic_job_id}/artifacts/download?file_type=codequality")
- expect(subject[:codeclimate][:base_path]).to include("/jobs/#{generic_job_id}/artifacts/download?file_type=codequality")
- end
-
- context 'on pipelines for merged results' do
- let(:pipeline) { create(:ci_pipeline, :merged_result_pipeline, :with_codequality_reports, project: project) }
-
- it 'returns URLs from the head_pipeline and merge_base_pipeline' do
- expect(subject[:codeclimate][:head_path]).to include("/jobs/#{generic_job_id}/artifacts/download?file_type=codequality")
- expect(subject[:codeclimate][:base_path]).to include("/jobs/#{merge_base_job_id}/artifacts/download?file_type=codequality")
- end
- end
- end
-
- context 'without artifacts' do
- it 'does not have data entry' do
- expect(subject).not_to include(:codeclimate)
- end
- end
- end
-
describe 'merge_request_add_ci_config_path' do
let!(:project_auto_devops) { create(:project_auto_devops, :disabled, project: project) }
diff --git a/spec/serializers/prometheus_alert_entity_spec.rb b/spec/serializers/prometheus_alert_entity_spec.rb
index ae8c97401f8..91a1e3377c2 100644
--- a/spec/serializers/prometheus_alert_entity_spec.rb
+++ b/spec/serializers/prometheus_alert_entity_spec.rb
@@ -18,9 +18,5 @@ RSpec.describe PrometheusAlertEntity do
it 'exposes prometheus_alert attributes' do
expect(subject).to include(:id, :title, :query, :operator, :threshold, :runbook_url)
end
-
- it 'exposes alert_path' do
- expect(subject).to include(:alert_path)
- end
end
end
diff --git a/spec/services/alert_management/alerts/update_service_spec.rb b/spec/services/alert_management/alerts/update_service_spec.rb
index f02607b8174..9bdc9970807 100644
--- a/spec/services/alert_management/alerts/update_service_spec.rb
+++ b/spec/services/alert_management/alerts/update_service_spec.rb
@@ -291,14 +291,6 @@ RSpec.describe AlertManagement::Alerts::UpdateService do
it_behaves_like 'does not sync with the incident status'
end
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(incident_escalations: false)
- end
-
- it_behaves_like 'does not sync with the incident status'
- end
end
end
end
diff --git a/spec/services/bulk_create_integration_service_spec.rb b/spec/services/bulk_create_integration_service_spec.rb
index 68c5af33fd8..22bb1736f9f 100644
--- a/spec/services/bulk_create_integration_service_spec.rb
+++ b/spec/services/bulk_create_integration_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe BulkCreateIntegrationService do
- include JiraServiceHelper
+ include JiraIntegrationHelpers
before_all do
stub_jira_integration_test
@@ -21,7 +21,7 @@ RSpec.describe BulkCreateIntegrationService do
]
end
- shared_examples 'creates integration from batch ids' do
+ shared_examples 'creates integration successfully' do
def attributes(record)
record.reload.attributes.except(*excluded_attributes)
end
@@ -41,15 +41,31 @@ RSpec.describe BulkCreateIntegrationService do
expect(attributes(created_integration.data_fields))
.to eq attributes(integration.data_fields)
end
+
+ it 'sets created_at and updated_at timestamps', :freeze_time do
+ described_class.new(integration, batch, association).execute
+
+ expect(created_integration.data_fields.reload).to have_attributes(
+ created_at: eq(Time.current),
+ updated_at: eq(Time.current)
+ )
+ end
end
- end
- shared_examples 'updates inherit_from_id' do
it 'updates inherit_from_id attributes' do
described_class.new(integration, batch, association).execute
expect(created_integration.reload.inherit_from_id).to eq(inherit_from_id)
end
+
+ it 'sets created_at and updated_at timestamps', :freeze_time do
+ described_class.new(integration, batch, association).execute
+
+ expect(created_integration.reload).to have_attributes(
+ created_at: eq(Time.current),
+ updated_at: eq(Time.current)
+ )
+ end
end
context 'passing an instance-level integration' do
@@ -62,8 +78,7 @@ RSpec.describe BulkCreateIntegrationService do
let(:batch) { Project.where(id: project.id) }
let(:association) { 'project' }
- it_behaves_like 'creates integration from batch ids'
- it_behaves_like 'updates inherit_from_id'
+ it_behaves_like 'creates integration successfully'
end
context 'with a group association' do
@@ -72,8 +87,7 @@ RSpec.describe BulkCreateIntegrationService do
let(:batch) { Group.where(id: group.id) }
let(:association) { 'group' }
- it_behaves_like 'creates integration from batch ids'
- it_behaves_like 'updates inherit_from_id'
+ it_behaves_like 'creates integration successfully'
end
end
@@ -88,15 +102,13 @@ RSpec.describe BulkCreateIntegrationService do
let(:association) { 'project' }
let(:inherit_from_id) { integration.id }
- it_behaves_like 'creates integration from batch ids'
- it_behaves_like 'updates inherit_from_id'
+ it_behaves_like 'creates integration successfully'
context 'with different foreign key of data_fields' do
let(:integration) { create(:zentao_integration, :group, group: group) }
let(:created_integration) { project.zentao_integration }
- it_behaves_like 'creates integration from batch ids'
- it_behaves_like 'updates inherit_from_id'
+ it_behaves_like 'creates integration successfully'
end
end
@@ -108,14 +120,12 @@ RSpec.describe BulkCreateIntegrationService do
let(:association) { 'group' }
let(:inherit_from_id) { instance_integration.id }
- it_behaves_like 'creates integration from batch ids'
- it_behaves_like 'updates inherit_from_id'
+ it_behaves_like 'creates integration successfully'
context 'with different foreign key of data_fields' do
let(:integration) { create(:zentao_integration, :group, group: group, inherit_from_id: instance_integration.id) }
- it_behaves_like 'creates integration from batch ids'
- it_behaves_like 'updates inherit_from_id'
+ it_behaves_like 'creates integration successfully'
end
end
end
diff --git a/spec/services/bulk_imports/create_pipeline_trackers_service_spec.rb b/spec/services/bulk_imports/create_pipeline_trackers_service_spec.rb
new file mode 100644
index 00000000000..d7b00ba04ab
--- /dev/null
+++ b/spec/services/bulk_imports/create_pipeline_trackers_service_spec.rb
@@ -0,0 +1,168 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::CreatePipelineTrackersService do
+ describe '#execute!' do
+ context 'when entity is group' do
+ it 'creates trackers for group entity' do
+ bulk_import = create(:bulk_import)
+ entity = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import)
+
+ described_class.new(entity).execute!
+
+ expect(entity.trackers.to_a).to include(
+ have_attributes(
+ stage: 0, status_name: :created, relation: BulkImports::Groups::Pipelines::GroupPipeline.to_s
+ ),
+ have_attributes(
+ stage: 1, status_name: :created, relation: BulkImports::Groups::Pipelines::GroupAttributesPipeline.to_s
+ )
+ )
+ end
+ end
+
+ context 'when entity is project' do
+ it 'creates trackers for project entity' do
+ bulk_import = create(:bulk_import)
+ entity = create(:bulk_import_entity, :project_entity, bulk_import: bulk_import)
+
+ described_class.new(entity).execute!
+
+ expect(entity.trackers.to_a).to include(
+ have_attributes(
+ stage: 0, status_name: :created, relation: BulkImports::Projects::Pipelines::ProjectPipeline.to_s
+ ),
+ have_attributes(
+ stage: 1, status_name: :created, relation: BulkImports::Projects::Pipelines::RepositoryPipeline.to_s
+ )
+ )
+ end
+ end
+
+ context 'when tracker configuration has a minimum version defined' do
+ before do
+ allow_next_instance_of(BulkImports::Groups::Stage) do |stage|
+ allow(stage).to receive(:config).and_return(
+ {
+ pipeline1: { pipeline: 'PipelineClass1', stage: 0 },
+ pipeline2: { pipeline: 'PipelineClass2', stage: 1, minimum_source_version: '14.10.0' },
+ pipeline3: { pipeline: 'PipelineClass3', stage: 1, minimum_source_version: '15.0.0' },
+ pipeline5: { pipeline: 'PipelineClass4', stage: 1, minimum_source_version: '15.1.0' },
+ pipeline6: { pipeline: 'PipelineClass5', stage: 1, minimum_source_version: '16.0.0' }
+ }
+ )
+ end
+ end
+
+ context 'when the source instance version is older than the tracker mininum version' do
+ let_it_be(:bulk_import) { create(:bulk_import, source_version: '15.0.0') }
+ let_it_be(:entity) { create(:bulk_import_entity, :group_entity, bulk_import: bulk_import) }
+
+ it 'creates trackers as skipped if version requirement does not meet' do
+ described_class.new(entity).execute!
+
+ expect(entity.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }).to contain_exactly(
+ [:created, 'PipelineClass1'],
+ [:created, 'PipelineClass2'],
+ [:created, 'PipelineClass3'],
+ [:skipped, 'PipelineClass4'],
+ [:skipped, 'PipelineClass5']
+ )
+ end
+
+ it 'logs an info message for the skipped pipelines' do
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger).to receive(:info).with({
+ message: 'Pipeline skipped as source instance version not compatible with pipeline',
+ entity_id: entity.id,
+ pipeline_name: 'PipelineClass4',
+ minimum_source_version: '15.1.0',
+ maximum_source_version: nil,
+ source_version: '15.0.0'
+ })
+
+ expect(logger).to receive(:info).with({
+ message: 'Pipeline skipped as source instance version not compatible with pipeline',
+ entity_id: entity.id,
+ pipeline_name: 'PipelineClass5',
+ minimum_source_version: '16.0.0',
+ maximum_source_version: nil,
+ source_version: '15.0.0'
+ })
+ end
+
+ described_class.new(entity).execute!
+ end
+ end
+
+ context 'when the source instance version is undefined' do
+ it 'creates trackers as created' do
+ bulk_import = create(:bulk_import, source_version: nil)
+ entity = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import)
+
+ described_class.new(entity).execute!
+
+ expect(entity.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }).to contain_exactly(
+ [:created, 'PipelineClass1'],
+ [:created, 'PipelineClass2'],
+ [:created, 'PipelineClass3'],
+ [:created, 'PipelineClass4'],
+ [:created, 'PipelineClass5']
+ )
+ end
+ end
+ end
+
+ context 'when tracker configuration has a maximum version defined' do
+ before do
+ allow_next_instance_of(BulkImports::Groups::Stage) do |stage|
+ allow(stage).to receive(:config).and_return(
+ {
+ pipeline1: { pipeline: 'PipelineClass1', stage: 0 },
+ pipeline2: { pipeline: 'PipelineClass2', stage: 1, maximum_source_version: '14.10.0' },
+ pipeline3: { pipeline: 'PipelineClass3', stage: 1, maximum_source_version: '15.0.0' },
+ pipeline5: { pipeline: 'PipelineClass4', stage: 1, maximum_source_version: '15.1.0' },
+ pipeline6: { pipeline: 'PipelineClass5', stage: 1, maximum_source_version: '16.0.0' }
+ }
+ )
+ end
+ end
+
+ context 'when the source instance version is older than the tracker maximum version' do
+ it 'creates trackers as skipped if version requirement does not meet' do
+ bulk_import = create(:bulk_import, source_version: '15.0.0')
+ entity = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import)
+
+ described_class.new(entity).execute!
+
+ expect(entity.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }).to contain_exactly(
+ [:created, 'PipelineClass1'],
+ [:skipped, 'PipelineClass2'],
+ [:created, 'PipelineClass3'],
+ [:created, 'PipelineClass4'],
+ [:created, 'PipelineClass5']
+ )
+ end
+ end
+
+ context 'when the source instance version is a patch version' do
+ it 'creates trackers with the same status as the non-patch source version' do
+ bulk_import_1 = create(:bulk_import, source_version: '15.0.1')
+ entity_1 = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import_1)
+
+ bulk_import_2 = create(:bulk_import, source_version: '15.0.0')
+ entity_2 = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import_2)
+
+ described_class.new(entity_1).execute!
+ described_class.new(entity_2).execute!
+
+ trackers_1 = entity_1.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }
+ trackers_2 = entity_2.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }
+
+ expect(trackers_1).to eq(trackers_2)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/bulk_imports/file_export_service_spec.rb b/spec/services/bulk_imports/file_export_service_spec.rb
index 94efceff6c6..453fc1d0c0d 100644
--- a/spec/services/bulk_imports/file_export_service_spec.rb
+++ b/spec/services/bulk_imports/file_export_service_spec.rb
@@ -4,39 +4,34 @@ require 'spec_helper'
RSpec.describe BulkImports::FileExportService do
let_it_be(:project) { create(:project) }
- let_it_be(:export_path) { Dir.mktmpdir }
- let_it_be(:relation) { BulkImports::FileTransfer::BaseConfig::UPLOADS_RELATION }
-
- subject(:service) { described_class.new(project, export_path, relation) }
describe '#execute' do
- it 'executes export service and archives exported data' do
- expect_next_instance_of(BulkImports::UploadsExportService) do |service|
- expect(service).to receive(:execute)
- end
+ it 'executes export service and archives exported data for each file relation' do
+ relations = {
+ 'uploads' => BulkImports::UploadsExportService,
+ 'lfs_objects' => BulkImports::LfsObjectsExportService,
+ 'repository' => BulkImports::RepositoryBundleExportService,
+ 'design' => BulkImports::RepositoryBundleExportService
+ }
- expect(subject).to receive(:tar_cf).with(archive: File.join(export_path, 'uploads.tar'), dir: export_path)
+ relations.each do |relation, klass|
+ Dir.mktmpdir do |export_path|
+ service = described_class.new(project, export_path, relation)
- subject.execute
- end
+ expect_next_instance_of(klass) do |service|
+ expect(service).to receive(:execute)
+ end
- context 'when relation is lfs objects' do
- let_it_be(:relation) { BulkImports::FileTransfer::ProjectConfig::LFS_OBJECTS_RELATION }
+ expect(service).to receive(:tar_cf).with(archive: File.join(export_path, "#{relation}.tar"), dir: export_path)
- it 'executes lfs objects export service' do
- expect_next_instance_of(BulkImports::LfsObjectsExportService) do |service|
- expect(service).to receive(:execute)
+ service.execute
end
-
- expect(subject).to receive(:tar_cf).with(archive: File.join(export_path, 'lfs_objects.tar'), dir: export_path)
-
- subject.execute
end
end
context 'when unsupported relation is passed' do
it 'raises an error' do
- service = described_class.new(project, export_path, 'unsupported')
+ service = described_class.new(project, nil, 'unsupported')
expect { service.execute }.to raise_error(BulkImports::Error, 'Unsupported relation export type')
end
@@ -45,7 +40,9 @@ RSpec.describe BulkImports::FileExportService do
describe '#exported_filename' do
it 'returns filename of the exported file' do
- expect(subject.exported_filename).to eq('uploads.tar')
+ service = described_class.new(project, nil, 'uploads')
+
+ expect(service.exported_filename).to eq('uploads.tar')
end
end
end
diff --git a/spec/services/bulk_imports/lfs_objects_export_service_spec.rb b/spec/services/bulk_imports/lfs_objects_export_service_spec.rb
index 5ae54ed309b..894789c7941 100644
--- a/spec/services/bulk_imports/lfs_objects_export_service_spec.rb
+++ b/spec/services/bulk_imports/lfs_objects_export_service_spec.rb
@@ -53,6 +53,18 @@ RSpec.describe BulkImports::LfsObjectsExportService do
)
end
+ context 'when lfs object has file on disk missing' do
+ it 'does not attempt to copy non-existent file' do
+ FileUtils.rm(lfs_object.file.path)
+
+ expect(service).not_to receive(:copy_files)
+
+ service.execute
+
+ expect(File).not_to exist(File.join(export_path, lfs_object.oid))
+ end
+ end
+
context 'when lfs object is remotely stored' do
let(:lfs_object) { create(:lfs_object, :object_storage) }
diff --git a/spec/services/bulk_imports/repository_bundle_export_service_spec.rb b/spec/services/bulk_imports/repository_bundle_export_service_spec.rb
new file mode 100644
index 00000000000..a7d98a7474a
--- /dev/null
+++ b/spec/services/bulk_imports/repository_bundle_export_service_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::RepositoryBundleExportService do
+ let(:project) { create(:project) }
+ let(:export_path) { Dir.mktmpdir }
+
+ subject(:service) { described_class.new(repository, export_path, export_filename) }
+
+ after do
+ FileUtils.remove_entry(export_path) if Dir.exist?(export_path)
+ end
+
+ describe '#execute' do
+ shared_examples 'repository export' do
+ context 'when repository exists' do
+ it 'bundles repository to disk' do
+ allow(repository).to receive(:exists?).and_return(true)
+ expect(repository).to receive(:bundle_to_disk).with(File.join(export_path, "#{export_filename}.bundle"))
+
+ service.execute
+ end
+ end
+
+ context 'when repository does not exist' do
+ it 'does not bundle repository to disk' do
+ allow(repository).to receive(:exists?).and_return(false)
+ expect(repository).not_to receive(:bundle_to_disk)
+
+ service.execute
+ end
+ end
+ end
+
+ include_examples 'repository export' do
+ let(:repository) { project.repository }
+ let(:export_filename) { 'repository' }
+ end
+
+ include_examples 'repository export' do
+ let(:repository) { project.design_repository }
+ let(:export_filename) { 'design' }
+ end
+ end
+end
diff --git a/spec/services/bulk_update_integration_service_spec.rb b/spec/services/bulk_update_integration_service_spec.rb
index dcc8d2df36d..e3e38aacaa2 100644
--- a/spec/services/bulk_update_integration_service_spec.rb
+++ b/spec/services/bulk_update_integration_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe BulkUpdateIntegrationService do
- include JiraServiceHelper
+ include JiraIntegrationHelpers
before_all do
stub_jira_integration_test
@@ -55,6 +55,20 @@ RSpec.describe BulkUpdateIntegrationService do
.not_to eq(subgroup_integration.attributes.except(*excluded_attributes))
end
+ it 'does not change the created_at timestamp' do
+ subgroup_integration.update_column(:created_at, Time.utc('2022-01-01'))
+
+ expect do
+ described_class.new(subgroup_integration, batch).execute
+ end.not_to change { integration.reload.created_at }
+ end
+
+ it 'sets the updated_at timestamp to the current time', time_travel_to: Time.utc('2022-01-01') do
+ expect do
+ described_class.new(subgroup_integration, batch).execute
+ end.to change { integration.reload.updated_at }.to(Time.current)
+ end
+
context 'with integration with data fields' do
let(:excluded_attributes) do
%w[id service_id created_at updated_at encrypted_properties encrypted_properties_iv]
@@ -69,6 +83,20 @@ RSpec.describe BulkUpdateIntegrationService do
expect(integration.data_fields.attributes.except(*excluded_attributes))
.not_to eq(excluded_integration.data_fields.attributes.except(*excluded_attributes))
end
+
+ it 'does not change the created_at timestamp' do
+ subgroup_integration.data_fields.update_column(:created_at, Time.utc('2022-01-02'))
+
+ expect do
+ described_class.new(subgroup_integration, batch).execute
+ end.not_to change { integration.data_fields.reload.created_at }
+ end
+
+ it 'sets the updated_at timestamp to the current time', time_travel_to: Time.utc('2022-01-01') do
+ expect do
+ described_class.new(subgroup_integration, batch).execute
+ end.to change { integration.data_fields.reload.updated_at }.to(Time.current)
+ end
end
end
diff --git a/spec/services/ci/abort_pipelines_service_spec.rb b/spec/services/ci/abort_pipelines_service_spec.rb
index db25faff70f..9f9519d6829 100644
--- a/spec/services/ci/abort_pipelines_service_spec.rb
+++ b/spec/services/ci/abort_pipelines_service_spec.rb
@@ -94,28 +94,5 @@ RSpec.describe Ci::AbortPipelinesService do
end
end
end
-
- context 'with user pipelines' do
- def abort_user_pipelines
- described_class.new.execute(user.pipelines, :user_blocked)
- end
-
- it 'fails all running pipelines and related jobs' do
- expect(abort_user_pipelines).to be_success
-
- expect_correct_cancellations
-
- expect(other_users_pipeline.status).not_to eq('failed')
- end
-
- it 'avoids N+1 queries' do
- control_count = ActiveRecord::QueryRecorder.new { abort_user_pipelines }.count
-
- pipelines = create_list(:ci_pipeline, 5, :running, project: project, user: user)
- create_list(:ci_build, 5, :running, pipeline: pipelines.first)
-
- expect { abort_user_pipelines }.not_to exceed_query_limit(control_count)
- end
- end
end
end
diff --git a/spec/services/ci/after_requeue_job_service_spec.rb b/spec/services/ci/after_requeue_job_service_spec.rb
index c9bd44f78e2..fb67ee18fb2 100644
--- a/spec/services/ci/after_requeue_job_service_spec.rb
+++ b/spec/services/ci/after_requeue_job_service_spec.rb
@@ -26,6 +26,11 @@ RSpec.describe Ci::AfterRequeueJobService, :sidekiq_inline do
script: exit 0
needs: [a1]
+ a3:
+ stage: a
+ script: exit 0
+ needs: [a2]
+
b1:
stage: b
script: exit 0
@@ -59,6 +64,7 @@ RSpec.describe Ci::AfterRequeueJobService, :sidekiq_inline do
check_jobs_statuses(
a1: 'pending',
a2: 'created',
+ a3: 'created',
b1: 'pending',
b2: 'created',
c1: 'created',
@@ -69,6 +75,7 @@ RSpec.describe Ci::AfterRequeueJobService, :sidekiq_inline do
check_jobs_statuses(
a1: 'pending',
a2: 'created',
+ a3: 'created',
b1: 'success',
b2: 'created',
c1: 'created',
@@ -79,6 +86,7 @@ RSpec.describe Ci::AfterRequeueJobService, :sidekiq_inline do
check_jobs_statuses(
a1: 'failed',
a2: 'skipped',
+ a3: 'skipped',
b1: 'success',
b2: 'skipped',
c1: 'skipped',
@@ -90,6 +98,7 @@ RSpec.describe Ci::AfterRequeueJobService, :sidekiq_inline do
check_jobs_statuses(
a1: 'pending',
a2: 'skipped',
+ a3: 'skipped',
b1: 'success',
b2: 'skipped',
c1: 'skipped',
@@ -103,12 +112,42 @@ RSpec.describe Ci::AfterRequeueJobService, :sidekiq_inline do
check_jobs_statuses(
a1: 'pending',
a2: 'created',
+ a3: 'skipped',
b1: 'success',
b2: 'created',
c1: 'created',
c2: 'created'
)
end
+
+ context 'when executed by a different user than the original owner' do
+ let(:retryer) { create(:user).tap { |u| project.add_maintainer(u) } }
+ let(:service) { described_class.new(project, retryer) }
+
+ it 'reassigns jobs with updated statuses to the retryer' do
+ expect(jobs_name_status_owner_needs).to contain_exactly(
+ { 'name' => 'a1', 'status' => 'pending', 'user_id' => user.id, 'needs' => [] },
+ { 'name' => 'a2', 'status' => 'skipped', 'user_id' => user.id, 'needs' => ['a1'] },
+ { 'name' => 'a3', 'status' => 'skipped', 'user_id' => user.id, 'needs' => ['a2'] },
+ { 'name' => 'b1', 'status' => 'success', 'user_id' => user.id, 'needs' => [] },
+ { 'name' => 'b2', 'status' => 'skipped', 'user_id' => user.id, 'needs' => ['a2'] },
+ { 'name' => 'c1', 'status' => 'skipped', 'user_id' => user.id, 'needs' => ['b2'] },
+ { 'name' => 'c2', 'status' => 'skipped', 'user_id' => user.id, 'needs' => [] }
+ )
+
+ execute_after_requeue_service(a1)
+
+ expect(jobs_name_status_owner_needs).to contain_exactly(
+ { 'name' => 'a1', 'status' => 'pending', 'user_id' => user.id, 'needs' => [] },
+ { 'name' => 'a2', 'status' => 'created', 'user_id' => retryer.id, 'needs' => ['a1'] },
+ { 'name' => 'a3', 'status' => 'skipped', 'user_id' => user.id, 'needs' => ['a2'] },
+ { 'name' => 'b1', 'status' => 'success', 'user_id' => user.id, 'needs' => [] },
+ { 'name' => 'b2', 'status' => 'created', 'user_id' => retryer.id, 'needs' => ['a2'] },
+ { 'name' => 'c1', 'status' => 'created', 'user_id' => retryer.id, 'needs' => ['b2'] },
+ { 'name' => 'c2', 'status' => 'created', 'user_id' => retryer.id, 'needs' => [] }
+ )
+ end
+ end
end
context 'stage-dag mixed pipeline with some same-stage needs' do
@@ -212,6 +251,12 @@ RSpec.describe Ci::AfterRequeueJobService, :sidekiq_inline do
pipeline.processables.latest
end
+ def jobs_name_status_owner_needs
+ processables.reload.map do |job|
+ job.attributes.slice('name', 'status', 'user_id').merge('needs' => job.needs.map(&:name))
+ end
+ end
+
def execute_after_requeue_service(processable)
service.execute(processable)
end
diff --git a/spec/services/ci/create_pipeline_service/rate_limit_spec.rb b/spec/services/ci/create_pipeline_service/rate_limit_spec.rb
index caea165cc6c..0000296230f 100644
--- a/spec/services/ci/create_pipeline_service/rate_limit_spec.rb
+++ b/spec/services/ci/create_pipeline_service/rate_limit_spec.rb
@@ -10,10 +10,8 @@ RSpec.describe Ci::CreatePipelineService, :freeze_time, :clean_gitlab_redis_rate
before do
stub_ci_pipeline_yaml_file(gitlab_ci_yaml)
- stub_feature_flags(ci_throttle_pipelines_creation_dry_run: false)
-
- allow(Gitlab::ApplicationRateLimiter).to receive(:rate_limits)
- .and_return(pipelines_create: { threshold: 1, interval: 1.minute })
+ stub_application_setting(pipeline_limit_per_project_user_sha: 1)
+ stub_feature_flags(ci_enforce_throttle_pipelines_creation_override: false)
end
context 'when user is under the limit' do
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index c39a76ad2fc..aac059f2104 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -12,10 +12,6 @@ RSpec.describe Ci::CreatePipelineService do
before do
stub_ci_pipeline_to_return_yaml_file
-
- # Disable rate limiting for pipeline creation
- allow(Gitlab::ApplicationRateLimiter).to receive(:rate_limits)
- .and_return(pipelines_create: { threshold: 0, interval: 1.minute })
end
describe '#execute' do
@@ -1541,11 +1537,12 @@ RSpec.describe Ci::CreatePipelineService do
expect(pipeline.target_sha).to be_nil
end
- it 'schedules update for the head pipeline of the merge request', :sidekiq_inline do
- expect(UpdateHeadPipelineForMergeRequestWorker)
- .to receive(:perform_async).with(merge_request.id)
+ it 'schedules update for the head pipeline of the merge request' do
+ allow(MergeRequests::UpdateHeadPipelineWorker).to receive(:perform_async)
pipeline
+
+ expect(MergeRequests::UpdateHeadPipelineWorker).to have_received(:perform_async).with('Ci::PipelineCreatedEvent', { 'pipeline_id' => pipeline.id })
end
it 'schedules a namespace onboarding create action worker' do
diff --git a/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
index 1c6963e4a31..4f7663d7996 100644
--- a/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
+++ b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
@@ -99,6 +99,16 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
expect { subject }.not_to change { artifact.file.exists? }
end
end
+
+ context 'when the project in which the arfifact belongs to is undergoing stats refresh' do
+ before do
+ create(:project_build_artifacts_size_refresh, :pending, project: artifact.project)
+ end
+
+ it 'does not destroy job artifact' do
+ expect { subject }.not_to change { Ci::JobArtifact.count }
+ end
+ end
end
context 'when artifact is locked' do
diff --git a/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb b/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
index 5e77041a632..3a04a3af03e 100644
--- a/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
+++ b/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
@@ -4,7 +4,14 @@ require 'spec_helper'
RSpec.describe Ci::JobArtifacts::DestroyBatchService do
let(:artifacts) { Ci::JobArtifact.where(id: [artifact_with_file.id, artifact_without_file.id, trace_artifact.id]) }
- let(:service) { described_class.new(artifacts, pick_up_at: Time.current) }
+ let(:skip_projects_on_refresh) { false }
+ let(:service) do
+ described_class.new(
+ artifacts,
+ pick_up_at: Time.current,
+ skip_projects_on_refresh: skip_projects_on_refresh
+ )
+ end
let_it_be(:artifact_with_file, refind: true) do
create(:ci_job_artifact, :zip)
@@ -52,6 +59,128 @@ RSpec.describe Ci::JobArtifacts::DestroyBatchService do
.and not_change { Ci::JobArtifact.exists?(trace_artifact.id) }
end
+ context 'when artifact belongs to a project that is undergoing stats refresh' do
+ let!(:artifact_under_refresh_1) do
+ create(:ci_job_artifact, :zip)
+ end
+
+ let!(:artifact_under_refresh_2) do
+ create(:ci_job_artifact, :zip)
+ end
+
+ let!(:artifact_under_refresh_3) do
+ create(:ci_job_artifact, :zip, project: artifact_under_refresh_2.project)
+ end
+
+ let(:artifacts) do
+ Ci::JobArtifact.where(id: [artifact_with_file.id, artifact_under_refresh_1.id, artifact_under_refresh_2.id,
+ artifact_under_refresh_3.id])
+ end
+
+ before do
+ create(:project_build_artifacts_size_refresh, :created, project: artifact_with_file.project)
+ create(:project_build_artifacts_size_refresh, :pending, project: artifact_under_refresh_1.project)
+ create(:project_build_artifacts_size_refresh, :running, project: artifact_under_refresh_2.project)
+ end
+
+ shared_examples 'avoiding N+1 queries' do
+ let!(:control_artifact_on_refresh) do
+ create(:ci_job_artifact, :zip)
+ end
+
+ let!(:control_artifact_non_refresh) do
+ create(:ci_job_artifact, :zip)
+ end
+
+ let!(:other_artifact_on_refresh) do
+ create(:ci_job_artifact, :zip)
+ end
+
+ let!(:other_artifact_on_refresh_2) do
+ create(:ci_job_artifact, :zip)
+ end
+
+ let!(:other_artifact_non_refresh) do
+ create(:ci_job_artifact, :zip)
+ end
+
+ let!(:control_artifacts) do
+ Ci::JobArtifact.where(
+ id: [
+ control_artifact_on_refresh.id,
+ control_artifact_non_refresh.id
+ ]
+ )
+ end
+
+ let!(:artifacts) do
+ Ci::JobArtifact.where(
+ id: [
+ other_artifact_on_refresh.id,
+ other_artifact_on_refresh_2.id,
+ other_artifact_non_refresh.id
+ ]
+ )
+ end
+
+ let(:control_service) do
+ described_class.new(
+ control_artifacts,
+ pick_up_at: Time.current,
+ skip_projects_on_refresh: skip_projects_on_refresh
+ )
+ end
+
+ before do
+ create(:project_build_artifacts_size_refresh, :pending, project: control_artifact_on_refresh.project)
+ create(:project_build_artifacts_size_refresh, :pending, project: other_artifact_on_refresh.project)
+ create(:project_build_artifacts_size_refresh, :pending, project: other_artifact_on_refresh_2.project)
+ end
+
+ it 'does not make multiple queries when fetching multiple project refresh records' do
+ control = ActiveRecord::QueryRecorder.new { control_service.execute }
+
+ expect { subject }.not_to exceed_query_limit(control)
+ end
+ end
+
+ context 'and skip_projects_on_refresh is set to false (default)' do
+ it 'logs the projects undergoing refresh and continues with the delete', :aggregate_failures do
+ expect(Gitlab::ProjectStatsRefreshConflictsLogger).to receive(:warn_artifact_deletion_during_stats_refresh).with(
+ method: 'Ci::JobArtifacts::DestroyBatchService#execute',
+ project_id: artifact_under_refresh_1.project.id
+ ).once
+
+ expect(Gitlab::ProjectStatsRefreshConflictsLogger).to receive(:warn_artifact_deletion_during_stats_refresh).with(
+ method: 'Ci::JobArtifacts::DestroyBatchService#execute',
+ project_id: artifact_under_refresh_2.project.id
+ ).once
+
+ expect { subject }.to change { Ci::JobArtifact.count }.by(-4)
+ end
+
+ it_behaves_like 'avoiding N+1 queries'
+ end
+
+ context 'and skip_projects_on_refresh is set to true' do
+ let(:skip_projects_on_refresh) { true }
+
+ it 'logs the projects undergoing refresh and excludes the artifacts from deletion', :aggregate_failures do
+ expect(Gitlab::ProjectStatsRefreshConflictsLogger).to receive(:warn_skipped_artifact_deletion_during_stats_refresh).with(
+ method: 'Ci::JobArtifacts::DestroyBatchService#execute',
+ project_ids: match_array([artifact_under_refresh_1.project.id, artifact_under_refresh_2.project.id])
+ )
+
+ expect { subject }.to change { Ci::JobArtifact.count }.by(-1)
+ expect(Ci::JobArtifact.where(id: artifact_under_refresh_1.id)).to exist
+ expect(Ci::JobArtifact.where(id: artifact_under_refresh_2.id)).to exist
+ expect(Ci::JobArtifact.where(id: artifact_under_refresh_3.id)).to exist
+ end
+
+ it_behaves_like 'avoiding N+1 queries'
+ end
+ end
+
context 'ProjectStatistics' do
it 'resets project statistics' do
expect(ProjectStatistics).to receive(:increment_statistic).once
diff --git a/spec/services/ci/pipeline_artifacts/coverage_report_service_spec.rb b/spec/services/ci/pipeline_artifacts/coverage_report_service_spec.rb
index 98b01e2b303..403afde5da3 100644
--- a/spec/services/ci/pipeline_artifacts/coverage_report_service_spec.rb
+++ b/spec/services/ci/pipeline_artifacts/coverage_report_service_spec.rb
@@ -4,17 +4,14 @@ require 'spec_helper'
RSpec.describe ::Ci::PipelineArtifacts::CoverageReportService do
describe '#execute' do
- subject { described_class.new.execute(pipeline) }
+ let_it_be(:project) { create(:project, :repository) }
- context 'when pipeline has coverage reports' do
- let(:project) { create(:project, :repository) }
- let(:pipeline) { create(:ci_pipeline, :with_coverage_reports, project: project) }
+ subject { described_class.new(pipeline).execute }
+ shared_examples 'creating a pipeline coverage report' do
context 'when pipeline is finished' do
it 'creates a pipeline artifact' do
- subject
-
- expect(Ci::PipelineArtifact.count).to eq(1)
+ expect { subject }.to change { Ci::PipelineArtifact.count }.from(0).to(1)
end
it 'persists the default file name' do
@@ -37,21 +34,32 @@ RSpec.describe ::Ci::PipelineArtifacts::CoverageReportService do
end
context 'when pipeline artifact has already been created' do
- it 'do not raise an error and do not persist the same artifact twice' do
- expect { 2.times { described_class.new.execute(pipeline) } }.not_to raise_error
+ it 'does not raise an error and does not persist the same artifact twice' do
+ expect { 2.times { described_class.new(pipeline).execute } }.not_to raise_error
expect(Ci::PipelineArtifact.count).to eq(1)
end
end
end
+ context 'when pipeline has coverage report' do
+ let!(:pipeline) { create(:ci_pipeline, :with_coverage_reports, project: project) }
+
+ it_behaves_like 'creating a pipeline coverage report'
+ end
+
+ context 'when pipeline has coverage report from child pipeline' do
+ let!(:pipeline) { create(:ci_pipeline, :success, project: project) }
+ let!(:child_pipeline) { create(:ci_pipeline, :with_coverage_reports, project: project, child_of: pipeline) }
+
+ it_behaves_like 'creating a pipeline coverage report'
+ end
+
context 'when pipeline is running and coverage report does not exist' do
let(:pipeline) { create(:ci_pipeline, :running) }
it 'does not persist data' do
- subject
-
- expect(Ci::PipelineArtifact.count).to eq(0)
+ expect { subject }.not_to change { Ci::PipelineArtifact.count }
end
end
end
diff --git a/spec/services/ci/process_sync_events_service_spec.rb b/spec/services/ci/process_sync_events_service_spec.rb
index 6b9717fe57d..241ac4995ff 100644
--- a/spec/services/ci/process_sync_events_service_spec.rb
+++ b/spec/services/ci/process_sync_events_service_spec.rb
@@ -137,10 +137,9 @@ RSpec.describe Ci::ProcessSyncEventsService do
end
end
- context 'when the FFs sync_traversal_ids, use_traversal_ids and use_traversal_ids_for_ancestors are disabled' do
+ context 'when the FFs use_traversal_ids and use_traversal_ids_for_ancestors are disabled' do
before do
- stub_feature_flags(sync_traversal_ids: false,
- use_traversal_ids: false,
+ stub_feature_flags(use_traversal_ids: false,
use_traversal_ids_for_ancestors: false)
end
diff --git a/spec/services/clusters/applications/schedule_update_service_spec.rb b/spec/services/clusters/applications/schedule_update_service_spec.rb
deleted file mode 100644
index 2cbcb861938..00000000000
--- a/spec/services/clusters/applications/schedule_update_service_spec.rb
+++ /dev/null
@@ -1,63 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Applications::ScheduleUpdateService do
- describe '#execute' do
- let(:project) { create(:project) }
-
- around do |example|
- freeze_time { example.run }
- end
-
- context 'when the application is a Clusters::Integrations::Prometheus' do
- let(:application) { create(:clusters_integrations_prometheus) }
-
- it 'does nothing' do
- service = described_class.new(application, project)
-
- expect(::ClusterUpdateAppWorker).not_to receive(:perform_in)
- expect(::ClusterUpdateAppWorker).not_to receive(:perform_async)
-
- service.execute
- end
- end
-
- context 'when the application is externally installed' do
- let(:application) { create(:clusters_applications_prometheus, :externally_installed) }
-
- it 'does nothing' do
- service = described_class.new(application, project)
-
- expect(::ClusterUpdateAppWorker).not_to receive(:perform_in)
- expect(::ClusterUpdateAppWorker).not_to receive(:perform_async)
-
- service.execute
- end
- end
-
- context 'when application is able to be updated' do
- context 'when the application was recently scheduled' do
- it 'schedules worker with a backoff delay' do
- application = create(:clusters_applications_prometheus, :installed, last_update_started_at: Time.current + 5.minutes)
- service = described_class.new(application, project)
-
- expect(::ClusterUpdateAppWorker).to receive(:perform_in).with(described_class::BACKOFF_DELAY, application.name, application.id, project.id, Time.current).once
-
- service.execute
- end
- end
-
- context 'when the application has not been recently updated' do
- it 'schedules worker' do
- application = create(:clusters_applications_prometheus, :installed)
- service = described_class.new(application, project)
-
- expect(::ClusterUpdateAppWorker).to receive(:perform_async).with(application.name, application.id, project.id, Time.current).once
-
- service.execute
- end
- end
- end
- end
-end
diff --git a/spec/services/deployments/create_service_spec.rb b/spec/services/deployments/create_service_spec.rb
index 0f2a6ce32e1..f6f4c68a6f1 100644
--- a/spec/services/deployments/create_service_spec.rb
+++ b/spec/services/deployments/create_service_spec.rb
@@ -21,11 +21,34 @@ RSpec.describe Deployments::CreateService do
expect(Deployments::UpdateEnvironmentWorker).to receive(:perform_async)
expect(Deployments::LinkMergeRequestWorker).to receive(:perform_async)
- expect(Deployments::HooksWorker).to receive(:perform_async)
+ expect_next_instance_of(Deployment) do |deployment|
+ expect(deployment).to receive(:execute_hooks)
+ end
expect(service.execute).to be_persisted
end
+ context 'when `deployment_hooks_skip_worker` flag is disabled' do
+ before do
+ stub_feature_flags(deployment_hooks_skip_worker: false)
+ end
+
+ it 'executes Deployments::HooksWorker asynchronously' do
+ service = described_class.new(
+ environment,
+ user,
+ sha: 'b83d6e391c22777fca1ed3012fce84f633d7fed0',
+ ref: 'master',
+ tag: false,
+ status: 'success'
+ )
+
+ expect(Deployments::HooksWorker).to receive(:perform_async)
+
+ service.execute
+ end
+ end
+
it 'does not change the status if no status is given' do
service = described_class.new(
environment,
@@ -37,7 +60,9 @@ RSpec.describe Deployments::CreateService do
expect(Deployments::UpdateEnvironmentWorker).not_to receive(:perform_async)
expect(Deployments::LinkMergeRequestWorker).not_to receive(:perform_async)
- expect(Deployments::HooksWorker).not_to receive(:perform_async)
+ expect_next_instance_of(Deployment) do |deployment|
+ expect(deployment).not_to receive(:execute_hooks)
+ end
expect(service.execute).to be_persisted
end
@@ -55,11 +80,9 @@ RSpec.describe Deployments::CreateService do
it 'does not create a new deployment' do
described_class.new(environment, user, params).execute
- expect(Deployments::UpdateEnvironmentWorker).not_to receive(:perform_async)
- expect(Deployments::LinkMergeRequestWorker).not_to receive(:perform_async)
- expect(Deployments::HooksWorker).not_to receive(:perform_async)
-
- described_class.new(environment.reload, user, params).execute
+ expect do
+ described_class.new(environment.reload, user, params).execute
+ end.not_to change { Deployment.count }
end
end
end
diff --git a/spec/services/deployments/update_environment_service_spec.rb b/spec/services/deployments/update_environment_service_spec.rb
index 0859aa2c9d1..e2d7a80fde3 100644
--- a/spec/services/deployments/update_environment_service_spec.rb
+++ b/spec/services/deployments/update_environment_service_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe Deployments::UpdateEnvironmentService do
before do
allow(Deployments::LinkMergeRequestWorker).to receive(:perform_async)
- allow(Deployments::HooksWorker).to receive(:perform_async)
+ allow(deployment).to receive(:execute_hooks)
job.success! # Create/Succeed deployment
end
@@ -84,7 +84,7 @@ RSpec.describe Deployments::UpdateEnvironmentService do
context 'and environment is stopped' do
before do
- environment.stop
+ environment.stop_complete
end
it 'makes environment available' do
diff --git a/spec/services/emails/confirm_service_spec.rb b/spec/services/emails/confirm_service_spec.rb
index d3a745bc744..e8d3c0d673b 100644
--- a/spec/services/emails/confirm_service_spec.rb
+++ b/spec/services/emails/confirm_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Emails::ConfirmService do
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
subject(:service) { described_class.new(user) }
@@ -11,7 +11,9 @@ RSpec.describe Emails::ConfirmService do
it 'enqueues a background job to send confirmation email again' do
email = user.emails.create!(email: 'new@email.com')
- expect { service.execute(email) }.to have_enqueued_job.on_queue('mailers')
+ travel_to(10.minutes.from_now) do
+ expect { service.execute(email) }.to have_enqueued_job.on_queue('mailers')
+ end
end
end
end
diff --git a/spec/services/environments/auto_stop_service_spec.rb b/spec/services/environments/auto_stop_service_spec.rb
index 8dad59cbefd..d688690c376 100644
--- a/spec/services/environments/auto_stop_service_spec.rb
+++ b/spec/services/environments/auto_stop_service_spec.rb
@@ -37,7 +37,7 @@ RSpec.describe Environments::AutoStopService, :clean_gitlab_redis_shared_state,
it 'stops environments and play stop jobs' do
expect { subject }
.to change { Environment.all.map(&:state).uniq }
- .from(['available']).to(['stopped'])
+ .from(['available']).to(['stopping'])
expect(Ci::Build.where(name: 'stop_review_app').map(&:status).uniq).to eq(['pending'])
end
diff --git a/spec/services/environments/stop_service_spec.rb b/spec/services/environments/stop_service_spec.rb
index afbc0ba70f9..3ed8a0b1da0 100644
--- a/spec/services/environments/stop_service_spec.rb
+++ b/spec/services/environments/stop_service_spec.rb
@@ -29,14 +29,27 @@ RSpec.describe Environments::StopService do
review_job.success!
end
- it 'stops the environment' do
- expect { subject }.to change { environment.reload.state }.from('available').to('stopped')
+ context 'without stop action' do
+ let!(:environment) { create(:environment, :available, project: project) }
+
+ it 'stops the environment' do
+ expect { subject }.to change { environment.reload.state }.from('available').to('stopped')
+ end
end
it 'plays the stop action' do
expect { subject }.to change { stop_review_job.reload.status }.from('manual').to('pending')
end
+ context 'force option' do
+ let(:service) { described_class.new(project, user, { force: true }) }
+
+ it 'does not play the stop action when forced' do
+ expect { subject }.to change { environment.reload.state }.from('available').to('stopped')
+ expect(stop_review_job.reload.status).to eq('manual')
+ end
+ end
+
context 'when an environment has already been stopped' do
let!(:environment) { create(:environment, :stopped, project: project) }
@@ -65,11 +78,6 @@ RSpec.describe Environments::StopService do
describe '#execute_for_branch' do
context 'when environment with review app exists' do
- before do
- create(:environment, :with_review_app, project: project,
- ref: 'feature')
- end
-
context 'when user has permission to stop environment' do
before do
project.add_developer(user)
@@ -77,25 +85,25 @@ RSpec.describe Environments::StopService do
context 'when environment is associated with removed branch' do
it 'stops environment' do
- expect_environment_stopped_on('feature')
+ expect_environment_stopping_on('feature', feature_environment)
end
end
context 'when environment is associated with different branch' do
it 'does not stop environment' do
- expect_environment_not_stopped_on('master')
+ expect_environment_not_stopped_on('master', feature_environment)
end
end
context 'when specified branch does not exist' do
it 'does not stop environment' do
- expect_environment_not_stopped_on('non/existent/branch')
+ expect_environment_not_stopped_on('non/existent/branch', feature_environment)
end
end
context 'when no branch not specified' do
it 'does not stop environment' do
- expect_environment_not_stopped_on(nil)
+ expect_environment_not_stopped_on(nil, feature_environment)
end
end
@@ -107,7 +115,7 @@ RSpec.describe Environments::StopService do
end
it 'does not stop environment' do
- expect_environment_not_stopped_on('feature')
+ expect_environment_not_stopped_on('feature', feature_environment)
end
end
end
@@ -119,7 +127,7 @@ RSpec.describe Environments::StopService do
end
it 'does not stop environment' do
- expect_environment_not_stopped_on('master')
+ expect_environment_not_stopped_on('master', feature_environment)
end
end
end
@@ -132,7 +140,7 @@ RSpec.describe Environments::StopService do
end
it 'does not stop environment' do
- expect_environment_not_stopped_on('master')
+ expect_environment_not_stopped_on('master', feature_environment)
end
end
end
@@ -148,7 +156,7 @@ RSpec.describe Environments::StopService do
end
it 'does not stop environment' do
- expect_environment_not_stopped_on('master')
+ expect_environment_not_stopped_on('master', feature_environment)
end
end
end
@@ -177,7 +185,7 @@ RSpec.describe Environments::StopService do
merge_requests_as_head_pipeline: [merge_request])
end
- let!(:review_job) { create(:ci_build, :with_deployment, :start_review_app, pipeline: pipeline, project: project) }
+ let!(:review_job) { create(:ci_build, :with_deployment, :start_review_app, :success, pipeline: pipeline, project: project) }
let!(:stop_review_job) { create(:ci_build, :with_deployment, :stop_review_app, :manual, pipeline: pipeline, project: project) }
before do
@@ -195,8 +203,7 @@ RSpec.describe Environments::StopService do
it 'stops the active environment' do
subject
-
- expect(pipeline.environments_in_self_and_descendants.first).to be_stopped
+ expect(pipeline.environments_in_self_and_descendants.first).to be_stopping
end
context 'when pipeline is a branch pipeline for merge request' do
@@ -263,13 +270,22 @@ RSpec.describe Environments::StopService do
end
end
- def expect_environment_stopped_on(branch)
+ def expect_environment_stopped_on(branch, environment)
+ expect { service.execute_for_branch(branch) }
+ .to change { environment.reload.state }.from('available').to('stopped')
+ end
+
+ def expect_environment_stopping_on(branch, environment)
expect { service.execute_for_branch(branch) }
- .to change { Environment.last.state }.from('available').to('stopped')
+ .to change { environment.reload.state }.from('available').to('stopping')
end
- def expect_environment_not_stopped_on(branch)
+ def expect_environment_not_stopped_on(branch, environment)
expect { service.execute_for_branch(branch) }
- .not_to change { Environment.last.state }
+ .not_to change { environment.reload.state }.from('available')
+ end
+
+ def feature_environment
+ create(:environment, :with_review_app, project: project, ref: 'feature')
end
end
diff --git a/spec/services/event_create_service_spec.rb b/spec/services/event_create_service_spec.rb
index c22099fe410..56da85cc4a0 100644
--- a/spec/services/event_create_service_spec.rb
+++ b/spec/services/event_create_service_spec.rb
@@ -21,8 +21,10 @@ RSpec.describe EventCreateService, :clean_gitlab_redis_cache, :clean_gitlab_redi
end
shared_examples 'Snowplow event' do
+ let(:label) { nil }
+
it 'is not emitted if FF is disabled' do
- stub_feature_flags(route_hll_to_snowplow: false)
+ stub_feature_flags(feature_flag_name => false)
subject
@@ -30,15 +32,18 @@ RSpec.describe EventCreateService, :clean_gitlab_redis_cache, :clean_gitlab_redi
end
it 'is emitted' do
+ params = {
+ category: category,
+ action: action,
+ namespace: namespace,
+ user: user,
+ project: project,
+ label: label
+ }.compact
+
subject
- expect_snowplow_event(
- category: described_class.to_s,
- action: 'action_active_users_project_repo',
- namespace: project.namespace,
- user: user,
- project: project
- )
+ expect_snowplow_event(**params)
end
end
@@ -74,7 +79,7 @@ RSpec.describe EventCreateService, :clean_gitlab_redis_cache, :clean_gitlab_redi
end
end
- describe 'Merge Requests' do
+ describe 'Merge Requests', :snowplow do
describe '#open_mr' do
subject(:open_mr) { service.open_mr(merge_request, merge_request.author) }
@@ -89,6 +94,16 @@ RSpec.describe EventCreateService, :clean_gitlab_redis_cache, :clean_gitlab_redi
it_behaves_like "it records the event in the event counter" do
let(:event_action) { Gitlab::UsageDataCounters::TrackUniqueEvents::MERGE_REQUEST_ACTION }
end
+
+ it_behaves_like 'Snowplow event' do
+ let(:category) { Gitlab::UsageDataCounters::TrackUniqueEvents::MERGE_REQUEST_ACTION.to_s }
+ let(:label) { 'merge_requests_users' }
+ let(:action) { 'create' }
+ let(:namespace) { project.namespace }
+ let(:project) { merge_request.project }
+ let(:user) { merge_request.author }
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
+ end
end
describe '#close_mr' do
@@ -105,6 +120,16 @@ RSpec.describe EventCreateService, :clean_gitlab_redis_cache, :clean_gitlab_redi
it_behaves_like "it records the event in the event counter" do
let(:event_action) { Gitlab::UsageDataCounters::TrackUniqueEvents::MERGE_REQUEST_ACTION }
end
+
+ it_behaves_like 'Snowplow event' do
+ let(:category) { Gitlab::UsageDataCounters::TrackUniqueEvents::MERGE_REQUEST_ACTION.to_s }
+ let(:label) { 'merge_requests_users' }
+ let(:action) { 'close' }
+ let(:namespace) { project.namespace }
+ let(:project) { merge_request.project }
+ let(:user) { merge_request.author }
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
+ end
end
describe '#merge_mr' do
@@ -121,6 +146,16 @@ RSpec.describe EventCreateService, :clean_gitlab_redis_cache, :clean_gitlab_redi
it_behaves_like "it records the event in the event counter" do
let(:event_action) { Gitlab::UsageDataCounters::TrackUniqueEvents::MERGE_REQUEST_ACTION }
end
+
+ it_behaves_like 'Snowplow event' do
+ let(:category) { Gitlab::UsageDataCounters::TrackUniqueEvents::MERGE_REQUEST_ACTION.to_s }
+ let(:label) { 'merge_requests_users' }
+ let(:action) { 'merge' }
+ let(:namespace) { project.namespace }
+ let(:project) { merge_request.project }
+ let(:user) { merge_request.author }
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
+ end
end
describe '#reopen_mr' do
@@ -295,7 +330,12 @@ RSpec.describe EventCreateService, :clean_gitlab_redis_cache, :clean_gitlab_redi
let(:event_action) { Gitlab::UsageDataCounters::TrackUniqueEvents::PUSH_ACTION }
end
- it_behaves_like 'Snowplow event'
+ it_behaves_like 'Snowplow event' do
+ let(:category) { described_class.to_s }
+ let(:action) { 'action_active_users_project_repo' }
+ let(:namespace) { project.namespace }
+ let(:feature_flag_name) { :route_hll_to_snowplow }
+ end
end
describe '#bulk_push', :snowplow do
@@ -315,7 +355,12 @@ RSpec.describe EventCreateService, :clean_gitlab_redis_cache, :clean_gitlab_redi
let(:event_action) { Gitlab::UsageDataCounters::TrackUniqueEvents::PUSH_ACTION }
end
- it_behaves_like 'Snowplow event'
+ it_behaves_like 'Snowplow event' do
+ let(:category) { described_class.to_s }
+ let(:action) { 'action_active_users_project_repo' }
+ let(:namespace) { project.namespace }
+ let(:feature_flag_name) { :route_hll_to_snowplow }
+ end
end
describe 'Project' do
@@ -392,7 +437,7 @@ RSpec.describe EventCreateService, :clean_gitlab_redis_cache, :clean_gitlab_redi
end
end
- describe '#leave_note' do
+ describe '#leave_note', :snowplow do
subject(:leave_note) { service.leave_note(note, author) }
let(:note) { create(:note) }
@@ -409,6 +454,17 @@ RSpec.describe EventCreateService, :clean_gitlab_redis_cache, :clean_gitlab_redi
it_behaves_like "it records the event in the event counter" do
let(:note) { create(:diff_note_on_merge_request) }
end
+
+ it_behaves_like 'Snowplow event' do
+ let(:note) { create(:diff_note_on_merge_request) }
+ let(:category) { Gitlab::UsageDataCounters::TrackUniqueEvents::MERGE_REQUEST_ACTION.to_s }
+ let(:label) { 'merge_requests_users' }
+ let(:action) { 'comment' }
+ let(:project) { note.project }
+ let(:namespace) { project.namespace }
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
+ let(:user) { author }
+ end
end
context 'when it is not a diff note' do
diff --git a/spec/services/git/branch_push_service_spec.rb b/spec/services/git/branch_push_service_spec.rb
index 57c130f76a4..befa9598964 100644
--- a/spec/services/git/branch_push_service_spec.rb
+++ b/spec/services/git/branch_push_service_spec.rb
@@ -410,7 +410,7 @@ RSpec.describe Git::BranchPushService, services: true do
end
context "for jira issue tracker" do
- include JiraServiceHelper
+ include JiraIntegrationHelpers
let(:jira_tracker) { project.create_jira_integration if project.jira_integration.nil? }
diff --git a/spec/services/import/fogbugz_service_spec.rb b/spec/services/import/fogbugz_service_spec.rb
new file mode 100644
index 00000000000..c9477dba7a5
--- /dev/null
+++ b/spec/services/import/fogbugz_service_spec.rb
@@ -0,0 +1,150 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Import::FogbugzService do
+ let_it_be(:user) { create(:user) }
+
+ let(:base_uri) { "https://test:7990" }
+ let(:token) { "asdasd12345" }
+ let(:repo_id) { "fogbugz_id" }
+ let(:repo) { instance_double(Gitlab::FogbugzImport::Repository, name: 'test', raw_data: nil) }
+
+ let(:client) { instance_double(Gitlab::FogbugzImport::Client) }
+ let(:credentials) { { uri: base_uri, token: token } }
+ let(:params) { { repo_id: repo_id } }
+
+ subject { described_class.new(client, user, params) }
+
+ before do
+ allow(subject).to receive(:authorized?).and_return(true)
+ end
+
+ context 'when no repo is found' do
+ before do
+ allow(client).to receive(:repo).with(repo_id).and_return(nil)
+ end
+
+ it 'returns an error' do
+ result = subject.execute(credentials)
+
+ expect(result).to include(
+ message: "Project #{repo_id} could not be found",
+ status: :error,
+ http_status: :unprocessable_entity
+ )
+ end
+ end
+
+ context 'when import source is disabled' do
+ before do
+ stub_application_setting(import_sources: nil)
+ allow(client).to receive(:repo).with(repo_id).and_return(repo)
+ end
+
+ it 'returns forbidden' do
+ result = subject.execute(credentials)
+
+ expect(result).to include(
+ status: :error,
+ http_status: :forbidden
+ )
+ end
+ end
+
+ context 'when user is unauthorized' do
+ before do
+ allow(subject).to receive(:authorized?).and_return(false)
+ end
+
+ it 'returns an error' do
+ result = subject.execute(credentials)
+
+ expect(result).to include(
+ message: "You don't have permissions to create this project",
+ status: :error,
+ http_status: :unauthorized
+ )
+ end
+ end
+
+ context 'verify url' do
+ shared_examples 'denies local request' do
+ before do
+ allow(client).to receive(:repo).with(repo_id).and_return(repo)
+ end
+
+ it 'does not allow requests' do
+ result = subject.execute(credentials)
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to include("Invalid URL:")
+ end
+ end
+
+ context 'when host is localhost' do
+ let(:base_uri) { 'http://localhost:3000' }
+
+ include_examples 'denies local request'
+ end
+
+ context 'when host is on local network' do
+ let(:base_uri) { 'https://192.168.0.191' }
+
+ include_examples 'denies local request'
+ end
+
+ context 'when host is ftp protocol' do
+ let(:base_uri) { 'ftp://testing' }
+
+ include_examples 'denies local request'
+ end
+ end
+
+ context 'when import starts succesfully' do
+ before do
+ allow(client).to receive(:repo).with(repo_id).and_return(
+ instance_double(Gitlab::FogbugzImport::Repository, name: 'test', raw_data: nil)
+ )
+ end
+
+ it 'returns success' do
+ result = subject.execute(credentials)
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:project].name).to eq('test')
+ end
+ end
+
+ context 'when import fails to start' do
+ let(:error_messages_array) { instance_double(Array, join: "something went wrong") }
+ let(:errors_double) { instance_double(ActiveModel::Errors, full_messages: error_messages_array, :[] => nil) }
+ let(:project_double) { instance_double(Project, persisted?: false, errors: errors_double) }
+ let(:project_creator) { instance_double(Gitlab::FogbugzImport::ProjectCreator, execute: project_double )}
+
+ before do
+ allow(Gitlab::FogbugzImport::ProjectCreator).to receive(:new).and_return(project_creator)
+ allow(client).to receive(:repo).with(repo_id).and_return(
+ instance_double(Gitlab::FogbugzImport::Repository, name: 'test', raw_data: nil)
+ )
+ end
+
+ it 'returns error' do
+ result = subject.execute(credentials)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq("something went wrong")
+ end
+ end
+
+ it 'returns error for unknown error causes' do
+ message = 'Not Implemented'
+ exception = StandardError.new(message)
+
+ allow(client).to receive(:repo).and_raise(exception)
+
+ expect(subject.execute(credentials)).to include({
+ status: :error,
+ message: "Fogbugz import failed due to an error: #{message}"
+ })
+ end
+end
diff --git a/spec/services/incident_management/issuable_escalation_statuses/prepare_update_service_spec.rb b/spec/services/incident_management/issuable_escalation_statuses/prepare_update_service_spec.rb
index 25164df40ca..6c99631fcb0 100644
--- a/spec/services/incident_management/issuable_escalation_statuses/prepare_update_service_spec.rb
+++ b/spec/services/incident_management/issuable_escalation_statuses/prepare_update_service_spec.rb
@@ -42,14 +42,6 @@ RSpec.describe IncidentManagement::IssuableEscalationStatuses::PrepareUpdateServ
it_behaves_like 'successful response', { status_event: :acknowledge }
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(incident_escalations: false)
- end
-
- it_behaves_like 'availability error response'
- end
-
context 'when user is anonymous' do
let(:current_user) { nil }
diff --git a/spec/services/incident_management/timeline_events/create_service_spec.rb b/spec/services/incident_management/timeline_events/create_service_spec.rb
index 38ce15e74f1..133a644f243 100644
--- a/spec/services/incident_management/timeline_events/create_service_spec.rb
+++ b/spec/services/incident_management/timeline_events/create_service_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe IncidentManagement::TimelineEvents::CreateService do
}
end
+ let(:editable) { false }
let(:current_user) { user_with_permissions }
let(:service) { described_class.new(incident, current_user, args) }
@@ -32,6 +33,8 @@ RSpec.describe IncidentManagement::TimelineEvents::CreateService do
expect(execute).to be_error
expect(execute.message).to eq(message)
end
+
+ it_behaves_like 'does not track incident management event', :incident_management_timeline_event_created
end
shared_examples 'success response' do
@@ -45,7 +48,10 @@ RSpec.describe IncidentManagement::TimelineEvents::CreateService do
expect(result.project).to eq(project)
expect(result.note).to eq(args[:note])
expect(result.promoted_from_note).to eq(comment)
+ expect(result.editable).to eq(editable)
end
+
+ it_behaves_like 'an incident management tracked event', :incident_management_timeline_event_created
end
subject(:execute) { service.execute }
@@ -90,6 +96,30 @@ RSpec.describe IncidentManagement::TimelineEvents::CreateService do
end
end
+ context 'with editable param' do
+ let(:args) do
+ {
+ note: 'note',
+ occurred_at: Time.current,
+ action: 'new comment',
+ promoted_from_note: comment,
+ editable: editable
+ }
+ end
+
+ context 'when editable is true' do
+ let(:editable) { true }
+
+ it_behaves_like 'success response'
+ end
+
+ context 'when editable is false' do
+ let(:editable) { false }
+
+ it_behaves_like 'success response'
+ end
+ end
+
it 'successfully creates a database record', :aggregate_failures do
expect { execute }.to change { ::IncidentManagement::TimelineEvent.count }.by(1)
end
diff --git a/spec/services/incident_management/timeline_events/destroy_service_spec.rb b/spec/services/incident_management/timeline_events/destroy_service_spec.rb
index 01daee2b749..09026f87116 100644
--- a/spec/services/incident_management/timeline_events/destroy_service_spec.rb
+++ b/spec/services/incident_management/timeline_events/destroy_service_spec.rb
@@ -24,6 +24,8 @@ RSpec.describe IncidentManagement::TimelineEvents::DestroyService do
expect(execute).to be_error
expect(execute.message).to eq(message)
end
+
+ it_behaves_like 'does not track incident management event', :incident_management_timeline_event_deleted
end
subject(:execute) { service.execute }
@@ -49,12 +51,16 @@ RSpec.describe IncidentManagement::TimelineEvents::DestroyService do
it_behaves_like 'error response', 'Note cannot be removed'
end
- it 'successfully returns the timeline event', :aggregate_failures do
- expect(execute).to be_success
+ context 'success response' do
+ it 'successfully returns the timeline event', :aggregate_failures do
+ expect(execute).to be_success
+
+ result = execute.payload[:timeline_event]
+ expect(result).to be_a(::IncidentManagement::TimelineEvent)
+ expect(result.id).to eq(timeline_event.id)
+ end
- result = execute.payload[:timeline_event]
- expect(result).to be_a(::IncidentManagement::TimelineEvent)
- expect(result.id).to eq(timeline_event.id)
+ it_behaves_like 'an incident management tracked event', :incident_management_timeline_event_deleted
end
context 'when incident_timeline feature flag is enabled' do
diff --git a/spec/services/incident_management/timeline_events/update_service_spec.rb b/spec/services/incident_management/timeline_events/update_service_spec.rb
index 8bc0e5ce0ed..3da533fb2a6 100644
--- a/spec/services/incident_management/timeline_events/update_service_spec.rb
+++ b/spec/services/incident_management/timeline_events/update_service_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe IncidentManagement::TimelineEvents::UpdateService do
let!(:timeline_event) { create(:incident_management_timeline_event, project: project, incident: incident) }
let(:occurred_at) { 1.minute.ago }
let(:params) { { note: 'Updated note', occurred_at: occurred_at } }
+ let(:current_user) { user }
before do
stub_feature_flags(incident_timeline: project)
@@ -21,6 +22,8 @@ RSpec.describe IncidentManagement::TimelineEvents::UpdateService do
expect(execute).to be_success
expect(execute.payload).to eq(timeline_event: timeline_event.reload)
end
+
+ it_behaves_like 'an incident management tracked event', :incident_management_timeline_event_edited
end
shared_examples 'error response' do |message|
@@ -28,6 +31,8 @@ RSpec.describe IncidentManagement::TimelineEvents::UpdateService do
expect(execute).to be_error
expect(execute.message).to eq(message)
end
+
+ it_behaves_like 'does not track incident management event', :incident_management_timeline_event_edited
end
shared_examples 'passing the correct was_changed value' do |was_changed|
@@ -135,6 +140,14 @@ RSpec.describe IncidentManagement::TimelineEvents::UpdateService do
execute
end
end
+
+ context 'when timeline event is non-editable' do
+ let!(:timeline_event) do
+ create(:incident_management_timeline_event, :non_editable, project: project, incident: incident)
+ end
+
+ it_behaves_like 'error response', 'You cannot edit this timeline event.'
+ end
end
context 'when user does not have permissions' do
diff --git a/spec/services/integrations/propagate_service_spec.rb b/spec/services/integrations/propagate_service_spec.rb
index 7ae843f6aeb..c971c4a0ad0 100644
--- a/spec/services/integrations/propagate_service_spec.rb
+++ b/spec/services/integrations/propagate_service_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Integrations::PropagateService do
describe '.propagate' do
- include JiraServiceHelper
+ include JiraIntegrationHelpers
before do
stub_jira_integration_test
diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb
index 3934ca04a00..5c1544d8ebc 100644
--- a/spec/services/issues/create_service_spec.rb
+++ b/spec/services/issues/create_service_spec.rb
@@ -66,6 +66,7 @@ RSpec.describe Issues::CreateService do
expect(issue.milestone).to eq(milestone)
expect(issue.due_date).to eq(Date.tomorrow)
expect(issue.work_item_type.base_type).to eq('issue')
+ expect(issue.issue_customer_relations_contacts).to be_empty
end
context 'when a build_service is provided' do
@@ -444,6 +445,50 @@ RSpec.describe Issues::CreateService do
expect(issue.issue_customer_relations_contacts.last.contact).to eq(contact)
end
end
+
+ context 'with external_author' do
+ let_it_be(:contact) { create(:contact, group: group) }
+
+ context 'when CRM contact exists with matching e-mail' do
+ let(:opts) do
+ {
+ title: 'Title',
+ external_author: contact.email
+ }
+ end
+
+ context 'with permission' do
+ it 'assigns contact to issue' do
+ group.add_reporter(user)
+ expect(issue).to be_persisted
+ expect(issue.issue_customer_relations_contacts.last.contact).to eq(contact)
+ end
+ end
+
+ context 'without permission' do
+ it 'does not assign contact to issue' do
+ group.add_guest(user)
+ expect(issue).to be_persisted
+ expect(issue.issue_customer_relations_contacts).to be_empty
+ end
+ end
+ end
+
+ context 'when no CRM contact exists with matching e-mail' do
+ let(:opts) do
+ {
+ title: 'Title',
+ external_author: 'example@gitlab.com'
+ }
+ end
+
+ it 'does not assign contact to issue' do
+ group.add_reporter(user)
+ expect(issue).to be_persisted
+ expect(issue.issue_customer_relations_contacts).to be_empty
+ end
+ end
+ end
end
context 'resolving discussions' do
diff --git a/spec/services/issues/move_service_spec.rb b/spec/services/issues/move_service_spec.rb
index 35a380e01d0..56a3c22cd7f 100644
--- a/spec/services/issues/move_service_spec.rb
+++ b/spec/services/issues/move_service_spec.rb
@@ -194,20 +194,6 @@ RSpec.describe Issues::MoveService do
expect(new_issue.customer_relations_contacts).to be_empty
end
end
-
- context 'when customer_relations feature is disabled' do
- let(:another_project) { create(:project, namespace: create(:group)) }
-
- before do
- stub_feature_flags(customer_relations: false)
- end
-
- it 'does not preserve contacts' do
- new_issue = move_service.execute(old_issue, new_project)
-
- expect(new_issue.customer_relations_contacts).to be_empty
- end
- end
end
context 'moving to same project' do
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index d496857bb25..d11fe772023 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -1230,14 +1230,6 @@ RSpec.describe Issues::UpdateService, :mailer do
it_behaves_like 'updates the escalation status record', :acknowledged
end
-
- context 'with :incident_escalations feature flag disabled' do
- before do
- stub_feature_flags(incident_escalations: false)
- end
-
- it_behaves_like 'does not change the status record'
- end
end
context 'when issue type is not incident' do
diff --git a/spec/services/jira_connect_subscriptions/create_service_spec.rb b/spec/services/jira_connect_subscriptions/create_service_spec.rb
index cde4753cde7..85208a30c30 100644
--- a/spec/services/jira_connect_subscriptions/create_service_spec.rb
+++ b/spec/services/jira_connect_subscriptions/create_service_spec.rb
@@ -3,9 +3,10 @@
require 'spec_helper'
RSpec.describe JiraConnectSubscriptions::CreateService do
- let(:installation) { create(:jira_connect_installation) }
- let(:current_user) { create(:user) }
- let(:group) { create(:group) }
+ let_it_be(:installation) { create(:jira_connect_installation) }
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+
let(:path) { group.full_path }
let(:params) { { namespace_path: path, jira_user: jira_user } }
let(:jira_user) { double(:JiraUser, site_admin?: true) }
@@ -16,38 +17,31 @@ RSpec.describe JiraConnectSubscriptions::CreateService do
group.add_maintainer(current_user)
end
- shared_examples 'a failed execution' do
+ shared_examples 'a failed execution' do |**status_attributes|
it 'does not create a subscription' do
expect { subject }.not_to change { installation.subscriptions.count }
end
it 'returns an error status' do
expect(subject[:status]).to eq(:error)
+ expect(subject).to include(status_attributes)
end
end
context 'remote user does not have access' do
let(:jira_user) { double(site_admin?: false) }
- it 'does not create a subscription' do
- expect { subject }.not_to change { installation.subscriptions.count }
- end
-
- it 'returns error' do
- expect(subject[:status]).to eq(:error)
- end
+ it_behaves_like 'a failed execution',
+ http_status: 403,
+ message: 'The Jira user is not a site administrator. Check the permissions in Jira and try again.'
end
context 'remote user cannot be retrieved' do
let(:jira_user) { nil }
- it 'does not create a subscription' do
- expect { subject }.not_to change { installation.subscriptions.count }
- end
-
- it 'returns error' do
- expect(subject[:status]).to eq(:error)
- end
+ it_behaves_like 'a failed execution',
+ http_status: 403,
+ message: 'Could not fetch user information from Jira. Check the permissions in Jira and try again.'
end
context 'when user does have access' do
@@ -60,8 +54,8 @@ RSpec.describe JiraConnectSubscriptions::CreateService do
end
context 'namespace has projects' do
- let!(:project_1) { create(:project, group: group) }
- let!(:project_2) { create(:project, group: group) }
+ let_it_be(:project_1) { create(:project, group: group) }
+ let_it_be(:project_2) { create(:project, group: group) }
before do
stub_const("#{described_class}::MERGE_REQUEST_SYNC_BATCH_SIZE", 1)
@@ -81,12 +75,18 @@ RSpec.describe JiraConnectSubscriptions::CreateService do
context 'when path is invalid' do
let(:path) { 'some_invalid_namespace_path' }
- it_behaves_like 'a failed execution'
+ it_behaves_like 'a failed execution',
+ http_status: 401,
+ message: 'Cannot find namespace. Make sure you have sufficient permissions.'
end
context 'when user does not have access' do
- subject { described_class.new(installation, create(:user), namespace_path: path).execute }
+ let_it_be(:other_group) { create(:group) }
+
+ let(:path) { other_group.full_path }
- it_behaves_like 'a failed execution'
+ it_behaves_like 'a failed execution',
+ http_status: 401,
+ message: 'Cannot find namespace. Make sure you have sufficient permissions.'
end
end
diff --git a/spec/services/jira_import/start_import_service_spec.rb b/spec/services/jira_import/start_import_service_spec.rb
index e04e3314158..510f58f0e75 100644
--- a/spec/services/jira_import/start_import_service_spec.rb
+++ b/spec/services/jira_import/start_import_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe JiraImport::StartImportService do
- include JiraServiceHelper
+ include JiraIntegrationHelpers
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project) }
diff --git a/spec/services/jira_import/users_importer_spec.rb b/spec/services/jira_import/users_importer_spec.rb
index af408847260..ace9e0d5779 100644
--- a/spec/services/jira_import/users_importer_spec.rb
+++ b/spec/services/jira_import/users_importer_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe JiraImport::UsersImporter do
- include JiraServiceHelper
+ include JiraIntegrationHelpers
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
diff --git a/spec/services/markdown_content_rewriter_service_spec.rb b/spec/services/markdown_content_rewriter_service_spec.rb
index 37c8a210ba5..91a117536ca 100644
--- a/spec/services/markdown_content_rewriter_service_spec.rb
+++ b/spec/services/markdown_content_rewriter_service_spec.rb
@@ -8,38 +8,63 @@ RSpec.describe MarkdownContentRewriterService do
let_it_be(:target_parent) { create(:project, :public) }
let(:content) { 'My content' }
+ let(:issue) { create(:issue, project: source_parent, description: content)}
describe '#initialize' do
it 'raises an error if source_parent is not a Project' do
expect do
- described_class.new(user, content, create(:group), target_parent)
+ described_class.new(user, issue, :description, create(:group), target_parent)
end.to raise_error(ArgumentError, 'The rewriter classes require that `source_parent` is a `Project`')
end
+
+ it 'raises an error if field does not have cached markdown' do
+ expect do
+ described_class.new(user, issue, :author, source_parent, target_parent)
+ end.to raise_error(ArgumentError, 'The `field` attribute does not contain cached markdown')
+ end
end
describe '#execute' do
- subject { described_class.new(user, content, source_parent, target_parent).execute }
+ subject { described_class.new(user, issue, :description, source_parent, target_parent).execute }
- it 'calls the rewriter classes successfully', :aggregate_failures do
- [Gitlab::Gfm::ReferenceRewriter, Gitlab::Gfm::UploadsRewriter].each do |rewriter_class|
- service = double
-
- expect(service).to receive(:rewrite).with(target_parent)
- expect(rewriter_class).to receive(:new).and_return(service)
+ context 'when content does not need a rewrite' do
+ it 'returns original content and cached html' do
+ expect(subject).to eq({
+ 'description' => issue.description,
+ 'description_html' => issue.description_html,
+ 'skip_markdown_cache_validation' => true
+ })
end
+ end
+
+ context 'when content needs a rewrite' do
+ it 'calls the rewriter classes successfully', :aggregate_failures do
+ described_class::REWRITERS.each do |rewriter_class|
+ service = double
- subject
+ allow(service).to receive(:needs_rewrite?).and_return(true)
+
+ expect(service).to receive(:rewrite).with(target_parent)
+ expect(rewriter_class).to receive(:new).and_return(service)
+ end
+
+ subject
+ end
end
# Perform simple integration-style tests for each rewriter class.
# to prove they run correctly.
- context 'when content contains a reference' do
- let_it_be(:issue) { create(:issue, project: source_parent) }
+ context 'when content has references' do
+ let_it_be(:issue_to_reference) { create(:issue, project: source_parent) }
- let(:content) { "See ##{issue.iid}" }
+ let(:content) { "See ##{issue_to_reference.iid}" }
it 'rewrites content' do
- expect(subject).to eq("See #{source_parent.full_path}##{issue.iid}")
+ expect(subject).to eq({
+ 'description' => "See #{source_parent.full_path}##{issue_to_reference.iid}",
+ 'description_html' => nil,
+ 'skip_markdown_cache_validation' => false
+ })
end
end
@@ -50,9 +75,37 @@ RSpec.describe MarkdownContentRewriterService do
it 'rewrites content' do
new_content = subject
- expect(new_content).not_to eq(content)
- expect(new_content.length).to eq(content.length)
+ expect(new_content[:description]).not_to eq(content)
+ expect(new_content[:description].length).to eq(content.length)
+ expect(new_content[1]).to eq(nil)
end
end
end
+
+ describe '#safe_to_copy_markdown?' do
+ subject do
+ rewriter = described_class.new(user, issue, :description, source_parent, target_parent)
+ rewriter.safe_to_copy_markdown?
+ end
+
+ context 'when content has references' do
+ let(:milestone) { create(:milestone, project: source_parent) }
+ let(:content) { "Description that references #{milestone.to_reference}" }
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when content has uploaded file references' do
+ let(:image_uploader) { build(:file_uploader, project: source_parent) }
+ let(:content) { "Text and #{image_uploader.markdown_link}" }
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when content does not have references or uploads' do
+ let(:content) { "simples text with ```code```" }
+
+ it { is_expected.to eq(true) }
+ end
+ end
end
diff --git a/spec/services/members/approve_access_request_service_spec.rb b/spec/services/members/approve_access_request_service_spec.rb
index f7fbac612ee..d26bab7bb0a 100644
--- a/spec/services/members/approve_access_request_service_spec.rb
+++ b/spec/services/members/approve_access_request_service_spec.rb
@@ -9,36 +9,34 @@ RSpec.describe Members::ApproveAccessRequestService do
let(:access_requester_user) { create(:user) }
let(:access_requester) { source.requesters.find_by!(user_id: access_requester_user.id) }
let(:opts) { {} }
-
- shared_examples 'a service raising ActiveRecord::RecordNotFound' do
- it 'raises ActiveRecord::RecordNotFound' do
- expect { described_class.new(current_user).execute(access_requester, **opts) }.to raise_error(ActiveRecord::RecordNotFound)
- end
- end
+ let(:params) { {} }
+ let(:custom_access_level) { Gitlab::Access::MAINTAINER }
shared_examples 'a service raising Gitlab::Access::AccessDeniedError' do
it 'raises Gitlab::Access::AccessDeniedError' do
- expect { described_class.new(current_user).execute(access_requester, **opts) }.to raise_error(Gitlab::Access::AccessDeniedError)
+ expect { described_class.new(current_user, params).execute(access_requester, **opts) }.to raise_error(Gitlab::Access::AccessDeniedError)
end
end
shared_examples 'a service approving an access request' do
it 'succeeds' do
- expect { described_class.new(current_user).execute(access_requester, **opts) }.to change { source.requesters.count }.by(-1)
+ expect { described_class.new(current_user, params).execute(access_requester, **opts) }.to change { source.requesters.count }.by(-1)
end
it 'returns a <Source>Member' do
- member = described_class.new(current_user).execute(access_requester, **opts)
+ member = described_class.new(current_user, params).execute(access_requester, **opts)
expect(member).to be_a "#{source.class}Member".constantize
expect(member.requested_at).to be_nil
end
context 'with a custom access level' do
+ let(:params) { { access_level: custom_access_level } }
+
it 'returns a ProjectMember with the custom access level' do
- member = described_class.new(current_user, access_level: Gitlab::Access::MAINTAINER).execute(access_requester, **opts)
+ member = described_class.new(current_user, params).execute(access_requester, **opts)
- expect(member.access_level).to eq(Gitlab::Access::MAINTAINER)
+ expect(member.access_level).to eq(custom_access_level)
end
end
end
@@ -111,5 +109,38 @@ RSpec.describe Members::ApproveAccessRequestService do
let(:source) { group }
end
end
+
+ context 'in a project' do
+ let_it_be(:group_project) { create(:project, :public, group: create(:group, :public)) }
+
+ let(:source) { group_project }
+ let(:custom_access_level) { Gitlab::Access::OWNER }
+ let(:params) { { access_level: custom_access_level } }
+
+ before do
+ group_project.request_access(access_requester_user)
+ end
+
+ context 'maintainers' do
+ before do
+ group_project.add_maintainer(current_user)
+ end
+
+ context 'cannot approve the access request of a requester to give them OWNER permissions' do
+ it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError'
+ end
+ end
+
+ context 'owners' do
+ before do
+ # so that `current_user` is considered an `OWNER` in the project via inheritance.
+ group_project.group.add_owner(current_user)
+ end
+
+ context 'can approve the access request of a requester to give them OWNER permissions' do
+ it_behaves_like 'a service approving an access request'
+ end
+ end
+ end
end
end
diff --git a/spec/services/members/create_service_spec.rb b/spec/services/members/create_service_spec.rb
index 730175af0bb..e79e13af769 100644
--- a/spec/services/members/create_service_spec.rb
+++ b/spec/services/members/create_service_spec.rb
@@ -33,6 +33,18 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
it 'raises a Gitlab::Access::AccessDeniedError' do
expect { execute_service }.to raise_error(Gitlab::Access::AccessDeniedError)
end
+
+ context 'when a project maintainer attempts to add owners' do
+ let(:access_level) { Gitlab::Access::OWNER }
+
+ before do
+ source.add_maintainer(current_user)
+ end
+
+ it 'raises a Gitlab::Access::AccessDeniedError' do
+ expect { execute_service }.to raise_error(Gitlab::Access::AccessDeniedError)
+ end
+ end
end
context 'when passing an invalid source' do
diff --git a/spec/services/members/creator_service_spec.rb b/spec/services/members/creator_service_spec.rb
index ff5bf705b6c..8b1df2ab86d 100644
--- a/spec/services/members/creator_service_spec.rb
+++ b/spec/services/members/creator_service_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Members::CreatorService do
describe '#execute' do
it 'raises error for new member on authorization check implementation' do
expect do
- described_class.new(source, user, :maintainer, current_user: current_user).execute
+ described_class.add_user(source, user, :maintainer, current_user: current_user)
end.to raise_error(NotImplementedError)
end
@@ -19,7 +19,7 @@ RSpec.describe Members::CreatorService do
source.add_developer(user)
expect do
- described_class.new(source, user, :maintainer, current_user: current_user).execute
+ described_class.add_user(source, user, :maintainer, current_user: current_user)
end.to raise_error(NotImplementedError)
end
end
diff --git a/spec/services/members/destroy_service_spec.rb b/spec/services/members/destroy_service_spec.rb
index 1a1283b1078..9f0daba3327 100644
--- a/spec/services/members/destroy_service_spec.rb
+++ b/spec/services/members/destroy_service_spec.rb
@@ -105,26 +105,46 @@ RSpec.describe Members::DestroyService do
context 'with a project member' do
let(:member) { group_project.members.find_by(user_id: member_user.id) }
- before do
- group_project.add_developer(member_user)
+ context 'when current user does not have any membership management permissions' do
+ before do
+ group_project.add_developer(member_user)
+ end
+
+ it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError'
+
+ context 'when skipping authorisation' do
+ it_behaves_like 'a service destroying a member with access' do
+ let(:opts) { { skip_authorization: true, unassign_issuables: true } }
+ end
+ end
end
- it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError'
+ context 'when a project maintainer tries to destroy a project owner' do
+ before do
+ group_project.add_owner(member_user)
+ end
- it_behaves_like 'a service destroying a member with access' do
- let(:opts) { { skip_authorization: true, unassign_issuables: true } }
+ it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError'
+
+ context 'when skipping authorisation' do
+ it_behaves_like 'a service destroying a member with access' do
+ let(:opts) { { skip_authorization: true, unassign_issuables: true } }
+ end
+ end
end
end
+ end
- context 'with a group member' do
- let(:member) { group.members.find_by(user_id: member_user.id) }
+ context 'with a group member' do
+ let(:member) { group.members.find_by(user_id: member_user.id) }
- before do
- group.add_developer(member_user)
- end
+ before do
+ group.add_developer(member_user)
+ end
- it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError'
+ it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError'
+ context 'when skipping authorisation' do
it_behaves_like 'a service destroying a member with access' do
let(:opts) { { skip_authorization: true, unassign_issuables: true } }
end
diff --git a/spec/services/members/groups/bulk_creator_service_spec.rb b/spec/services/members/groups/bulk_creator_service_spec.rb
deleted file mode 100644
index 0623ae00080..00000000000
--- a/spec/services/members/groups/bulk_creator_service_spec.rb
+++ /dev/null
@@ -1,10 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Members::Groups::BulkCreatorService do
- it_behaves_like 'bulk member creation' do
- let_it_be(:source, reload: true) { create(:group, :public) }
- let_it_be(:member_type) { GroupMember }
- end
-end
diff --git a/spec/services/members/groups/creator_service_spec.rb b/spec/services/members/groups/creator_service_spec.rb
index c3ba7c0374d..b80b7998eac 100644
--- a/spec/services/members/groups/creator_service_spec.rb
+++ b/spec/services/members/groups/creator_service_spec.rb
@@ -3,16 +3,24 @@
require 'spec_helper'
RSpec.describe Members::Groups::CreatorService do
+ let_it_be(:source, reload: true) { create(:group, :public) }
+ let_it_be(:user) { create(:user) }
+
describe '.access_levels' do
it 'returns Gitlab::Access.options_with_owner' do
expect(described_class.access_levels).to eq(Gitlab::Access.sym_options_with_owner)
end
end
- describe '#execute' do
- let_it_be(:source, reload: true) { create(:group, :public) }
- let_it_be(:user) { create(:user) }
+ it_behaves_like 'owner management'
+
+ describe '.add_users' do
+ it_behaves_like 'bulk member creation' do
+ let_it_be(:member_type) { GroupMember }
+ end
+ end
+ describe '.add_user' do
it_behaves_like 'member creation' do
let_it_be(:member_type) { GroupMember }
end
@@ -22,7 +30,7 @@ RSpec.describe Members::Groups::CreatorService do
expect(AuthorizedProjectsWorker).to receive(:bulk_perform_and_wait).once
1.upto(3) do
- described_class.new(source, user, :maintainer).execute
+ described_class.add_user(source, user, :maintainer)
end
end
end
diff --git a/spec/services/members/invite_service_spec.rb b/spec/services/members/invite_service_spec.rb
index 8213e8baae0..a948041479b 100644
--- a/spec/services/members/invite_service_spec.rb
+++ b/spec/services/members/invite_service_spec.rb
@@ -367,20 +367,21 @@ RSpec.describe Members::InviteService, :aggregate_failures, :clean_gitlab_redis_
context 'when email is already a member with a user on the project' do
let!(:existing_member) { create(:project_member, :guest, project: project) }
- let(:params) { { email: "#{existing_member.user.email}" } }
+ let(:params) { { email: "#{existing_member.user.email}", access_level: ProjectMember::MAINTAINER } }
- it 'returns an error for the already invited email' do
- expect_not_to_create_members
- expect(result[:message][existing_member.user.email]).to eq("User already exists in source")
+ it 'allows re-invite of an already invited email and updates the access_level' do
+ expect { result }.not_to change(ProjectMember, :count)
+ expect(result[:status]).to eq(:success)
+ expect(existing_member.reset.access_level).to eq ProjectMember::MAINTAINER
end
context 'when email belongs to an existing user as a secondary email' do
let(:secondary_email) { create(:email, email: 'secondary@example.com', user: existing_member.user) }
let(:params) { { email: "#{secondary_email.email}" } }
- it 'returns an error for the already invited email' do
- expect_not_to_create_members
- expect(result[:message][secondary_email.email]).to eq("User already exists in source")
+ it 'allows re-invite to an already invited email' do
+ expect_to_create_members(count: 0)
+ expect(result[:status]).to eq(:success)
end
end
end
diff --git a/spec/services/members/mailgun/process_webhook_service_spec.rb b/spec/services/members/mailgun/process_webhook_service_spec.rb
deleted file mode 100644
index d6a21183395..00000000000
--- a/spec/services/members/mailgun/process_webhook_service_spec.rb
+++ /dev/null
@@ -1,42 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Members::Mailgun::ProcessWebhookService do
- describe '#execute', :aggregate_failures do
- let_it_be(:member) { create(:project_member, :invited) }
-
- let(:raw_invite_token) { member.raw_invite_token }
- let(:payload) { { 'user-variables' => { ::Members::Mailgun::INVITE_EMAIL_TOKEN_KEY => raw_invite_token } } }
-
- subject(:service) { described_class.new(payload).execute }
-
- it 'marks the member invite email success as false' do
- expect(Gitlab::AppLogger).to receive(:info).with(/^UPDATED MEMBER INVITE_EMAIL_SUCCESS/).and_call_original
-
- expect { service }.to change { member.reload.invite_email_success }.from(true).to(false)
- end
-
- context 'when member can not be found' do
- let(:raw_invite_token) { '_foobar_' }
-
- it 'does not change member status' do
- expect(Gitlab::AppLogger).not_to receive(:info).with(/^UPDATED MEMBER INVITE_EMAIL_SUCCESS/)
-
- expect { service }.not_to change { member.reload.invite_email_success }
- end
- end
-
- context 'when invite token is not found in payload' do
- let(:payload) { {} }
-
- it 'does not change member status and logs an error' do
- expect(Gitlab::AppLogger).not_to receive(:info).with(/^UPDATED MEMBER INVITE_EMAIL_SUCCESS/)
- expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
- an_instance_of(described_class::ProcessWebhookServiceError))
-
- expect { service }.not_to change { member.reload.invite_email_success }
- end
- end
- end
-end
diff --git a/spec/services/members/projects/bulk_creator_service_spec.rb b/spec/services/members/projects/bulk_creator_service_spec.rb
deleted file mode 100644
index 7acb7d79fe7..00000000000
--- a/spec/services/members/projects/bulk_creator_service_spec.rb
+++ /dev/null
@@ -1,10 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Members::Projects::BulkCreatorService do
- it_behaves_like 'bulk member creation' do
- let_it_be(:source, reload: true) { create(:project, :public) }
- let_it_be(:member_type) { ProjectMember }
- end
-end
diff --git a/spec/services/members/projects/creator_service_spec.rb b/spec/services/members/projects/creator_service_spec.rb
index 7605238c3c5..38955122ab0 100644
--- a/spec/services/members/projects/creator_service_spec.rb
+++ b/spec/services/members/projects/creator_service_spec.rb
@@ -3,16 +3,24 @@
require 'spec_helper'
RSpec.describe Members::Projects::CreatorService do
+ let_it_be(:source, reload: true) { create(:project, :public) }
+ let_it_be(:user) { create(:user) }
+
describe '.access_levels' do
it 'returns Gitlab::Access.sym_options_with_owner' do
expect(described_class.access_levels).to eq(Gitlab::Access.sym_options_with_owner)
end
end
- describe '#execute' do
- let_it_be(:source, reload: true) { create(:project, :public) }
- let_it_be(:user) { create(:user) }
+ it_behaves_like 'owner management'
+
+ describe '.add_users' do
+ it_behaves_like 'bulk member creation' do
+ let_it_be(:member_type) { ProjectMember }
+ end
+ end
+ describe '.add_user' do
it_behaves_like 'member creation' do
let_it_be(:member_type) { ProjectMember }
end
@@ -22,7 +30,7 @@ RSpec.describe Members::Projects::CreatorService do
expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to receive(:bulk_perform_in).once
1.upto(3) do
- described_class.new(source, user, :maintainer).execute
+ described_class.add_user(source, user, :maintainer)
end
end
end
diff --git a/spec/services/members/update_service_spec.rb b/spec/services/members/update_service_spec.rb
index a1b1397d444..f919d6d1516 100644
--- a/spec/services/members/update_service_spec.rb
+++ b/spec/services/members/update_service_spec.rb
@@ -9,8 +9,9 @@ RSpec.describe Members::UpdateService do
let(:member_user) { create(:user) }
let(:permission) { :update }
let(:member) { source.members_and_requesters.find_by!(user_id: member_user.id) }
+ let(:access_level) { Gitlab::Access::MAINTAINER }
let(:params) do
- { access_level: Gitlab::Access::MAINTAINER }
+ { access_level: access_level }
end
subject { described_class.new(current_user, params).execute(member, permission: permission) }
@@ -29,7 +30,7 @@ RSpec.describe Members::UpdateService do
updated_member = subject.fetch(:member)
expect(updated_member).to be_valid
- expect(updated_member.access_level).to eq(Gitlab::Access::MAINTAINER)
+ expect(updated_member.access_level).to eq(access_level)
end
it 'returns success status' do
@@ -111,4 +112,75 @@ RSpec.describe Members::UpdateService do
let(:source) { group }
end
end
+
+ context 'in a project' do
+ let_it_be(:group_project) { create(:project, group: create(:group)) }
+
+ let(:source) { group_project }
+
+ context 'a project maintainer' do
+ before do
+ group_project.add_maintainer(current_user)
+ end
+
+ context 'cannot update a member to OWNER' do
+ before do
+ group_project.add_developer(member_user)
+ end
+
+ it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do
+ let(:access_level) { Gitlab::Access::OWNER }
+ end
+ end
+
+ context 'cannot update themselves to OWNER' do
+ let(:member) { source.members_and_requesters.find_by!(user_id: current_user.id) }
+
+ before do
+ group_project.add_developer(member_user)
+ end
+
+ it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do
+ let(:access_level) { Gitlab::Access::OWNER }
+ end
+ end
+
+ context 'cannot downgrade a member from OWNER' do
+ before do
+ group_project.add_owner(member_user)
+ end
+
+ it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do
+ let(:access_level) { Gitlab::Access::MAINTAINER }
+ end
+ end
+ end
+
+ context 'owners' do
+ before do
+ # so that `current_user` is considered an `OWNER` in the project via inheritance.
+ group_project.group.add_owner(current_user)
+ end
+
+ context 'can update a member to OWNER' do
+ before do
+ group_project.add_developer(member_user)
+ end
+
+ it_behaves_like 'a service updating a member' do
+ let(:access_level) { Gitlab::Access::OWNER }
+ end
+ end
+
+ context 'can downgrade a member from OWNER' do
+ before do
+ group_project.add_owner(member_user)
+ end
+
+ it_behaves_like 'a service updating a member' do
+ let(:access_level) { Gitlab::Access::MAINTAINER }
+ end
+ end
+ end
+ end
end
diff --git a/spec/services/merge_requests/build_service_spec.rb b/spec/services/merge_requests/build_service_spec.rb
index ab3d9880d29..3c9d2271ddc 100644
--- a/spec/services/merge_requests/build_service_spec.rb
+++ b/spec/services/merge_requests/build_service_spec.rb
@@ -79,6 +79,15 @@ RSpec.describe MergeRequests::BuildService do
end
end
+ shared_examples 'with a Default.md template' do
+ let(:files) { { '.gitlab/merge_request_templates/Default.md' => 'Default template contents' } }
+ let(:project) { create(:project, :custom_repo, files: files ) }
+
+ it 'the template description is preferred' do
+ expect(merge_request.description).to eq('Default template contents')
+ end
+ end
+
describe '#execute' do
it 'calls the compare service with the correct arguments' do
allow_any_instance_of(described_class).to receive(:projects_and_branches_valid?).and_return(true)
@@ -221,6 +230,7 @@ RSpec.describe MergeRequests::BuildService do
end
it_behaves_like 'allows the merge request to be created'
+ it_behaves_like 'with a Default.md template'
it 'uses the title of the commit as the title of the merge request' do
expect(merge_request.title).to eq(commit_2.safe_message.split("\n").first)
@@ -241,6 +251,8 @@ RSpec.describe MergeRequests::BuildService do
context 'commit has no description' do
let(:commits) { Commit.decorate([commit_3], project) }
+ it_behaves_like 'with a Default.md template'
+
it 'uses the title of the commit as the title of the merge request' do
expect(merge_request.title).to eq(commit_3.safe_message)
end
@@ -279,6 +291,17 @@ RSpec.describe MergeRequests::BuildService do
expect(merge_request.description).to eq(expected_description)
end
+
+ context 'a Default.md template is defined' do
+ let(:files) { { '.gitlab/merge_request_templates/Default.md' => 'Default template contents' } }
+ let(:project) { create(:project, :custom_repo, files: files ) }
+
+ it 'appends the closing description to a Default.md template' do
+ expected_description = ['Default template contents', closing_message].compact.join("\n\n")
+
+ expect(merge_request.description).to eq(expected_description)
+ end
+ end
end
context 'when the source branch matches an internal issue' do
@@ -332,6 +355,7 @@ RSpec.describe MergeRequests::BuildService do
end
it_behaves_like 'allows the merge request to be created'
+ it_behaves_like 'with a Default.md template'
it 'uses the title of the branch as the merge request title' do
expect(merge_request.title).to eq('Feature branch')
@@ -347,6 +371,15 @@ RSpec.describe MergeRequests::BuildService do
it 'keeps the description from the initial params' do
expect(merge_request.description).to eq(description)
end
+
+ context 'a Default.md template is defined' do
+ let(:files) { { '.gitlab/merge_request_templates/Default.md' => 'Default template contents' } }
+ let(:project) { create(:project, :custom_repo, files: files ) }
+
+ it 'keeps the description from the initial params' do
+ expect(merge_request.description).to eq(description)
+ end
+ end
end
context 'when the source branch matches an issue' do
@@ -377,6 +410,17 @@ RSpec.describe MergeRequests::BuildService do
it 'sets the closing description' do
expect(merge_request.description).to eq(closing_message)
end
+
+ context 'a Default.md template is defined' do
+ let(:files) { { '.gitlab/merge_request_templates/Default.md' => 'Default template contents' } }
+ let(:project) { create(:project, :custom_repo, files: files ) }
+
+ it 'appends the closing description to a Default.md template' do
+ expected_description = ['Default template contents', closing_message].compact.join("\n\n")
+
+ expect(merge_request.description).to eq(expected_description)
+ end
+ end
end
end
end
@@ -389,6 +433,7 @@ RSpec.describe MergeRequests::BuildService do
end
it_behaves_like 'allows the merge request to be created'
+ it_behaves_like 'with a Default.md template'
it 'uses the first line of the first multi-line commit message as the title' do
expect(merge_request.title).to eq('Closes #1234 Second commit')
@@ -426,6 +471,17 @@ RSpec.describe MergeRequests::BuildService do
it 'sets the closing description' do
expect(merge_request.description).to eq("Create the app#{closing_message ? "\n\n" + closing_message : ''}")
end
+
+ context 'a Default.md template is defined' do
+ let(:files) { { '.gitlab/merge_request_templates/Default.md' => 'Default template contents' } }
+ let(:project) { create(:project, :custom_repo, files: files ) }
+
+ it 'appends the closing description to a Default.md template' do
+ expected_description = ['Default template contents', closing_message].compact.join("\n\n")
+
+ expect(merge_request.description).to eq(expected_description)
+ end
+ end
end
end
@@ -626,4 +682,52 @@ RSpec.describe MergeRequests::BuildService do
end
end
end
+
+ describe '#assign_description_from_repository_template' do
+ subject { service.send(:assign_description_from_repository_template) }
+
+ it 'performs no action if the merge request description is not blank' do
+ merge_request.description = 'foo'
+ subject
+ expect(merge_request.description).to eq 'foo'
+ end
+
+ context 'when a Default template is not found' do
+ it 'does not modify the merge request description' do
+ merge_request.description = nil
+ subject
+ expect(merge_request.description).to be_nil
+ end
+ end
+
+ context 'when a Default template is found' do
+ context 'when its contents cannot be retrieved' do
+ let(:files) { { '.gitlab/merge_request_templates/OtherTemplate.md' => 'Other template contents' } }
+ let(:project) { create(:project, :custom_repo, files: files ) }
+
+ it 'does not modify the merge request description' do
+ allow(TemplateFinder).to receive(:all_template_names).and_return({
+ merge_requests: [
+ { name: 'Default', id: 'default', key: 'default', project_id: project.id }
+ ]
+ })
+
+ merge_request.description = nil
+ subject
+ expect(merge_request.description).to be_nil
+ end
+ end
+
+ context 'when its contents can be retrieved' do
+ let(:files) { { '.gitlab/merge_request_templates/Default.md' => 'Default template contents' } }
+ let(:project) { create(:project, :custom_repo, files: files ) }
+
+ it 'modifies the merge request description' do
+ merge_request.description = nil
+ subject
+ expect(merge_request.description).to eq 'Default template contents'
+ end
+ end
+ end
+ end
end
diff --git a/spec/services/merge_requests/create_pipeline_service_spec.rb b/spec/services/merge_requests/create_pipeline_service_spec.rb
index d84ce8d15b4..08ad05b54da 100644
--- a/spec/services/merge_requests/create_pipeline_service_spec.rb
+++ b/spec/services/merge_requests/create_pipeline_service_spec.rb
@@ -50,6 +50,19 @@ RSpec.describe MergeRequests::CreatePipelineService do
expect(response.payload.source).to eq('merge_request_event')
end
+ context 'when push options contain ci.skip' do
+ let(:params) { { push_options: { ci: { skip: true } } } }
+
+ it 'creates a skipped pipeline' do
+ expect { response }.to change { Ci::Pipeline.count }.by(1)
+
+ expect(response).to be_success
+ expect(response.payload).to be_persisted
+ expect(response.payload.builds).to be_empty
+ expect(response.payload).to be_skipped
+ end
+ end
+
context 'with fork merge request' do
let_it_be(:forked_project) { fork_project(project, nil, repository: true, target_project: create(:project, :private, :repository)) }
diff --git a/spec/services/merge_requests/create_service_spec.rb b/spec/services/merge_requests/create_service_spec.rb
index 49f691e97e2..c0c56a72192 100644
--- a/spec/services/merge_requests/create_service_spec.rb
+++ b/spec/services/merge_requests/create_service_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
it 'creates an MR' do
expect(merge_request).to be_valid
- expect(merge_request.work_in_progress?).to be(false)
+ expect(merge_request.draft?).to be(false)
expect(merge_request.title).to eq('Awesome merge_request')
expect(merge_request.assignees).to be_empty
expect(merge_request.merge_params['force_remove_source_branch']).to eq('1')
@@ -74,7 +74,7 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
end
it 'sets MR to draft' do
- expect(merge_request.work_in_progress?).to be(true)
+ expect(merge_request.draft?).to be(true)
end
end
@@ -90,7 +90,7 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
end
it 'sets MR to draft' do
- expect(merge_request.work_in_progress?).to be(true)
+ expect(merge_request.draft?).to be(true)
end
end
end
diff --git a/spec/services/merge_requests/merge_service_spec.rb b/spec/services/merge_requests/merge_service_spec.rb
index 78deab64b1c..a2d73d8c9b1 100644
--- a/spec/services/merge_requests/merge_service_spec.rb
+++ b/spec/services/merge_requests/merge_service_spec.rb
@@ -161,7 +161,7 @@ RSpec.describe MergeRequests::MergeService do
end
context 'with Jira integration' do
- include JiraServiceHelper
+ include JiraIntegrationHelpers
let(:jira_tracker) { project.create_jira_integration }
let(:jira_issue) { ExternalIssue.new('JIRA-123', project) }
@@ -263,10 +263,13 @@ RSpec.describe MergeRequests::MergeService do
merge_request.update_attribute(:merge_params, { 'force_remove_source_branch' => '1' })
end
+ # Not a real use case. When a merger merges a MR , merge param 'should_remove_source_branch' is defined
it 'removes the source branch using the author user' do
expect(::MergeRequests::DeleteSourceBranchWorker).to receive(:perform_async).with(merge_request.id, merge_request.source_branch_sha, merge_request.author.id)
service.execute(merge_request)
+
+ expect(merge_request.reload.should_remove_source_branch?).to be nil
end
context 'when the merger set the source branch not to be removed' do
@@ -276,6 +279,8 @@ RSpec.describe MergeRequests::MergeService do
expect(::MergeRequests::DeleteSourceBranchWorker).not_to receive(:perform_async)
service.execute(merge_request)
+
+ expect(merge_request.reload.should_remove_source_branch?).to be false
end
end
end
@@ -289,6 +294,8 @@ RSpec.describe MergeRequests::MergeService do
expect(::MergeRequests::DeleteSourceBranchWorker).to receive(:perform_async).with(merge_request.id, merge_request.source_branch_sha, user.id)
service.execute(merge_request)
+
+ expect(merge_request.reload.should_remove_source_branch?).to be true
end
end
end
diff --git a/spec/services/merge_requests/mergeability/run_checks_service_spec.rb b/spec/services/merge_requests/mergeability/run_checks_service_spec.rb
index d4ee4afd71d..2bb7dc3eef7 100644
--- a/spec/services/merge_requests/mergeability/run_checks_service_spec.rb
+++ b/spec/services/merge_requests/mergeability/run_checks_service_spec.rb
@@ -7,12 +7,6 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService do
let_it_be(:merge_request) { create(:merge_request) }
- describe '#CHECKS' do
- it 'contains every subclass of the base checks service', :eager_load do
- expect(described_class::CHECKS).to contain_exactly(*MergeRequests::Mergeability::CheckBaseService.subclasses)
- end
- end
-
describe '#execute' do
subject(:execute) { run_checks.execute }
@@ -22,8 +16,8 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService do
context 'when every check is skipped', :eager_load do
before do
MergeRequests::Mergeability::CheckBaseService.subclasses.each do |subclass|
- expect_next_instance_of(subclass) do |service|
- expect(service).to receive(:skip?).and_return(true)
+ allow_next_instance_of(subclass) do |service|
+ allow(service).to receive(:skip?).and_return(true)
end
end
end
@@ -35,7 +29,7 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService do
context 'when a check is skipped' do
it 'does not execute the check' do
- described_class::CHECKS.each do |check|
+ merge_request.mergeability_checks.each do |check|
allow_next_instance_of(check) do |service|
allow(service).to receive(:skip?).and_return(false)
allow(service).to receive(:execute).and_return(success_result)
@@ -47,7 +41,13 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService do
expect(service).not_to receive(:execute)
end
- expect(execute).to match_array([success_result, success_result, success_result, success_result])
+ # Since we're only marking one check to be skipped, we expect to receive
+ # `# of checks - 1` success result objects in return
+ #
+ check_count = merge_request.mergeability_checks.count - 1
+ success_array = (1..check_count).each_with_object([]) { |_, array| array << success_result }
+
+ expect(execute).to match_array(success_array)
end
end
@@ -56,7 +56,7 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService do
let(:merge_check) { instance_double(MergeRequests::Mergeability::CheckCiStatusService) }
before do
- described_class::CHECKS.each do |check|
+ merge_request.mergeability_checks.each do |check|
allow_next_instance_of(check) do |service|
allow(service).to receive(:skip?).and_return(true)
end
diff --git a/spec/services/merge_requests/post_merge_service_spec.rb b/spec/services/merge_requests/post_merge_service_spec.rb
index f0885365f96..e486daae15e 100644
--- a/spec/services/merge_requests/post_merge_service_spec.rb
+++ b/spec/services/merge_requests/post_merge_service_spec.rb
@@ -106,32 +106,6 @@ RSpec.describe MergeRequests::PostMergeService do
expect(merge_request.reload).to be_merged
end
end
-
- context 'when async_mr_close_issue feature flag is disabled' do
- before do
- stub_feature_flags(async_mr_close_issue: false)
- end
-
- it 'executes Issues::CloseService' do
- expect_next_instance_of(Issues::CloseService) do |close_service|
- expect(close_service).to receive(:execute).with(issue, commit: merge_request)
- end
-
- subject
-
- expect(merge_request.reload).to be_merged
- end
-
- it 'marks MR as merged regardless of errors when closing issues' do
- expect_next_instance_of(Issues::CloseService) do |close_service|
- allow(close_service).to receive(:execute).with(issue, commit: merge_request).and_raise(RuntimeError)
- end
-
- expect { subject }.to raise_error(RuntimeError)
-
- expect(merge_request.reload).to be_merged
- end
- end
end
context 'when the merge request has review apps' do
diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb
index 6e6b4a91e0d..eecf7c21cba 100644
--- a/spec/services/merge_requests/refresh_service_spec.rb
+++ b/spec/services/merge_requests/refresh_service_spec.rb
@@ -228,6 +228,21 @@ RSpec.describe MergeRequests::RefreshService do
expect(@another_merge_request.has_commits?).to be_falsy
end
+ context 'when "push_options: nil" is passed' do
+ let(:service_instance) { service.new(project: project, current_user: @user, params: { push_options: nil }) }
+
+ subject { service_instance.execute(@oldrev, @newrev, ref) }
+
+ it 'creates a detached merge request pipeline with commits' do
+ expect { subject }
+ .to change { @merge_request.pipelines_for_merge_request.count }.by(1)
+ .and change { @another_merge_request.pipelines_for_merge_request.count }.by(0)
+
+ expect(@merge_request.has_commits?).to be_truthy
+ expect(@another_merge_request.has_commits?).to be_falsy
+ end
+ end
+
it 'does not create detached merge request pipeline for forked project' do
expect { subject }
.not_to change { @fork_merge_request.pipelines_for_merge_request.count }
@@ -741,47 +756,48 @@ RSpec.describe MergeRequests::RefreshService do
refresh_service.execute(oldrev, newrev, 'refs/heads/wip')
fixup_merge_request.reload
- expect(fixup_merge_request.work_in_progress?).to eq(true)
+ expect(fixup_merge_request.draft?).to eq(true)
expect(fixup_merge_request.notes.last.note).to match(
/marked this merge request as \*\*draft\*\* from #{Commit.reference_pattern}/
)
end
it 'references the commit that caused the draft status' do
- wip_merge_request = create(:merge_request,
+ draft_merge_request = create(:merge_request,
source_project: @project,
source_branch: 'wip',
target_branch: 'master',
target_project: @project)
- commits = wip_merge_request.commits
+ commits = draft_merge_request.commits
oldrev = commits.last.id
newrev = commits.first.id
- wip_commit = wip_merge_request.commits.find(&:work_in_progress?)
+ draft_commit = draft_merge_request.commits.find(&:draft?)
refresh_service.execute(oldrev, newrev, 'refs/heads/wip')
- expect(wip_merge_request.reload.notes.last.note).to eq(
- "marked this merge request as **draft** from #{wip_commit.id}"
+ expect(draft_merge_request.reload.notes.last.note).to eq(
+ "marked this merge request as **draft** from #{draft_commit.id}"
)
end
it 'does not mark as draft based on commits that do not belong to an MR' do
allow(refresh_service).to receive(:find_new_commits)
+
refresh_service.instance_variable_set("@commits", [
double(
id: 'aaaaaaa',
sha: 'aaaaaaa',
short_id: 'aaaaaaa',
title: 'Fix issue',
- work_in_progress?: false
+ draft?: false
),
double(
id: 'bbbbbbb',
sha: 'bbbbbbbb',
short_id: 'bbbbbbb',
title: 'fixup! Fix issue',
- work_in_progress?: true,
+ draft?: true,
to_reference: 'bbbbbbb'
)
])
@@ -789,7 +805,7 @@ RSpec.describe MergeRequests::RefreshService do
refresh_service.execute(@oldrev, @newrev, 'refs/heads/master')
reload_mrs
- expect(@merge_request.work_in_progress?).to be_falsey
+ expect(@merge_request.draft?).to be_falsey
end
end
diff --git a/spec/services/metrics/dashboard/panel_preview_service_spec.rb b/spec/services/metrics/dashboard/panel_preview_service_spec.rb
index 2877d22d1f3..787c61cc918 100644
--- a/spec/services/metrics/dashboard/panel_preview_service_spec.rb
+++ b/spec/services/metrics/dashboard/panel_preview_service_spec.rb
@@ -45,7 +45,6 @@ RSpec.describe Metrics::Dashboard::PanelPreviewService do
::Gitlab::Metrics::Dashboard::Stages::CommonMetricsInserter,
::Gitlab::Metrics::Dashboard::Stages::MetricEndpointInserter,
::Gitlab::Metrics::Dashboard::Stages::PanelIdsInserter,
- ::Gitlab::Metrics::Dashboard::Stages::AlertsInserter,
::Gitlab::Metrics::Dashboard::Stages::UrlValidator
]
processor_params = [project, dashboard, sequence, environment: environment]
diff --git a/spec/services/notes/copy_service_spec.rb b/spec/services/notes/copy_service_spec.rb
index dd11fa15ea8..fd8802e6640 100644
--- a/spec/services/notes/copy_service_spec.rb
+++ b/spec/services/notes/copy_service_spec.rb
@@ -16,9 +16,8 @@ RSpec.describe Notes::CopyService do
let_it_be(:group) { create(:group) }
let_it_be(:from_project) { create(:project, :public, group: group) }
let_it_be(:to_project) { create(:project, :public, group: group) }
-
- let(:from_noteable) { create(:issue, project: from_project) }
- let(:to_noteable) { create(:issue, project: to_project) }
+ let_it_be(:from_noteable) { create(:issue, project: from_project) }
+ let_it_be(:to_noteable) { create(:issue, project: to_project) }
subject(:execute_service) { described_class.new(user, from_noteable, to_noteable).execute }
@@ -85,6 +84,15 @@ RSpec.describe Notes::CopyService do
expect(execute_service).to be_success
end
end
+
+ it 'copies rendered markdown from note_html' do
+ expect(Banzai::Renderer).not_to receive(:cacheless_render_field)
+
+ execute_service
+
+ new_note = to_noteable.notes.first
+ expect(new_note.note_html).to eq(notes.first.note_html)
+ end
end
context 'notes with mentions' do
@@ -119,6 +127,13 @@ RSpec.describe Notes::CopyService do
expect(new_note.author).to eq(note.author)
end
end
+
+ it 'does not copy rendered markdown from note_html' do
+ execute_service
+
+ new_note = to_noteable.notes.first
+ expect(new_note.note_html).not_to eq(note.note_html)
+ end
end
context 'notes with upload' do
@@ -137,6 +152,13 @@ RSpec.describe Notes::CopyService do
expect(note.note_html).not_to eq(new_note.note_html)
end
end
+
+ it 'does not copy rendered markdown from note_html' do
+ execute_service
+
+ new_note = to_noteable.notes.first
+ expect(new_note.note_html).not_to eq(note.note_html)
+ end
end
context 'discussion notes' do
diff --git a/spec/services/notes/create_service_spec.rb b/spec/services/notes/create_service_spec.rb
index c72a9465f20..53b75a3c991 100644
--- a/spec/services/notes/create_service_spec.rb
+++ b/spec/services/notes/create_service_spec.rb
@@ -359,7 +359,7 @@ RSpec.describe Notes::CreateService do
issuable.reload.update!(title: "title")
},
expectation: ->(issuable, can_use_quick_action) {
- expect(issuable.work_in_progress?).to eq(can_use_quick_action)
+ expect(issuable.draft?).to eq(can_use_quick_action)
}
),
# Remove draft status
@@ -369,7 +369,7 @@ RSpec.describe Notes::CreateService do
issuable.reload.update!(title: "Draft: title")
},
expectation: ->(noteable, can_use_quick_action) {
- expect(noteable.work_in_progress?).not_to eq(can_use_quick_action)
+ expect(noteable.draft?).not_to eq(can_use_quick_action)
}
)
]
diff --git a/spec/services/notification_recipients/build_service_spec.rb b/spec/services/notification_recipients/build_service_spec.rb
index ff54d6ccd2f..899d23ec641 100644
--- a/spec/services/notification_recipients/build_service_spec.rb
+++ b/spec/services/notification_recipients/build_service_spec.rb
@@ -14,6 +14,9 @@ RSpec.describe NotificationRecipients::BuildService do
shared_examples 'no N+1 queries' do
it 'avoids N+1 queries', :request_store do
+ # existing N+1 due to multiple users having to be looked up in the project_authorizations table
+ threshold = project.private? ? 1 : 0
+
create_user
service.build_new_note_recipients(note)
@@ -24,7 +27,7 @@ RSpec.describe NotificationRecipients::BuildService do
create_user
- expect { service.build_new_note_recipients(note) }.not_to exceed_query_limit(control_count)
+ expect { service.build_new_note_recipients(note) }.not_to exceed_query_limit(control_count).with_threshold(threshold)
end
end
@@ -66,6 +69,9 @@ RSpec.describe NotificationRecipients::BuildService do
shared_examples 'no N+1 queries' do
it 'avoids N+1 queries', :request_store do
+ # existing N+1 due to multiple users having to be looked up in the project_authorizations table
+ threshold = project.private? ? 1 : 0
+
create_user
service.build_new_review_recipients(review)
@@ -76,7 +82,7 @@ RSpec.describe NotificationRecipients::BuildService do
create_user
- expect { service.build_new_review_recipients(review) }.not_to exceed_query_limit(control_count)
+ expect { service.build_new_review_recipients(review) }.not_to exceed_query_limit(control_count).with_threshold(threshold)
end
end
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index 743a04eabe6..032f35cfc29 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -975,10 +975,17 @@ RSpec.describe NotificationService, :mailer do
end
describe '#send_new_release_notifications', :deliver_mails_inline do
- let(:release) { create(:release, author: current_user) }
+ let(:release) { create(:release, project: project, author: current_user) }
let(:object) { release }
let(:action) { notification.send_new_release_notifications(release) }
+ before_all do
+ build_team(project)
+
+ update_custom_notification(:new_release, @u_guest_custom, resource: project)
+ update_custom_notification(:new_release, @u_custom_global)
+ end
+
context 'when release author is blocked' do
let(:current_user) { create(:user, :blocked) }
@@ -994,19 +1001,15 @@ RSpec.describe NotificationService, :mailer do
context 'when recipients for a new release exist' do
let(:current_user) { create(:user) }
- it 'calls new_release_email for each relevant recipient' do
- user_1 = create(:user)
- user_2 = create(:user)
- user_3 = create(:user)
- recipient_1 = NotificationRecipient.new(user_1, :custom, custom_action: :new_release)
- recipient_2 = NotificationRecipient.new(user_2, :custom, custom_action: :new_release)
- allow(NotificationRecipients::BuildService).to receive(:build_new_release_recipients).and_return([recipient_1, recipient_2])
-
+ it 'notifies the expected users' do
notification.send_new_release_notifications(release)
- should_email(user_1)
- should_email(user_2)
- should_not_email(user_3)
+ should_only_email(
+ @u_watcher,
+ @u_guest_watcher,
+ @u_custom_global,
+ @u_guest_custom
+ )
end
end
end
diff --git a/spec/services/packages/cleanup/update_policy_service_spec.rb b/spec/services/packages/cleanup/update_policy_service_spec.rb
new file mode 100644
index 00000000000..a11fbb766f5
--- /dev/null
+++ b/spec/services/packages/cleanup/update_policy_service_spec.rb
@@ -0,0 +1,105 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::Cleanup::UpdatePolicyService do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be_with_reload(:project) { create(:project) }
+ let_it_be(:current_user) { create(:user) }
+
+ let(:params) { { keep_n_duplicated_package_files: 50 } }
+
+ describe '#execute' do
+ subject { described_class.new(project: project, current_user: current_user, params: params).execute }
+
+ shared_examples 'creating the policy' do
+ it 'creates a new one' do
+ expect { subject }.to change { ::Packages::Cleanup::Policy.count }.from(0).to(1)
+
+ expect(subject.payload[:packages_cleanup_policy]).to be_present
+ expect(subject.success?).to be_truthy
+ expect(project.packages_cleanup_policy).to be_persisted
+ expect(project.packages_cleanup_policy.keep_n_duplicated_package_files).to eq('50')
+ end
+
+ context 'with invalid parameters' do
+ let(:params) { { keep_n_duplicated_package_files: 100 } }
+
+ it 'does not create one' do
+ expect { subject }.not_to change { ::Packages::Cleanup::Policy.count }
+
+ expect(subject.status).to eq(:error)
+ expect(subject.message).to eq('Keep n duplicated package files is invalid')
+ end
+ end
+ end
+
+ shared_examples 'updating the policy' do
+ it 'updates the existing one' do
+ expect { subject }.not_to change { ::Packages::Cleanup::Policy.count }
+
+ expect(subject.payload[:packages_cleanup_policy]).to be_present
+ expect(subject.success?).to be_truthy
+ expect(project.packages_cleanup_policy.keep_n_duplicated_package_files).to eq('50')
+ end
+
+ context 'with invalid parameters' do
+ let(:params) { { keep_n_duplicated_package_files: 100 } }
+
+ it 'does not update one' do
+ expect { subject }.not_to change { policy.keep_n_duplicated_package_files }
+
+ expect(subject.status).to eq(:error)
+ expect(subject.message).to eq('Keep n duplicated package files is invalid')
+ end
+ end
+ end
+
+ shared_examples 'denying access' do
+ it 'returns an error' do
+ subject
+
+ expect(subject.message).to eq('Access denied')
+ expect(subject.status).to eq(:error)
+ end
+ end
+
+ context 'with existing container expiration policy' do
+ let_it_be(:policy) { create(:packages_cleanup_policy, project: project) }
+
+ where(:user_role, :shared_examples_name) do
+ :maintainer | 'updating the policy'
+ :developer | 'denying access'
+ :reporter | 'denying access'
+ :guest | 'denying access'
+ :anonymous | 'denying access'
+ end
+
+ with_them do
+ before do
+ project.send("add_#{user_role}", current_user) unless user_role == :anonymous
+ end
+
+ it_behaves_like params[:shared_examples_name]
+ end
+ end
+
+ context 'without existing container expiration policy' do
+ where(:user_role, :shared_examples_name) do
+ :maintainer | 'creating the policy'
+ :developer | 'denying access'
+ :reporter | 'denying access'
+ :guest | 'denying access'
+ :anonymous | 'denying access'
+ end
+
+ with_them do
+ before do
+ project.send("add_#{user_role}", current_user) unless user_role == :anonymous
+ end
+
+ it_behaves_like params[:shared_examples_name]
+ end
+ end
+ end
+end
diff --git a/spec/services/packages/go/create_package_service_spec.rb b/spec/services/packages/go/create_package_service_spec.rb
index 5c5fec0aa3a..4ca1119fbaa 100644
--- a/spec/services/packages/go/create_package_service_spec.rb
+++ b/spec/services/packages/go/create_package_service_spec.rb
@@ -35,6 +35,22 @@ RSpec.describe Packages::Go::CreatePackageService do
expect(file.file_sha1).not_to be_nil
expect(file.file_sha256).not_to be_nil
end
+
+ context 'with FIPS mode', :fips_mode do
+ it 'does not generate file_md5' do
+ file_name = "#{version.name}.#{type}"
+ expect(subject.package_files.map { |f| f.file_name }).to include(file_name)
+
+ file = subject.package_files.with_file_name(file_name).first
+ expect(file).not_to be_nil
+ expect(file.file).not_to be_nil
+ expect(file.size).to eq(file.file.size)
+ expect(file.file_name).to eq(file_name)
+ expect(file.file_md5).to be_nil
+ expect(file.file_sha1).not_to be_nil
+ expect(file.file_sha256).not_to be_nil
+ end
+ end
end
describe '#execute' do
diff --git a/spec/services/packages/maven/metadata/append_package_file_service_spec.rb b/spec/services/packages/maven/metadata/append_package_file_service_spec.rb
index c406ab93630..f3a90d31158 100644
--- a/spec/services/packages/maven/metadata/append_package_file_service_spec.rb
+++ b/spec/services/packages/maven/metadata/append_package_file_service_spec.rb
@@ -22,6 +22,18 @@ RSpec.describe ::Packages::Maven::Metadata::AppendPackageFileService do
expect_file("#{metadata_file_name}.sha256")
expect_file("#{metadata_file_name}.sha512")
end
+
+ context 'with FIPS mode', :fips_mode do
+ it 'does not generate file_md5' do
+ expect { subject }.to change { package.package_files.count }.by(4)
+ expect(subject).to be_success
+
+ expect_file(metadata_file_name, with_content: content, with_content_type: 'application/xml', fips: true)
+ expect_file("#{metadata_file_name}.sha1", fips: true)
+ expect_file("#{metadata_file_name}.sha256", fips: true)
+ expect_file("#{metadata_file_name}.sha512", fips: true)
+ end
+ end
end
context 'with nil content' do
@@ -36,17 +48,22 @@ RSpec.describe ::Packages::Maven::Metadata::AppendPackageFileService do
it_behaves_like 'returning an error service response', message: 'package is not set'
end
- def expect_file(file_name, with_content: nil, with_content_type: '')
+ def expect_file(file_name, fips: false, with_content: nil, with_content_type: '')
package_file = package.package_files.recent.with_file_name(file_name).first
expect(package_file.file).to be_present
expect(package_file.file_name).to eq(file_name)
expect(package_file.size).to be > 0
- expect(package_file.file_md5).to be_present
expect(package_file.file_sha1).to be_present
expect(package_file.file_sha256).to be_present
expect(package_file.file.content_type).to eq(with_content_type)
+ if fips
+ expect(package_file.file_md5).not_to be_present
+ else
+ expect(package_file.file_md5).to be_present
+ end
+
if with_content
expect(package_file.file.read).to eq(with_content)
end
diff --git a/spec/services/packages/rubygems/create_gemspec_service_spec.rb b/spec/services/packages/rubygems/create_gemspec_service_spec.rb
index 198e978a47e..839fb4d955a 100644
--- a/spec/services/packages/rubygems/create_gemspec_service_spec.rb
+++ b/spec/services/packages/rubygems/create_gemspec_service_spec.rb
@@ -24,5 +24,18 @@ RSpec.describe Packages::Rubygems::CreateGemspecService do
expect(gemspec_file.file_sha1).not_to be_nil
expect(gemspec_file.file_sha256).not_to be_nil
end
+
+ context 'with FIPS mode', :fips_mode do
+ it 'does not generate file_md5' do
+ expect { subject }.to change { package.package_files.count }.by(1)
+
+ gemspec_file = package.package_files.find_by(file_name: "#{gemspec.name}.gemspec")
+ expect(gemspec_file.file).not_to be_nil
+ expect(gemspec_file.size).not_to be_nil
+ expect(gemspec_file.file_md5).to be_nil
+ expect(gemspec_file.file_sha1).not_to be_nil
+ expect(gemspec_file.file_sha256).not_to be_nil
+ end
+ end
end
end
diff --git a/spec/services/pages/delete_service_spec.rb b/spec/services/pages/delete_service_spec.rb
index e02e8e72e0b..0c0b2c0431b 100644
--- a/spec/services/pages/delete_service_spec.rb
+++ b/spec/services/pages/delete_service_spec.rb
@@ -43,4 +43,10 @@ RSpec.describe Pages::DeleteService do
service.execute
end.to change { PagesDeployment.count }.by(-1)
end
+
+ it 'publishes a ProjectDeleted event with project id and namespace id' do
+ expected_data = { project_id: project.id, namespace_id: project.namespace_id }
+
+ expect { service.execute }.to publish_event(Pages::PageDeletedEvent).with(expected_data)
+ end
end
diff --git a/spec/services/pages_domains/create_acme_order_service_spec.rb b/spec/services/pages_domains/create_acme_order_service_spec.rb
index 35b2cc56973..b882c253613 100644
--- a/spec/services/pages_domains/create_acme_order_service_spec.rb
+++ b/spec/services/pages_domains/create_acme_order_service_spec.rb
@@ -38,13 +38,21 @@ RSpec.describe PagesDomains::CreateAcmeOrderService do
expect(challenge).to have_received(:request_validation).ordered
end
- it 'generates and saves private key' do
+ it 'generates and saves private key: rsa' do
+ stub_feature_flags(pages_lets_encrypt_ecdsa: false)
service.execute
saved_order = PagesDomainAcmeOrder.last
expect { OpenSSL::PKey::RSA.new(saved_order.private_key) }.not_to raise_error
end
+ it 'generates and saves private key: ec' do
+ service.execute
+
+ saved_order = PagesDomainAcmeOrder.last
+ expect { OpenSSL::PKey::EC.new(saved_order.private_key) }.not_to raise_error
+ end
+
it 'properly saves order attributes' do
service.execute
diff --git a/spec/services/projects/after_rename_service_spec.rb b/spec/services/projects/after_rename_service_spec.rb
index a8db87e48d0..a9329f092fa 100644
--- a/spec/services/projects/after_rename_service_spec.rb
+++ b/spec/services/projects/after_rename_service_spec.rb
@@ -64,22 +64,11 @@ RSpec.describe Projects::AfterRenameService do
allow(project_storage).to receive(:rename_repo) { true }
end
- context 'when the project has pages deployed' do
- it 'schedules a move of the pages directory' do
- allow(project).to receive(:pages_deployed?).and_return(true)
-
- expect(PagesTransferWorker).to receive(:perform_async).with('rename_project', anything)
-
- service_execute
- end
- end
-
context 'when the project does not have pages deployed' do
it 'does nothing with the pages directory' do
allow(project).to receive(:pages_deployed?).and_return(false)
expect(PagesTransferWorker).not_to receive(:perform_async)
- expect(Gitlab::PagesTransfer).not_to receive(:new)
service_execute
end
@@ -172,29 +161,6 @@ RSpec.describe Projects::AfterRenameService do
end
end
- context 'gitlab pages' do
- context 'when the project has pages deployed' do
- it 'schedules a move of the pages directory' do
- allow(project).to receive(:pages_deployed?).and_return(true)
-
- expect(PagesTransferWorker).to receive(:perform_async).with('rename_project', anything)
-
- service_execute
- end
- end
-
- context 'when the project does not have pages deployed' do
- it 'does nothing with the pages directory' do
- allow(project).to receive(:pages_deployed?).and_return(false)
-
- expect(PagesTransferWorker).not_to receive(:perform_async)
- expect(Gitlab::PagesTransfer).not_to receive(:new)
-
- service_execute
- end
- end
- end
-
context 'attachments' do
let(:uploader) { create(:upload, :issuable_upload, :with_file, model: project) }
let(:file_uploader) { build(:file_uploader, project: project) }
diff --git a/spec/services/projects/autocomplete_service_spec.rb b/spec/services/projects/autocomplete_service_spec.rb
index ed043bacf31..54a21d2f22b 100644
--- a/spec/services/projects/autocomplete_service_spec.rb
+++ b/spec/services/projects/autocomplete_service_spec.rb
@@ -158,7 +158,6 @@ RSpec.describe Projects::AutocompleteService do
subject { described_class.new(project, user).contacts.as_json }
before do
- stub_feature_flags(customer_relations: true)
group.add_developer(user)
end
diff --git a/spec/services/projects/destroy_rollback_service_spec.rb b/spec/services/projects/destroy_rollback_service_spec.rb
deleted file mode 100644
index 3eaacc8c1e7..00000000000
--- a/spec/services/projects/destroy_rollback_service_spec.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Projects::DestroyRollbackService do
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project, :repository, namespace: user.namespace) }
-
- let(:repository) { project.repository }
- let(:repository_storage) { project.repository_storage }
-
- subject { described_class.new(project, user, {}).execute }
-
- describe '#execute' do
- let(:path) { repository.disk_path + '.git' }
- let(:removal_path) { "#{repository.disk_path}+#{project.id}#{Repositories::DestroyService::DELETED_FLAG}.git" }
-
- before do
- aggregate_failures do
- expect(TestEnv.storage_dir_exists?(repository_storage, path)).to be_truthy
- expect(TestEnv.storage_dir_exists?(repository_storage, removal_path)).to be_falsey
- end
-
- # Don't run sidekiq to check if renamed repository exists
- Sidekiq::Testing.fake! { destroy_project(project, user, {}) }
-
- aggregate_failures do
- expect(TestEnv.storage_dir_exists?(repository_storage, path)).to be_falsey
- expect(TestEnv.storage_dir_exists?(repository_storage, removal_path)).to be_truthy
- end
- end
-
- it 'restores the repositories' do
- Sidekiq::Testing.fake! { subject }
-
- aggregate_failures do
- expect(TestEnv.storage_dir_exists?(repository_storage, path)).to be_truthy
- expect(TestEnv.storage_dir_exists?(repository_storage, removal_path)).to be_falsey
- end
- end
- end
-
- def destroy_project(project, user, params = {})
- Projects::DestroyService.new(project, user, params).execute
- end
-end
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index cd923720631..c00438199fd 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -9,7 +9,6 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
let!(:project) { create(:project, :repository, namespace: user.namespace) }
let(:path) { project.repository.disk_path }
- let(:remove_path) { removal_path(path) }
let(:async) { false } # execute or async_execute
before do
@@ -24,7 +23,6 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
expect(Project.unscoped.all).not_to include(project)
expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_falsey
- expect(project.gitlab_shell.repository_exists?(project.repository_storage, remove_path + '.git')).to be_falsey
end
it 'publishes a ProjectDeleted event with project id and namespace id' do
@@ -73,6 +71,18 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
end
it_behaves_like 'deleting the project'
+
+ context 'when project is undergoing refresh' do
+ let!(:build_artifacts_size_refresh) { create(:project_build_artifacts_size_refresh, :pending, project: project) }
+
+ it 'does not log about artifact deletion but continues to delete artifacts' do
+ expect(Gitlab::ProjectStatsRefreshConflictsLogger).not_to receive(:warn_artifact_deletion_during_stats_refresh)
+
+ expect { destroy_project(project, user, {}) }
+ .to change { Ci::JobArtifact.count }.by(-2)
+ .and change { Projects::BuildArtifactsSizeRefresh.count }.by(-1)
+ end
+ end
end
end
@@ -192,10 +202,6 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
it do
expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_falsey
end
-
- it do
- expect(project.gitlab_shell.repository_exists?(project.repository_storage, remove_path + '.git')).to be_truthy
- end
end
context 'when flushing caches fail due to Git errors' do
@@ -392,36 +398,13 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
end
end
- context 'repository +deleted path removal' do
- context 'regular phase' do
- it 'schedules +deleted removal of existing repos' do
- service = described_class.new(project, user, {})
- allow(service).to receive(:schedule_stale_repos_removal)
-
- expect(Repositories::ShellDestroyService).to receive(:new).and_call_original
- expect(GitlabShellWorker).to receive(:perform_in)
- .with(5.minutes, :remove_repository, project.repository_storage, removal_path(project.disk_path))
-
- service.execute
+ context 'repository removal' do
+ it 'removal of existing repos' do
+ expect_next_instances_of(Repositories::DestroyService, 2) do |instance|
+ expect(instance).to receive(:execute).and_return(status: :success)
end
- end
-
- context 'stale cleanup' do
- let(:async) { true }
-
- it 'schedules +deleted wiki and repo removal' do
- allow(ProjectDestroyWorker).to receive(:perform_async)
-
- expect(Repositories::ShellDestroyService).to receive(:new).with(project.repository).and_call_original
- expect(GitlabShellWorker).to receive(:perform_in)
- .with(10.minutes, :remove_repository, project.repository_storage, removal_path(project.disk_path))
-
- expect(Repositories::ShellDestroyService).to receive(:new).with(project.wiki.repository).and_call_original
- expect(GitlabShellWorker).to receive(:perform_in)
- .with(10.minutes, :remove_repository, project.repository_storage, removal_path(project.wiki.disk_path))
- destroy_project(project, user, {})
- end
+ described_class.new(project, user, {}).execute
end
end
@@ -480,7 +463,6 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
expect do
destroy_project(project, user)
end.to change(WebHook, :count).by(-2)
- .and change(WebHookLog, :count).by(-1)
end
context 'when an error is raised deleting webhooks' do
@@ -541,7 +523,6 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
expect(Project.unscoped.all).not_to include(project)
expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_falsey
- expect(project.gitlab_shell.repository_exists?(project.repository_storage, remove_path + '.git')).to be_falsey
expect(project.all_pipelines).to be_empty
expect(project.builds).to be_empty
end
@@ -550,8 +531,4 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
def destroy_project(project, user, params = {})
described_class.new(project, user, params).public_send(async ? :async_execute : :execute)
end
-
- def removal_path(path)
- "#{path}+#{project.id}#{Repositories::DestroyService::DELETED_FLAG}"
- end
end
diff --git a/spec/services/projects/refresh_build_artifacts_size_statistics_service_spec.rb b/spec/services/projects/refresh_build_artifacts_size_statistics_service_spec.rb
index 41487e9ea48..6a715312097 100644
--- a/spec/services/projects/refresh_build_artifacts_size_statistics_service_spec.rb
+++ b/spec/services/projects/refresh_build_artifacts_size_statistics_service_spec.rb
@@ -52,6 +52,12 @@ RSpec.describe Projects::RefreshBuildArtifactsSizeStatisticsService, :clean_gitl
expect { service.execute }.to change { refresh.reload.last_job_artifact_id.to_i }.to(artifact_3.id)
end
+ it 'updates the last_job_artifact_id to the ID of the last artifact from the project' do
+ expect { service.execute }
+ .to change { refresh.reload.last_job_artifact_id_on_refresh_start.to_i }
+ .to(project.job_artifacts.last.id)
+ end
+
it 'requeues the refresh job' do
service.execute
expect(refresh.reload).to be_pending
@@ -63,7 +69,8 @@ RSpec.describe Projects::RefreshBuildArtifactsSizeStatisticsService, :clean_gitl
:project_build_artifacts_size_refresh,
:pending,
project: project,
- last_job_artifact_id: artifact_3.id
+ last_job_artifact_id: artifact_3.id,
+ last_job_artifact_id_on_refresh_start: artifact_4.id
)
end
@@ -77,6 +84,10 @@ RSpec.describe Projects::RefreshBuildArtifactsSizeStatisticsService, :clean_gitl
expect(refresh.reload.last_job_artifact_id).to eq(artifact_3.id)
end
+ it 'keeps the last_job_artifact_id_on_refresh_start unchanged' do
+ expect(refresh.reload.last_job_artifact_id_on_refresh_start).to eq(artifact_4.id)
+ end
+
it 'keeps the state of the refresh record at running' do
expect(refresh.reload).to be_running
end
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index e547ace1d9f..bebe80b710b 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -53,9 +53,6 @@ RSpec.describe Projects::TransferService do
allow_next_instance_of(Gitlab::UploadsTransfer) do |service|
allow(service).to receive(:move_project).and_return(true)
end
- allow_next_instance_of(Gitlab::PagesTransfer) do |service|
- allow(service).to receive(:move_project).and_return(true)
- end
group.add_owner(user)
end
@@ -725,15 +722,6 @@ RSpec.describe Projects::TransferService do
group.add_owner(user)
end
- it 'schedules a job when pages are deployed' do
- project.mark_pages_as_deployed
-
- expect(PagesTransferWorker).to receive(:perform_async)
- .with("move_project", [project.path, user.namespace.full_path, group.full_path])
-
- execute_transfer
- end
-
it 'does not schedule a job when no pages are deployed' do
expect(PagesTransferWorker).not_to receive(:perform_async)
diff --git a/spec/services/projects/update_pages_service_spec.rb b/spec/services/projects/update_pages_service_spec.rb
index 777162b6196..cbbed82aa0b 100644
--- a/spec/services/projects/update_pages_service_spec.rb
+++ b/spec/services/projects/update_pages_service_spec.rb
@@ -205,6 +205,25 @@ RSpec.describe Projects::UpdatePagesService do
include_examples 'fails with outdated reference message'
end
end
+
+ context 'when uploaded deployment size is wrong' do
+ it 'raises an error' do
+ allow_next_instance_of(PagesDeployment) do |deployment|
+ allow(deployment)
+ .to receive(:size)
+ .and_return(file.size + 1)
+ end
+
+ expect do
+ expect(execute).not_to eq(:success)
+
+ expect(GenericCommitStatus.last.description).to eq("Error: The uploaded artifact size does not match the expected value.")
+ project.pages_metadatum.reload
+ expect(project.pages_metadatum).not_to be_deployed
+ expect(project.pages_metadatum.pages_deployment).to be_ni
+ end.to raise_error(Projects::UpdatePagesService::WrongUploadedDeploymentSizeError)
+ end
+ end
end
end
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index f7a22b1b92f..f7ed6006099 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -122,7 +122,7 @@ RSpec.describe QuickActions::InterpretService do
inprogress # populate the label
_, updates, _ = service.execute(content, issuable)
- expect(updates).to eq(add_label_ids: [bug.id, inprogress.id])
+ expect(updates).to match(add_label_ids: contain_exactly(bug.id, inprogress.id))
end
it 'returns the label message' do
@@ -130,7 +130,10 @@ RSpec.describe QuickActions::InterpretService do
inprogress # populate the label
_, _, message = service.execute(content, issuable)
- expect(message).to eq("Added #{bug.to_reference(format: :name)} #{inprogress.to_reference(format: :name)} labels.")
+ # Compare message without making assumptions about ordering.
+ expect(message).to match %r{Added ~".*" ~".*" labels.}
+ expect(message).to include(bug.to_reference(format: :name))
+ expect(message).to include(inprogress.to_reference(format: :name))
end
end
@@ -318,32 +321,40 @@ RSpec.describe QuickActions::InterpretService do
end
shared_examples 'draft command' do
- it 'returns wip_event: "wip" if content contains /draft' do
+ it 'returns wip_event: "draft"' do
_, updates, _ = service.execute(content, issuable)
- expect(updates).to eq(wip_event: 'wip')
+ expect(updates).to eq(wip_event: 'draft')
end
- it 'returns the wip message' do
+ it 'returns the draft message' do
_, _, message = service.execute(content, issuable)
expect(message).to eq("Marked this #{issuable.to_ability_name.humanize(capitalize: false)} as a draft.")
end
end
- shared_examples 'undraft command' do
- it 'returns wip_event: "unwip" if content contains /draft' do
- issuable.update!(title: issuable.wip_title)
+ shared_examples 'draft/ready command no action' do
+ it 'returns the no action message if there is no change to the status' do
+ _, _, message = service.execute(content, issuable)
+
+ expect(message).to eq("No change to this #{issuable.to_ability_name.humanize(capitalize: false)}'s draft status.")
+ end
+ end
+
+ shared_examples 'ready command' do
+ it 'returns wip_event: "ready"' do
+ issuable.update!(title: issuable.draft_title)
_, updates, _ = service.execute(content, issuable)
- expect(updates).to eq(wip_event: 'unwip')
+ expect(updates).to eq(wip_event: 'ready')
end
- it 'returns the unwip message' do
- issuable.update!(title: issuable.wip_title)
+ it 'returns the ready message' do
+ issuable.update!(title: issuable.draft_title)
_, _, message = service.execute(content, issuable)
- expect(message).to eq("Unmarked this #{issuable.to_ability_name.humanize(capitalize: false)} as a draft.")
+ expect(message).to eq("Marked this #{issuable.to_ability_name.humanize(capitalize: false)} as ready.")
end
end
@@ -1196,6 +1207,64 @@ RSpec.describe QuickActions::InterpretService do
let(:issuable) { merge_request }
end
+ context 'with a colon label' do
+ let(:bug) { create(:label, project: project, title: 'Category:Bug') }
+ let(:inprogress) { create(:label, project: project, title: 'status:in:progress') }
+
+ context 'when quoted' do
+ let(:content) { %(/label ~"#{inprogress.title}" ~"#{bug.title}" ~unknown) }
+
+ it_behaves_like 'label command' do
+ let(:issuable) { merge_request }
+ end
+
+ it_behaves_like 'label command' do
+ let(:issuable) { issue }
+ end
+ end
+
+ context 'when unquoted' do
+ let(:content) { %(/label ~#{inprogress.title} ~#{bug.title} ~unknown) }
+
+ it_behaves_like 'label command' do
+ let(:issuable) { merge_request }
+ end
+
+ it_behaves_like 'label command' do
+ let(:issuable) { issue }
+ end
+ end
+ end
+
+ context 'with a scoped label' do
+ let(:bug) { create(:label, :scoped, project: project) }
+ let(:inprogress) { create(:label, project: project, title: 'three::part::label') }
+
+ context 'when quoted' do
+ let(:content) { %(/label ~"#{inprogress.title}" ~"#{bug.title}" ~unknown) }
+
+ it_behaves_like 'label command' do
+ let(:issuable) { merge_request }
+ end
+
+ it_behaves_like 'label command' do
+ let(:issuable) { issue }
+ end
+ end
+
+ context 'when unquoted' do
+ let(:content) { %(/label ~#{inprogress.title} ~#{bug.title} ~unknown) }
+
+ it_behaves_like 'label command' do
+ let(:issuable) { merge_request }
+ end
+
+ it_behaves_like 'label command' do
+ let(:issuable) { issue }
+ end
+ end
+ end
+
it_behaves_like 'multiple label command' do
let(:content) { %(/label ~"#{inprogress.title}" \n/label ~#{bug.title}) }
let(:issuable) { issue }
@@ -1306,11 +1375,21 @@ RSpec.describe QuickActions::InterpretService do
let(:issuable) { merge_request }
end
- it_behaves_like 'undraft command' do
+ it_behaves_like 'ready command' do
let(:content) { '/draft' }
let(:issuable) { merge_request }
end
+ it_behaves_like 'draft/ready command no action' do
+ let(:content) { '/ready' }
+ let(:issuable) { merge_request }
+ end
+
+ it_behaves_like 'ready command' do
+ let(:content) { '/ready' }
+ let(:issuable) { merge_request }
+ end
+
it_behaves_like 'failed command', 'Could not apply remove_due_date command.' do
let(:content) { '/remove_due_date' }
let(:issuable) { merge_request }
@@ -2333,24 +2412,6 @@ RSpec.describe QuickActions::InterpretService do
create(:issue_customer_relations_contact, issue: issue, contact: existing_contact)
end
- context 'with feature flag disabled' do
- before do
- stub_feature_flags(customer_relations: false)
- end
-
- it 'add_contacts command does not add the contact' do
- _, updates, _ = add_command
-
- expect(updates).to be_empty
- end
-
- it 'remove_contacts command does not remove the contact' do
- _, updates, _ = remove_command
-
- expect(updates).to be_empty
- end
- end
-
it 'add_contacts command adds the contact' do
_, updates, message = add_command
@@ -2644,7 +2705,24 @@ RSpec.describe QuickActions::InterpretService do
it 'includes the new status' do
_, explanations = service.explain(content, merge_request)
- expect(explanations).to eq(['Marks this merge request as a draft.'])
+ expect(explanations).to match_array(['Marks this merge request as a draft.'])
+ end
+ end
+
+ describe 'ready command' do
+ let(:content) { '/ready' }
+
+ it 'includes the new status' do
+ merge_request.update!(title: merge_request.draft_title)
+ _, explanations = service.explain(content, merge_request)
+
+ expect(explanations).to match_array(['Marks this merge request as ready.'])
+ end
+
+ it 'includes the no change message when status unchanged' do
+ _, explanations = service.explain(content, merge_request)
+
+ expect(explanations).to match_array(["No change to this merge request's draft status."])
end
end
@@ -2805,12 +2883,6 @@ RSpec.describe QuickActions::InterpretService do
expect(explanations).to be_empty
end
-
- it '/remove_contacts is not available' do
- _, explanations = service.explain(remove_contacts, issue)
-
- expect(explanations).to be_empty
- end
end
context 'when group has contacts' do
@@ -2822,10 +2894,22 @@ RSpec.describe QuickActions::InterpretService do
expect(explanations).to contain_exactly("Add customer relation contact(s).")
end
- it '/remove_contacts is available' do
- _, explanations = service.explain(remove_contacts, issue)
+ context 'when issue has no contacts' do
+ it '/remove_contacts is not available' do
+ _, explanations = service.explain(remove_contacts, issue)
- expect(explanations).to contain_exactly("Remove customer relation contact(s).")
+ expect(explanations).to be_empty
+ end
+ end
+
+ context 'when issue has contacts' do
+ let!(:issue_contact) { create(:issue_customer_relations_contact, issue: issue, contact: contact) }
+
+ it '/remove_contacts is available' do
+ _, explanations = service.explain(remove_contacts, issue)
+
+ expect(explanations).to contain_exactly("Remove customer relation contact(s).")
+ end
end
end
end
diff --git a/spec/services/releases/create_service_spec.rb b/spec/services/releases/create_service_spec.rb
index d53fc968e2a..566d73a3b75 100644
--- a/spec/services/releases/create_service_spec.rb
+++ b/spec/services/releases/create_service_spec.rb
@@ -6,10 +6,11 @@ RSpec.describe Releases::CreateService do
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
let(:tag_name) { project.repository.tag_names.first }
+ let(:tag_message) { nil }
let(:tag_sha) { project.repository.find_tag(tag_name).dereferenced_target.sha }
let(:name) { 'Bionic Beaver' }
let(:description) { 'Awesome release!' }
- let(:params) { { tag: tag_name, name: name, description: description, ref: ref } }
+ let(:params) { { tag: tag_name, name: name, description: description, ref: ref, tag_message: tag_message } }
let(:ref) { nil }
let(:service) { described_class.new(project, user, params) }
@@ -68,6 +69,24 @@ RSpec.describe Releases::CreateService do
expect(result[:tag]).not_to be_nil
expect(result[:release]).not_to be_nil
end
+
+ context 'and tag_message is provided' do
+ let(:ref) { 'master' }
+ let(:tag_name) { 'foobar' }
+ let(:tag_message) { 'Annotated tag message' }
+
+ it_behaves_like 'a successful release creation'
+
+ it 'creates a tag if the tag does not exist' do
+ expect(project.repository.ref_exists?("refs/tags/#{tag_name}")).to be_falsey
+
+ result = service.execute
+ expect(result[:status]).to eq(:success)
+ expect(result[:tag]).not_to be_nil
+ expect(result[:release]).not_to be_nil
+ expect(project.repository.find_tag(tag_name).message).to eq(tag_message)
+ end
+ end
end
context 'there already exists a release on a tag' do
diff --git a/spec/services/repositories/changelog_service_spec.rb b/spec/services/repositories/changelog_service_spec.rb
index ddb8e7e1182..82546ae810b 100644
--- a/spec/services/repositories/changelog_service_spec.rb
+++ b/spec/services/repositories/changelog_service_spec.rb
@@ -78,7 +78,7 @@ RSpec.describe Repositories::ChangelogService do
recorder = ActiveRecord::QueryRecorder.new { service.execute(commit_to_changelog: commit_to_changelog) }
changelog = project.repository.blob_at('master', 'CHANGELOG.md')&.data
- expect(recorder.count).to eq(9)
+ expect(recorder.count).to eq(10)
expect(changelog).to include('Title 1', 'Title 2')
end
@@ -148,6 +148,52 @@ RSpec.describe Repositories::ChangelogService do
expect(changelog).to include('Title 1', 'Title 2')
end
end
+
+ it 'avoids N+1 queries', :request_store do
+ RequestStore.clear!
+
+ request = ->(to) do
+ described_class
+ .new(project, creator, version: '1.0.0', from: sha1, to: to)
+ .execute(commit_to_changelog: false)
+ end
+
+ control = ActiveRecord::QueryRecorder.new { request.call(sha2) }
+
+ RequestStore.clear!
+
+ expect { request.call(sha3) }.not_to exceed_query_limit(control.count)
+ end
+
+ context 'when one of commits does not exist' do
+ let(:service) { described_class.new(project, creator, version: '1.0.0', from: 'master', to: '54321') }
+
+ it 'raises an exception' do
+ expect { service.execute(commit_to_changelog: false) }.to raise_error(Gitlab::Changelog::Error)
+ end
+ end
+
+ context 'when commit range exceeds the limit' do
+ let(:service) { described_class.new(project, creator, version: '1.0.0', from: sha1) }
+
+ before do
+ stub_const("#{described_class.name}::COMMITS_LIMIT", 2)
+ end
+
+ it 'raises an exception' do
+ expect { service.execute(commit_to_changelog: false) }.to raise_error(Gitlab::Changelog::Error)
+ end
+
+ context 'when feature flag is off' do
+ before do
+ stub_feature_flags(changelog_commits_limitation: false)
+ end
+
+ it 'returns the changelog' do
+ expect(service.execute(commit_to_changelog: false)).to include('Title 1', 'Title 2', 'Title 3')
+ end
+ end
+ end
end
describe '#start_of_commit_range' do
diff --git a/spec/services/repositories/destroy_rollback_service_spec.rb b/spec/services/repositories/destroy_rollback_service_spec.rb
deleted file mode 100644
index a52dff62760..00000000000
--- a/spec/services/repositories/destroy_rollback_service_spec.rb
+++ /dev/null
@@ -1,85 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Repositories::DestroyRollbackService do
- let_it_be(:user) { create(:user) }
-
- let!(:project) { create(:project, :repository, namespace: user.namespace) }
- let(:repository) { project.repository }
- let(:path) { repository.disk_path }
- let(:remove_path) { "#{path}+#{project.id}#{described_class::DELETED_FLAG}" }
-
- subject { described_class.new(repository).execute }
-
- before do
- # Dont run sidekiq to check if renamed repository exists
- Sidekiq::Testing.fake! { destroy_project(project, user) }
- end
-
- it 'moves the repository from the +deleted folder' do
- expect(project.gitlab_shell.repository_exists?(project.repository_storage, remove_path + '.git')).to be_truthy
- expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_falsey
-
- subject
-
- expect(project.gitlab_shell.repository_exists?(project.repository_storage, remove_path + '.git')).to be_falsey
- expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_truthy
- end
-
- it 'logs the successful action' do
- expect(Gitlab::AppLogger).to receive(:info)
-
- subject
- end
-
- it 'flushes the repository cache' do
- expect(repository).to receive(:before_delete)
-
- subject
- end
-
- it 'returns success and does not perform any action if repository path does not exist' do
- expect(repository).to receive(:disk_path).and_return('foo')
- expect(repository).not_to receive(:before_delete)
-
- expect(subject[:status]).to eq :success
- end
-
- it 'gracefully handles exception if the repository does not exist on disk' do
- expect(repository).to receive(:before_delete).and_raise(Gitlab::Git::Repository::NoRepository)
- expect(subject[:status]).to eq :success
- end
-
- context 'when move operation cannot be performed' do
- let(:service) { described_class.new(repository) }
-
- before do
- expect(service).to receive(:mv_repository).and_return(false)
- end
-
- it 'returns error' do
- result = service.execute
-
- expect(result[:status]).to eq :error
- end
-
- it 'logs the error' do
- expect(Gitlab::AppLogger).to receive(:error)
-
- service.execute
- end
-
- context 'when repository does not exist' do
- it 'returns success' do
- allow(service).to receive(:repo_exists?).and_return(true, false)
-
- expect(service.execute[:status]).to eq :success
- end
- end
- end
-
- def destroy_project(project, user)
- Projects::DestroyService.new(project, user, {}).execute
- end
-end
diff --git a/spec/services/repositories/destroy_service_spec.rb b/spec/services/repositories/destroy_service_spec.rb
index 3766467d708..565a18d501a 100644
--- a/spec/services/repositories/destroy_service_spec.rb
+++ b/spec/services/repositories/destroy_service_spec.rb
@@ -8,31 +8,19 @@ RSpec.describe Repositories::DestroyService do
let!(:project) { create(:project, :repository, namespace: user.namespace) }
let(:repository) { project.repository }
let(:path) { repository.disk_path }
- let(:remove_path) { "#{path}+#{project.id}#{described_class::DELETED_FLAG}" }
subject { described_class.new(repository).execute }
- it 'moves the repository to a +deleted folder' do
+ it 'removes the repository' do
expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_truthy
- expect(project.gitlab_shell.repository_exists?(project.repository_storage, remove_path + '.git')).to be_falsey
subject
- expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_falsey
- expect(project.gitlab_shell.repository_exists?(project.repository_storage, remove_path + '.git')).to be_truthy
- end
-
- it 'schedules the repository deletion' do
- subject
-
- expect(Repositories::ShellDestroyService).to receive(:new).with(repository).and_call_original
-
- expect(GitlabShellWorker).to receive(:perform_in)
- .with(Repositories::ShellDestroyService::REPO_REMOVAL_DELAY, :remove_repository, project.repository_storage, remove_path)
-
- # Because GitlabShellWorker is inside a run_after_commit callback we need to
+ # Because the removal happens inside a run_after_commit callback we need to
# trigger the callback
project.touch
+
+ expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_falsey
end
context 'on a read-only instance' do
@@ -41,22 +29,12 @@ RSpec.describe Repositories::DestroyService do
end
it 'schedules the repository deletion' do
- expect(Repositories::ShellDestroyService).to receive(:new).with(repository).and_call_original
-
- expect(GitlabShellWorker).to receive(:perform_in)
- .with(Repositories::ShellDestroyService::REPO_REMOVAL_DELAY, :remove_repository, project.repository_storage, remove_path)
+ expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_truthy
subject
- end
- end
-
- it 'removes the repository', :sidekiq_inline do
- subject
- project.touch
-
- expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_falsey
- expect(project.gitlab_shell.repository_exists?(project.repository_storage, remove_path + '.git')).to be_falsey
+ expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_falsey
+ end
end
it 'flushes the repository cache' do
@@ -77,48 +55,20 @@ RSpec.describe Repositories::DestroyService do
expect(subject[:status]).to eq :success
end
- context 'when move operation cannot be performed' do
- let(:service) { described_class.new(repository) }
-
- before do
- expect(service).to receive(:mv_repository).and_return(false)
- end
-
- it 'returns error' do
- expect(service.execute[:status]).to eq :error
- end
-
- it 'logs the error' do
- expect(Gitlab::AppLogger).to receive(:error)
-
- service.execute
- end
-
- context 'when repository does not exist' do
- it 'returns success' do
- allow(service).to receive(:repo_exists?).and_return(true, false)
-
- expect(Repositories::ShellDestroyService).not_to receive(:new)
- expect(service.execute[:status]).to eq :success
- end
- end
- end
-
context 'with a project wiki repository' do
let(:project) { create(:project, :wiki_repo) }
let(:repository) { project.wiki.repository }
it 'schedules the repository deletion' do
- subject
-
- expect(Repositories::ShellDestroyService).to receive(:new).with(repository).and_call_original
+ expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_truthy
- expect(GitlabShellWorker).to receive(:perform_in)
- .with(Repositories::ShellDestroyService::REPO_REMOVAL_DELAY, :remove_repository, project.repository_storage, remove_path)
+ subject
- # Because GitlabShellWorker is inside a run_after_commit callback we need to
+ # Because the removal happens inside a run_after_commit callback we need to
# trigger the callback
project.touch
+
+ expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_falsey
end
end
end
diff --git a/spec/services/repositories/shell_destroy_service_spec.rb b/spec/services/repositories/shell_destroy_service_spec.rb
deleted file mode 100644
index 65168a1784a..00000000000
--- a/spec/services/repositories/shell_destroy_service_spec.rb
+++ /dev/null
@@ -1,26 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Repositories::ShellDestroyService do
- let_it_be(:user) { create(:user) }
-
- let!(:project) { create(:project, :repository, namespace: user.namespace) }
- let(:path) { project.repository.disk_path }
- let(:remove_path) { "#{path}+#{project.id}#{described_class::DELETED_FLAG}" }
-
- it 'returns success if the repository is nil' do
- expect(GitlabShellWorker).not_to receive(:perform_in)
-
- result = described_class.new(nil).execute
-
- expect(result[:status]).to eq :success
- end
-
- it 'schedules the repository deletion' do
- expect(GitlabShellWorker).to receive(:perform_in)
- .with(described_class::REPO_REMOVAL_DELAY, :remove_repository, project.repository_storage, remove_path)
-
- described_class.new(project.repository).execute
- end
-end
diff --git a/spec/services/resource_access_tokens/create_service_spec.rb b/spec/services/resource_access_tokens/create_service_spec.rb
index 5a88929334b..127948549b0 100644
--- a/spec/services/resource_access_tokens/create_service_spec.rb
+++ b/spec/services/resource_access_tokens/create_service_spec.rb
@@ -268,10 +268,36 @@ RSpec.describe ResourceAccessTokens::CreateService do
end
it_behaves_like 'allows creation of bot with valid params'
+
+ context 'when user specifies an access level of OWNER for the bot' do
+ let_it_be(:params) { { access_level: Gitlab::Access::OWNER } }
+
+ context 'when the executor is a MAINTAINER' do
+ it 'does not add the bot user with the specified access level in the resource' do
+ response = subject
+
+ expect(response.error?).to be true
+ expect(response.errors).to include('Could not provision owner access to project access token')
+ end
+ end
+
+ context 'when the executor is an OWNER' do
+ let_it_be(:user) { project.first_owner }
+
+ it 'adds the bot user with the specified access level in the resource' do
+ response = subject
+
+ access_token = response.payload[:access_token]
+ bot_user = access_token.user
+
+ expect(resource.members.owners.map(&:user_id)).to include(bot_user.id)
+ end
+ end
+ end
end
end
- context 'when resource is a project' do
+ context 'when resource is a group' do
let_it_be(:resource_type) { 'group' }
let_it_be(:resource) { group }
@@ -283,6 +309,18 @@ RSpec.describe ResourceAccessTokens::CreateService do
end
it_behaves_like 'allows creation of bot with valid params'
+
+ context 'when user specifies an access level of OWNER for the bot' do
+ let_it_be(:params) { { access_level: Gitlab::Access::OWNER } }
+
+ it 'adds the bot user with the specified access level in the resource' do
+ response = subject
+ access_token = response.payload[:access_token]
+ bot_user = access_token.user
+
+ expect(resource.members.owners.map(&:user_id)).to include(bot_user.id)
+ end
+ end
end
end
end
diff --git a/spec/services/service_response_spec.rb b/spec/services/service_response_spec.rb
index 082ee4ddc67..3ede90fbc44 100644
--- a/spec/services/service_response_spec.rb
+++ b/spec/services/service_response_spec.rb
@@ -2,7 +2,10 @@
require 'fast_spec_helper'
+require 're2'
+
require_relative '../../app/services/service_response'
+require_relative '../../lib/gitlab/error_tracking'
RSpec.describe ServiceResponse do
describe '.success' do
@@ -94,4 +97,76 @@ RSpec.describe ServiceResponse do
expect(described_class.error(message: 'error message').errors).to eq(['error message'])
end
end
+
+ describe '#track_and_raise_exception' do
+ context 'when successful' do
+ let(:response) { described_class.success }
+
+ it 'returns self' do
+ expect(response.track_and_raise_exception).to be response
+ end
+ end
+
+ context 'when an error' do
+ let(:response) { described_class.error(message: 'bang') }
+
+ it 'tracks and raises' do
+ expect(::Gitlab::ErrorTracking).to receive(:track_and_raise_exception)
+ .with(StandardError.new('bang'), {})
+
+ response.track_and_raise_exception
+ end
+
+ it 'allows specification of error class' do
+ error = Class.new(StandardError)
+ expect(::Gitlab::ErrorTracking).to receive(:track_and_raise_exception)
+ .with(error.new('bang'), {})
+
+ response.track_and_raise_exception(as: error)
+ end
+
+ it 'allows extra data for tracking' do
+ expect(::Gitlab::ErrorTracking).to receive(:track_and_raise_exception)
+ .with(StandardError.new('bang'), { foo: 1, bar: 2 })
+
+ response.track_and_raise_exception(foo: 1, bar: 2)
+ end
+ end
+ end
+
+ describe '#track_exception' do
+ context 'when successful' do
+ let(:response) { described_class.success }
+
+ it 'returns self' do
+ expect(response.track_exception).to be response
+ end
+ end
+
+ context 'when an error' do
+ let(:response) { described_class.error(message: 'bang') }
+
+ it 'tracks' do
+ expect(::Gitlab::ErrorTracking).to receive(:track_exception)
+ .with(StandardError.new('bang'), {})
+
+ expect(response.track_exception).to be response
+ end
+
+ it 'allows specification of error class' do
+ error = Class.new(StandardError)
+ expect(::Gitlab::ErrorTracking).to receive(:track_exception)
+ .with(error.new('bang'), {})
+
+ expect(response.track_exception(as: error)).to be response
+ end
+
+ it 'allows extra data for tracking' do
+ expect(::Gitlab::ErrorTracking).to receive(:track_exception)
+ .with(StandardError.new('bang'), { foo: 1, bar: 2 })
+
+ expect(response.track_exception(foo: 1, bar: 2)).to be response
+ end
+ end
+ end
end
diff --git a/spec/services/snippets/bulk_destroy_service_spec.rb b/spec/services/snippets/bulk_destroy_service_spec.rb
index 2f399d10188..2d2bdd116d1 100644
--- a/spec/services/snippets/bulk_destroy_service_spec.rb
+++ b/spec/services/snippets/bulk_destroy_service_spec.rb
@@ -22,8 +22,8 @@ RSpec.describe Snippets::BulkDestroyService do
it 'deletes the snippets in bulk' do
response = nil
- expect(Repositories::ShellDestroyService).to receive(:new).with(personal_snippet.repository).and_call_original
- expect(Repositories::ShellDestroyService).to receive(:new).with(project_snippet.repository).and_call_original
+ expect(Repositories::DestroyService).to receive(:new).with(personal_snippet.repository).and_call_original
+ expect(Repositories::DestroyService).to receive(:new).with(project_snippet.repository).and_call_original
aggregate_failures do
expect do
@@ -94,12 +94,6 @@ RSpec.describe Snippets::BulkDestroyService do
it_behaves_like 'error is raised' do
let(:error_message) { 'Failed to delete snippet repositories.' }
end
-
- it 'tries to rollback the repository' do
- expect(subject).to receive(:attempt_rollback_repositories)
-
- subject.execute
- end
end
context 'when an error is raised deleting the records' do
@@ -110,22 +104,17 @@ RSpec.describe Snippets::BulkDestroyService do
it_behaves_like 'error is raised' do
let(:error_message) { 'Failed to remove snippets.' }
end
-
- it 'tries to rollback the repository' do
- expect(subject).to receive(:attempt_rollback_repositories)
-
- subject.execute
- end
end
context 'when snippet does not have a repository attached' do
let!(:snippet_without_repo) { create(:personal_snippet, author: user) }
- it 'does not schedule anything for the snippet without repository and return success' do
+ it 'returns success' do
response = nil
- expect(Repositories::ShellDestroyService).to receive(:new).with(personal_snippet.repository).and_call_original
- expect(Repositories::ShellDestroyService).to receive(:new).with(project_snippet.repository).and_call_original
+ expect(Repositories::DestroyService).to receive(:new).with(personal_snippet.repository).and_call_original
+ expect(Repositories::DestroyService).to receive(:new).with(project_snippet.repository).and_call_original
+ expect(Repositories::DestroyService).to receive(:new).with(snippet_without_repo.repository).and_call_original
expect do
response = subject.execute
@@ -136,38 +125,6 @@ RSpec.describe Snippets::BulkDestroyService do
end
end
- describe '#attempt_rollback_repositories' do
- before do
- Repositories::DestroyService.new(personal_snippet.repository).execute
- end
-
- it 'rollbacks the repository' do
- error_msg = personal_snippet.disk_path + "+#{personal_snippet.id}+deleted.git"
- expect(repository_exists?(personal_snippet, error_msg)).to be_truthy
-
- subject.__send__(:attempt_rollback_repositories)
-
- aggregate_failures do
- expect(repository_exists?(personal_snippet, error_msg)).to be_falsey
- expect(repository_exists?(personal_snippet)).to be_truthy
- end
- end
-
- context 'when an error is raised' do
- before do
- allow_next_instance_of(Repositories::DestroyRollbackService) do |instance|
- allow(instance).to receive(:execute).and_return({ status: :error })
- end
- end
-
- it 'logs the error' do
- expect(Gitlab::AppLogger).to receive(:error).with(/\ARepository .* in path .* could not be rolled back\z/).twice
-
- subject.__send__(:attempt_rollback_repositories)
- end
- end
- end
-
def repository_exists?(snippet, path = snippet.disk_path + ".git")
gitlab_shell.repository_exists?(snippet.snippet_repository.shard_name, path)
end
diff --git a/spec/services/snippets/destroy_service_spec.rb b/spec/services/snippets/destroy_service_spec.rb
index e53d00b9ca1..23765243dd6 100644
--- a/spec/services/snippets/destroy_service_spec.rb
+++ b/spec/services/snippets/destroy_service_spec.rb
@@ -41,7 +41,6 @@ RSpec.describe Snippets::DestroyService do
shared_examples 'deletes the snippet repository' do
it 'removes the snippet repository' do
expect(snippet.repository.exists?).to be_truthy
- expect(GitlabShellWorker).to receive(:perform_in)
expect_next_instance_of(Repositories::DestroyService) do |instance|
expect(instance).to receive(:execute).and_call_original
end
@@ -57,12 +56,6 @@ RSpec.describe Snippets::DestroyService do
end
it_behaves_like 'an unsuccessful destroy'
-
- it 'does not try to rollback repository' do
- expect(Repositories::DestroyRollbackService).not_to receive(:new)
-
- subject
- end
end
context 'when a destroy error is raised' do
@@ -71,12 +64,6 @@ RSpec.describe Snippets::DestroyService do
end
it_behaves_like 'an unsuccessful destroy'
-
- it 'attempts to rollback the repository' do
- expect(Repositories::DestroyRollbackService).to receive(:new).and_call_original
-
- subject
- end
end
context 'when repository is nil' do
diff --git a/spec/services/static_site_editor/config_service_spec.rb b/spec/services/static_site_editor/config_service_spec.rb
deleted file mode 100644
index fed373828a1..00000000000
--- a/spec/services/static_site_editor/config_service_spec.rb
+++ /dev/null
@@ -1,126 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe StaticSiteEditor::ConfigService do
- let_it_be(:project) { create(:project, :repository) }
- let_it_be(:user) { create(:user) }
-
- # params
- let(:ref) { 'master' }
- let(:path) { 'README.md' }
- let(:return_url) { double(:return_url) }
-
- # stub data
- let(:generated_data) { { generated: true } }
- let(:file_data) { { file: true } }
-
- describe '#execute' do
- subject(:execute) do
- described_class.new(
- container: project,
- current_user: user,
- params: {
- ref: ref,
- path: path,
- return_url: return_url
- }
- ).execute
- end
-
- context 'when insufficient permission' do
- it 'returns an error' do
- expect(execute).to be_error
- expect(execute.message).to eq('Insufficient permissions to read configuration')
- end
- end
-
- context 'for developer' do
- before do
- project.add_developer(user)
-
- allow_next_instance_of(Gitlab::StaticSiteEditor::Config::GeneratedConfig) do |config|
- allow(config).to receive(:data) { generated_data }
- end
- end
-
- context 'when reading file from repo fails with an unexpected error' do
- let(:unexpected_error) { RuntimeError.new('some unexpected error') }
-
- before do
- allow(project.repository).to receive(:blob_data_at).and_raise(unexpected_error)
- end
-
- it 'returns an error response' do
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_exception).with(unexpected_error).and_call_original
- expect { execute }.to raise_error(unexpected_error)
- end
- end
-
- context 'when file is missing' do
- before do
- allow(project.repository).to receive(:blob_data_at).and_raise(GRPC::NotFound)
- expect_next_instance_of(Gitlab::StaticSiteEditor::Config::FileConfig, '{}') do |config|
- allow(config).to receive(:valid?) { true }
- allow(config).to receive(:to_hash_with_defaults) { file_data }
- end
- end
-
- it 'returns default config' do
- expect(execute).to be_success
- expect(execute.payload).to eq(generated: true, file: true)
- end
- end
-
- context 'when file is present' do
- before do
- allow(project.repository).to receive(:blob_data_at).with(ref, anything) do
- config_content
- end
- end
-
- context 'and configuration is not valid' do
- let(:config_content) { 'invalid content' }
-
- before do
- expect_next_instance_of(Gitlab::StaticSiteEditor::Config::FileConfig, config_content) do |config|
- error = 'error'
- allow(config).to receive_message_chain('errors.first') { error }
- allow(config).to receive(:valid?) { false }
- end
- end
-
- it 'returns an error' do
- expect(execute).to be_error
- expect(execute.message).to eq('Invalid configuration format')
- end
- end
-
- context 'and configuration is valid' do
- # NOTE: This has to be a valid config, even though it is mocked, because
- # `expect_next_instance_of` executes the constructor logic.
- let(:config_content) { 'static_site_generator: middleman' }
-
- before do
- expect_next_instance_of(Gitlab::StaticSiteEditor::Config::FileConfig, config_content) do |config|
- allow(config).to receive(:valid?) { true }
- allow(config).to receive(:to_hash_with_defaults) { file_data }
- end
- end
-
- it 'returns merged generated data and config file data' do
- expect(execute).to be_success
- expect(execute.payload).to eq(generated: true, file: true)
- end
-
- it 'returns an error if any keys would be overwritten by the merge' do
- generated_data[:duplicate_key] = true
- file_data[:duplicate_key] = true
- expect(execute).to be_error
- expect(execute.message).to match(/duplicate key.*duplicate_key.*found/i)
- end
- end
- end
- end
- end
-end
diff --git a/spec/services/terraform/remote_state_handler_spec.rb b/spec/services/terraform/remote_state_handler_spec.rb
index ca392849d49..19c1d4109e9 100644
--- a/spec/services/terraform/remote_state_handler_spec.rb
+++ b/spec/services/terraform/remote_state_handler_spec.rb
@@ -33,6 +33,14 @@ RSpec.describe Terraform::RemoteStateHandler do
it 'returns the state' do
expect(subject.find_with_lock).to eq(state)
end
+
+ context 'with a state scheduled for deletion' do
+ let!(:state) { create(:terraform_state, :deletion_in_progress, project: project, name: 'state') }
+
+ it 'raises an exception' do
+ expect { subject.find_with_lock }.to raise_error(described_class::StateDeletedError)
+ end
+ end
end
end
end
@@ -84,6 +92,13 @@ RSpec.describe Terraform::RemoteStateHandler do
.to raise_error(described_class::StateLockedError)
end
+ it 'raises an exception if the state is scheduled for deletion' do
+ create(:terraform_state, :deletion_in_progress, project: project, name: 'new-state')
+
+ expect { handler.handle_with_lock }
+ .to raise_error(described_class::StateDeletedError)
+ end
+
context 'user does not have permission to modify state' do
let(:user) { developer }
@@ -127,24 +142,28 @@ RSpec.describe Terraform::RemoteStateHandler do
expect { handler.lock! }.to raise_error(described_class::StateLockedError)
end
+
+ it 'raises an exception when the state exists and is scheduled for deletion' do
+ create(:terraform_state, :deletion_in_progress, project: project, name: 'new-state')
+
+ expect { handler.lock! }.to raise_error(described_class::StateDeletedError)
+ end
end
describe '#unlock!' do
- let(:lock_id) { 'abc-abc' }
+ let_it_be(:state) { create(:terraform_state, :locked, project: project, name: 'new-state', lock_xid: 'abc-abc') }
+
+ let(:lock_id) { state.lock_xid }
subject(:handler) do
described_class.new(
project,
user,
- name: 'new-state',
+ name: state.name,
lock_id: lock_id
)
end
- before do
- create(:terraform_state, :locked, project: project, name: 'new-state', lock_xid: 'abc-abc')
- end
-
it 'unlocks the state' do
state = handler.unlock!
@@ -169,6 +188,15 @@ RSpec.describe Terraform::RemoteStateHandler do
.to raise_error(described_class::StateLockedError)
end
end
+
+ context 'with a state scheduled for deletion' do
+ it 'raises an exception' do
+ state.update!(deleted_at: Time.current)
+
+ expect { handler.unlock! }
+ .to raise_error(described_class::StateDeletedError)
+ end
+ end
end
end
end
diff --git a/spec/services/terraform/states/destroy_service_spec.rb b/spec/services/terraform/states/destroy_service_spec.rb
new file mode 100644
index 00000000000..5acf32cd73c
--- /dev/null
+++ b/spec/services/terraform/states/destroy_service_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Terraform::States::DestroyService do
+ let_it_be(:state) { create(:terraform_state, :with_version, :deletion_in_progress) }
+
+ let(:file) { instance_double(Terraform::StateUploader, relative_path: 'path') }
+
+ before do
+ allow_next_found_instance_of(Terraform::StateVersion) do |version|
+ allow(version).to receive(:file).and_return(file)
+ end
+ end
+
+ describe '#execute' do
+ subject { described_class.new(state).execute }
+
+ it 'removes version files from object storage, followed by the state record' do
+ expect(file).to receive(:remove!).once
+ expect(state).to receive(:destroy!)
+
+ subject
+ end
+
+ context 'state is not marked for deletion' do
+ let(:state) { create(:terraform_state) }
+
+ it 'does not delete the state' do
+ expect(state).not_to receive(:destroy!)
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/services/terraform/states/trigger_destroy_service_spec.rb b/spec/services/terraform/states/trigger_destroy_service_spec.rb
new file mode 100644
index 00000000000..2e96331779c
--- /dev/null
+++ b/spec/services/terraform/states/trigger_destroy_service_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Terraform::States::TriggerDestroyService do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user, maintainer_projects: [project]) }
+
+ describe '#execute', :aggregate_failures do
+ let_it_be(:state) { create(:terraform_state, project: project) }
+
+ subject { described_class.new(state, current_user: user).execute }
+
+ it 'marks the state as deleted and schedules a cleanup worker' do
+ expect(Terraform::States::DestroyWorker).to receive(:perform_async).with(state.id).once
+
+ expect(subject).to be_success
+ expect(state.deleted_at).to be_like_time(Time.current)
+ end
+
+ shared_examples 'unable to delete state' do
+ it 'does not modify the state' do
+ expect(Terraform::States::DestroyWorker).not_to receive(:perform_async)
+
+ expect { subject }.not_to change(state, :deleted_at)
+ expect(subject).to be_error
+ expect(subject.message).to eq(message)
+ end
+ end
+
+ context 'user does not have permission' do
+ let(:user) { create(:user, developer_projects: [project]) }
+ let(:message) { 'You have insufficient permissions to delete this state' }
+
+ include_examples 'unable to delete state'
+ end
+
+ context 'state is locked' do
+ let(:state) { create(:terraform_state, :locked, project: project) }
+ let(:message) { 'Cannot remove a locked state' }
+
+ include_examples 'unable to delete state'
+ end
+ end
+end
diff --git a/spec/services/user_project_access_changed_service_spec.rb b/spec/services/user_project_access_changed_service_spec.rb
index 438db6b987b..be4f205afb5 100644
--- a/spec/services/user_project_access_changed_service_spec.rb
+++ b/spec/services/user_project_access_changed_service_spec.rb
@@ -31,6 +31,19 @@ RSpec.describe UserProjectAccessChangedService do
priority: described_class::LOW_PRIORITY)
end
+ it 'permits medium-priority operation' do
+ expect(AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker).to(
+ receive(:bulk_perform_in).with(
+ described_class::MEDIUM_DELAY,
+ [[1], [2]],
+ { batch_delay: 30.seconds, batch_size: 100 }
+ )
+ )
+
+ described_class.new([1, 2]).execute(blocking: false,
+ priority: described_class::MEDIUM_PRIORITY)
+ end
+
it 'sets the current caller_id as related_class in the context of all the enqueued jobs' do
Gitlab::ApplicationContext.with_context(caller_id: 'Foo') do
described_class.new([1, 2]).execute(blocking: false,
diff --git a/spec/services/web_hook_service_spec.rb b/spec/services/web_hook_service_spec.rb
index b99bc860523..068550ec234 100644
--- a/spec/services/web_hook_service_spec.rb
+++ b/spec/services/web_hook_service_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state do
include StubRequests
+ let(:ellipsis) { '…' }
let_it_be(:project) { create(:project) }
let_it_be_with_reload(:project_hook) { create(:project_hook, project: project) }
@@ -268,6 +269,20 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state
end
context 'execution logging' do
+ let(:default_log_data) do
+ {
+ trigger: 'push_hooks',
+ url: project_hook.url,
+ request_headers: headers,
+ request_data: data,
+ response_body: 'Success',
+ response_headers: {},
+ response_status: 200,
+ execution_duration: be > 0,
+ internal_error_message: nil
+ }
+ end
+
context 'with success' do
before do
stub_full_request(project_hook.url, method: :post).to_return(status: 200, body: 'Success')
@@ -280,7 +295,7 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state
expect(::WebHooks::LogExecutionWorker).not_to receive(:perform_async)
expect(::WebHooks::LogExecutionService)
.to receive(:new)
- .with(hook: project_hook, log_data: Hash, response_category: :ok)
+ .with(hook: project_hook, log_data: default_log_data, response_category: :ok)
.and_return(double(execute: nil))
service_instance.execute
@@ -291,17 +306,7 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state
expect(WebHooks::LogExecutionWorker).to receive(:perform_async)
.with(
project_hook.id,
- hash_including(
- trigger: 'push_hooks',
- url: project_hook.url,
- request_headers: headers,
- request_data: data,
- response_body: 'Success',
- response_headers: {},
- response_status: 200,
- execution_duration: be > 0,
- internal_error_message: nil
- ),
+ hash_including(default_log_data),
:ok,
nil
)
@@ -328,15 +333,10 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state
.with(
project_hook.id,
hash_including(
- trigger: 'push_hooks',
- url: project_hook.url,
- request_headers: headers,
- request_data: data,
- response_body: 'Bad request',
- response_headers: {},
- response_status: 400,
- execution_duration: be > 0,
- internal_error_message: nil
+ default_log_data.merge(
+ response_body: 'Bad request',
+ response_status: 400
+ )
),
:failed,
nil
@@ -356,15 +356,11 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state
.with(
project_hook.id,
hash_including(
- trigger: 'push_hooks',
- url: project_hook.url,
- request_headers: headers,
- request_data: data,
- response_body: '',
- response_headers: {},
- response_status: 'internal error',
- execution_duration: be > 0,
- internal_error_message: 'Some HTTP Post error'
+ default_log_data.merge(
+ response_body: '',
+ response_status: 'internal error',
+ internal_error_message: 'Some HTTP Post error'
+ )
),
:error,
nil
@@ -383,23 +379,137 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state
expect(WebHooks::LogExecutionWorker).to receive(:perform_async)
.with(
project_hook.id,
- hash_including(
- trigger: 'push_hooks',
- url: project_hook.url,
- request_headers: headers,
- request_data: data,
- response_body: '',
- response_headers: {},
- response_status: 200,
- execution_duration: be > 0,
- internal_error_message: nil
- ),
+ hash_including(default_log_data.merge(response_body: '')),
+ :ok,
+ nil
+ )
+
+ service_instance.execute
+ end
+ end
+
+ context 'with oversize response body' do
+ let(:oversize_body) { 'a' * (described_class::RESPONSE_BODY_SIZE_LIMIT + 1) }
+ let(:stripped_body) { 'a' * (described_class::RESPONSE_BODY_SIZE_LIMIT - ellipsis.bytesize) + ellipsis }
+
+ before do
+ stub_full_request(project_hook.url, method: :post).to_return(status: 200, body: oversize_body)
+ end
+
+ it 'queues LogExecutionWorker with stripped response_body' do
+ expect(WebHooks::LogExecutionWorker).to receive(:perform_async)
+ .with(
+ project_hook.id,
+ hash_including(default_log_data.merge(response_body: stripped_body)),
+ :ok,
+ nil
+ )
+
+ service_instance.execute
+ end
+ end
+
+ context 'with massive amount of headers' do
+ let(:response_headers) do
+ (1..described_class::RESPONSE_HEADERS_COUNT_LIMIT + 1).to_a.to_h do |num|
+ ["header-#{num}", SecureRandom.hex(num)]
+ end
+ end
+
+ let(:expected_response_headers) do
+ (1..described_class::RESPONSE_HEADERS_COUNT_LIMIT).to_a.to_h do |num|
+ # Capitalized
+ ["Header-#{num}", response_headers["header-#{num}"]]
+ end
+ end
+
+ before do
+ stub_full_request(project_hook.url, method: :post).to_return(
+ status: 200, body: 'Success', headers: response_headers
+ )
+ end
+
+ it 'queues LogExecutionWorker with limited amount of headers' do
+ expect(WebHooks::LogExecutionWorker).to receive(:perform_async)
+ .with(
+ project_hook.id,
+ hash_including(default_log_data.merge(response_headers: expected_response_headers)),
+ :ok,
+ nil
+ )
+
+ service_instance.execute
+ end
+ end
+
+ context 'with oversize header' do
+ let(:oversize_header) { 'a' * (described_class::RESPONSE_HEADERS_SIZE_LIMIT + 1) }
+ let(:stripped_header) { 'a' * (described_class::RESPONSE_HEADERS_SIZE_LIMIT - ellipsis.bytesize) + ellipsis }
+ let(:response_headers) { { 'oversized-header' => oversize_header } }
+ let(:expected_response_headers) { { 'Oversized-Header' => stripped_header } }
+
+ before do
+ stub_full_request(project_hook.url, method: :post).to_return(
+ status: 200, body: 'Success', headers: response_headers
+ )
+ end
+
+ it 'queues LogExecutionWorker with stripped header value' do
+ expect(WebHooks::LogExecutionWorker).to receive(:perform_async)
+ .with(
+ project_hook.id,
+ hash_including(default_log_data.merge(response_headers: expected_response_headers)),
+ :ok,
+ nil
+ )
+
+ service_instance.execute
+ end
+ end
+
+ context 'with log data exceeding Sidekiq limit' do
+ before do
+ stub_full_request(project_hook.url, method: :post).to_return(status: 200, body: 'Success')
+ end
+
+ it 'queues LogExecutionWorker with request_data overrided in the second attempt' do
+ expect(WebHooks::LogExecutionWorker).to receive(:perform_async)
+ .with(
+ project_hook.id,
+ hash_including(default_log_data),
+ :ok,
+ nil
+ )
+ .and_raise(
+ Gitlab::SidekiqMiddleware::SizeLimiter::ExceedLimitError.new(WebHooks::LogExecutionWorker, 100, 50)
+ )
+ .ordered
+ expect(WebHooks::LogExecutionWorker).to receive(:perform_async)
+ .with(
+ project_hook.id,
+ hash_including(default_log_data.merge(request_data: WebHookLog::OVERSIZE_REQUEST_DATA)),
:ok,
nil
)
+ .and_call_original
+ .ordered
service_instance.execute
end
+
+ context 'new log data still exceeds limit' do
+ before do
+ allow(WebHooks::LogExecutionWorker).to receive(:perform_async).and_raise(
+ Gitlab::SidekiqMiddleware::SizeLimiter::ExceedLimitError.new(WebHooks::LogExecutionWorker, 100, 50)
+ )
+ end
+
+ it 'raises an exception' do
+ expect do
+ service_instance.execute
+ end.to raise_error(Gitlab::SidekiqMiddleware::SizeLimiter::ExceedLimitError)
+ end
+ end
end
end
end
@@ -411,7 +521,7 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state
def expect_to_rate_limit(hook, threshold:, throttled: false)
expect(Gitlab::ApplicationRateLimiter).to receive(:throttled?)
- .with(:web_hook_calls, scope: [hook], threshold: threshold)
+ .with(:web_hook_calls, scope: [hook.parent.root_namespace], threshold: threshold)
.and_return(throttled)
end
@@ -460,13 +570,8 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state
end
end
- context 'when the hook is throttled (via Redis)', :clean_gitlab_redis_rate_limiting do
+ context 'when the hook is throttled (via Redis)', :clean_gitlab_redis_rate_limiting, :freeze_time do
before do
- # Set a high interval to avoid intermittent failures in CI
- allow(Gitlab::ApplicationRateLimiter).to receive(:rate_limits).and_return(
- web_hook_calls: { interval: 1.day }
- )
-
expect_to_perform_worker(project_hook).exactly(threshold).times
threshold.times { service_instance.async_execute }
diff --git a/spec/services/web_hooks/destroy_service_spec.rb b/spec/services/web_hooks/destroy_service_spec.rb
index 5269fe08ac0..4d9bb18e540 100644
--- a/spec/services/web_hooks/destroy_service_spec.rb
+++ b/spec/services/web_hooks/destroy_service_spec.rb
@@ -7,50 +7,46 @@ RSpec.describe WebHooks::DestroyService do
subject { described_class.new(user) }
- shared_examples 'batched destroys' do
- it 'destroys all hooks in batches' do
- stub_const("#{described_class}::BATCH_SIZE", 1)
- expect(subject).to receive(:delete_web_hook_logs_in_batches).exactly(4).times.and_call_original
-
- expect do
- status = subject.execute(hook)
- expect(status[:async]).to be false
- end
- .to change { WebHook.count }.from(1).to(0)
- .and change { WebHookLog.count }.from(3).to(0)
- end
-
- it 'returns an error if sync destroy fails' do
- expect(hook).to receive(:destroy).and_return(false)
+ describe '#execute' do
+ %i[system_hook project_hook].each do |factory|
+ context "deleting a #{factory}" do
+ let!(:hook) { create(factory) } # rubocop: disable Rails/SaveBang (false-positive!)
+ let!(:log) { create_list(:web_hook_log, 3, web_hook: hook) }
- result = subject.sync_destroy(hook)
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq("Unable to destroy #{hook.model_name.human}")
- end
+ it 'is successful' do
+ expect(subject.execute(hook)).to be_success
+ end
- it 'schedules an async delete' do
- stub_const('WebHooks::DestroyService::LOG_COUNT_THRESHOLD', 1)
+ it 'destroys the hook' do
+ expect { subject.execute(hook) }.to change(WebHook, :count).from(1).to(0)
+ end
- expect(WebHooks::DestroyWorker).to receive(:perform_async).with(user.id, hook.id).and_call_original
+ it 'does not destroy logs' do
+ expect { subject.execute(hook) }.not_to change(WebHookLog, :count)
+ end
- status = subject.execute(hook)
+ it 'schedules the destruction of logs' do
+ expect(WebHooks::LogDestroyWorker).to receive(:perform_async).with({ 'hook_id' => hook.id })
+ expect(Gitlab::AppLogger).to receive(:info).with(match(/scheduled a deletion of logs/))
- expect(status[:async]).to be true
- end
- end
+ subject.execute(hook)
+ end
- context 'with system hook' do
- let!(:hook) { create(:system_hook, url: "http://example.com") }
- let!(:log) { create_list(:web_hook_log, 3, web_hook: hook) }
+ context 'when the hook fails to destroy' do
+ before do
+ allow(hook).to receive(:destroy).and_return(false)
+ end
- it_behaves_like 'batched destroys'
- end
+ it 'is not a success' do
+ expect(WebHooks::LogDestroyWorker).not_to receive(:perform_async)
- context 'with project hook' do
- let!(:hook) { create(:project_hook) }
- let!(:log) { create_list(:web_hook_log, 3, web_hook: hook) }
+ r = subject.execute(hook)
- it_behaves_like 'batched destroys'
+ expect(r).to be_error
+ expect(r[:message]).to match %r{Unable to destroy}
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/services/web_hooks/log_destroy_service_spec.rb b/spec/services/web_hooks/log_destroy_service_spec.rb
new file mode 100644
index 00000000000..7634726e5a4
--- /dev/null
+++ b/spec/services/web_hooks/log_destroy_service_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WebHooks::LogDestroyService do
+ subject(:service) { described_class.new(hook.id) }
+
+ describe '#execute' do
+ shared_examples 'deletes web hook logs for hook' do
+ before do
+ create_list(:web_hook_log, 3, web_hook: hook)
+ hook.destroy! # The LogDestroyService is expected to be called _after_ hook destruction
+ end
+
+ it 'deletes the logs' do
+ expect { service.execute }
+ .to change(WebHookLog, :count).from(3).to(0)
+ end
+
+ context 'when the data-set exceeds the batch size' do
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 2)
+ end
+
+ it 'deletes the logs' do
+ expect { service.execute }
+ .to change(WebHookLog, :count).from(3).to(0)
+ end
+ end
+
+ context 'when it encounters an error' do
+ before do
+ allow(WebHookLog).to receive(:delete_batch_for).and_raise(StandardError.new('bang'))
+ end
+
+ it 'reports the error' do
+ expect(service.execute)
+ .to be_error
+ .and have_attributes(message: 'bang')
+ end
+ end
+ end
+
+ context 'with system hook' do
+ let!(:hook) { create(:system_hook, url: "http://example.com") }
+
+ it_behaves_like 'deletes web hook logs for hook'
+ end
+
+ context 'with project hook' do
+ let!(:hook) { create(:project_hook) }
+
+ it_behaves_like 'deletes web hook logs for hook'
+ end
+ end
+end
diff --git a/spec/services/work_items/update_service_spec.rb b/spec/services/work_items/update_service_spec.rb
index b2d3f428899..9030326dadb 100644
--- a/spec/services/work_items/update_service_spec.rb
+++ b/spec/services/work_items/update_service_spec.rb
@@ -8,11 +8,12 @@ RSpec.describe WorkItems::UpdateService do
let_it_be_with_reload(:work_item) { create(:work_item, project: project, assignees: [developer]) }
let(:spam_params) { double }
+ let(:widget_params) { {} }
let(:opts) { {} }
let(:current_user) { developer }
describe '#execute' do
- subject(:update_work_item) { described_class.new(project: project, current_user: current_user, params: opts, spam_params: spam_params).execute(work_item) }
+ subject(:update_work_item) { described_class.new(project: project, current_user: current_user, params: opts, spam_params: spam_params, widget_params: widget_params).execute(work_item) }
before do
stub_spam_services
@@ -69,5 +70,17 @@ RSpec.describe WorkItems::UpdateService do
end
end
end
+
+ context 'when updating widgets' do
+ context 'for the description widget' do
+ let(:widget_params) { { description_widget: { description: 'changed' } } }
+
+ it 'updates the description of the work item' do
+ update_work_item
+
+ expect(work_item.description).to eq('changed')
+ end
+ end
+ end
end
end
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index e49e82f6ab6..b39153e79fc 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -53,8 +53,10 @@ end
require 'rainbow/ext/string'
Rainbow.enabled = false
-require_relative('../ee/spec/spec_helper') if Gitlab.ee?
+# Require JH first because we need override some EE methods with JH methods,
+# if we load EE first, we can't find JH modules in prepend_mod method
require_relative('../jh/spec/spec_helper') if Gitlab.jh?
+require_relative('../ee/spec/spec_helper') if Gitlab.ee?
# Requires supporting ruby files with custom matchers and macros, etc,
# in spec/support/ and its subdirectories.
@@ -256,16 +258,6 @@ RSpec.configure do |config|
end
config.around do |example|
- if example.metadata.fetch(:stub_feature_flags, true)
- # It doesn't make sense for this to default to enabled as we only plan to
- # use this temporarily to override an environment variable but eventually
- # we'll just use the environment variable value when we've completed the
- # gradual rollout. This stub must happen in around block as there are other
- # around blocks in tests that will run before this and get the wrong
- # database connection.
- stub_feature_flags(force_no_sharing_primary_model: false)
- end
-
example.run
end
@@ -308,10 +300,6 @@ RSpec.configure do |config|
# See https://gitlab.com/gitlab-org/gitlab/-/issues/33867
stub_feature_flags(file_identifier_hash: false)
- # The following `vue_issues_list` stub can be removed
- # once the Vue issues page has feature parity with the current Haml page
- stub_feature_flags(vue_issues_list: false)
-
# Disable `main_branch_over_master` as we migrate
# from `master` to `main` accross our codebase.
# It's done in order to preserve the concistency in tests
diff --git a/spec/support/factory_bot.rb b/spec/support/factory_bot.rb
index 5f22fa11e9e..6faa2db3330 100644
--- a/spec/support/factory_bot.rb
+++ b/spec/support/factory_bot.rb
@@ -2,6 +2,7 @@
FactoryBot::SyntaxRunner.class_eval do
include RSpec::Mocks::ExampleMethods
+ include StubMethodCalls
# FactoryBot doesn't allow yet to add a helper that can be used in factories
# While the fixture_file_upload helper is reasonable to be used there:
diff --git a/spec/support/graphql/field_inspection.rb b/spec/support/graphql/field_inspection.rb
index e5fe37ec555..8730f82b893 100644
--- a/spec/support/graphql/field_inspection.rb
+++ b/spec/support/graphql/field_inspection.rb
@@ -20,7 +20,7 @@ module Graphql
def type
@type ||= begin
- field_type = @field.type.respond_to?(:to_graphql) ? @field.type.to_graphql : @field.type
+ field_type = @field.type
# The type could be nested. For example `[GraphQL::Types::String]`:
# - List
diff --git a/spec/support/graphql/field_selection.rb b/spec/support/graphql/field_selection.rb
index 00323c46d69..432340cfdb5 100644
--- a/spec/support/graphql/field_selection.rb
+++ b/spec/support/graphql/field_selection.rb
@@ -46,7 +46,7 @@ module Graphql
NO_SKIP = ->(_name, _field) { false }
- def self.select_fields(type, skip = NO_SKIP, parent_types = Set.new, max_depth = 3)
+ def self.select_fields(type, skip = NO_SKIP, max_depth = 3)
return new if max_depth <= 0 || !type.kind.fields?
new(type.fields.flat_map do |name, field|
@@ -55,12 +55,8 @@ module Graphql
inspected = ::Graphql::FieldInspection.new(field)
singular_field_type = inspected.type
- # If field type is the same as parent type, then we're hitting into
- # mutual dependency. Break it from infinite recursion
- next [] if parent_types.include?(singular_field_type)
-
if inspected.nested_fields?
- subselection = select_fields(singular_field_type, skip, parent_types | [type], max_depth - 1)
+ subselection = select_fields(singular_field_type, skip, max_depth - 1)
next [] if subselection.empty?
[[name, subselection.to_h]]
diff --git a/spec/support/graphql/resolver_factories.rb b/spec/support/graphql/resolver_factories.rb
index 8188f17cc43..3c5aad34e8b 100644
--- a/spec/support/graphql/resolver_factories.rb
+++ b/spec/support/graphql/resolver_factories.rb
@@ -15,8 +15,8 @@ module Graphql
private
- def simple_resolver(resolved_value = 'Resolved value')
- Class.new(Resolvers::BaseResolver) do
+ def simple_resolver(resolved_value = 'Resolved value', base_class: Resolvers::BaseResolver)
+ Class.new(base_class) do
define_method :resolve do |**_args|
resolved_value
end
diff --git a/spec/support/helpers/callouts_test_helper.rb b/spec/support/helpers/callouts_test_helper.rb
new file mode 100644
index 00000000000..8c7faa71d9f
--- /dev/null
+++ b/spec/support/helpers/callouts_test_helper.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+module CalloutsTestHelper
+ def callouts_trials_link_path
+ '/-/trial_registrations/new?glm_content=gold-callout&glm_source=gitlab.com'
+ end
+end
+
+CalloutsTestHelper.prepend_mod
diff --git a/spec/support/helpers/countries_controller_test_helper.rb b/spec/support/helpers/countries_controller_test_helper.rb
new file mode 100644
index 00000000000..5d36a29bba7
--- /dev/null
+++ b/spec/support/helpers/countries_controller_test_helper.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+module CountriesControllerTestHelper
+ def world_deny_list
+ ::World::DENYLIST + ::World::JH_MARKET
+ end
+end
+
+CountriesControllerTestHelper.prepend_mod
diff --git a/spec/support/helpers/doc_url_helper.rb b/spec/support/helpers/doc_url_helper.rb
new file mode 100644
index 00000000000..bbff4827c56
--- /dev/null
+++ b/spec/support/helpers/doc_url_helper.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+module DocUrlHelper
+ def version
+ "13.4.0-ee"
+ end
+
+ def doc_url(documentation_base_url)
+ "#{documentation_base_url}/13.4/ee/#{path}.html"
+ end
+
+ def doc_url_without_version(documentation_base_url)
+ "#{documentation_base_url}/ee/#{path}.html"
+ end
+
+ def stub_doc_file_read(file_name: 'index.md', content: )
+ expect_file_read(File.join(Rails.root, 'doc', file_name), content: content)
+ end
+end
+
+DocUrlHelper.prepend_mod
diff --git a/spec/support/helpers/emails_helper_test_helper.rb b/spec/support/helpers/emails_helper_test_helper.rb
new file mode 100644
index 00000000000..ea7dbc89ebd
--- /dev/null
+++ b/spec/support/helpers/emails_helper_test_helper.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+module EmailsHelperTestHelper
+ def default_header_logo
+ %r{<img alt="GitLab" src="/images/mailers/gitlab_logo\.(?:gif|png)" width="\d+" height="\d+" />}
+ end
+end
+
+EmailsHelperTestHelper.prepend_mod
diff --git a/spec/support/helpers/form_builder_helpers.rb b/spec/support/helpers/form_builder_helpers.rb
new file mode 100644
index 00000000000..4bae7421c4d
--- /dev/null
+++ b/spec/support/helpers/form_builder_helpers.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module FormBuilderHelpers
+ def fake_action_view_base
+ lookup_context = ActionView::LookupContext.new(ActionController::Base.view_paths)
+
+ ActionView::Base.new(lookup_context, {}, ApplicationController.new)
+ end
+
+ def fake_form_for(&block)
+ fake_action_view_base.form_for :user, url: '/user', &block
+ end
+end
diff --git a/spec/support/helpers/gitaly_setup.rb b/spec/support/helpers/gitaly_setup.rb
index 264281ef94a..56993fc27b7 100644
--- a/spec/support/helpers/gitaly_setup.rb
+++ b/spec/support/helpers/gitaly_setup.rb
@@ -9,6 +9,7 @@
require 'securerandom'
require 'socket'
require 'logger'
+require 'fileutils'
require 'bundler'
module GitalySetup
@@ -151,6 +152,9 @@ module GitalySetup
toml ||= config_path(service)
args = service_cmd(service, toml)
+ # Ensure that tmp/run exists
+ FileUtils.mkdir_p(runtime_dir)
+
# Ensure user configuration does not affect Git
# Context: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/58776#note_547613780
env = self.env.merge('HOME' => nil, 'XDG_CONFIG_HOME' => nil)
@@ -369,7 +373,7 @@ module GitalySetup
message += "- The `gitaly` binary does not exist: #{gitaly_binary}\n" unless File.exist?(gitaly_binary)
message += "- The `praefect` binary does not exist: #{praefect_binary}\n" unless File.exist?(praefect_binary)
- message += "- The `git` binary does not exist: #{git_binary}\n" unless File.exist?(git_binary)
+ message += "- No `git` binaries exist\n" if git_binaries.empty?
message += "\nCheck log/gitaly-test.log & log/praefect-test.log for errors.\n"
@@ -381,8 +385,8 @@ module GitalySetup
message
end
- def git_binary
- File.join(tmp_tests_gitaly_dir, "_build", "bin", "gitaly-git")
+ def git_binaries
+ Dir.glob(File.join(tmp_tests_gitaly_dir, "_build", "bin", "gitaly-git-v*"))
end
def gitaly_binary
@@ -392,8 +396,4 @@ module GitalySetup
def praefect_binary
File.join(tmp_tests_gitaly_dir, "_build", "bin", "praefect")
end
-
- def git_binary_exists?
- File.exist?(git_binary)
- end
end
diff --git a/spec/support/helpers/graphql_helpers.rb b/spec/support/helpers/graphql_helpers.rb
index db8d45f61ea..d0a1941817a 100644
--- a/spec/support/helpers/graphql_helpers.rb
+++ b/spec/support/helpers/graphql_helpers.rb
@@ -26,10 +26,44 @@ module GraphqlHelpers
end
end
+ # Some arguments use `as:` to expose a different name internally.
+ # Transform the args to use those names
+ def self.deep_transform_args(args, field)
+ args.to_h do |k, v|
+ argument = field.arguments[k.to_s.camelize(:lower)]
+ [argument.keyword, v.is_a?(Hash) ? deep_transform_args(v, argument.type) : v]
+ end
+ end
+
+ # Convert incoming args into the form usually passed in from the client,
+ # all strings, etc.
+ def self.as_graphql_argument_literals(args)
+ args.transform_values { |value| transform_arg_value(value) }
+ end
+
+ def self.transform_arg_value(value)
+ case value
+ when Hash
+ as_graphql_argument_literals(value)
+ when Array
+ value.map { |x| transform_arg_value(x) }
+ when Time, ActiveSupport::TimeWithZone
+ value.strftime("%F %T.%N %z")
+ when Date, GlobalID, Symbol
+ value.to_s
+ else
+ value
+ end
+ end
+
# Run this resolver exactly as it would be called in the framework. This
# includes all authorization hooks, all argument processing and all result
# wrapping.
# see: GraphqlHelpers#resolve_field
+ #
+ # TODO: this is too coupled to gem internals, making upgrades incredibly
+ # painful, and bypasses much of the validation of the framework.
+ # See https://gitlab.com/gitlab-org/gitlab/-/issues/363121
def resolve(
resolver_class, # [Class[<= BaseResolver]] The resolver at test.
obj: nil, # [Any] The BaseObject#object for the resolver (available as `#object` in the resolver).
@@ -37,7 +71,8 @@ module GraphqlHelpers
ctx: {}, # [#to_h] The current context values.
schema: GitlabSchema, # [GraphQL::Schema] Schema to use during execution.
parent: :not_given, # A GraphQL query node to be passed as the `:parent` extra.
- lookahead: :not_given # A GraphQL lookahead object to be passed as the `:lookahead` extra.
+ lookahead: :not_given, # A GraphQL lookahead object to be passed as the `:lookahead` extra.
+ arg_style: :internal_prepared # Args are in internal format, but should use more rigorous processing
)
# All resolution goes through fields, so we need to create one here that
# uses our resolver. Thankfully, apart from the field name, resolvers
@@ -49,7 +84,6 @@ module GraphqlHelpers
field = ::Types::BaseField.new(**field_options)
# All mutations accept a single `:input` argument. Wrap arguments here.
- # See the unwrapping below in GraphqlHelpers#resolve_field
args = { input: args } if resolver_class <= ::Mutations::BaseMutation && !args.key?(:input)
resolve_field(field, obj,
@@ -57,7 +91,8 @@ module GraphqlHelpers
ctx: ctx,
schema: schema,
object_type: resolver_parent,
- extras: { parent: parent, lookahead: lookahead })
+ extras: { parent: parent, lookahead: lookahead },
+ arg_style: arg_style)
end
# Resolve the value of a field on an object.
@@ -85,21 +120,22 @@ module GraphqlHelpers
# NB: Arguments are passed from the client's perspective. If there is an argument
# `foo` aliased as `bar`, then we would pass `args: { bar: the_value }`, and
# types are checked before resolution.
+ # rubocop:disable Metrics/ParameterLists
def resolve_field(
- field, # An instance of `BaseField`, or the name of a field on the current described_class
- object, # The current object of the `BaseObject` this field 'belongs' to
- args: {}, # Field arguments (keys will be fieldnamerized)
- ctx: {}, # Context values (important ones are :current_user)
- extras: {}, # Stub values for field extras (parent and lookahead)
- current_user: :not_given, # The current user (specified explicitly, overrides ctx[:current_user])
- schema: GitlabSchema, # A specific schema instance
- object_type: described_class # The `BaseObject` type this field belongs to
+ field, # An instance of `BaseField`, or the name of a field on the current described_class
+ object, # The current object of the `BaseObject` this field 'belongs' to
+ args: {}, # Field arguments (keys will be fieldnamerized)
+ ctx: {}, # Context values (important ones are :current_user)
+ extras: {}, # Stub values for field extras (parent and lookahead)
+ current_user: :not_given, # The current user (specified explicitly, overrides ctx[:current_user])
+ schema: GitlabSchema, # A specific schema instance
+ object_type: described_class, # The `BaseObject` type this field belongs to
+ arg_style: :internal_prepared # Args are in internal format, but should use more rigorous processing
)
field = to_base_field(field, object_type)
ctx[:current_user] = current_user unless current_user == :not_given
query = GraphQL::Query.new(schema, context: ctx.to_h)
extras[:lookahead] = negative_lookahead if extras[:lookahead] == :not_given && field.extras.include?(:lookahead)
-
query_ctx = query.context
mock_extras(query_ctx, **extras)
@@ -107,29 +143,58 @@ module GraphqlHelpers
parent = object_type.authorized_new(object, query_ctx)
raise UnauthorizedObject unless parent
- # TODO: This will need to change when we move to the interpreter:
- # At that point, arguments will be a plain ruby hash rather than
- # an Arguments object
- # see: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/27536
- # https://gitlab.com/gitlab-org/gitlab/-/issues/210556
- arguments = field.to_graphql.arguments_class.new(
- GraphqlHelpers.deep_fieldnamerize(args),
- context: query_ctx,
- defaults_used: []
- )
-
# we enable the request store so we can track gitaly calls.
::Gitlab::WithRequestStore.with_request_store do
- # TODO: This will need to change when we move to the interpreter - at that
- # point we will call `field#resolve`
-
- # Unwrap the arguments to mutations. This pairs with the wrapping in GraphqlHelpers#resolve
- # If arguments are not wrapped first, then arguments processing will raise.
- # If arguments are not unwrapped here, then the resolve method of the mutation will raise argument errors.
- arguments = arguments.to_kwargs[:input] if field.resolver && field.resolver <= ::Mutations::BaseMutation
+ prepared_args = case arg_style
+ when :internal_prepared
+ args_internal_prepared(field, args: args, query_ctx: query_ctx, parent: parent, extras: extras, query: query)
+ else
+ args_internal(field, args: args, query_ctx: query_ctx, parent: parent, extras: extras, query: query)
+ end
+
+ if prepared_args.class <= Gitlab::Graphql::Errors::BaseError
+ prepared_args
+ else
+ field.resolve(parent, prepared_args, query_ctx)
+ end
+ end
+ end
+ # rubocop:enable Metrics/ParameterLists
- field.resolve_field(parent, arguments, query_ctx)
+ # Pros:
+ # - Original way we handled arguments
+ #
+ # Cons:
+ # - the `prepare` method of a type is not called. Whether as a proc or as a method
+ # on the type, it's not called. For example `:cluster_id` in ee/app/graphql/resolvers/vulnerabilities_resolver.rb,
+ # or `prepare` in app/graphql/types/range_input_type.rb, used by Types::TimeframeInputType
+ def args_internal(field, args:, query_ctx:, parent:, extras:, query:)
+ arguments = GraphqlHelpers.deep_transform_args(args, field)
+ arguments.merge!(extras.reject {|k, v| v == :not_given})
+ end
+
+ # Pros:
+ # - Allows the use of ruby types, without having to pass in strings
+ # - All args are converted into strings just like if it was called from a client
+ # - Much stronger argument verification
+ #
+ # Cons:
+ # - Some values, such as enums, would need to be changed in the specs to use the
+ # external values, because there is no easy way to handle them.
+ #
+ # take internal style args, and force them into client style args
+ def args_internal_prepared(field, args:, query_ctx:, parent:, extras:, query:)
+ arguments = GraphqlHelpers.as_graphql_argument_literals(args)
+ arguments.merge!(extras.reject {|k, v| v == :not_given})
+
+ # Use public API to properly prepare the args for use by the resolver.
+ # It uses `coerce_arguments` under the covers
+ prepared_args = nil
+ query.arguments_cache.dataload_for(GraphqlHelpers.deep_fieldnamerize(arguments), field, parent) do |kwarg_arguments|
+ prepared_args = kwarg_arguments
end
+
+ prepared_args.respond_to?(:keyword_arguments) ? prepared_args.keyword_arguments : prepared_args
end
def mock_extras(context, parent: :not_given, lookahead: :not_given)
@@ -148,7 +213,7 @@ module GraphqlHelpers
def resolver_instance(resolver_class, obj: nil, ctx: {}, field: nil, schema: GitlabSchema, subscription_update: false)
if ctx.is_a?(Hash)
- q = double('Query', schema: schema, subscription_update?: subscription_update)
+ q = double('Query', schema: schema, subscription_update?: subscription_update, warden: GraphQL::Schema::Warden::PassThruWarden)
ctx = GraphQL::Query::Context.new(query: q, object: obj, values: ctx)
end
@@ -357,8 +422,8 @@ module GraphqlHelpers
end
end
- def query_double(schema:)
- double('query', schema: schema)
+ def query_double(schema: empty_schema)
+ double('query', schema: schema, warden: GraphQL::Schema::Warden::PassThruWarden)
end
def wrap_fields(fields)
@@ -380,7 +445,7 @@ module GraphqlHelpers
FIELDS
end
- def all_graphql_fields_for(class_name, parent_types = Set.new, max_depth: 3, excluded: [])
+ def all_graphql_fields_for(class_name, max_depth: 3, excluded: [])
# pulling _all_ fields can generate a _huge_ query (like complexity 180,000),
# and significantly increase spec runtime. so limit the depth by default
return if max_depth <= 0
@@ -397,7 +462,7 @@ module GraphqlHelpers
# We can't guess arguments, so skip fields that require them
skip = ->(name, field) { excluded.include?(name) || required_arguments?(field) }
- ::Graphql::FieldSelection.select_fields(type, skip, parent_types, max_depth)
+ ::Graphql::FieldSelection.select_fields(type, skip, max_depth)
end
def with_signature(variables, query)
@@ -569,8 +634,11 @@ module GraphqlHelpers
# Helps migrate to the new GraphQL interpreter,
# https://gitlab.com/gitlab-org/gitlab/-/issues/210556
- def expect_graphql_error_to_be_created(error_class, match_message = nil)
- expect { yield }.to raise_error(error_class, match_message)
+ def expect_graphql_error_to_be_created(error_class, match_message = '')
+ resolved = yield
+
+ expect(resolved).to be_instance_of(error_class)
+ expect(resolved.message).to match(match_message)
end
def flattened_errors
@@ -644,7 +712,7 @@ module GraphqlHelpers
end
def allow_high_graphql_recursion
- allow_any_instance_of(Gitlab::Graphql::QueryAnalyzers::RecursionAnalyzer).to receive(:recursion_threshold).and_return 1000
+ allow_any_instance_of(Gitlab::Graphql::QueryAnalyzers::AST::RecursionAnalyzer).to receive(:recursion_threshold).and_return 1000
end
def allow_high_graphql_transaction_threshold
@@ -699,13 +767,13 @@ module GraphqlHelpers
end
# assumes query_string and user to be let-bound in the current context
- def execute_query(query_type, schema: empty_schema, graphql: query_string, raise_on_error: false)
+ def execute_query(query_type = Types::QueryType, schema: empty_schema, graphql: query_string, raise_on_error: false, variables: {})
schema.query(query_type)
r = schema.execute(
graphql,
context: { current_user: user },
- variables: {}
+ variables: variables
)
if raise_on_error && r.to_h['errors'].present?
@@ -717,7 +785,6 @@ module GraphqlHelpers
def empty_schema
Class.new(GraphQL::Schema) do
- use GraphQL::Pagination::Connections
use Gitlab::Graphql::Pagination::Connections
use BatchLoader::GraphQL
@@ -817,7 +884,3 @@ module GraphqlHelpers
object_type.fields[name] || (raise ArgumentError, "Unknown field #{name} for #{described_class.graphql_name}")
end
end
-
-# This warms our schema, doing this as part of loading the helpers to avoid
-# duplicate loading error when Rails tries autoload the types.
-GitlabSchema.graphql_definition
diff --git a/spec/support/helpers/jira_service_helper.rb b/spec/support/helpers/jira_integration_helpers.rb
index 3cfd0de06e8..66940314589 100644
--- a/spec/support/helpers/jira_service_helper.rb
+++ b/spec/support/helpers/jira_integration_helpers.rb
@@ -1,8 +1,8 @@
# frozen_string_literal: true
-module JiraServiceHelper
- JIRA_URL = "http://jira.example.net"
- JIRA_API = JIRA_URL + "/rest/api/2"
+module JiraIntegrationHelpers
+ JIRA_URL = 'http://jira.example.net'
+ JIRA_API = "#{JIRA_URL}/rest/api/2"
def jira_integration_settings
url = JIRA_URL
@@ -17,6 +17,7 @@ module JiraServiceHelper
end
def jira_issue_comments
+ # rubocop: disable Layout/LineLength
"{\"startAt\":0,\"maxResults\":11,\"total\":11,
\"comments\":[{\"self\":\"http://0.0.0.0:4567/rest/api/2/issue/10002/comment/10609\",
\"id\":\"10609\",\"author\":{\"self\":\"http://0.0.0.0:4567/rest/api/2/user?username=gitlab\",
@@ -51,30 +52,31 @@ module JiraServiceHelper
\"updated\":\"2015-04-01T03:45:55.667+0200\"
}
]}"
+ # rubocop: enable Layout/LineLength
end
def jira_project_url
- JIRA_API + "/project"
+ "#{JIRA_API}/project"
end
def jira_api_comment_url(issue_id)
- JIRA_API + "/issue/#{issue_id}/comment"
+ "#{JIRA_API}/issue/#{issue_id}/comment"
end
def jira_api_remote_link_url(issue_id)
- JIRA_API + "/issue/#{issue_id}/remotelink"
+ "#{JIRA_API}/issue/#{issue_id}/remotelink"
end
def jira_api_transition_url(issue_id)
- JIRA_API + "/issue/#{issue_id}/transitions"
+ "#{JIRA_API}/issue/#{issue_id}/transitions"
end
def jira_api_test_url
- JIRA_API + "/myself"
+ "#{JIRA_API}/myself"
end
def jira_issue_url(issue_id)
- JIRA_API + "/issue/#{issue_id}"
+ "#{JIRA_API}/issue/#{issue_id}"
end
def stub_jira_integration_test
diff --git a/spec/support/helpers/login_helpers.rb b/spec/support/helpers/login_helpers.rb
index 29b1bb260f2..c93ef8b0ead 100644
--- a/spec/support/helpers/login_helpers.rb
+++ b/spec/support/helpers/login_helpers.rb
@@ -117,6 +117,14 @@ module LoginHelpers
click_button "oauth-login-#{provider}"
end
+ def register_via(provider, uid, email, additional_info: {})
+ mock_auth_hash(provider, uid, email, additional_info: additional_info)
+ visit new_user_registration_path
+ expect(page).to have_content('Create an account using')
+
+ click_link_or_button "oauth-login-#{provider}"
+ end
+
def fake_successful_u2f_authentication
allow(U2fRegistration).to receive(:authenticate).and_return(true)
FakeU2fDevice.new(page, nil).fake_u2f_authentication
diff --git a/spec/support/helpers/namespaces_test_helper.rb b/spec/support/helpers/namespaces_test_helper.rb
index 9762c38a9bb..08224cfd43c 100644
--- a/spec/support/helpers/namespaces_test_helper.rb
+++ b/spec/support/helpers/namespaces_test_helper.rb
@@ -8,6 +8,10 @@ module NamespacesTestHelper
def get_buy_storage_path(namespace)
buy_storage_subscriptions_path(selected_group: namespace.id)
end
+
+ def get_buy_storage_url(namespace)
+ buy_storage_subscriptions_url(selected_group: namespace.id)
+ end
end
NamespacesTestHelper.prepend_mod
diff --git a/spec/support/helpers/next_instance_of.rb b/spec/support/helpers/next_instance_of.rb
index 461d411a5ce..3c88715615d 100644
--- a/spec/support/helpers/next_instance_of.rb
+++ b/spec/support/helpers/next_instance_of.rb
@@ -22,7 +22,7 @@ module NextInstanceOf
def stub_new(target, number, ordered = false, *new_args, &blk)
receive_new = receive(:new)
receive_new.ordered if ordered
- receive_new.with(*new_args) if new_args.any?
+ receive_new.with(*new_args) if new_args.present?
if number.is_a?(Range)
receive_new.at_least(number.begin).times if number.begin
diff --git a/spec/support/helpers/project_helpers.rb b/spec/support/helpers/project_helpers.rb
index 2ea6405e48c..ef8947ab340 100644
--- a/spec/support/helpers/project_helpers.rb
+++ b/spec/support/helpers/project_helpers.rb
@@ -17,12 +17,12 @@ module ProjectHelpers
end
end
- def update_feature_access_level(project, access_level)
+ def update_feature_access_level(project, access_level, additional_params = {})
features = ProjectFeature::FEATURES.dup
features.delete(:pages)
params = features.each_with_object({}) { |feature, h| h["#{feature}_access_level"] = access_level }
- project.update!(params)
+ project.update!(params.merge(additional_params))
end
def create_project_with_statistics(namespace = nil, with_data: false, size_multiplier: 1)
diff --git a/spec/support/helpers/project_template_test_helper.rb b/spec/support/helpers/project_template_test_helper.rb
new file mode 100644
index 00000000000..eab41f6a1cf
--- /dev/null
+++ b/spec/support/helpers/project_template_test_helper.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module ProjectTemplateTestHelper
+ def all_templates
+ %w[
+ rails spring express iosswift dotnetcore android
+ gomicro gatsby hugo jekyll plainhtml gitbook
+ hexo middleman gitpod_spring_petclinic nfhugo
+ nfjekyll nfplainhtml nfgitbook nfhexo salesforcedx
+ serverless_framework tencent_serverless_framework
+ jsonnet cluster_management kotlin_native_linux
+ pelican
+ ]
+ end
+end
+
+ProjectTemplateTestHelper.prepend_mod
diff --git a/spec/support/helpers/search_settings_helpers.rb b/spec/support/helpers/search_settings_helpers.rb
index 838f897bff5..a453ea7fa8f 100644
--- a/spec/support/helpers/search_settings_helpers.rb
+++ b/spec/support/helpers/search_settings_helpers.rb
@@ -1,5 +1,5 @@
# frozen_string_literal: true
module SearchHelpers
- self::INPUT_PLACEHOLDER = 'Search settings'
+ self::INPUT_PLACEHOLDER = 'Search page'
end
diff --git a/spec/support/helpers/stub_method_calls.rb b/spec/support/helpers/stub_method_calls.rb
new file mode 100644
index 00000000000..45d704958ca
--- /dev/null
+++ b/spec/support/helpers/stub_method_calls.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+# Used to stud methods for factories where we can't
+# use rspec-mocks.
+#
+# Examples:
+# stub_method(user, :some_method) { |var1, var2| var1 + var2 }
+# stub_method(user, :some_method) { true }
+# stub_method(user, :some_method) => nil
+# stub_method(user, :some_method) do |*args|
+# true
+# end
+#
+# restore_original_method(user, :some_method)
+# restore_original_methods(user)
+#
+module StubMethodCalls
+ AlreadyImplementedError = Class.new(StandardError)
+
+ def stub_method(object, method, &block)
+ Backup.stub_method(object, method, &block)
+ end
+
+ def restore_original_method(object, method)
+ Backup.restore_method(object, method)
+ end
+
+ def restore_original_methods(object)
+ Backup.stubbed_methods(object).each_key { |method, backed_up_method| restore_original_method(object, method) }
+ end
+
+ module Backup
+ def self.stubbed_methods(object)
+ return {} unless object.respond_to?(:_stubbed_methods)
+
+ object._stubbed_methods
+ end
+
+ def self.backup_method(object, method)
+ backed_up_methods = stubbed_methods(object)
+ backed_up_methods[method] = object.respond_to?(method) ? object.method(method) : nil
+
+ object.define_singleton_method(:_stubbed_methods) { backed_up_methods }
+ end
+
+ def self.stub_method(object, method, &block)
+ raise ArgumentError, "Block is required" unless block_given?
+
+ backup_method(object, method) unless backed_up_method?(object, method)
+ object.define_singleton_method(method, &block)
+ end
+
+ def self.restore_method(object, method)
+ raise NotImplementedError, "#{method} has not been stubbed on #{object}" unless backed_up_method?(object, method)
+
+ object.singleton_class.remove_method(method)
+ backed_up_method = stubbed_methods(object)[method]
+
+ object.define_singleton_method(method, backed_up_method) if backed_up_method
+ end
+
+ def self.backed_up_method?(object, method)
+ stubbed_methods(object).key?(method)
+ end
+ end
+end
diff --git a/spec/support/helpers/subscription_portal_helper.rb b/spec/support/helpers/subscription_portal_helper.rb
new file mode 100644
index 00000000000..53e8f78371d
--- /dev/null
+++ b/spec/support/helpers/subscription_portal_helper.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module SubscriptionPortalHelper
+ def staging_customers_url
+ 'https://customers.staging.gitlab.com'
+ end
+
+ def prod_customers_url
+ 'https://customers.gitlab.com'
+ end
+end
+
+SubscriptionPortalHelper.prepend_mod
diff --git a/spec/support/helpers/test_env.rb b/spec/support/helpers/test_env.rb
index 11f469c1d27..7c865dd7e11 100644
--- a/spec/support/helpers/test_env.rb
+++ b/spec/support/helpers/test_env.rb
@@ -53,7 +53,7 @@ module TestEnv
'wip' => 'b9238ee',
'csv' => '3dd0896',
'v1.1.0' => 'b83d6e3',
- 'add-ipython-files' => 'a867a602',
+ 'add-ipython-files' => '4963fef',
'add-pdf-file' => 'e774ebd',
'squash-large-files' => '54cec52',
'add-pdf-text-binary' => '79faa7b',
diff --git a/spec/support/matchers/background_migrations_matchers.rb b/spec/support/matchers/background_migrations_matchers.rb
index b471323dd72..c5b3e140585 100644
--- a/spec/support/matchers/background_migrations_matchers.rb
+++ b/spec/support/matchers/background_migrations_matchers.rb
@@ -65,11 +65,13 @@ RSpec::Matchers.define :be_scheduled_migration_with_multiple_args do |*expected|
end
end
-RSpec::Matchers.define :have_scheduled_batched_migration do |table_name: nil, column_name: nil, job_arguments: [], **attributes|
+RSpec::Matchers.define :have_scheduled_batched_migration do |gitlab_schema: :gitlab_main, table_name: nil, column_name: nil, job_arguments: [], **attributes|
define_method :matches? do |migration|
+ reset_column_information(Gitlab::Database::BackgroundMigration::BatchedMigration)
+
batched_migrations =
Gitlab::Database::BackgroundMigration::BatchedMigration
- .for_configuration(migration, table_name, column_name, job_arguments)
+ .for_configuration(gitlab_schema, migration, table_name, column_name, job_arguments)
expect(batched_migrations.count).to be(1)
expect(batched_migrations).to all(have_attributes(attributes)) if attributes.present?
diff --git a/spec/support/matchers/exceed_query_limit.rb b/spec/support/matchers/exceed_query_limit.rb
index b48c7f905b2..bfcaf9552b3 100644
--- a/spec/support/matchers/exceed_query_limit.rb
+++ b/spec/support/matchers/exceed_query_limit.rb
@@ -1,6 +1,68 @@
# frozen_string_literal: true
module ExceedQueryLimitHelpers
+ class QueryDiff
+ def initialize(expected, actual, show_common_queries)
+ @expected = expected
+ @actual = actual
+ @show_common_queries = show_common_queries
+ end
+
+ def diff
+ return combined_counts if @show_common_queries
+
+ combined_counts
+ .transform_values { select_suffixes_with_diffs(_1) }
+ .reject { |_prefix, suffs| suffs.empty? }
+ end
+
+ private
+
+ def select_suffixes_with_diffs(suffs)
+ reject_groups_with_different_parameters(reject_suffixes_with_identical_counts(suffs))
+ end
+
+ def reject_suffixes_with_identical_counts(suffs)
+ suffs.reject { |_k, counts| counts.first == counts.second }
+ end
+
+ # Eliminates groups that differ only in parameters,
+ # to make it easier to debug the output.
+ #
+ # For example, if we have a group `SELECT * FROM users...`,
+ # with the following suffixes
+ # `WHERE id = 1` (counts: N, 0)
+ # `WHERE id = 2` (counts: 0, N)
+ def reject_groups_with_different_parameters(suffs)
+ return suffs if suffs.size != 2
+
+ counts_a, counts_b = suffs.values
+ return {} if counts_a == counts_b.reverse && counts_a.include?(0)
+
+ suffs
+ end
+
+ def expected_counts
+ @expected.transform_values do |suffixes|
+ suffixes.transform_values { |n| [n, 0] }
+ end
+ end
+
+ def recorded_counts
+ @actual.transform_values do |suffixes|
+ suffixes.transform_values { |n| [0, n] }
+ end
+ end
+
+ def combined_counts
+ expected_counts.merge(recorded_counts) do |_k, exp, got|
+ exp.merge(got) do |_k, exp_counts, got_counts|
+ exp_counts.zip(got_counts).map { |a, b| a + b }
+ end
+ end
+ end
+ end
+
MARGINALIA_ANNOTATION_REGEX = %r{\s*\/\*.*\*\/}.freeze
DB_QUERY_RE = Regexp.union([
@@ -108,26 +170,7 @@ module ExceedQueryLimitHelpers
end
def diff_query_counts(expected, actual)
- expected_counts = expected.transform_values do |suffixes|
- suffixes.transform_values { |n| [n, 0] }
- end
- recorded_counts = actual.transform_values do |suffixes|
- suffixes.transform_values { |n| [0, n] }
- end
-
- combined_counts = expected_counts.merge(recorded_counts) do |_k, exp, got|
- exp.merge(got) do |_k, exp_counts, got_counts|
- exp_counts.zip(got_counts).map { |a, b| a + b }
- end
- end
-
- unless @show_common_queries
- combined_counts = combined_counts.transform_values do |suffs|
- suffs.reject { |_k, counts| counts.first == counts.second }
- end
- end
-
- combined_counts.reject { |_prefix, suffs| suffs.empty? }
+ QueryDiff.new(expected, actual, @show_common_queries).diff
end
def diff_query_group_message(query, suffixes)
@@ -141,7 +184,7 @@ module ExceedQueryLimitHelpers
def log_message
if expected.is_a?(ActiveRecord::QueryRecorder)
diff_counts = diff_query_counts(count_queries(expected), count_queries(@recorder))
- sections = diff_counts.map { |q, suffixes| diff_query_group_message(q, suffixes) }
+ sections = diff_counts.filter_map { |q, suffixes| diff_query_group_message(q, suffixes) }
<<~MSG
Query Diff:
@@ -323,7 +366,12 @@ RSpec::Matchers.define :exceed_query_limit do |expected|
include ExceedQueryLimitHelpers
match do |block|
- verify_count(&block)
+ if block.is_a?(ActiveRecord::QueryRecorder)
+ @recorder = block
+ verify_count
+ else
+ verify_count(&block)
+ end
end
failure_message_when_negated do |actual|
diff --git a/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb b/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb
index 3ea6658c0c1..3d3b8c2207d 100644
--- a/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb
+++ b/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb
@@ -2,14 +2,42 @@
Integration.available_integration_names.each do |integration|
RSpec.shared_context integration do
- include JiraServiceHelper if integration == 'jira'
+ include JiraIntegrationHelpers if integration == 'jira'
let(:dashed_integration) { integration.dasherize }
let(:integration_method) { Project.integration_association_name(integration) }
let(:integration_klass) { Integration.integration_name_to_model(integration) }
let(:integration_instance) { integration_klass.new }
- let(:integration_fields) { integration_instance.fields }
- let(:integration_attrs_list) { integration_fields.inject([]) {|arr, hash| arr << hash[:name].to_sym } }
+
+ # Build a list of all attributes that an integration supports.
+ let(:integration_attrs_list) do
+ integration_fields + integration_events + custom_attributes.fetch(integration.to_sym, [])
+ end
+
+ # Attributes defined as fields.
+ let(:integration_fields) do
+ integration_instance.fields.map { _1[:name].to_sym }
+ end
+
+ # Attributes for configurable event triggers.
+ let(:integration_events) do
+ integration_instance.configurable_events.map { IntegrationsHelper.integration_event_field_name(_1).to_sym }
+ end
+
+ # Other special cases, this list might be incomplete.
+ #
+ # Some of these won't be needed anymore after we've converted them to use the field DSL
+ # in https://gitlab.com/gitlab-org/gitlab/-/issues/354899.
+ #
+ # Others like `comment_on_event_disabled` are actual columns on `integrations`, maybe we should migrate
+ # these to fields as well.
+ let(:custom_attributes) do
+ {
+ jira: %i[comment_on_event_enabled jira_issue_transition_automatic jira_issue_transition_id project_key
+ issues_enabled vulnerabilities_enabled vulnerabilities_issuetype]
+ }
+ end
+
let(:integration_attrs) do
integration_attrs_list.inject({}) do |hash, k|
if k =~ /^(token*|.*_token|.*_key)/
@@ -32,9 +60,11 @@ Integration.available_integration_names.each do |integration|
hash.merge!(k => 1234)
elsif integration == 'jira' && k == :jira_issue_transition_id
hash.merge!(k => '1,2,3')
+ elsif integration == 'jira' && k == :jira_issue_transition_automatic
+ hash.merge!(k => true)
elsif integration == 'emails_on_push' && k == :recipients
hash.merge!(k => 'foo@bar.com')
- elsif integration == 'slack' || integration == 'mattermost' && k == :labels_to_be_notified_behavior
+ elsif (integration == 'slack' || integration == 'mattermost') && k == :labels_to_be_notified_behavior
hash.merge!(k => "match_any")
else
hash.merge!(k => "someword")
@@ -44,7 +74,7 @@ Integration.available_integration_names.each do |integration|
let(:licensed_features) do
{
- 'github' => :github_project_service_integration
+ 'github' => :github_integration
}
end
diff --git a/spec/support/shared_contexts/finders/issues_finder_shared_contexts.rb b/spec/support/shared_contexts/finders/issues_finder_shared_contexts.rb
index d9cbea58406..afb3976e3b8 100644
--- a/spec/support/shared_contexts/finders/issues_finder_shared_contexts.rb
+++ b/spec/support/shared_contexts/finders/issues_finder_shared_contexts.rb
@@ -12,7 +12,7 @@ RSpec.shared_context 'IssuesFinder context' do
let_it_be(:milestone) { create(:milestone, project: project1, releases: [release]) }
let_it_be(:label) { create(:label, project: project2) }
let_it_be(:label2) { create(:label, project: project2) }
- let_it_be(:issue1, reload: true) do
+ let_it_be(:item1, reload: true) do
create(:issue,
author: user,
assignees: [user],
@@ -23,7 +23,7 @@ RSpec.shared_context 'IssuesFinder context' do
updated_at: 1.week.ago)
end
- let_it_be(:issue2, reload: true) do
+ let_it_be(:item2, reload: true) do
create(:issue,
author: user,
assignees: [user],
@@ -33,7 +33,7 @@ RSpec.shared_context 'IssuesFinder context' do
updated_at: 1.week.from_now)
end
- let_it_be(:issue3, reload: true) do
+ let_it_be(:item3, reload: true) do
create(:issue,
author: user2,
assignees: [user2],
@@ -44,8 +44,8 @@ RSpec.shared_context 'IssuesFinder context' do
updated_at: 2.weeks.from_now)
end
- let_it_be(:issue4, reload: true) { create(:issue, project: project3) }
- let_it_be(:issue5, reload: true) do
+ let_it_be(:item4, reload: true) { create(:issue, project: project3) }
+ let_it_be(:item5, reload: true) do
create(:issue,
author: user,
assignees: [user],
@@ -55,18 +55,20 @@ RSpec.shared_context 'IssuesFinder context' do
updated_at: 3.days.ago)
end
- let_it_be(:award_emoji1) { create(:award_emoji, name: 'thumbsup', user: user, awardable: issue1) }
- let_it_be(:award_emoji2) { create(:award_emoji, name: 'thumbsup', user: user2, awardable: issue2) }
- let_it_be(:award_emoji3) { create(:award_emoji, name: 'thumbsdown', user: user, awardable: issue3) }
+ let_it_be(:award_emoji1) { create(:award_emoji, name: 'thumbsup', user: user, awardable: item1) }
+ let_it_be(:award_emoji2) { create(:award_emoji, name: 'thumbsup', user: user2, awardable: item2) }
+ let_it_be(:award_emoji3) { create(:award_emoji, name: 'thumbsdown', user: user, awardable: item3) }
+
+ let(:items_model) { Issue }
end
RSpec.shared_context 'IssuesFinder#execute context' do
- let!(:closed_issue) { create(:issue, author: user2, assignees: [user2], project: project2, state: 'closed') }
- let!(:label_link) { create(:label_link, label: label, target: issue2) }
- let!(:label_link2) { create(:label_link, label: label2, target: issue3) }
+ let!(:closed_item) { create(:issue, author: user2, assignees: [user2], project: project2, state: 'closed') }
+ let!(:label_link) { create(:label_link, label: label, target: item2) }
+ let!(:label_link2) { create(:label_link, label: label2, target: item3) }
let(:search_user) { user }
let(:params) { {} }
- let(:issues) { described_class.new(search_user, params.reverse_merge(scope: scope, state: 'opened')).execute }
+ let(:items) { described_class.new(search_user, params.reverse_merge(scope: scope, state: 'opened')).execute }
before_all do
project1.add_maintainer(user)
diff --git a/spec/support/shared_contexts/finders/work_items_finder_shared_contexts.rb b/spec/support/shared_contexts/finders/work_items_finder_shared_contexts.rb
new file mode 100644
index 00000000000..8c5bc339db5
--- /dev/null
+++ b/spec/support/shared_contexts/finders/work_items_finder_shared_contexts.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'WorkItemsFinder context' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:user2) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:subgroup) { create(:group, parent: group) }
+ let_it_be(:project1, reload: true) { create(:project, group: group) }
+ let_it_be(:project2, reload: true) { create(:project) }
+ let_it_be(:project3, reload: true) { create(:project, group: subgroup) }
+ let_it_be(:release) { create(:release, project: project1, tag: 'v1.0.0') }
+ let_it_be(:milestone) { create(:milestone, project: project1, releases: [release]) }
+ let_it_be(:label) { create(:label, project: project2) }
+ let_it_be(:label2) { create(:label, project: project2) }
+ let_it_be(:item1, reload: true) do
+ create(:work_item,
+ author: user,
+ assignees: [user],
+ project: project1,
+ milestone: milestone,
+ title: 'gitlab',
+ created_at: 1.week.ago,
+ updated_at: 1.week.ago)
+ end
+
+ let_it_be(:item2, reload: true) do
+ create(:work_item,
+ author: user,
+ assignees: [user],
+ project: project2,
+ description: 'gitlab',
+ created_at: 1.week.from_now,
+ updated_at: 1.week.from_now)
+ end
+
+ let_it_be(:item3, reload: true) do
+ create(:work_item,
+ author: user2,
+ assignees: [user2],
+ project: project2,
+ title: 'tanuki',
+ description: 'tanuki',
+ created_at: 2.weeks.from_now,
+ updated_at: 2.weeks.from_now)
+ end
+
+ let_it_be(:item4, reload: true) { create(:work_item, project: project3) }
+ let_it_be(:item5, reload: true) do
+ create(:work_item,
+ author: user,
+ assignees: [user],
+ project: project1,
+ title: 'wotnot',
+ created_at: 3.days.ago,
+ updated_at: 3.days.ago)
+ end
+
+ let_it_be(:award_emoji1) { create(:award_emoji, name: 'thumbsup', user: user, awardable: item1) }
+ let_it_be(:award_emoji2) { create(:award_emoji, name: 'thumbsup', user: user2, awardable: item2) }
+ let_it_be(:award_emoji3) { create(:award_emoji, name: 'thumbsdown', user: user, awardable: item3) }
+
+ let(:items_model) { WorkItem }
+end
+
+RSpec.shared_context 'WorkItemsFinder#execute context' do
+ let!(:closed_item) { create(:work_item, author: user2, assignees: [user2], project: project2, state: 'closed') }
+ let!(:label_link) { create(:label_link, label: label, target: item2) }
+ let!(:label_link2) { create(:label_link, label: label2, target: item3) }
+ let(:search_user) { user }
+ let(:params) { {} }
+ let(:items) { described_class.new(search_user, params.reverse_merge(scope: scope, state: 'opened')).execute }
+
+ before_all do
+ project1.add_maintainer(user)
+ project2.add_developer(user)
+ project2.add_developer(user2)
+ project3.add_developer(user)
+ end
+end
diff --git a/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb b/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb
index 7d51c90522a..aa8bc6fa79f 100644
--- a/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb
+++ b/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb
@@ -18,7 +18,10 @@ RSpec.shared_context 'structured_logger' do
"correlation_id" => 'cid',
"error_message" => "wrong number of arguments (2 for 3)",
"error_class" => "ArgumentError",
- "error_backtrace" => []
+ "error_backtrace" => [],
+ "exception.message" => "wrong number of arguments (2 for 3)",
+ "exception.class" => "ArgumentError",
+ "exception.backtrace" => []
}
end
@@ -28,7 +31,10 @@ RSpec.shared_context 'structured_logger' do
let(:clock_thread_cputime_start) { 0.222222299 }
let(:clock_thread_cputime_end) { 1.333333799 }
let(:start_payload) do
- job.except('error_backtrace', 'error_class', 'error_message').merge(
+ job.except(
+ 'error_message', 'error_class', 'error_backtrace',
+ 'exception.backtrace', 'exception.class', 'exception.message'
+ ).merge(
'message' => 'TestWorker JID-da883554ee4fe414012f5f42: start',
'job_status' => 'start',
'pid' => Process.pid,
@@ -58,7 +64,8 @@ RSpec.shared_context 'structured_logger' do
'duration_s' => 0.0,
'completed_at' => timestamp.to_f,
'cpu_s' => 1.111112,
- 'rate_limiting_gates' => []
+ 'rate_limiting_gates' => [],
+ 'worker_id' => "process_#{Process.pid}"
)
end
@@ -68,7 +75,10 @@ RSpec.shared_context 'structured_logger' do
'job_status' => 'fail',
'error_class' => 'ArgumentError',
'error_message' => 'Something went wrong',
- 'error_backtrace' => be_a(Array).and(be_present)
+ 'error_backtrace' => be_a(Array).and(be_present),
+ 'exception.class' => 'ArgumentError',
+ 'exception.message' => 'Something went wrong',
+ 'exception.backtrace' => be_a(Array).and(be_present)
)
end
diff --git a/spec/support/shared_contexts/markdown_snapshot_shared_examples.rb b/spec/support/shared_contexts/markdown_snapshot_shared_examples.rb
new file mode 100644
index 00000000000..de52b58982e
--- /dev/null
+++ b/spec/support/shared_contexts/markdown_snapshot_shared_examples.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+# See https://docs.gitlab.com/ee/development/gitlab_flavored_markdown/specification_guide/#markdown-snapshot-testing
+# for documentation on this spec.
+# rubocop:disable Layout/LineLength
+RSpec.shared_context 'with API::Markdown Snapshot shared context' do |glfm_specification_dir, glfm_example_snapshots_dir|
+ # rubocop:enable Layout/LineLength
+ include ApiHelpers
+
+ markdown_examples, html_examples = %w[markdown.yml html.yml].map do |file_name|
+ yaml = File.read("#{glfm_example_snapshots_dir}/#{file_name}")
+ YAML.safe_load(yaml, symbolize_names: true, aliases: true)
+ end
+
+ normalizations_yaml = File.read(
+ "#{glfm_specification_dir}/input/gitlab_flavored_markdown/glfm_example_normalizations.yml")
+ normalizations_by_example_name = YAML.safe_load(normalizations_yaml, symbolize_names: true, aliases: true)
+
+ if (focused_markdown_examples_string = ENV['FOCUSED_MARKDOWN_EXAMPLES'])
+ focused_markdown_examples = focused_markdown_examples_string.split(',').map(&:strip).map(&:to_sym)
+ markdown_examples.select! { |example_name| focused_markdown_examples.include?(example_name) }
+ end
+
+ markdown_examples.each do |name, markdown|
+ context "for #{name}" do
+ let(:html) { html_examples.fetch(name).fetch(:static) }
+ let(:normalizations) { normalizations_by_example_name.dig(name, :html, :static, :snapshot) }
+
+ it "verifies conversion of GLFM to HTML", :unlimited_max_formatted_output_length do
+ api_url = api "/markdown"
+
+ # noinspection RubyResolve
+ normalized_html = normalize_html(html, normalizations)
+
+ post api_url, params: { text: markdown, gfm: true }
+ expect(response).to be_successful
+ response_body = Gitlab::Json.parse(response.body)
+ # Some requests have the HTML in the `html` key, others in the `body` key.
+ response_html = response_body['body'] ? response_body.fetch('body') : response_body.fetch('html')
+ # noinspection RubyResolve
+ normalized_response_html = normalize_html(response_html, normalizations)
+
+ expect(normalized_response_html).to eq(normalized_html)
+ end
+
+ def normalize_html(html, normalizations)
+ return html unless normalizations
+
+ normalized_html = html.dup
+ normalizations.each_value do |normalization_entry|
+ normalization_entry.each do |normalization|
+ regex = normalization.fetch(:regex)
+ replacement = normalization.fetch(:replacement)
+ normalized_html.gsub!(%r{#{regex}}, replacement)
+ end
+ end
+
+ normalized_html
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_contexts/navbar_structure_context.rb b/spec/support/shared_contexts/navbar_structure_context.rb
index ef6ff7be840..d277a45584d 100644
--- a/spec/support/shared_contexts/navbar_structure_context.rb
+++ b/spec/support/shared_contexts/navbar_structure_context.rb
@@ -134,13 +134,13 @@ RSpec.shared_context 'group navbar structure' do
nav_sub_items: [
_('General'),
_('Integrations'),
+ _('Webhooks'),
_('Access Tokens'),
_('Projects'),
_('Repository'),
_('CI/CD'),
_('Applications'),
- _('Packages & Registries'),
- _('Webhooks')
+ _('Packages & Registries')
]
}
end
diff --git a/spec/support/shared_contexts/policies/project_policy_shared_context.rb b/spec/support/shared_contexts/policies/project_policy_shared_context.rb
index e50083a10e7..7396643823c 100644
--- a/spec/support/shared_contexts/policies/project_policy_shared_context.rb
+++ b/spec/support/shared_contexts/policies/project_policy_shared_context.rb
@@ -75,7 +75,7 @@ RSpec.shared_context 'ProjectPolicy context' do
let(:base_owner_permissions) do
%i[
archive_project change_namespace change_visibility_level destroy_issue
- destroy_merge_request remove_fork_project remove_project rename_project
+ destroy_merge_request manage_owners remove_fork_project remove_project rename_project
set_issue_created_at set_issue_iid set_issue_updated_at
set_note_created_at
]
diff --git a/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb b/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb
index 15590fd10dc..0e6f6f12c3f 100644
--- a/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb
+++ b/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb
@@ -26,7 +26,7 @@ RSpec.shared_examples 'multiple issue boards' do
it 'switches current board' do
in_boards_switcher_dropdown do
- click_link board2.name
+ click_button board2.name
end
wait_for_requests
@@ -66,7 +66,7 @@ RSpec.shared_examples 'multiple issue boards' do
it 'adds a list to the none default board' do
in_boards_switcher_dropdown do
- click_link board2.name
+ click_button board2.name
end
wait_for_requests
@@ -88,7 +88,7 @@ RSpec.shared_examples 'multiple issue boards' do
expect(page).to have_selector('.board', count: 3)
in_boards_switcher_dropdown do
- click_link board.name
+ click_button board.name
end
wait_for_requests
@@ -100,7 +100,7 @@ RSpec.shared_examples 'multiple issue boards' do
assert_boards_nav_active
in_boards_switcher_dropdown do
- click_link board2.name
+ click_button board2.name
end
assert_boards_nav_active
@@ -108,7 +108,7 @@ RSpec.shared_examples 'multiple issue boards' do
it 'switches current board back' do
in_boards_switcher_dropdown do
- click_link board.name
+ click_button board.name
end
wait_for_requests
diff --git a/spec/support/shared_examples/components/pajamas_shared_examples.rb b/spec/support/shared_examples/components/pajamas_shared_examples.rb
new file mode 100644
index 00000000000..5c0ad1a1bc9
--- /dev/null
+++ b/spec/support/shared_examples/components/pajamas_shared_examples.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'it renders help text' do
+ it 'renders help text' do
+ expect(rendered_component).to have_selector('[data-testid="pajamas-component-help-text"]', text: help_text)
+ end
+end
+
+RSpec.shared_examples 'it does not render help text' do
+ it 'does not render help text' do
+ expect(rendered_component).not_to have_selector('[data-testid="pajamas-component-help-text"]')
+ end
+end
diff --git a/spec/support/shared_examples/controllers/environments_controller_shared_examples.rb b/spec/support/shared_examples/controllers/environments_controller_shared_examples.rb
index a79b94209f3..c6e880635aa 100644
--- a/spec/support/shared_examples/controllers/environments_controller_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/environments_controller_shared_examples.rb
@@ -65,20 +65,3 @@ RSpec.shared_examples 'failed response for #cancel_auto_stop' do
end
end
end
-
-RSpec.shared_examples 'avoids N+1 queries on environment detail page' do
- render_views
-
- before do
- create_deployment_with_associations(sequence: 0)
- end
-
- it 'avoids N+1 queries' do
- control = ActiveRecord::QueryRecorder.new { get :show, params: environment_params }
-
- create_deployment_with_associations(sequence: 1)
- create_deployment_with_associations(sequence: 2)
-
- expect { get :show, params: environment_params }.not_to exceed_query_limit(control.count).with_threshold(34)
- end
-end
diff --git a/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb b/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb
index 2ea98002de1..5faf462c23c 100644
--- a/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb
@@ -36,6 +36,19 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST personal_access_toke
expect(session[:"#{provider}_access_token"]).to eq(token)
expect(controller).to redirect_to(status_import_url)
end
+
+ it 'passes namespace_id param as query param if it was present' do
+ namespace_id = 5
+ status_import_url = public_send("status_import_#{provider}_url", { namespace_id: namespace_id })
+
+ allow_next_instance_of(Gitlab::LegacyGithubImport::Client) do |client|
+ allow(client).to receive(:user).and_return(true)
+ end
+
+ post :personal_access_token, params: { personal_access_token: 'some-token', namespace_id: 5 }
+
+ expect(controller).to redirect_to(status_import_url)
+ end
end
RSpec.shared_examples 'a GitHub-ish import controller: GET new' do
diff --git a/spec/support/shared_examples/features/2fa_shared_examples.rb b/spec/support/shared_examples/features/2fa_shared_examples.rb
index 94c91556ea7..44f30c32472 100644
--- a/spec/support/shared_examples/features/2fa_shared_examples.rb
+++ b/spec/support/shared_examples/features/2fa_shared_examples.rb
@@ -2,6 +2,7 @@
RSpec.shared_examples 'hardware device for 2fa' do |device_type|
include Spec::Support::Helpers::Features::TwoFactorHelpers
+ include Spec::Support::Helpers::ModalHelpers
def register_device(device_type, **kwargs)
case device_type.downcase
@@ -18,7 +19,6 @@ RSpec.shared_examples 'hardware device for 2fa' do |device_type|
let(:user) { create(:user) }
before do
- stub_feature_flags(bootstrap_confirmation_modals: false)
gitlab_sign_in(user)
user.update_attribute(:otp_required_for_login, true)
end
@@ -59,7 +59,7 @@ RSpec.shared_examples 'hardware device for 2fa' do |device_type|
expect(page).to have_content(first_device.name)
expect(page).to have_content(second_device.name)
- accept_confirm { click_on 'Delete', match: :first }
+ accept_gl_confirm(button_text: 'Delete') { click_on 'Delete', match: :first }
expect(page).to have_content('Successfully deleted')
expect(page.body).not_to have_content(first_device.name)
diff --git a/spec/support/shared_examples/features/access_tokens_shared_examples.rb b/spec/support/shared_examples/features/access_tokens_shared_examples.rb
index 215d9d3e5a8..c162ed36881 100644
--- a/spec/support/shared_examples/features/access_tokens_shared_examples.rb
+++ b/spec/support/shared_examples/features/access_tokens_shared_examples.rb
@@ -51,7 +51,7 @@ RSpec.shared_examples 'resource access tokens creation disallowed' do |error_mes
it 'does not show access token creation form' do
visit resource_settings_access_tokens_path
- expect(page).not_to have_selector('#new_resource_access_token')
+ expect(page).not_to have_selector('#js-new-access-token-form')
end
it 'shows access token creation disabled text' do
@@ -135,7 +135,7 @@ RSpec.shared_examples 'inactive resource access tokens' do |no_active_tokens_tex
it 'allows revocation of an active token' do
visit resource_settings_access_tokens_path
- accept_confirm { click_on 'Revoke' }
+ accept_gl_confirm(button_text: 'Revoke') { click_on 'Revoke' }
expect(page).to have_selector('.settings-message')
expect(no_resource_access_tokens_message).to have_text(no_active_tokens_text)
@@ -156,7 +156,7 @@ RSpec.shared_examples 'inactive resource access tokens' do |no_active_tokens_tex
it 'allows revocation of an active token' do
visit resource_settings_access_tokens_path
- accept_confirm { click_on 'Revoke' }
+ accept_gl_confirm(button_text: 'Revoke') { click_on 'Revoke' }
expect(page).to have_selector('.settings-message')
expect(no_resource_access_tokens_message).to have_text(no_active_tokens_text)
diff --git a/spec/support/shared_examples/features/cascading_settings_shared_examples.rb b/spec/support/shared_examples/features/cascading_settings_shared_examples.rb
index 395f4fc54e0..cb80751ff49 100644
--- a/spec/support/shared_examples/features/cascading_settings_shared_examples.rb
+++ b/spec/support/shared_examples/features/cascading_settings_shared_examples.rb
@@ -6,7 +6,8 @@ RSpec.shared_examples 'a cascading setting' do
visit group_path
page.within form_group_selector do
- find(setting_field_selector).check
+ enable_setting.call
+
find('[data-testid="enforce-for-all-subgroups-checkbox"]').check
end
diff --git a/spec/support/shared_examples/features/container_registry_shared_examples.rb b/spec/support/shared_examples/features/container_registry_shared_examples.rb
index 6aa7e6e6270..784f82fdda1 100644
--- a/spec/support/shared_examples/features/container_registry_shared_examples.rb
+++ b/spec/support/shared_examples/features/container_registry_shared_examples.rb
@@ -19,8 +19,7 @@ RSpec.shared_examples 'rejecting tags destruction for an importing repository on
expect(find('.modal .modal-title')).to have_content _('Remove tag')
find('.modal .modal-footer .btn-danger').click
- alert_body = find('.gl-alert-body')
- expect(alert_body).to have_content('Tags temporarily cannot be marked for deletion. Please try again in a few minutes.')
- expect(alert_body).to have_link('More details', href: help_page_path('user/packages/container_registry/index', anchor: 'tags-temporarily-cannot-be-marked-for-deletion'))
+ expect(page).to have_content('Tags temporarily cannot be marked for deletion. Please try again in a few minutes.')
+ expect(page).to have_link('More details', href: help_page_path('user/packages/container_registry/index', anchor: 'tags-temporarily-cannot-be-marked-for-deletion'))
end
end
diff --git a/spec/support/shared_examples/features/content_editor_shared_examples.rb b/spec/support/shared_examples/features/content_editor_shared_examples.rb
index c93d8e3d511..591f7973454 100644
--- a/spec/support/shared_examples/features/content_editor_shared_examples.rb
+++ b/spec/support/shared_examples/features/content_editor_shared_examples.rb
@@ -21,6 +21,31 @@ RSpec.shared_examples 'edits content using the content editor' do
end
end
+ describe 'code block' do
+ before do
+ visit(profile_preferences_path)
+
+ find('.syntax-theme').choose('Dark')
+
+ wait_for_requests
+
+ page.go_back
+ refresh
+
+ click_button 'Edit rich text'
+ end
+
+ it 'applies theme classes to code blocks' do
+ expect(page).not_to have_css('.content-editor-code-block.code.highlight.dark')
+
+ find(content_editor_testid).send_keys [:enter, :enter]
+ find(content_editor_testid).send_keys '```js ' # trigger input rule
+ find(content_editor_testid).send_keys 'var a = 0'
+
+ expect(page).to have_css('.content-editor-code-block.code.highlight.dark')
+ end
+ end
+
describe 'code block bubble menu' do
it 'shows a code block bubble menu for a code block' do
find(content_editor_testid).send_keys [:enter, :enter]
@@ -51,4 +76,49 @@ RSpec.shared_examples 'edits content using the content editor' do
expect(find('[data-testid="code-block-bubble-menu"]')).to have_text('Custom (nomnoml)')
end
end
+
+ describe 'mermaid diagram' do
+ before do
+ find(content_editor_testid).send_keys [:enter, :enter]
+
+ find(content_editor_testid).send_keys '```mermaid '
+ find(content_editor_testid).send_keys ['graph TD;', :enter, ' JohnDoe12 --> HelloWorld34']
+ end
+
+ it 'renders and updates the diagram correctly in a sandboxed iframe' do
+ iframe = find(content_editor_testid).find('iframe')
+ expect(iframe['src']).to include('/-/sandbox/mermaid')
+
+ within_frame(iframe) do
+ expect(find('svg').text).to include('JohnDoe12')
+ expect(find('svg').text).to include('HelloWorld34')
+ end
+
+ expect(iframe['height'].to_i).to be > 100
+
+ find(content_editor_testid).send_keys [:enter, ' JaneDoe34 --> HelloWorld56']
+
+ within_frame(iframe) do
+ page.has_content?('JaneDoe34')
+
+ expect(find('svg').text).to include('JaneDoe34')
+ expect(find('svg').text).to include('HelloWorld56')
+ end
+ end
+
+ it 'toggles the diagram when preview button is clicked' do
+ find('[data-testid="preview-diagram"]').click
+
+ expect(find(content_editor_testid)).not_to have_selector('iframe')
+
+ find('[data-testid="preview-diagram"]').click
+
+ iframe = find(content_editor_testid).find('iframe')
+
+ within_frame(iframe) do
+ expect(find('svg').text).to include('JohnDoe12')
+ expect(find('svg').text).to include('HelloWorld34')
+ end
+ end
+ end
end
diff --git a/spec/support/shared_examples/features/issuables_user_dropdown_behaviors_shared_examples.rb b/spec/support/shared_examples/features/issuables_user_dropdown_behaviors_shared_examples.rb
deleted file mode 100644
index 1848b4fffd9..00000000000
--- a/spec/support/shared_examples/features/issuables_user_dropdown_behaviors_shared_examples.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.shared_examples 'issuable user dropdown behaviors' do
- include FilteredSearchHelpers
-
- before do
- issuable # ensure we have at least one issuable
- sign_in(user_in_dropdown)
- end
-
- %w[author assignee].each do |dropdown|
- describe "#{dropdown} dropdown", :js do
- it 'only includes members of the project/group' do
- visit issuables_path
-
- filtered_search.set("#{dropdown}:=")
-
- expect(find("#js-dropdown-#{dropdown} .filter-dropdown")).to have_content(user_in_dropdown.name)
- expect(find("#js-dropdown-#{dropdown} .filter-dropdown")).not_to have_content(user_not_in_dropdown.name)
- end
- end
- end
-end
diff --git a/spec/support/shared_examples/features/runners_shared_examples.rb b/spec/support/shared_examples/features/runners_shared_examples.rb
index d9460c7b8f1..52f3fd60c07 100644
--- a/spec/support/shared_examples/features/runners_shared_examples.rb
+++ b/spec/support/shared_examples/features/runners_shared_examples.rb
@@ -35,11 +35,11 @@ RSpec.shared_examples 'shows and resets runner registration token' do
it 'has a registration token' do
click_on 'Click to reveal'
- expect(page.find('[data-testid="token-value"] input').value).to have_content(registration_token)
+ expect(page.find_field('token-value').value).to have_content(registration_token)
end
describe 'reset registration token' do
- let!(:old_registration_token) { find('[data-testid="token-value"] input').value }
+ let!(:old_registration_token) { find_field('token-value').value }
before do
click_on 'Reset registration token'
@@ -62,7 +62,7 @@ RSpec.shared_examples 'shows and resets runner registration token' do
end
end
-RSpec.shared_examples 'shows no runners' do
+RSpec.shared_examples 'shows no runners registered' do
it 'shows counts with 0' do
expect(page).to have_text "Online runners 0"
expect(page).to have_text "Offline runners 0"
@@ -70,13 +70,19 @@ RSpec.shared_examples 'shows no runners' do
end
it 'shows "no runners" message' do
- expect(page).to have_text 'No runners found'
+ expect(page).to have_text s_('Runners|Get started with runners')
+ end
+end
+
+RSpec.shared_examples 'shows no runners found' do
+ it 'shows "no runners" message' do
+ expect(page).to have_text s_('Runners|No results found')
end
end
RSpec.shared_examples 'shows runner in list' do
it 'does not show empty state' do
- expect(page).not_to have_content 'No runners found'
+ expect(page).not_to have_content s_('Runners|Get started with runners')
end
it 'shows runner row' do
diff --git a/spec/support/shared_examples/features/sidebar_shared_examples.rb b/spec/support/shared_examples/features/sidebar_shared_examples.rb
index af3ea0600a2..77334db6a36 100644
--- a/spec/support/shared_examples/features/sidebar_shared_examples.rb
+++ b/spec/support/shared_examples/features/sidebar_shared_examples.rb
@@ -109,9 +109,8 @@ RSpec.shared_examples 'issue boards sidebar' do
wait_for_requests
expect(page).to have_content(
- _('Only project members with at least' \
- ' Reporter role can view or be' \
- ' notified about this issue.')
+ _('Only project members with at least the Reporter role, the author, and assignees' \
+ ' can view or be notified about this issue.')
)
end
end
diff --git a/spec/support/shared_examples/finders/issues_finder_shared_examples.rb b/spec/support/shared_examples/finders/issues_finder_shared_examples.rb
new file mode 100644
index 00000000000..622a88e8323
--- /dev/null
+++ b/spec/support/shared_examples/finders/issues_finder_shared_examples.rb
@@ -0,0 +1,1471 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.shared_examples 'issues or work items finder' do |factory, execute_context|
+ describe '#execute' do
+ include_context execute_context
+
+ context 'scope: all' do
+ let(:scope) { 'all' }
+
+ it 'returns all items' do
+ expect(items).to contain_exactly(item1, item2, item3, item4, item5)
+ end
+
+ context 'user does not have read permissions' do
+ let(:search_user) { user2 }
+
+ context 'when filtering by project id' do
+ let(:params) { { project_id: project1.id } }
+
+ it 'returns no items' do
+ expect(items).to be_empty
+ end
+ end
+
+ context 'when filtering by group id' do
+ let(:params) { { group_id: group.id } }
+
+ it 'returns no items' do
+ expect(items).to be_empty
+ end
+ end
+ end
+
+ context 'assignee filtering' do
+ let(:issuables) { items }
+
+ it_behaves_like 'assignee ID filter' do
+ let(:params) { { assignee_id: user.id } }
+ let(:expected_issuables) { [item1, item2, item5] }
+ end
+
+ it_behaves_like 'assignee NOT ID filter' do
+ let(:params) { { not: { assignee_id: user.id } } }
+ let(:expected_issuables) { [item3, item4] }
+ end
+
+ it_behaves_like 'assignee OR filter' do
+ let(:params) { { or: { assignee_id: [user.id, user2.id] } } }
+ let(:expected_issuables) { [item1, item2, item3, item5] }
+ end
+
+ context 'when assignee_id does not exist' do
+ it_behaves_like 'assignee NOT ID filter' do
+ let(:params) { { not: { assignee_id: -100 } } }
+ let(:expected_issuables) { [item1, item2, item3, item4, item5] }
+ end
+ end
+
+ context 'filter by username' do
+ let_it_be(:user3) { create(:user) }
+
+ before do
+ project2.add_developer(user3)
+ item2.assignees = [user2]
+ item3.assignees = [user3]
+ end
+
+ it_behaves_like 'assignee username filter' do
+ let(:params) { { assignee_username: [user2.username] } }
+ let(:expected_issuables) { [item2] }
+ end
+
+ it_behaves_like 'assignee NOT username filter' do
+ before do
+ item2.assignees = [user2]
+ end
+
+ let(:params) { { not: { assignee_username: [user.username, user2.username] } } }
+ let(:expected_issuables) { [item3, item4] }
+ end
+
+ it_behaves_like 'assignee OR filter' do
+ let(:params) { { or: { assignee_username: [user2.username, user3.username] } } }
+ let(:expected_issuables) { [item2, item3] }
+ end
+
+ context 'when assignee_username does not exist' do
+ it_behaves_like 'assignee NOT username filter' do
+ before do
+ item2.assignees = [user2]
+ end
+
+ let(:params) { { not: { assignee_username: 'non_existent_username' } } }
+ let(:expected_issuables) { [item1, item2, item3, item4, item5] }
+ end
+ end
+ end
+
+ it_behaves_like 'no assignee filter' do
+ let_it_be(:user3) { create(:user) }
+ let(:expected_issuables) { [item4] }
+ end
+
+ it_behaves_like 'any assignee filter' do
+ let(:expected_issuables) { [item1, item2, item3, item5] }
+ end
+ end
+
+ context 'filtering by release' do
+ context 'when the release tag is none' do
+ let(:params) { { release_tag: 'none' } }
+
+ it 'returns items without releases' do
+ expect(items).to contain_exactly(item2, item3, item4, item5)
+ end
+ end
+
+ context 'when the release tag exists' do
+ let(:params) { { project_id: project1.id, release_tag: release.tag } }
+
+ it 'returns the items associated with that release' do
+ expect(items).to contain_exactly(item1)
+ end
+ end
+ end
+
+ context 'filtering by projects' do
+ context 'when projects are passed in a list of ids' do
+ let(:params) { { projects: [project1.id] } }
+
+ it 'returns the item belonging to the projects' do
+ expect(items).to contain_exactly(item1, item5)
+ end
+ end
+
+ context 'when projects are passed in a subquery' do
+ let(:params) { { projects: Project.id_in(project1.id) } }
+
+ it 'returns the item belonging to the projects' do
+ expect(items).to contain_exactly(item1, item5)
+ end
+ end
+ end
+
+ context 'filtering by group_id' do
+ let(:params) { { group_id: group.id } }
+
+ context 'when include_subgroup param not set' do
+ it 'returns all group items' do
+ expect(items).to contain_exactly(item1, item5)
+ end
+
+ context 'when projects outside the group are passed' do
+ let(:params) { { group_id: group.id, projects: [project2.id] } }
+
+ it 'returns no items' do
+ expect(items).to be_empty
+ end
+ end
+
+ context 'when projects of the group are passed' do
+ let(:params) { { group_id: group.id, projects: [project1.id] } }
+
+ it 'returns the item within the group and projects' do
+ expect(items).to contain_exactly(item1, item5)
+ end
+ end
+
+ context 'when projects of the group are passed as a subquery' do
+ let(:params) { { group_id: group.id, projects: Project.id_in(project1.id) } }
+
+ it 'returns the item within the group and projects' do
+ expect(items).to contain_exactly(item1, item5)
+ end
+ end
+
+ context 'when release_tag is passed as a parameter' do
+ let(:params) { { group_id: group.id, release_tag: 'dne-release-tag' } }
+
+ it 'ignores the release_tag parameter' do
+ expect(items).to contain_exactly(item1, item5)
+ end
+ end
+ end
+
+ context 'when include_subgroup param is true' do
+ before do
+ params[:include_subgroups] = true
+ end
+
+ it 'returns all group and subgroup items' do
+ expect(items).to contain_exactly(item1, item4, item5)
+ end
+
+ context 'when mixed projects are passed' do
+ let(:params) { { group_id: group.id, projects: [project2.id, project3.id] } }
+
+ it 'returns the item within the group and projects' do
+ expect(items).to contain_exactly(item4)
+ end
+ end
+ end
+ end
+
+ context 'filtering by author' do
+ context 'by author ID' do
+ let(:params) { { author_id: user2.id } }
+
+ it 'returns items created by that user' do
+ expect(items).to contain_exactly(item3)
+ end
+ end
+
+ context 'using OR' do
+ let(:item6) { create(factory, project: project2) }
+ let(:params) { { or: { author_username: [item3.author.username, item6.author.username] } } }
+
+ it 'returns items created by any of the given users' do
+ expect(items).to contain_exactly(item3, item6)
+ end
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(or_issuable_queries: false)
+ end
+
+ it 'does not add any filter' do
+ expect(items).to contain_exactly(item1, item2, item3, item4, item5, item6)
+ end
+ end
+ end
+
+ context 'filtering by NOT author ID' do
+ let(:params) { { not: { author_id: user2.id } } }
+
+ it 'returns items not created by that user' do
+ expect(items).to contain_exactly(item1, item2, item4, item5)
+ end
+ end
+
+ context 'filtering by nonexistent author ID and issue term using CTE for search' do
+ let(:params) do
+ {
+ author_id: 'does-not-exist',
+ search: 'git',
+ attempt_group_search_optimizations: true
+ }
+ end
+
+ it 'returns no results' do
+ expect(items).to be_empty
+ end
+ end
+ end
+
+ context 'filtering by milestone' do
+ let(:params) { { milestone_title: milestone.title } }
+
+ it 'returns items assigned to that milestone' do
+ expect(items).to contain_exactly(item1)
+ end
+ end
+
+ context 'filtering by not milestone' do
+ let(:params) { { not: { milestone_title: milestone.title } } }
+
+ it 'returns items not assigned to that milestone' do
+ expect(items).to contain_exactly(item2, item3, item4, item5)
+ end
+ end
+
+ context 'filtering by group milestone' do
+ let!(:group) { create(:group, :public) }
+ let(:group_milestone) { create(:milestone, group: group) }
+ let!(:group_member) { create(:group_member, group: group, user: user) }
+ let(:params) { { milestone_title: group_milestone.title } }
+
+ before do
+ project2.update!(namespace: group)
+ item2.update!(milestone: group_milestone)
+ item3.update!(milestone: group_milestone)
+ end
+
+ it 'returns items assigned to that group milestone' do
+ expect(items).to contain_exactly(item2, item3)
+ end
+
+ context 'using NOT' do
+ let(:params) { { not: { milestone_title: group_milestone.title } } }
+
+ it 'returns items not assigned to that group milestone' do
+ expect(items).to contain_exactly(item1, item4, item5)
+ end
+ end
+ end
+
+ context 'filtering by no milestone' do
+ let(:params) { { milestone_title: 'None' } }
+
+ it 'returns items with no milestone' do
+ expect(items).to contain_exactly(item2, item3, item4, item5)
+ end
+
+ it 'returns items with no milestone (deprecated)' do
+ params[:milestone_title] = Milestone::None.title
+
+ expect(items).to contain_exactly(item2, item3, item4, item5)
+ end
+ end
+
+ context 'filtering by any milestone' do
+ let(:params) { { milestone_title: 'Any' } }
+
+ it 'returns items with any assigned milestone' do
+ expect(items).to contain_exactly(item1)
+ end
+
+ it 'returns items with any assigned milestone (deprecated)' do
+ params[:milestone_title] = Milestone::Any.title
+
+ expect(items).to contain_exactly(item1)
+ end
+ end
+
+ context 'filtering by upcoming milestone' do
+ let(:params) { { milestone_title: Milestone::Upcoming.name } }
+
+ let!(:group) { create(:group, :public) }
+ let!(:group_member) { create(:group_member, group: group, user: user) }
+
+ let(:project_no_upcoming_milestones) { create(:project, :public) }
+ let(:project_next_1_1) { create(:project, :public) }
+ let(:project_next_8_8) { create(:project, :public) }
+ let(:project_in_group) { create(:project, :public, namespace: group) }
+
+ let(:yesterday) { Date.current - 1.day }
+ let(:tomorrow) { Date.current + 1.day }
+ let(:two_days_from_now) { Date.current + 2.days }
+ let(:ten_days_from_now) { Date.current + 10.days }
+
+ let(:milestones) do
+ [
+ create(:milestone, :closed, project: project_no_upcoming_milestones),
+ create(:milestone, project: project_next_1_1, title: '1.1', due_date: two_days_from_now),
+ create(:milestone, project: project_next_1_1, title: '8.9', due_date: ten_days_from_now),
+ create(:milestone, project: project_next_8_8, title: '1.2', due_date: yesterday),
+ create(:milestone, project: project_next_8_8, title: '8.8', due_date: tomorrow),
+ create(:milestone, group: group, title: '9.9', due_date: tomorrow)
+ ]
+ end
+
+ let!(:created_items) do
+ milestones.map do |milestone|
+ create(factory, project: milestone.project || project_in_group,
+ milestone: milestone, author: user, assignees: [user])
+ end
+ end
+
+ it 'returns items in the upcoming milestone for each project or group' do
+ expect(items.map { |item| item.milestone.title })
+ .to contain_exactly('1.1', '8.8', '9.9')
+ expect(items.map { |item| item.milestone.due_date })
+ .to contain_exactly(tomorrow, two_days_from_now, tomorrow)
+ end
+
+ context 'using NOT' do
+ let(:params) { { not: { milestone_title: Milestone::Upcoming.name } } }
+
+ it 'returns items not in upcoming milestones for each project or group, but must have a due date' do
+ target_items = created_items.select do |item|
+ item.milestone&.due_date && item.milestone.due_date <= Date.current
+ end
+
+ expect(items).to contain_exactly(*target_items)
+ end
+ end
+ end
+
+ context 'filtering by started milestone' do
+ let(:params) { { milestone_title: Milestone::Started.name } }
+
+ let(:project_no_started_milestones) { create(:project, :public) }
+ let(:project_started_1_and_2) { create(:project, :public) }
+ let(:project_started_8) { create(:project, :public) }
+
+ let(:yesterday) { Date.current - 1.day }
+ let(:tomorrow) { Date.current + 1.day }
+ let(:two_days_ago) { Date.current - 2.days }
+ let(:three_days_ago) { Date.current - 3.days }
+
+ let(:milestones) do
+ [
+ create(:milestone, project: project_no_started_milestones, start_date: tomorrow),
+ create(:milestone, project: project_started_1_and_2, title: '1.0', start_date: two_days_ago),
+ create(:milestone, project: project_started_1_and_2, title: '2.0', start_date: yesterday),
+ create(:milestone, project: project_started_1_and_2, title: '3.0', start_date: tomorrow),
+ create(:milestone, :closed, project: project_started_1_and_2, title: '4.0', start_date: three_days_ago),
+ create(:milestone, :closed, project: project_started_8, title: '6.0', start_date: three_days_ago),
+ create(:milestone, project: project_started_8, title: '7.0'),
+ create(:milestone, project: project_started_8, title: '8.0', start_date: yesterday),
+ create(:milestone, project: project_started_8, title: '9.0', start_date: tomorrow)
+ ]
+ end
+
+ before do
+ milestones.each do |milestone|
+ create(factory, project: milestone.project, milestone: milestone, author: user, assignees: [user])
+ end
+ end
+
+ it 'returns items in the started milestones for each project' do
+ expect(items.map { |item| item.milestone.title })
+ .to contain_exactly('1.0', '2.0', '8.0')
+ expect(items.map { |item| item.milestone.start_date })
+ .to contain_exactly(two_days_ago, yesterday, yesterday)
+ end
+
+ context 'using NOT' do
+ let(:params) { { not: { milestone_title: Milestone::Started.name } } }
+
+ it 'returns items not in the started milestones for each project' do
+ target_items = items_model.where(milestone: Milestone.not_started)
+
+ expect(items).to contain_exactly(*target_items)
+ end
+ end
+ end
+
+ context 'filtering by label' do
+ let(:params) { { label_name: label.title } }
+
+ it 'returns items with that label' do
+ expect(items).to contain_exactly(item2)
+ end
+
+ context 'using NOT' do
+ let(:params) { { not: { label_name: label.title } } }
+
+ it 'returns items that do not have that label' do
+ expect(items).to contain_exactly(item1, item3, item4, item5)
+ end
+
+ # IssuableFinder first filters using the outer params (the ones not inside the `not` key.)
+ # Afterwards, it applies the `not` params to that resultset. This means that things inside the `not` param
+ # do not take precedence over the outer params with the same name.
+ context 'shadowing the same outside param' do
+ let(:params) { { label_name: label2.title, not: { label_name: label.title } } }
+
+ it 'does not take precedence over labels outside NOT' do
+ expect(items).to contain_exactly(item3)
+ end
+ end
+
+ context 'further filtering outside params' do
+ let(:params) { { label_name: label2.title, not: { assignee_username: user2.username } } }
+
+ it 'further filters on the returned resultset' do
+ expect(items).to be_empty
+ end
+ end
+ end
+ end
+
+ context 'filtering by multiple labels' do
+ let(:params) { { label_name: [label.title, label2.title].join(',') } }
+ let(:label2) { create(:label, project: project2) }
+
+ before do
+ create(:label_link, label: label2, target: item2)
+ end
+
+ it 'returns the unique items with all those labels' do
+ expect(items).to contain_exactly(item2)
+ end
+
+ context 'using NOT' do
+ let(:params) { { not: { label_name: [label.title, label2.title].join(',') } } }
+
+ it 'returns items that do not have any of the labels provided' do
+ expect(items).to contain_exactly(item1, item4, item5)
+ end
+ end
+ end
+
+ context 'filtering by a label that includes any or none in the title' do
+ let(:params) { { label_name: [label.title, label2.title].join(',') } }
+ let(:label) { create(:label, title: 'any foo', project: project2) }
+ let(:label2) { create(:label, title: 'bar none', project: project2) }
+
+ before do
+ create(:label_link, label: label2, target: item2)
+ end
+
+ it 'returns the unique items with all those labels' do
+ expect(items).to contain_exactly(item2)
+ end
+
+ context 'using NOT' do
+ let(:params) { { not: { label_name: [label.title, label2.title].join(',') } } }
+
+ it 'returns items that do not have ANY ONE of the labels provided' do
+ expect(items).to contain_exactly(item1, item4, item5)
+ end
+ end
+ end
+
+ context 'filtering by no label' do
+ let(:params) { { label_name: described_class::Params::FILTER_NONE } }
+
+ it 'returns items with no labels' do
+ expect(items).to contain_exactly(item1, item4, item5)
+ end
+ end
+
+ context 'filtering by any label' do
+ let(:params) { { label_name: described_class::Params::FILTER_ANY } }
+
+ it 'returns items that have one or more label' do
+ create_list(:label_link, 2, label: create(:label, project: project2), target: item3)
+
+ expect(items).to contain_exactly(item2, item3)
+ end
+ end
+
+ context 'when the same label exists on project and group levels' do
+ let(:item1) { create(factory, project: project1) }
+ let(:item2) { create(factory, project: project1) }
+
+ # Skipping validation to reproduce a "real-word" scenario.
+ # We still have legacy labels on PRD that have the same title on the group and project levels, example: `bug`
+ let(:project_label) do
+ build(:label, title: 'somelabel', project: project1).tap { |r| r.save!(validate: false) }
+ end
+
+ let(:group_label) { create(:group_label, title: 'somelabel', group: project1.group) }
+
+ let(:params) { { label_name: 'somelabel' } }
+
+ before do
+ create(:label_link, label: group_label, target: item1)
+ create(:label_link, label: project_label, target: item2)
+ end
+
+ it 'finds both item records' do
+ expect(items).to contain_exactly(item1, item2)
+ end
+ end
+
+ context 'filtering by item term' do
+ let(:params) { { search: search_term } }
+
+ let_it_be(:english) { create(factory, project: project1, title: 'title', description: 'something english') }
+
+ let_it_be(:japanese) do
+ create(factory, project: project1, title: '日本語 title', description: 'another english description')
+ end
+
+ context 'with latin search term' do
+ let(:search_term) { 'title english' }
+
+ it 'returns matching items' do
+ expect(items).to contain_exactly(english, japanese)
+ end
+ end
+
+ context 'with non-latin search term' do
+ let(:search_term) { '日本語' }
+
+ it 'returns matching items' do
+ expect(items).to contain_exactly(japanese)
+ end
+ end
+
+ context 'when full-text search is disabled' do
+ let(:search_term) { 'somet' }
+
+ before do
+ stub_feature_flags(issues_full_text_search: false)
+ end
+
+ it 'allows partial word matches' do
+ expect(items).to contain_exactly(english)
+ end
+ end
+
+ context 'with anonymous user' do
+ let_it_be(:public_project) { create(:project, :public, group: subgroup) }
+ let_it_be(:item6) { create(factory, project: public_project, title: 'tanuki') }
+ let_it_be(:item7) { create(factory, project: public_project, title: 'ikunat') }
+
+ let(:search_user) { nil }
+ let(:params) { { search: 'tanuki' } }
+
+ context 'with disable_anonymous_search feature flag enabled' do
+ before do
+ stub_feature_flags(disable_anonymous_search: true)
+ end
+
+ it 'does not perform search' do
+ expect(items).to contain_exactly(item6, item7)
+ end
+ end
+
+ context 'with disable_anonymous_search feature flag disabled' do
+ before do
+ stub_feature_flags(disable_anonymous_search: false)
+ end
+
+ it 'finds one public item' do
+ expect(items).to contain_exactly(item6)
+ end
+ end
+ end
+ end
+
+ context 'filtering by item term in title' do
+ let(:params) { { search: 'git', in: 'title' } }
+
+ it 'returns items with title match for search term' do
+ expect(items).to contain_exactly(item1)
+ end
+ end
+
+ context 'filtering by items iids' do
+ let(:params) { { iids: [item3.iid] } }
+
+ it 'returns items where iids match' do
+ expect(items).to contain_exactly(item3, item5)
+ end
+
+ context 'using NOT' do
+ let(:params) { { not: { iids: [item3.iid] } } }
+
+ it 'returns items with no iids match' do
+ expect(items).to contain_exactly(item1, item2, item4)
+ end
+ end
+ end
+
+ context 'filtering by state' do
+ context 'with opened' do
+ let(:params) { { state: 'opened' } }
+
+ it 'returns only opened items' do
+ expect(items).to contain_exactly(item1, item2, item3, item4, item5)
+ end
+ end
+
+ context 'with closed' do
+ let(:params) { { state: 'closed' } }
+
+ it 'returns only closed items' do
+ expect(items).to contain_exactly(closed_item)
+ end
+ end
+
+ context 'with all' do
+ let(:params) { { state: 'all' } }
+
+ it 'returns all items' do
+ expect(items).to contain_exactly(item1, item2, item3, closed_item, item4, item5)
+ end
+ end
+
+ context 'with invalid state' do
+ let(:params) { { state: 'invalid_state' } }
+
+ it 'returns all items' do
+ expect(items).to contain_exactly(item1, item2, item3, closed_item, item4, item5)
+ end
+ end
+ end
+
+ context 'filtering by created_at' do
+ context 'through created_after' do
+ let(:params) { { created_after: item3.created_at } }
+
+ it 'returns items created on or after the given date' do
+ expect(items).to contain_exactly(item3)
+ end
+ end
+
+ context 'through created_before' do
+ let(:params) { { created_before: item1.created_at } }
+
+ it 'returns items created on or before the given date' do
+ expect(items).to contain_exactly(item1)
+ end
+ end
+
+ context 'through created_after and created_before' do
+ let(:params) { { created_after: item2.created_at, created_before: item3.created_at } }
+
+ it 'returns items created between the given dates' do
+ expect(items).to contain_exactly(item2, item3)
+ end
+ end
+ end
+
+ context 'filtering by updated_at' do
+ context 'through updated_after' do
+ let(:params) { { updated_after: item3.updated_at } }
+
+ it 'returns items updated on or after the given date' do
+ expect(items).to contain_exactly(item3)
+ end
+ end
+
+ context 'through updated_before' do
+ let(:params) { { updated_before: item1.updated_at } }
+
+ it 'returns items updated on or before the given date' do
+ expect(items).to contain_exactly(item1)
+ end
+ end
+
+ context 'through updated_after and updated_before' do
+ let(:params) { { updated_after: item2.updated_at, updated_before: item3.updated_at } }
+
+ it 'returns items updated between the given dates' do
+ expect(items).to contain_exactly(item2, item3)
+ end
+ end
+ end
+
+ context 'filtering by closed_at' do
+ let!(:closed_item1) { create(factory, project: project1, state: :closed, closed_at: 1.week.ago) }
+ let!(:closed_item2) { create(factory, project: project2, state: :closed, closed_at: 1.week.from_now) }
+ let!(:closed_item3) { create(factory, project: project2, state: :closed, closed_at: 2.weeks.from_now) }
+
+ context 'through closed_after' do
+ let(:params) { { state: :closed, closed_after: closed_item3.closed_at } }
+
+ it 'returns items closed on or after the given date' do
+ expect(items).to contain_exactly(closed_item3)
+ end
+ end
+
+ context 'through closed_before' do
+ let(:params) { { state: :closed, closed_before: closed_item1.closed_at } }
+
+ it 'returns items closed on or before the given date' do
+ expect(items).to contain_exactly(closed_item1)
+ end
+ end
+
+ context 'through closed_after and closed_before' do
+ let(:params) do
+ { state: :closed, closed_after: closed_item2.closed_at, closed_before: closed_item3.closed_at }
+ end
+
+ it 'returns items closed between the given dates' do
+ expect(items).to contain_exactly(closed_item2, closed_item3)
+ end
+ end
+ end
+
+ context 'filtering by reaction name' do
+ context 'user searches by no reaction' do
+ let(:params) { { my_reaction_emoji: 'None' } }
+
+ it 'returns items that the user did not react to' do
+ expect(items).to contain_exactly(item2, item4, item5)
+ end
+ end
+
+ context 'user searches by any reaction' do
+ let(:params) { { my_reaction_emoji: 'Any' } }
+
+ it 'returns items that the user reacted to' do
+ expect(items).to contain_exactly(item1, item3)
+ end
+ end
+
+ context 'user searches by "thumbsup" reaction' do
+ let(:params) { { my_reaction_emoji: 'thumbsup' } }
+
+ it 'returns items that the user thumbsup to' do
+ expect(items).to contain_exactly(item1)
+ end
+
+ context 'using NOT' do
+ let(:params) { { not: { my_reaction_emoji: 'thumbsup' } } }
+
+ it 'returns items that the user did not thumbsup to' do
+ expect(items).to contain_exactly(item2, item3, item4, item5)
+ end
+ end
+ end
+
+ context 'user2 searches by "thumbsup" reaction' do
+ let(:search_user) { user2 }
+
+ let(:params) { { my_reaction_emoji: 'thumbsup' } }
+
+ it 'returns items that the user2 thumbsup to' do
+ expect(items).to contain_exactly(item2)
+ end
+
+ context 'using NOT' do
+ let(:params) { { not: { my_reaction_emoji: 'thumbsup' } } }
+
+ it 'returns items that the user2 thumbsup to' do
+ expect(items).to contain_exactly(item3)
+ end
+ end
+ end
+
+ context 'user searches by "thumbsdown" reaction' do
+ let(:params) { { my_reaction_emoji: 'thumbsdown' } }
+
+ it 'returns items that the user thumbsdown to' do
+ expect(items).to contain_exactly(item3)
+ end
+
+ context 'using NOT' do
+ let(:params) { { not: { my_reaction_emoji: 'thumbsdown' } } }
+
+ it 'returns items that the user thumbsdown to' do
+ expect(items).to contain_exactly(item1, item2, item4, item5)
+ end
+ end
+ end
+ end
+
+ context 'filtering by confidential' do
+ let_it_be(:confidential_item) { create(factory, project: project1, confidential: true) }
+
+ context 'no filtering' do
+ it 'returns all items' do
+ expect(items).to contain_exactly(item1, item2, item3, item4, item5, confidential_item)
+ end
+ end
+
+ context 'user filters confidential items' do
+ let(:params) { { confidential: true } }
+
+ it 'returns only confidential items' do
+ expect(items).to contain_exactly(confidential_item)
+ end
+ end
+
+ context 'user filters only public items' do
+ let(:params) { { confidential: false } }
+
+ it 'returns only public items' do
+ expect(items).to contain_exactly(item1, item2, item3, item4, item5)
+ end
+ end
+ end
+
+ context 'filtering by item type' do
+ let_it_be(:incident_item) { create(factory, issue_type: :incident, project: project1) }
+
+ context 'no type given' do
+ let(:params) { { issue_types: [] } }
+
+ it 'returns all items' do
+ expect(items).to contain_exactly(incident_item, item1, item2, item3, item4, item5)
+ end
+ end
+
+ context 'incident type' do
+ let(:params) { { issue_types: ['incident'] } }
+
+ it 'returns incident items' do
+ expect(items).to contain_exactly(incident_item)
+ end
+ end
+
+ context 'item type' do
+ let(:params) { { issue_types: ['issue'] } }
+
+ it 'returns all items with type issue' do
+ expect(items).to contain_exactly(item1, item2, item3, item4, item5)
+ end
+ end
+
+ context 'multiple params' do
+ let(:params) { { issue_types: %w(issue incident) } }
+
+ it 'returns all items' do
+ expect(items).to contain_exactly(incident_item, item1, item2, item3, item4, item5)
+ end
+ end
+
+ context 'without array' do
+ let(:params) { { issue_types: 'incident' } }
+
+ it 'returns incident items' do
+ expect(items).to contain_exactly(incident_item)
+ end
+ end
+
+ context 'invalid params' do
+ let(:params) { { issue_types: ['nonsense'] } }
+
+ it 'returns no items' do
+ expect(items).to eq(items_model.none)
+ end
+ end
+ end
+
+ context 'filtering by crm contact' do
+ let_it_be(:contact1) { create(:contact, group: group) }
+ let_it_be(:contact2) { create(:contact, group: group) }
+
+ let_it_be(:contact1_item1) { create(factory, project: project1) }
+ let_it_be(:contact1_item2) { create(factory, project: project1) }
+ let_it_be(:contact2_item1) { create(factory, project: project1) }
+
+ let(:params) { { crm_contact_id: contact1.id } }
+
+ it 'returns for that contact' do
+ create(:issue_customer_relations_contact, issue: contact1_item1, contact: contact1)
+ create(:issue_customer_relations_contact, issue: contact1_item2, contact: contact1)
+ create(:issue_customer_relations_contact, issue: contact2_item1, contact: contact2)
+
+ expect(items).to contain_exactly(contact1_item1, contact1_item2)
+ end
+ end
+
+ context 'filtering by crm organization' do
+ let_it_be(:organization) { create(:organization, group: group) }
+ let_it_be(:contact1) { create(:contact, group: group, organization: organization) }
+ let_it_be(:contact2) { create(:contact, group: group, organization: organization) }
+
+ let_it_be(:contact1_item1) { create(factory, project: project1) }
+ let_it_be(:contact1_item2) { create(factory, project: project1) }
+ let_it_be(:contact2_item1) { create(factory, project: project1) }
+
+ let(:params) { { crm_organization_id: organization.id } }
+
+ it 'returns for that contact' do
+ create(:issue_customer_relations_contact, issue: contact1_item1, contact: contact1)
+ create(:issue_customer_relations_contact, issue: contact1_item2, contact: contact1)
+ create(:issue_customer_relations_contact, issue: contact2_item1, contact: contact2)
+
+ expect(items).to contain_exactly(contact1_item1, contact1_item2, contact2_item1)
+ end
+ end
+
+ context 'when the user is unauthorized' do
+ let(:search_user) { nil }
+
+ it 'returns no results' do
+ expect(items).to be_empty
+ end
+ end
+
+ context 'when the user can see some, but not all, items' do
+ let(:search_user) { user2 }
+
+ it 'returns only items they can see' do
+ expect(items).to contain_exactly(item2, item3)
+ end
+ end
+
+ it 'finds items user can access due to group' do
+ group = create(:group)
+ project = create(:project, group: group)
+ item = create(factory, project: project)
+ group.add_user(user, :owner)
+
+ expect(items).to include(item)
+ end
+ end
+
+ context 'personal scope' do
+ let(:scope) { 'assigned_to_me' }
+
+ it 'returns item assigned to the user' do
+ expect(items).to contain_exactly(item1, item2, item5)
+ end
+
+ context 'filtering by project' do
+ let(:params) { { project_id: project1.id } }
+
+ it 'returns items assigned to the user in that project' do
+ expect(items).to contain_exactly(item1, item5)
+ end
+ end
+ end
+
+ context 'when project restricts items' do
+ let(:scope) { nil }
+
+ it "doesn't return team-only items to non team members" do
+ project = create(:project, :public, :issues_private)
+ item = create(factory, project: project)
+
+ expect(items).not_to include(item)
+ end
+
+ it "doesn't return items if feature disabled" do
+ [project1, project2, project3].each do |project|
+ project.project_feature.update!(issues_access_level: ProjectFeature::DISABLED)
+ end
+
+ expect(items.count).to eq 0
+ end
+ end
+
+ context 'external authorization' do
+ it_behaves_like 'a finder with external authorization service' do
+ let!(:subject) { create(factory, project: project) }
+ let(:project_params) { { project_id: project.id } }
+ end
+ end
+
+ context 'filtering by due date' do
+ let_it_be(:item_due_today) { create(factory, project: project1, due_date: Date.current) }
+ let_it_be(:item_due_tomorrow) { create(factory, project: project1, due_date: 1.day.from_now) }
+ let_it_be(:item_overdue) { create(factory, project: project1, due_date: 2.days.ago) }
+ let_it_be(:item_due_soon) { create(factory, project: project1, due_date: 2.days.from_now) }
+
+ let(:scope) { 'all' }
+ let(:base_params) { { project_id: project1.id } }
+
+ context 'with param set to no due date' do
+ let(:params) { base_params.merge(due_date: items_model::NoDueDate.name) }
+
+ it 'returns items with no due date' do
+ expect(items).to contain_exactly(item1, item5)
+ end
+ end
+
+ context 'with param set to any due date' do
+ let(:params) { base_params.merge(due_date: items_model::AnyDueDate.name) }
+
+ it 'returns items with any due date' do
+ expect(items).to contain_exactly(item_due_today, item_due_tomorrow, item_overdue, item_due_soon)
+ end
+ end
+
+ context 'with param set to due today' do
+ let(:params) { base_params.merge(due_date: items_model::DueToday.name) }
+
+ it 'returns items due today' do
+ expect(items).to contain_exactly(item_due_today)
+ end
+ end
+
+ context 'with param set to due tomorrow' do
+ let(:params) { base_params.merge(due_date: items_model::DueTomorrow.name) }
+
+ it 'returns items due today' do
+ expect(items).to contain_exactly(item_due_tomorrow)
+ end
+ end
+
+ context 'with param set to overdue' do
+ let(:params) { base_params.merge(due_date: items_model::Overdue.name) }
+
+ it 'returns overdue items' do
+ expect(items).to contain_exactly(item_overdue)
+ end
+ end
+
+ context 'with param set to next month and previous two weeks' do
+ let(:params) { base_params.merge(due_date: items_model::DueNextMonthAndPreviousTwoWeeks.name) }
+
+ it 'returns items due in the previous two weeks and next month' do
+ expect(items).to contain_exactly(item_due_today, item_due_tomorrow, item_overdue, item_due_soon)
+ end
+ end
+
+ context 'with invalid param' do
+ let(:params) { base_params.merge(due_date: 'foo') }
+
+ it 'returns no items' do
+ expect(items).to be_empty
+ end
+ end
+ end
+ end
+
+ describe '#row_count', :request_store do
+ let_it_be(:admin) { create(:admin) }
+
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it 'returns the number of rows for the default state' do
+ finder = described_class.new(admin)
+
+ expect(finder.row_count).to eq(5)
+ end
+
+ it 'returns the number of rows for a given state' do
+ finder = described_class.new(admin, state: 'closed')
+
+ expect(finder.row_count).to be_zero
+ end
+ end
+
+ context 'when admin mode is disabled' do
+ it 'returns no rows' do
+ finder = described_class.new(admin)
+
+ expect(finder.row_count).to be_zero
+ end
+ end
+
+ it 'returns -1 if the query times out' do
+ finder = described_class.new(admin)
+
+ expect_next_instance_of(described_class) do |subfinder|
+ expect(subfinder).to receive(:execute).and_raise(ActiveRecord::QueryCanceled)
+ end
+
+ expect(finder.row_count).to eq(-1)
+ end
+ end
+
+ describe '#with_confidentiality_access_check' do
+ let(:guest) { create(:user) }
+
+ let_it_be(:authorized_user) { create(:user) }
+ let_it_be(:banned_user) { create(:user, :banned) }
+ let_it_be(:project) { create(:project, namespace: authorized_user.namespace) }
+ let_it_be(:public_item) { create(factory, project: project) }
+ let_it_be(:confidential_item) { create(factory, project: project, confidential: true) }
+ let_it_be(:hidden_item) { create(factory, project: project, author: banned_user) }
+
+ shared_examples 'returns public, does not return hidden or confidential' do
+ it 'returns only public items' do
+ expect(subject).to include(public_item)
+ expect(subject).not_to include(confidential_item, hidden_item)
+ end
+ end
+
+ shared_examples 'returns public and confidential, does not return hidden' do
+ it 'returns only public and confidential items' do
+ expect(subject).to include(public_item, confidential_item)
+ expect(subject).not_to include(hidden_item)
+ end
+ end
+
+ shared_examples 'returns public and hidden, does not return confidential' do
+ it 'returns only public and hidden items' do
+ expect(subject).to include(public_item, hidden_item)
+ expect(subject).not_to include(confidential_item)
+ end
+ end
+
+ shared_examples 'returns public, confidential, and hidden' do
+ it 'returns all items' do
+ expect(subject).to include(public_item, confidential_item, hidden_item)
+ end
+ end
+
+ context 'when no project filter is given' do
+ let(:params) { {} }
+
+ context 'for an anonymous user' do
+ subject { described_class.new(nil, params).with_confidentiality_access_check }
+
+ it_behaves_like 'returns public, does not return hidden or confidential'
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(ban_user_feature_flag: false)
+ end
+
+ it_behaves_like 'returns public and hidden, does not return confidential'
+ end
+ end
+
+ context 'for a user without project membership' do
+ subject { described_class.new(user, params).with_confidentiality_access_check }
+
+ it_behaves_like 'returns public, does not return hidden or confidential'
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(ban_user_feature_flag: false)
+ end
+
+ it_behaves_like 'returns public and hidden, does not return confidential'
+ end
+ end
+
+ context 'for a guest user' do
+ subject { described_class.new(guest, params).with_confidentiality_access_check }
+
+ before do
+ project.add_guest(guest)
+ end
+
+ it_behaves_like 'returns public, does not return hidden or confidential'
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(ban_user_feature_flag: false)
+ end
+
+ it_behaves_like 'returns public and hidden, does not return confidential'
+ end
+ end
+
+ context 'for a project member with access to view confidential items' do
+ subject { described_class.new(authorized_user, params).with_confidentiality_access_check }
+
+ it_behaves_like 'returns public and confidential, does not return hidden'
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(ban_user_feature_flag: false)
+ end
+
+ it_behaves_like 'returns public, confidential, and hidden'
+ end
+ end
+
+ context 'for an admin' do
+ let(:admin_user) { create(:user, :admin) }
+
+ subject { described_class.new(admin_user, params).with_confidentiality_access_check }
+
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it_behaves_like 'returns public, confidential, and hidden'
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(ban_user_feature_flag: false)
+ end
+
+ it_behaves_like 'returns public, confidential, and hidden'
+ end
+ end
+
+ context 'when admin mode is disabled' do
+ it_behaves_like 'returns public, does not return hidden or confidential'
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(ban_user_feature_flag: false)
+ end
+
+ it_behaves_like 'returns public and hidden, does not return confidential'
+ end
+ end
+ end
+ end
+
+ context 'when searching within a specific project' do
+ let(:params) { { project_id: project.id } }
+
+ context 'for an anonymous user' do
+ subject { described_class.new(nil, params).with_confidentiality_access_check }
+
+ it_behaves_like 'returns public, does not return hidden or confidential'
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(ban_user_feature_flag: false)
+ end
+
+ it_behaves_like 'returns public and hidden, does not return confidential'
+ end
+
+ it 'does not filter by confidentiality' do
+ expect(items_model).not_to receive(:where).with(a_string_matching('confidential'), anything)
+ subject
+ end
+ end
+
+ context 'for a user without project membership' do
+ subject { described_class.new(user, params).with_confidentiality_access_check }
+
+ it_behaves_like 'returns public, does not return hidden or confidential'
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(ban_user_feature_flag: false)
+ end
+
+ it_behaves_like 'returns public and hidden, does not return confidential'
+ end
+
+ it 'filters by confidentiality' do
+ expect(subject.to_sql).to match("issues.confidential")
+ end
+ end
+
+ context 'for a guest user' do
+ subject { described_class.new(guest, params).with_confidentiality_access_check }
+
+ before do
+ project.add_guest(guest)
+ end
+
+ it_behaves_like 'returns public, does not return hidden or confidential'
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(ban_user_feature_flag: false)
+ end
+
+ it_behaves_like 'returns public and hidden, does not return confidential'
+ end
+
+ it 'filters by confidentiality' do
+ expect(subject.to_sql).to match("issues.confidential")
+ end
+ end
+
+ context 'for a project member with access to view confidential items' do
+ subject { described_class.new(authorized_user, params).with_confidentiality_access_check }
+
+ it_behaves_like 'returns public and confidential, does not return hidden'
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(ban_user_feature_flag: false)
+ end
+
+ it_behaves_like 'returns public, confidential, and hidden'
+ end
+
+ it 'does not filter by confidentiality' do
+ expect(items_model).not_to receive(:where).with(a_string_matching('confidential'), anything)
+
+ subject
+ end
+ end
+
+ context 'for an admin' do
+ let(:admin_user) { create(:user, :admin) }
+
+ subject { described_class.new(admin_user, params).with_confidentiality_access_check }
+
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it_behaves_like 'returns public, confidential, and hidden'
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(ban_user_feature_flag: false)
+ end
+
+ it_behaves_like 'returns public, confidential, and hidden'
+ end
+
+ it 'does not filter by confidentiality' do
+ expect(items_model).not_to receive(:where).with(a_string_matching('confidential'), anything)
+
+ subject
+ end
+ end
+
+ context 'when admin mode is disabled' do
+ it_behaves_like 'returns public, does not return hidden or confidential'
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(ban_user_feature_flag: false)
+ end
+
+ it_behaves_like 'returns public and hidden, does not return confidential'
+ end
+
+ it 'filters by confidentiality' do
+ expect(subject.to_sql).to match("issues.confidential")
+ end
+ end
+ end
+ end
+ end
+
+ describe '#use_cte_for_search?' do
+ let(:finder) { described_class.new(nil, params) }
+
+ context 'when there is no search param' do
+ let(:params) { { attempt_group_search_optimizations: true } }
+
+ it 'returns false' do
+ expect(finder.use_cte_for_search?).to be_falsey
+ end
+ end
+
+ context 'when the force_cte param is falsey' do
+ let(:params) { { search: '日本語' } }
+
+ it 'returns false' do
+ expect(finder.use_cte_for_search?).to be_falsey
+ end
+ end
+
+ context 'when a non-simple sort is given' do
+ let(:params) { { search: '日本語', attempt_project_search_optimizations: true, sort: 'popularity' } }
+
+ it 'returns false' do
+ expect(finder.use_cte_for_search?).to be_falsey
+ end
+ end
+
+ context 'when all conditions are met' do
+ context "uses group search optimization" do
+ let(:params) { { search: '日本語', attempt_group_search_optimizations: true } }
+
+ it 'returns true' do
+ expect(finder.use_cte_for_search?).to be_truthy
+ expect(finder.execute.to_sql)
+ .to match(/^WITH "issues" AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported}/)
+ end
+ end
+
+ context "uses project search optimization" do
+ let(:params) { { search: '日本語', attempt_project_search_optimizations: true } }
+
+ it 'returns true' do
+ expect(finder.use_cte_for_search?).to be_truthy
+ expect(finder.execute.to_sql)
+ .to match(/^WITH "issues" AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported}/)
+ end
+ end
+
+ context 'with simple sort' do
+ let(:params) { { search: '日本語', attempt_project_search_optimizations: true, sort: 'updated_desc' } }
+
+ it 'returns true' do
+ expect(finder.use_cte_for_search?).to be_truthy
+ expect(finder.execute.to_sql)
+ .to match(/^WITH "issues" AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported}/)
+ end
+ end
+
+ context 'with simple sort as a symbol' do
+ let(:params) { { search: '日本語', attempt_project_search_optimizations: true, sort: :updated_desc } }
+
+ it 'returns true' do
+ expect(finder.use_cte_for_search?).to be_truthy
+ expect(finder.execute.to_sql)
+ .to match(/^WITH "issues" AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported}/)
+ end
+ end
+ end
+ end
+
+ describe '#parent_param=' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:finder) { described_class.new(nil) }
+
+ subject { finder.parent_param = obj }
+
+ where(:klass, :param) do
+ :Project | :project_id
+ :Group | :group_id
+ end
+
+ with_them do
+ let(:obj) { Object.const_get(klass, false).new }
+
+ it 'sets the params' do
+ subject
+
+ expect(finder.params[param]).to eq(obj)
+ end
+ end
+
+ context 'unexpected parent' do
+ let(:obj) { MergeRequest.new }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error('Unexpected parent: MergeRequest')
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/graphql/members_shared_examples.rb b/spec/support/shared_examples/graphql/members_shared_examples.rb
index 8e9e22f4359..110706c730b 100644
--- a/spec/support/shared_examples/graphql/members_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/members_shared_examples.rb
@@ -39,7 +39,8 @@ RSpec.shared_examples 'querying members with a group' do
let(:base_args) { { relations: described_class.arguments['relations'].default_value } }
subject do
- resolve(described_class, obj: resource, args: base_args.merge(args), ctx: { current_user: user_4 })
+ resolve(described_class, obj: resource, args: base_args.merge(args),
+ ctx: { current_user: user_4 }, arg_style: :internal)
end
describe '#resolve' do
@@ -73,7 +74,8 @@ RSpec.shared_examples 'querying members with a group' do
let_it_be(:other_user) { create(:user) }
subject do
- resolve(described_class, obj: resource, args: base_args.merge(args), ctx: { current_user: other_user })
+ resolve(described_class, obj: resource, args: base_args.merge(args),
+ ctx: { current_user: other_user }, arg_style: :internal)
end
it 'generates an error' do
diff --git a/spec/support/shared_examples/graphql/mutations/incident_management_timeline_events_shared_examples.rb b/spec/support/shared_examples/graphql/mutations/incident_management_timeline_events_shared_examples.rb
index b989dbc6524..cd591248ff6 100644
--- a/spec/support/shared_examples/graphql/mutations/incident_management_timeline_events_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/mutations/incident_management_timeline_events_shared_examples.rb
@@ -21,6 +21,7 @@ RSpec.shared_examples 'creating an incident timeline event' do
expect(timeline_event.occurred_at.to_s).to eq(expected_timeline_event.occurred_at)
expect(timeline_event.incident).to eq(expected_timeline_event.incident)
expect(timeline_event.author).to eq(expected_timeline_event.author)
+ expect(timeline_event.editable).to eq(expected_timeline_event.editable)
end
end
diff --git a/spec/support/shared_examples/graphql/mutations/security/ci_configuration_shared_examples.rb b/spec/support/shared_examples/graphql/mutations/security/ci_configuration_shared_examples.rb
index 21260e4d954..dfb8ce64391 100644
--- a/spec/support/shared_examples/graphql/mutations/security/ci_configuration_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/mutations/security/ci_configuration_shared_examples.rb
@@ -71,7 +71,7 @@ RSpec.shared_examples_for 'graphql mutations security ci configuration' do
end
it 'returns an array of errors' do
- expect(result).to match(
+ expect(result).to include(
branch: be_nil,
success_path: be_nil,
errors: match_array([error_message])
@@ -92,7 +92,7 @@ RSpec.shared_examples_for 'graphql mutations security ci configuration' do
end
it 'returns a success path' do
- expect(result).to match(
+ expect(result).to include(
branch: branch,
success_path: success_path,
errors: []
@@ -108,7 +108,7 @@ RSpec.shared_examples_for 'graphql mutations security ci configuration' do
end
it 'returns an array of errors' do
- expect(result).to match(
+ expect(result).to include(
branch: be_nil,
success_path: be_nil,
errors: match_array([error])
diff --git a/spec/support/shared_examples/graphql/projects/merge_request_n_plus_one_query_examples.rb b/spec/support/shared_examples/graphql/n_plus_one_query_examples.rb
index 738edd43c92..738edd43c92 100644
--- a/spec/support/shared_examples/graphql/projects/merge_request_n_plus_one_query_examples.rb
+++ b/spec/support/shared_examples/graphql/n_plus_one_query_examples.rb
diff --git a/spec/support/shared_examples/graphql/resolvers/packages_resolvers_shared_examples.rb b/spec/support/shared_examples/graphql/resolvers/packages_resolvers_shared_examples.rb
index da8562161e7..3017f62a7c9 100644
--- a/spec/support/shared_examples/graphql/resolvers/packages_resolvers_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/resolvers/packages_resolvers_shared_examples.rb
@@ -24,7 +24,7 @@ RSpec.shared_examples 'group and projects packages resolver' do
create(:maven_package, name: 'baz', project: project, created_at: 1.minute.ago, version: nil)
end
- [:created_desc, :name_desc, :version_desc, :type_asc].each do |order|
+ %w[CREATED_DESC NAME_DESC VERSION_DESC TYPE_ASC].each do |order|
context "#{order}" do
let(:args) { { sort: order } }
@@ -32,7 +32,7 @@ RSpec.shared_examples 'group and projects packages resolver' do
end
end
- [:created_asc, :name_asc, :version_asc, :type_desc].each do |order|
+ %w[CREATED_ASC NAME_ASC VERSION_ASC TYPE_DESC].each do |order|
context "#{order}" do
let(:args) { { sort: order } }
@@ -41,25 +41,25 @@ RSpec.shared_examples 'group and projects packages resolver' do
end
context 'filter by package_name' do
- let(:args) { { package_name: 'bar', sort: :created_desc } }
+ let(:args) { { package_name: 'bar', sort: 'CREATED_DESC' } }
it { is_expected.to eq([conan_package]) }
end
context 'filter by package_type' do
- let(:args) { { package_type: 'conan', sort: :created_desc } }
+ let(:args) { { package_type: 'conan', sort: 'CREATED_DESC' } }
it { is_expected.to eq([conan_package]) }
end
context 'filter by status' do
- let(:args) { { status: 'error', sort: :created_desc } }
+ let(:args) { { status: 'error', sort: 'CREATED_DESC' } }
it { is_expected.to eq([maven_package]) }
end
context 'include_versionless' do
- let(:args) { { include_versionless: true, sort: :created_desc } }
+ let(:args) { { include_versionless: true, sort: 'CREATED_DESC' } }
it { is_expected.to include(repository3) }
end
diff --git a/spec/support/shared_examples/graphql/types/gitlab_style_deprecations_shared_examples.rb b/spec/support/shared_examples/graphql/types/gitlab_style_deprecations_shared_examples.rb
index cf9c36fafe8..7fd54408b11 100644
--- a/spec/support/shared_examples/graphql/types/gitlab_style_deprecations_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/types/gitlab_style_deprecations_shared_examples.rb
@@ -53,18 +53,20 @@ RSpec.shared_examples 'Gitlab-style deprecations' do
it 'adds information about the replacement if provided' do
deprecable = subject(deprecated: { milestone: '1.10', reason: :renamed, replacement: 'Foo.bar' })
- expect(deprecable.deprecation_reason).to include 'Please use `Foo.bar`'
+ expect(deprecable.deprecation_reason).to include('Please use `Foo.bar`')
end
it 'supports named reasons: renamed' do
deprecable = subject(deprecated: { milestone: '1.10', reason: :renamed })
- expect(deprecable.deprecation_reason).to include 'This was renamed.'
+ expect(deprecable.deprecation_reason).to eq('This was renamed. Deprecated in 1.10.')
end
it 'supports named reasons: alpha' do
deprecable = subject(deprecated: { milestone: '1.10', reason: :alpha })
- expect(deprecable.deprecation_reason).to include 'This feature is in Alpha'
+ expect(deprecable.deprecation_reason).to eq(
+ 'This feature is in Alpha. It can be changed or removed at any time. Introduced in 1.10.'
+ )
end
end
diff --git a/spec/support/shared_examples/integrations/integration_settings_form.rb b/spec/support/shared_examples/integrations/integration_settings_form.rb
index d8a46180796..dfe5a071f91 100644
--- a/spec/support/shared_examples/integrations/integration_settings_form.rb
+++ b/spec/support/shared_examples/integrations/integration_settings_form.rb
@@ -20,10 +20,18 @@ RSpec.shared_examples 'integration settings form' do
"#{integration.title} field #{field_name} not present"
end
+ sections = integration.sections
events = parse_json(trigger_events_for_integration(integration))
+
events.each do |trigger|
- expect(page).to have_field(trigger[:title], type: 'checkbox', wait: 0),
- "#{integration.title} field #{title} checkbox not present"
+ trigger_title = if sections.any? { |s| s[:type] == 'trigger' }
+ trigger_event_title(trigger[:name])
+ else
+ trigger[:title]
+ end
+
+ expect(page).to have_field(trigger_title, type: 'checkbox', wait: 0),
+ "#{integration.title} field #{trigger_title} checkbox not present"
end
end
end
@@ -35,4 +43,20 @@ RSpec.shared_examples 'integration settings form' do
def parse_json(json)
Gitlab::Json.parse(json, symbolize_names: true)
end
+
+ def trigger_event_title(name)
+ # Should match `integrationTriggerEventTitles` in app/assets/javascripts/integrations/constants.js
+ event_titles = {
+ push_events: s_('IntegrationEvents|A push is made to the repository'),
+ issues_events: s_('IntegrationEvents|IntegrationEvents|An issue is created, updated, or closed'),
+ confidential_issues_events: s_('IntegrationEvents|A confidential issue is created, updated, or closed'),
+ merge_requests_events: s_('IntegrationEvents|A merge request is created, updated, or merged'),
+ note_events: s_('IntegrationEvents|A comment is added on an issue'),
+ confidential_note_events: s_('IntegrationEvents|A comment is added on a confidential issue'),
+ tag_push_events: s_('IntegrationEvents|A tag is pushed to the repository'),
+ pipeline_events: s_('IntegrationEvents|A pipeline status changes'),
+ wiki_page_events: s_('IntegrationEvents|A wiki page is created or updated')
+ }.with_indifferent_access
+ event_titles[name]
+ end
end
diff --git a/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb
index e886ec65b02..284c129221b 100644
--- a/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb
@@ -834,8 +834,8 @@ RSpec.shared_examples 'trace with enabled live trace feature' do
end
end
- describe '#live_trace_exist?' do
- subject { trace.live_trace_exist? }
+ describe '#live?' do
+ subject { trace.live? }
context 'when trace does not exist' do
it { is_expected.to be_falsy }
diff --git a/spec/support/shared_examples/lib/gitlab/database/reestablished_connection_stack_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/database/reestablished_connection_stack_shared_examples.rb
index 67d739b79ab..d14216ec5ff 100644
--- a/spec/support/shared_examples/lib/gitlab/database/reestablished_connection_stack_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/database/reestablished_connection_stack_shared_examples.rb
@@ -22,7 +22,7 @@ RSpec.shared_context 'reconfigures connection stack' do |db_config_name|
end
end
- def validate_connections!
+ def validate_connections_stack!
model_connections = Gitlab::Database.database_base_models.to_h do |db_config_name, model_class|
[model_class, Gitlab::Database.db_config_name(model_class.connection)]
end
diff --git a/spec/support/shared_examples/lib/gitlab/event_store_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/event_store_shared_examples.rb
index 4fc15cacab4..db2f2f2d0f0 100644
--- a/spec/support/shared_examples/lib/gitlab/event_store_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/event_store_shared_examples.rb
@@ -11,6 +11,8 @@ RSpec.shared_examples 'subscribes to event' do
::Gitlab::EventStore.publish(event)
end
+
+ it_behaves_like 'an idempotent worker'
end
def consume_event(subscriber:, event:)
diff --git a/spec/support/shared_examples/lib/gitlab/redis/multi_store_feature_flags_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/redis/multi_store_feature_flags_shared_examples.rb
new file mode 100644
index 00000000000..a5e4df1c272
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/redis/multi_store_feature_flags_shared_examples.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'multi store feature flags' do |use_primary_and_secondary_stores, use_primary_store_as_default|
+ context "with feature flag :#{use_primary_and_secondary_stores} is enabled" do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores => true)
+ end
+
+ it 'multi store is enabled' do
+ subject.with do |redis_instance|
+ expect(redis_instance.use_primary_and_secondary_stores?).to be true
+ end
+ end
+ end
+
+ context "with feature flag :#{use_primary_and_secondary_stores} is disabled" do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores => false)
+ end
+
+ it 'multi store is disabled' do
+ subject.with do |redis_instance|
+ expect(redis_instance.use_primary_and_secondary_stores?).to be false
+ end
+ end
+ end
+
+ context "with feature flag :#{use_primary_store_as_default} is enabled" do
+ before do
+ stub_feature_flags(use_primary_store_as_default => true)
+ end
+
+ it 'primary store is enabled' do
+ subject.with do |redis_instance|
+ expect(redis_instance.use_primary_store_as_default?).to be true
+ end
+ end
+ end
+
+ context "with feature flag :#{use_primary_store_as_default} is disabled" do
+ before do
+ stub_feature_flags(use_primary_store_as_default => false)
+ end
+
+ it 'primary store is disabled' do
+ subject.with do |redis_instance|
+ expect(redis_instance.use_primary_store_as_default?).to be false
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/application_setting_shared_examples.rb b/spec/support/shared_examples/models/application_setting_shared_examples.rb
index 74ec6474e80..6e7d04d3cba 100644
--- a/spec/support/shared_examples/models/application_setting_shared_examples.rb
+++ b/spec/support/shared_examples/models/application_setting_shared_examples.rb
@@ -238,8 +238,16 @@ RSpec.shared_examples 'application settings examples' do
end
describe '#allowed_key_types' do
- it 'includes all key types by default' do
- expect(setting.allowed_key_types).to contain_exactly(*Gitlab::SSHPublicKey.supported_types)
+ context 'in non-FIPS mode', fips_mode: false do
+ it 'includes all key types by default' do
+ expect(setting.allowed_key_types).to contain_exactly(*Gitlab::SSHPublicKey.supported_types)
+ end
+ end
+
+ context 'in FIPS mode', :fips_mode do
+ it 'excludes DSA from supported key types' do
+ expect(setting.allowed_key_types).to contain_exactly(*Gitlab::SSHPublicKey.supported_types - %i(dsa))
+ end
end
it 'excludes disabled key types' do
diff --git a/spec/support/shared_examples/models/commit_signature_shared_examples.rb b/spec/support/shared_examples/models/commit_signature_shared_examples.rb
new file mode 100644
index 00000000000..56d5c1da3af
--- /dev/null
+++ b/spec/support/shared_examples/models/commit_signature_shared_examples.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'commit signature' do
+ describe 'associations' do
+ it { is_expected.to belong_to(:project).required }
+ end
+
+ describe 'validation' do
+ subject { described_class.new }
+
+ it { is_expected.to validate_presence_of(:commit_sha) }
+ it { is_expected.to validate_presence_of(:project_id) }
+ end
+
+ describe '.safe_create!' do
+ it 'finds a signature by commit sha if it existed' do
+ signature
+
+ expect(described_class.safe_create!(commit_sha: commit_sha)).to eq(signature)
+ end
+
+ it 'creates a new signature if it was not found' do
+ expect { described_class.safe_create!(attributes) }.to change { described_class.count }.by(1)
+ end
+
+ it 'assigns the correct attributes when creating' do
+ signature = described_class.safe_create!(attributes)
+
+ expect(signature).to have_attributes(attributes)
+ end
+
+ it 'does not raise an error in case of a race condition' do
+ expect(described_class).to receive(:find_by).and_return(nil, instance_double(described_class, persisted?: true))
+
+ expect(described_class).to receive(:create).and_raise(ActiveRecord::RecordNotUnique)
+ allow(described_class).to receive(:create).and_call_original
+
+ described_class.safe_create!(attributes)
+ end
+ end
+
+ describe '#commit' do
+ it 'fetches the commit through the project' do
+ expect_next_instance_of(Project) do |instance|
+ expect(instance).to receive(:commit).with(commit_sha).and_return(commit)
+ end
+
+ signature.commit
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/concerns/limitable_shared_examples.rb b/spec/support/shared_examples/models/concerns/limitable_shared_examples.rb
index 0ff0895b861..3d393e6dcb5 100644
--- a/spec/support/shared_examples/models/concerns/limitable_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/limitable_shared_examples.rb
@@ -1,6 +1,30 @@
# frozen_string_literal: true
RSpec.shared_examples 'includes Limitable concern' do
+ describe '#exceeds_limits?' do
+ let(:plan_limits) { create(:plan_limits, :default_plan) }
+
+ context 'without plan limits configured' do
+ it { expect(subject.exceeds_limits?).to eq false }
+ end
+
+ context 'without plan limits configured' do
+ before do
+ plan_limits.update!(subject.class.limit_name => 1)
+ end
+
+ it { expect(subject.exceeds_limits?).to eq false }
+
+ context 'with an existing model' do
+ before do
+ subject.clone.save!
+ end
+
+ it { expect(subject.exceeds_limits?).to eq true }
+ end
+ end
+ end
+
describe 'validations' do
let(:plan_limits) { create(:plan_limits, :default_plan) }
diff --git a/spec/support/shared_examples/models/integrations/base_data_fields_shared_examples.rb b/spec/support/shared_examples/models/integrations/base_data_fields_shared_examples.rb
new file mode 100644
index 00000000000..211beb5b32f
--- /dev/null
+++ b/spec/support/shared_examples/models/integrations/base_data_fields_shared_examples.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.shared_examples Integrations::BaseDataFields do
+ subject(:model) { described_class.new }
+
+ describe 'associations' do
+ it { is_expected.to belong_to :integration }
+ end
+
+ describe '#activated?' do
+ subject(:activated?) { model.activated? }
+
+ context 'with integration' do
+ let(:integration) { instance_spy(Integration, activated?: activated) }
+
+ before do
+ allow(model).to receive(:integration).and_return(integration)
+ end
+
+ context 'with value set to false' do
+ let(:activated) { false }
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'with value set to true' do
+ let(:activated) { true }
+
+ it { is_expected.to eq(true) }
+ end
+ end
+
+ context 'without integration' do
+ before do
+ allow(model).to receive(:integration).and_return(nil)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ describe '#to_database_hash' do
+ it 'does not include certain attributes' do
+ hash = model.to_database_hash
+
+ expect(hash.keys).not_to include('id', 'service_id', 'integration_id', 'created_at', 'updated_at')
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/member_shared_examples.rb b/spec/support/shared_examples/models/member_shared_examples.rb
index e293d10964b..75fff11cecd 100644
--- a/spec/support/shared_examples/models/member_shared_examples.rb
+++ b/spec/support/shared_examples/models/member_shared_examples.rb
@@ -80,7 +80,7 @@ RSpec.shared_examples_for "member creation" do
let_it_be(:admin) { create(:admin) }
it 'returns a Member object', :aggregate_failures do
- member = described_class.new(source, user, :maintainer).execute
+ member = described_class.add_user(source, user, :maintainer)
expect(member).to be_a member_type
expect(member).to be_persisted
@@ -99,7 +99,7 @@ RSpec.shared_examples_for "member creation" do
end
it 'does not update the member' do
- member = described_class.new(source, project_bot, :maintainer, current_user: user).execute
+ member = described_class.add_user(source, project_bot, :maintainer, current_user: user)
expect(source.users.reload).to include(project_bot)
expect(member).to be_persisted
@@ -110,7 +110,7 @@ RSpec.shared_examples_for "member creation" do
context 'when project_bot is not already a member' do
it 'adds the member' do
- member = described_class.new(source, project_bot, :maintainer, current_user: user).execute
+ member = described_class.add_user(source, project_bot, :maintainer, current_user: user)
expect(source.users.reload).to include(project_bot)
expect(member).to be_persisted
@@ -120,7 +120,7 @@ RSpec.shared_examples_for "member creation" do
context 'when admin mode is enabled', :enable_admin_mode, :aggregate_failures do
it 'sets members.created_by to the given admin current_user' do
- member = described_class.new(source, user, :maintainer, current_user: admin).execute
+ member = described_class.add_user(source, user, :maintainer, current_user: admin)
expect(member).to be_persisted
expect(source.users.reload).to include(user)
@@ -130,7 +130,7 @@ RSpec.shared_examples_for "member creation" do
context 'when admin mode is disabled' do
it 'rejects setting members.created_by to the given admin current_user', :aggregate_failures do
- member = described_class.new(source, user, :maintainer, current_user: admin).execute
+ member = described_class.add_user(source, user, :maintainer, current_user: admin)
expect(member).not_to be_persisted
expect(source.users.reload).not_to include(user)
@@ -139,7 +139,7 @@ RSpec.shared_examples_for "member creation" do
end
it 'sets members.expires_at to the given expires_at' do
- member = described_class.new(source, user, :maintainer, expires_at: Date.new(2016, 9, 22)).execute
+ member = described_class.add_user(source, user, :maintainer, expires_at: Date.new(2016, 9, 22))
expect(member.expires_at).to eq(Date.new(2016, 9, 22))
end
@@ -148,7 +148,7 @@ RSpec.shared_examples_for "member creation" do
it "accepts the :#{sym_key} symbol as access level", :aggregate_failures do
expect(source.users).not_to include(user)
- member = described_class.new(source, user.id, sym_key).execute
+ member = described_class.add_user(source, user.id, sym_key)
expect(member.access_level).to eq(int_access_level)
expect(source.users.reload).to include(user)
@@ -157,7 +157,7 @@ RSpec.shared_examples_for "member creation" do
it "accepts the #{int_access_level} integer as access level", :aggregate_failures do
expect(source.users).not_to include(user)
- member = described_class.new(source, user.id, int_access_level).execute
+ member = described_class.add_user(source, user.id, int_access_level)
expect(member.access_level).to eq(int_access_level)
expect(source.users.reload).to include(user)
@@ -169,7 +169,7 @@ RSpec.shared_examples_for "member creation" do
it 'adds the user as a member' do
expect(source.users).not_to include(user)
- described_class.new(source, user.id, :maintainer).execute
+ described_class.add_user(source, user.id, :maintainer)
expect(source.users.reload).to include(user)
end
@@ -179,7 +179,7 @@ RSpec.shared_examples_for "member creation" do
it 'does not add the user as a member' do
expect(source.users).not_to include(user)
- described_class.new(source, non_existing_record_id, :maintainer).execute
+ described_class.add_user(source, non_existing_record_id, :maintainer)
expect(source.users.reload).not_to include(user)
end
@@ -189,7 +189,7 @@ RSpec.shared_examples_for "member creation" do
it 'adds the user as a member' do
expect(source.users).not_to include(user)
- described_class.new(source, user, :maintainer).execute
+ described_class.add_user(source, user, :maintainer)
expect(source.users.reload).to include(user)
end
@@ -200,12 +200,12 @@ RSpec.shared_examples_for "member creation" do
source.request_access(user)
end
- it 'adds the requester as a member', :aggregate_failures do
+ it 'does not add the requester as a regular member', :aggregate_failures do
expect(source.users).not_to include(user)
expect(source.requesters.exists?(user_id: user)).to be_truthy
expect do
- described_class.new(source, user, :maintainer).execute
+ described_class.add_user(source, user, :maintainer)
end.to raise_error(Gitlab::Access::AccessDeniedError)
expect(source.users.reload).not_to include(user)
@@ -217,7 +217,7 @@ RSpec.shared_examples_for "member creation" do
it 'adds the user as a member' do
expect(source.users).not_to include(user)
- described_class.new(source, user.email, :maintainer).execute
+ described_class.add_user(source, user.email, :maintainer)
expect(source.users.reload).to include(user)
end
@@ -227,7 +227,7 @@ RSpec.shared_examples_for "member creation" do
it 'creates an invited member' do
expect(source.users).not_to include(user)
- described_class.new(source, 'user@example.com', :maintainer).execute
+ described_class.add_user(source, 'user@example.com', :maintainer)
expect(source.members.invite.pluck(:invite_email)).to include('user@example.com')
end
@@ -237,7 +237,7 @@ RSpec.shared_examples_for "member creation" do
it 'creates an invited member', :aggregate_failures do
email_starting_with_number = "#{user.id}_email@example.com"
- described_class.new(source, email_starting_with_number, :maintainer).execute
+ described_class.add_user(source, email_starting_with_number, :maintainer)
expect(source.members.invite.pluck(:invite_email)).to include(email_starting_with_number)
expect(source.users.reload).not_to include(user)
@@ -249,7 +249,7 @@ RSpec.shared_examples_for "member creation" do
it 'creates the member' do
expect(source.users).not_to include(user)
- described_class.new(source, user, :maintainer, current_user: admin).execute
+ described_class.add_user(source, user, :maintainer, current_user: admin)
expect(source.users.reload).to include(user)
end
@@ -263,7 +263,7 @@ RSpec.shared_examples_for "member creation" do
expect(source.users).not_to include(user)
expect(source.requesters.exists?(user_id: user)).to be_truthy
- described_class.new(source, user, :maintainer, current_user: admin).execute
+ described_class.add_user(source, user, :maintainer, current_user: admin)
expect(source.users.reload).to include(user)
expect(source.requesters.reload.exists?(user_id: user)).to be_falsy
@@ -275,7 +275,7 @@ RSpec.shared_examples_for "member creation" do
it 'does not create the member', :aggregate_failures do
expect(source.users).not_to include(user)
- member = described_class.new(source, user, :maintainer, current_user: user).execute
+ member = described_class.add_user(source, user, :maintainer, current_user: user)
expect(source.users.reload).not_to include(user)
expect(member).not_to be_persisted
@@ -290,7 +290,7 @@ RSpec.shared_examples_for "member creation" do
expect(source.users).not_to include(user)
expect(source.requesters.exists?(user_id: user)).to be_truthy
- described_class.new(source, user, :maintainer, current_user: user).execute
+ described_class.add_user(source, user, :maintainer, current_user: user)
expect(source.users.reload).not_to include(user)
expect(source.requesters.exists?(user_id: user)).to be_truthy
@@ -307,7 +307,7 @@ RSpec.shared_examples_for "member creation" do
it 'updates the member' do
expect(source.users).to include(user)
- described_class.new(source, user, :maintainer).execute
+ described_class.add_user(source, user, :maintainer)
expect(source.members.find_by(user_id: user).access_level).to eq(Gitlab::Access::MAINTAINER)
end
@@ -317,7 +317,7 @@ RSpec.shared_examples_for "member creation" do
it 'updates the member' do
expect(source.users).to include(user)
- described_class.new(source, user, :maintainer, current_user: admin).execute
+ described_class.add_user(source, user, :maintainer, current_user: admin)
expect(source.members.find_by(user_id: user).access_level).to eq(Gitlab::Access::MAINTAINER)
end
@@ -327,221 +327,194 @@ RSpec.shared_examples_for "member creation" do
it 'does not update the member' do
expect(source.users).to include(user)
- described_class.new(source, user, :maintainer, current_user: user).execute
+ described_class.add_user(source, user, :maintainer, current_user: user)
expect(source.members.find_by(user_id: user).access_level).to eq(Gitlab::Access::DEVELOPER)
end
end
end
+end
- context 'when `tasks_to_be_done` and `tasks_project_id` are passed' do
- let(:task_project) { source.is_a?(Group) ? create(:project, group: source) : source }
-
- it 'creates a member_task with the correct attributes', :aggregate_failures do
- described_class.new(source, user, :developer, tasks_to_be_done: %w(ci code), tasks_project_id: task_project.id).execute
+RSpec.shared_examples_for "bulk member creation" do
+ let_it_be(:admin) { create(:admin) }
+ let_it_be(:user1) { create(:user) }
+ let_it_be(:user2) { create(:user) }
- member = source.members.last
+ context 'when current user does not have permission' do
+ it 'does not succeed' do
+ # maintainers cannot add owners
+ source.add_maintainer(user)
- expect(member.tasks_to_be_done).to match_array([:ci, :code])
- expect(member.member_task.project).to eq(task_project)
+ expect(described_class.add_users(source, [user1, user2], :owner, current_user: user)).to be_empty
end
+ end
- context 'with an already existing member' do
- before do
- source.add_user(user, :developer)
- end
-
- it 'does not update tasks to be done if tasks already exist', :aggregate_failures do
- member = source.members.find_by(user_id: user.id)
- create(:member_task, member: member, project: task_project, tasks_to_be_done: %w(code ci))
-
- expect do
- described_class.new(source,
- user,
- :developer,
- tasks_to_be_done: %w(issues),
- tasks_project_id: task_project.id).execute
- end.not_to change(MemberTask, :count)
+ it 'returns Member objects' do
+ members = described_class.add_users(source, [user1, user2], :maintainer)
- member.reset
- expect(member.tasks_to_be_done).to match_array([:code, :ci])
- expect(member.member_task.project).to eq(task_project)
- end
+ expect(members.map(&:user)).to contain_exactly(user1, user2)
+ expect(members).to all(be_a(member_type))
+ expect(members).to all(be_persisted)
+ end
- it 'adds tasks to be done if they do not exist', :aggregate_failures do
- expect do
- described_class.new(source,
- user,
- :developer,
- tasks_to_be_done: %w(issues),
- tasks_project_id: task_project.id).execute
- end.to change(MemberTask, :count).by(1)
+ it 'returns an empty array' do
+ members = described_class.add_users(source, [], :maintainer)
- member = source.members.find_by(user_id: user.id)
- expect(member.tasks_to_be_done).to match_array([:issues])
- expect(member.member_task.project).to eq(task_project)
- end
- end
+ expect(members).to be_a Array
+ expect(members).to be_empty
end
-end
-RSpec.shared_examples_for "bulk member creation" do
- let_it_be(:user) { create(:user) }
- let_it_be(:admin) { create(:admin) }
+ it 'supports different formats' do
+ list = ['joe@local.test', admin, user1.id, user2.id.to_s]
- describe '#execute' do
- it 'raises an error when exiting_members is not passed in the args hash' do
- expect do
- described_class.new(source, user, :maintainer, current_user: user).execute
- end.to raise_error(ArgumentError, 'existing_members must be included in the args hash')
- end
- end
+ members = described_class.add_users(source, list, :maintainer)
- describe '.add_users', :aggregate_failures do
- let_it_be(:user1) { create(:user) }
- let_it_be(:user2) { create(:user) }
+ expect(members.size).to eq(4)
+ expect(members.first).to be_invite
+ end
- it 'returns a Member objects' do
- members = described_class.add_users(source, [user1, user2], :maintainer)
+ context 'with de-duplication' do
+ it 'has the same user by id and user' do
+ members = described_class.add_users(source, [user1.id, user1, user1.id, user2, user2.id, user2], :maintainer)
expect(members.map(&:user)).to contain_exactly(user1, user2)
expect(members).to all(be_a(member_type))
expect(members).to all(be_persisted)
end
- it 'returns an empty array' do
- members = described_class.add_users(source, [], :maintainer)
+ it 'has the same user sent more than once' do
+ members = described_class.add_users(source, [user1, user1], :maintainer)
- expect(members).to be_a Array
- expect(members).to be_empty
+ expect(members.map(&:user)).to contain_exactly(user1)
+ expect(members).to all(be_a(member_type))
+ expect(members).to all(be_persisted)
end
+ end
- it 'supports different formats' do
- list = ['joe@local.test', admin, user1.id, user2.id.to_s]
+ it 'with the same user sent more than once by user and by email' do
+ members = described_class.add_users(source, [user1, user1.email], :maintainer)
- members = described_class.add_users(source, list, :maintainer)
+ expect(members.map(&:user)).to contain_exactly(user1)
+ expect(members).to all(be_a(member_type))
+ expect(members).to all(be_persisted)
+ end
- expect(members.size).to eq(4)
- expect(members.first).to be_invite
- end
+ it 'with the same user sent more than once by user id and by email' do
+ members = described_class.add_users(source, [user1.id, user1.email], :maintainer)
- context 'with de-duplication' do
- it 'has the same user by id and user' do
- members = described_class.add_users(source, [user1.id, user1, user1.id, user2, user2.id, user2], :maintainer)
+ expect(members.map(&:user)).to contain_exactly(user1)
+ expect(members).to all(be_a(member_type))
+ expect(members).to all(be_persisted)
+ end
+
+ context 'when a member already exists' do
+ before do
+ source.add_user(user1, :developer)
+ end
+ it 'has the same user sent more than once with the member already existing' do
+ expect do
+ members = described_class.add_users(source, [user1, user1, user2], :maintainer)
expect(members.map(&:user)).to contain_exactly(user1, user2)
expect(members).to all(be_a(member_type))
expect(members).to all(be_persisted)
- end
+ end.to change { Member.count }.by(1)
+ end
- it 'has the same user sent more than once' do
- members = described_class.add_users(source, [user1, user1], :maintainer)
+ it 'supports existing users as expected with user_ids passed' do
+ user3 = create(:user)
- expect(members.map(&:user)).to contain_exactly(user1)
+ expect do
+ members = described_class.add_users(source, [user1.id, user2, user3.id], :maintainer)
+ expect(members.map(&:user)).to contain_exactly(user1, user2, user3)
expect(members).to all(be_a(member_type))
expect(members).to all(be_persisted)
- end
+ end.to change { Member.count }.by(2)
end
- it 'with the same user sent more than once by user and by email' do
- members = described_class.add_users(source, [user1, user1.email], :maintainer)
+ it 'supports existing users as expected without user ids passed' do
+ user3 = create(:user)
- expect(members.map(&:user)).to contain_exactly(user1)
- expect(members).to all(be_a(member_type))
- expect(members).to all(be_persisted)
+ expect do
+ members = described_class.add_users(source, [user1, user2, user3], :maintainer)
+ expect(members.map(&:user)).to contain_exactly(user1, user2, user3)
+ expect(members).to all(be_a(member_type))
+ expect(members).to all(be_persisted)
+ end.to change { Member.count }.by(2)
end
+ end
- it 'with the same user sent more than once by user id and by email' do
- members = described_class.add_users(source, [user1.id, user1.email], :maintainer)
+ context 'when `tasks_to_be_done` and `tasks_project_id` are passed' do
+ let(:task_project) { source.is_a?(Group) ? create(:project, group: source) : source }
- expect(members.map(&:user)).to contain_exactly(user1)
- expect(members).to all(be_a(member_type))
- expect(members).to all(be_persisted)
+ it 'creates a member_task with the correct attributes', :aggregate_failures do
+ members = described_class.add_users(source, [user1], :developer, tasks_to_be_done: %w(ci code), tasks_project_id: task_project.id)
+ member = members.last
+
+ expect(member.tasks_to_be_done).to match_array([:ci, :code])
+ expect(member.member_task.project).to eq(task_project)
end
- context 'when a member already exists' do
+ context 'with an already existing member' do
before do
source.add_user(user1, :developer)
end
- it 'has the same user sent more than once with the member already existing' do
- expect do
- members = described_class.add_users(source, [user1, user1, user2], :maintainer)
- expect(members.map(&:user)).to contain_exactly(user1, user2)
- expect(members).to all(be_a(member_type))
- expect(members).to all(be_persisted)
- end.to change { Member.count }.by(1)
- end
-
- it 'supports existing users as expected with user_ids passed' do
- user3 = create(:user)
+ it 'does not update tasks to be done if tasks already exist', :aggregate_failures do
+ member = source.members.find_by(user_id: user1.id)
+ create(:member_task, member: member, project: task_project, tasks_to_be_done: %w(code ci))
expect do
- members = described_class.add_users(source, [user1.id, user2, user3.id], :maintainer)
- expect(members.map(&:user)).to contain_exactly(user1, user2, user3)
- expect(members).to all(be_a(member_type))
- expect(members).to all(be_persisted)
- end.to change { Member.count }.by(2)
- end
+ described_class.add_users(source,
+ [user1.id],
+ :developer,
+ tasks_to_be_done: %w(issues),
+ tasks_project_id: task_project.id)
+ end.not_to change(MemberTask, :count)
- it 'supports existing users as expected without user ids passed' do
- user3 = create(:user)
+ member.reset
+ expect(member.tasks_to_be_done).to match_array([:code, :ci])
+ expect(member.member_task.project).to eq(task_project)
+ end
+ it 'adds tasks to be done if they do not exist', :aggregate_failures do
expect do
- members = described_class.add_users(source, [user1, user2, user3], :maintainer)
- expect(members.map(&:user)).to contain_exactly(user1, user2, user3)
- expect(members).to all(be_a(member_type))
- expect(members).to all(be_persisted)
- end.to change { Member.count }.by(2)
+ described_class.add_users(source,
+ [user1.id],
+ :developer,
+ tasks_to_be_done: %w(issues),
+ tasks_project_id: task_project.id)
+ end.to change(MemberTask, :count).by(1)
+
+ member = source.members.find_by(user_id: user1.id)
+ expect(member.tasks_to_be_done).to match_array([:issues])
+ expect(member.member_task.project).to eq(task_project)
end
end
+ end
+end
+
+RSpec.shared_examples 'owner management' do
+ describe '.cannot_manage_owners?' do
+ subject { described_class.cannot_manage_owners?(source, user) }
- context 'when `tasks_to_be_done` and `tasks_project_id` are passed' do
- let(:task_project) { source.is_a?(Group) ? create(:project, group: source) : source }
+ context 'when maintainer' do
+ before do
+ source.add_maintainer(user)
+ end
- it 'creates a member_task with the correct attributes', :aggregate_failures do
- members = described_class.add_users(source, [user1], :developer, tasks_to_be_done: %w(ci code), tasks_project_id: task_project.id)
- member = members.last
+ it 'cannot manage owners' do
+ expect(subject).to be_truthy
+ end
+ end
- expect(member.tasks_to_be_done).to match_array([:ci, :code])
- expect(member.member_task.project).to eq(task_project)
+ context 'when owner' do
+ before do
+ source.add_owner(user)
end
- context 'with an already existing member' do
- before do
- source.add_user(user1, :developer)
- end
-
- it 'does not update tasks to be done if tasks already exist', :aggregate_failures do
- member = source.members.find_by(user_id: user1.id)
- create(:member_task, member: member, project: task_project, tasks_to_be_done: %w(code ci))
-
- expect do
- described_class.add_users(source,
- [user1.id],
- :developer,
- tasks_to_be_done: %w(issues),
- tasks_project_id: task_project.id)
- end.not_to change(MemberTask, :count)
-
- member.reset
- expect(member.tasks_to_be_done).to match_array([:code, :ci])
- expect(member.member_task.project).to eq(task_project)
- end
-
- it 'adds tasks to be done if they do not exist', :aggregate_failures do
- expect do
- described_class.add_users(source,
- [user1.id],
- :developer,
- tasks_to_be_done: %w(issues),
- tasks_project_id: task_project.id)
- end.to change(MemberTask, :count).by(1)
-
- member = source.members.find_by(user_id: user1.id)
- expect(member.tasks_to_be_done).to match_array([:issues])
- expect(member.member_task.project).to eq(task_project)
- end
+ it 'can manage owners' do
+ expect(subject).to be_falsey
end
end
end
diff --git a/spec/support/shared_examples/models/members_notifications_shared_example.rb b/spec/support/shared_examples/models/members_notifications_shared_example.rb
index 04af3935d15..75eed0203a7 100644
--- a/spec/support/shared_examples/models/members_notifications_shared_example.rb
+++ b/spec/support/shared_examples/models/members_notifications_shared_example.rb
@@ -33,6 +33,18 @@ RSpec.shared_examples 'members notifications' do |entity_type|
end
end
+ describe '#after_commit' do
+ context 'on creation of a member requesting access' do
+ let(:member) { build(:"#{entity_type}_member", :access_request) }
+
+ it "calls NotificationService.new_access_request" do
+ expect(notification_service).to receive(:new_access_request).with(member)
+
+ member.save!
+ end
+ end
+ end
+
describe '#accept_request' do
let(:member) { create(:"#{entity_type}_member", :access_request) }
diff --git a/spec/support/shared_examples/models/wiki_shared_examples.rb b/spec/support/shared_examples/models/wiki_shared_examples.rb
index 6f17231a040..604c57768fe 100644
--- a/spec/support/shared_examples/models/wiki_shared_examples.rb
+++ b/spec/support/shared_examples/models/wiki_shared_examples.rb
@@ -540,14 +540,6 @@ RSpec.shared_examples 'wiki model' do
end
end
end
-
- context 'when feature flag :gitaly_replace_wiki_create_page is disabled' do
- before do
- stub_feature_flags(gitaly_replace_wiki_create_page: false)
- end
-
- it_behaves_like 'create_page tests'
- end
end
describe '#update_page' do
diff --git a/spec/support/shared_examples/namespaces/traversal_scope_examples.rb b/spec/support/shared_examples/namespaces/traversal_scope_examples.rb
index f1ace9878e9..45da1d382c1 100644
--- a/spec/support/shared_examples/namespaces/traversal_scope_examples.rb
+++ b/spec/support/shared_examples/namespaces/traversal_scope_examples.rb
@@ -238,6 +238,12 @@ RSpec.shared_examples 'namespace traversal scopes' do
subject { described_class.where(id: [nested_group_1, nested_group_2]).self_and_descendants(include_self: false) }
it { is_expected.to contain_exactly(deep_nested_group_1, deep_nested_group_2) }
+
+ context 'with duplicate descendants' do
+ subject { described_class.where(id: [group_1, nested_group_1]).self_and_descendants(include_self: false) }
+
+ it { is_expected.to contain_exactly(nested_group_1, deep_nested_group_1) }
+ end
end
context 'with offset and limit' do
@@ -267,6 +273,14 @@ RSpec.shared_examples 'namespace traversal scopes' do
include_examples '.self_and_descendants'
end
+
+ context 'with linear_scopes_superset feature flag disabled' do
+ before do
+ stub_feature_flags(linear_scopes_superset: false)
+ end
+
+ include_examples '.self_and_descendants'
+ end
end
shared_examples '.self_and_descendant_ids' do
@@ -310,6 +324,14 @@ RSpec.shared_examples 'namespace traversal scopes' do
include_examples '.self_and_descendant_ids'
end
+
+ context 'with linear_scopes_superset feature flag disabled' do
+ before do
+ stub_feature_flags(linear_scopes_superset: false)
+ end
+
+ include_examples '.self_and_descendant_ids'
+ end
end
shared_examples '.self_and_hierarchy' do
diff --git a/spec/support/shared_examples/requests/api/project_statistics_refresh_conflicts_shared_examples.rb b/spec/support/shared_examples/requests/api/project_statistics_refresh_conflicts_shared_examples.rb
new file mode 100644
index 00000000000..7c3f4781472
--- /dev/null
+++ b/spec/support/shared_examples/requests/api/project_statistics_refresh_conflicts_shared_examples.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'preventing request because of ongoing project stats refresh' do |entrypoint|
+ before do
+ create(:project_build_artifacts_size_refresh, :pending, project: project)
+ end
+
+ it 'logs about the rejected request' do
+ expect(Gitlab::ProjectStatsRefreshConflictsLogger)
+ .to receive(:warn_request_rejected_during_stats_refresh)
+ .with(project.id)
+
+ make_request
+ end
+
+ it 'returns 409 error' do
+ make_request
+
+ expect(response).to have_gitlab_http_status(:conflict)
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb
index aff086d1ba3..795545e4ad1 100644
--- a/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb
@@ -124,6 +124,23 @@ RSpec.shared_examples 'PyPI package versions' do |user_type, status, add_member
end
end
+RSpec.shared_examples 'PyPI package index' do |user_type, status, add_member = true|
+ context "for user type #{user_type}" do
+ before do
+ project.send("add_#{user_type}", user) if add_member && user_type != :anonymous
+ group.send("add_#{user_type}", user) if add_member && user_type != :anonymous
+ end
+
+ it 'returns the package index' do
+ subject
+
+ expect(response.body).to match(package.name)
+ end
+
+ it_behaves_like 'returning response status', status
+ end
+end
+
RSpec.shared_examples 'PyPI package download' do |user_type, status, add_member = true|
context "for user type #{user_type}" do
before do
@@ -259,6 +276,45 @@ RSpec.shared_examples 'pypi simple API endpoint' do
end
end
+RSpec.shared_examples 'pypi simple index API endpoint' do
+ using RSpec::Parameterized::TableSyntax
+
+ context 'with valid project' do
+ where(:visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
+ :public | :developer | true | true | 'PyPI package index' | :success
+ :public | :guest | true | true | 'PyPI package index' | :success
+ :public | :developer | true | false | 'PyPI package index' | :success
+ :public | :guest | true | false | 'PyPI package index' | :success
+ :public | :developer | false | true | 'PyPI package index' | :success
+ :public | :guest | false | true | 'PyPI package index' | :success
+ :public | :developer | false | false | 'PyPI package index' | :success
+ :public | :guest | false | false | 'PyPI package index' | :success
+ :public | :anonymous | false | true | 'PyPI package index' | :success
+ :private | :developer | true | true | 'PyPI package index' | :success
+ :private | :guest | true | true | 'process PyPI api request' | :forbidden
+ :private | :developer | true | false | 'process PyPI api request' | :unauthorized
+ :private | :guest | true | false | 'process PyPI api request' | :unauthorized
+ :private | :developer | false | true | 'process PyPI api request' | :not_found
+ :private | :guest | false | true | 'process PyPI api request' | :not_found
+ :private | :developer | false | false | 'process PyPI api request' | :unauthorized
+ :private | :guest | false | false | 'process PyPI api request' | :unauthorized
+ :private | :anonymous | false | true | 'process PyPI api request' | :unauthorized
+ end
+
+ with_them do
+ let(:token) { user_token ? personal_access_token.token : 'wrong' }
+ let(:headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
+
+ before do
+ project.update_column(:visibility_level, Gitlab::VisibilityLevel.level_value(visibility_level.to_s))
+ group.update_column(:visibility_level, Gitlab::VisibilityLevel.level_value(visibility_level.to_s))
+ end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ end
+ end
+end
+
RSpec.shared_examples 'pypi file download endpoint' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/support/shared_examples/requests/projects/environments_controller_spec_shared_examples.rb b/spec/support/shared_examples/requests/projects/environments_controller_spec_shared_examples.rb
new file mode 100644
index 00000000000..31218b104bd
--- /dev/null
+++ b/spec/support/shared_examples/requests/projects/environments_controller_spec_shared_examples.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'avoids N+1 queries on environment detail page' do
+ it 'avoids N+1 queries', :use_sql_query_cache do
+ create_deployment_with_associations(commit_depth: 19)
+
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ get project_environment_path(project, environment), params: environment_params
+ end
+
+ 18.downto(0).each { |n| create_deployment_with_associations(commit_depth: n) }
+
+ # N+1s exist for loading commit emails and users
+ expect do
+ get project_environment_path(project, environment), params: environment_params
+ end.not_to exceed_all_query_limit(control).with_threshold(9)
+ end
+end
diff --git a/spec/support/shared_examples/serializers/environment_serializer_shared_examples.rb b/spec/support/shared_examples/serializers/environment_serializer_shared_examples.rb
index e1baa594f3c..6d59943d91c 100644
--- a/spec/support/shared_examples/serializers/environment_serializer_shared_examples.rb
+++ b/spec/support/shared_examples/serializers/environment_serializer_shared_examples.rb
@@ -8,9 +8,8 @@ RSpec.shared_examples 'avoid N+1 on environments serialization' do
create_environment_with_associations(project)
create_environment_with_associations(project)
- # Fix N+1 queries introduced by multi stop_actions for environment.
- # Tracked in https://gitlab.com/gitlab-org/gitlab/-/issues/358780
- relax_count = 14
+ # See issue: https://gitlab.com/gitlab-org/gitlab/-/issues/363317
+ relax_count = 1
expect { serialize(grouping: true) }.not_to exceed_query_limit(control.count + relax_count)
end
@@ -23,9 +22,8 @@ RSpec.shared_examples 'avoid N+1 on environments serialization' do
create_environment_with_associations(project)
create_environment_with_associations(project)
- # Fix N+1 queries introduced by multi stop_actions for environment.
- # Tracked in https://gitlab.com/gitlab-org/gitlab/-/issues/358780
- relax_count = 14
+ # See issue: https://gitlab.com/gitlab-org/gitlab/-/issues/363317
+ relax_count = 1
expect { serialize(grouping: false) }.not_to exceed_query_limit(control.count + relax_count)
end
diff --git a/spec/support/shared_examples/services/alert_management_shared_examples.rb b/spec/support/shared_examples/services/alert_management_shared_examples.rb
index 23aee912d2d..f644f1a1687 100644
--- a/spec/support/shared_examples/services/alert_management_shared_examples.rb
+++ b/spec/support/shared_examples/services/alert_management_shared_examples.rb
@@ -32,7 +32,7 @@ RSpec.shared_context 'incident management settings enabled' do
end
before do
- allow(ProjectServiceWorker).to receive(:perform_async)
+ allow(Integrations::ExecuteWorker).to receive(:perform_async)
allow(service)
.to receive(:incident_management_setting)
.and_return(incident_management_setting)
diff --git a/spec/support/shared_examples/services/boards/items_list_service_shared_examples.rb b/spec/support/shared_examples/services/boards/items_list_service_shared_examples.rb
index 9a3a0cc9cc8..ed05a150f8b 100644
--- a/spec/support/shared_examples/services/boards/items_list_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/boards/items_list_service_shared_examples.rb
@@ -3,17 +3,17 @@
RSpec.shared_examples 'items list service' do
it 'avoids N+1' do
params = { board_id: board.id }
- control = ActiveRecord::QueryRecorder.new { described_class.new(parent, user, params).execute }
+ control = ActiveRecord::QueryRecorder.new { list_service(params).execute }
new_list
- expect { described_class.new(parent, user, params).execute }.not_to exceed_query_limit(control)
+ expect { list_service(params).execute }.not_to exceed_query_limit(control)
end
- it 'returns opened items when list_id is missing' do
+ it 'returns opened items when list_id and list are missing' do
params = { board_id: board.id }
- items = described_class.new(parent, user, params).execute
+ items = list_service(params).execute
expect(items).to match_array(backlog_items)
end
@@ -21,7 +21,7 @@ RSpec.shared_examples 'items list service' do
it 'returns opened items when listing items from Backlog' do
params = { board_id: board.id, id: backlog.id }
- items = described_class.new(parent, user, params).execute
+ items = list_service(params).execute
expect(items).to match_array(backlog_items)
end
@@ -29,7 +29,7 @@ RSpec.shared_examples 'items list service' do
it 'returns opened items that have label list applied when listing items from a label list' do
params = { board_id: board.id, id: list1.id }
- items = described_class.new(parent, user, params).execute
+ items = list_service(params).execute
expect(items).to match_array(list1_items)
end
@@ -37,20 +37,24 @@ RSpec.shared_examples 'items list service' do
it 'returns closed items when listing items from Closed sorted by closed_at in descending order' do
params = { board_id: board.id, id: closed.id }
- items = described_class.new(parent, user, params).execute
+ items = list_service(params).execute
expect(items).to eq(closed_items)
end
it 'raises an error if the list does not belong to the board' do
list = create(list_factory) # rubocop:disable Rails/SaveBang
- service = described_class.new(parent, user, board_id: board.id, id: list.id)
+ params = { board_id: board.id, id: list.id }
+
+ service = list_service(params)
expect { service.execute }.to raise_error(ActiveRecord::RecordNotFound)
end
- it 'raises an error if list id is invalid' do
- service = described_class.new(parent, user, board_id: board.id, id: nil)
+ it 'raises an error if list and list id are invalid or missing' do
+ params = { board_id: board.id, id: nil, list: nil }
+
+ service = list_service(params)
expect { service.execute }.to raise_error(ActiveRecord::RecordNotFound)
end
@@ -58,8 +62,22 @@ RSpec.shared_examples 'items list service' do
it 'returns items from all lists if :all_list is used' do
params = { board_id: board.id, all_lists: true }
- items = described_class.new(parent, user, params).execute
+ items = list_service(params).execute
expect(items).to match_array(all_items)
end
+
+ it 'returns opened items that have label list applied when using list param' do
+ params = { board_id: board.id, list: list1 }
+
+ items = list_service(params).execute
+
+ expect(items).to match_array(list1_items)
+ end
+
+ def list_service(params)
+ args = [parent, user].push(params)
+
+ described_class.new(*args)
+ end
end
diff --git a/spec/support/shared_examples/views/pagination_shared_examples.rb b/spec/support/shared_examples/views/pagination_shared_examples.rb
new file mode 100644
index 00000000000..3932f320859
--- /dev/null
+++ b/spec/support/shared_examples/views/pagination_shared_examples.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'correct pagination' do
+ it 'paginates correctly to page 3 and back' do
+ expect(page).to have_selector(item_selector, count: per_page)
+ page1_item_text = page.find(item_selector).text
+ click_next_page(next_button_selector)
+
+ expect(page).to have_selector(item_selector, count: per_page)
+ page2_item_text = page.find(item_selector).text
+ click_next_page(next_button_selector)
+
+ expect(page).to have_selector(item_selector, count: per_page)
+ page3_item_text = page.find(item_selector).text
+ click_prev_page(prev_button_selector)
+
+ expect(page3_item_text).not_to eql(page2_item_text)
+ expect(page.find(item_selector).text).to eql(page2_item_text)
+
+ click_prev_page(prev_button_selector)
+
+ expect(page.find(item_selector).text).to eql(page1_item_text)
+ expect(page).to have_selector(item_selector, count: per_page)
+ end
+
+ def click_next_page(next_button_selector)
+ page.find(next_button_selector).click
+ wait_for_requests
+ end
+
+ def click_prev_page(prev_button_selector)
+ page.find(prev_button_selector).click
+ wait_for_requests
+ end
+end
diff --git a/spec/support/shared_examples/workers/background_migration_worker_shared_examples.rb b/spec/support/shared_examples/workers/background_migration_worker_shared_examples.rb
index 7fdf049a823..8ecb04bfdd6 100644
--- a/spec/support/shared_examples/workers/background_migration_worker_shared_examples.rb
+++ b/spec/support/shared_examples/workers/background_migration_worker_shared_examples.rb
@@ -42,159 +42,195 @@ RSpec.shared_examples 'it runs background migration jobs' do |tracking_database|
describe '#perform' do
let(:worker) { described_class.new }
- before do
- allow(worker).to receive(:jid).and_return(1)
- allow(worker).to receive(:always_perform?).and_return(false)
+ context 'when execute_background_migrations feature flag is disabled' do
+ before do
+ stub_feature_flags(execute_background_migrations: false)
+ end
- allow(Postgresql::ReplicationSlot).to receive(:lag_too_great?).and_return(false)
- end
+ it 'does not perform the job, reschedules it in the future, and logs a message' do
+ expect(worker).not_to receive(:perform_with_connection)
- it 'performs jobs using the coordinator for the worker' do
- expect_next_instance_of(Gitlab::BackgroundMigration::JobCoordinator) do |coordinator|
- allow(coordinator).to receive(:with_shared_connection).and_yield
+ expect(Sidekiq.logger).to receive(:info) do |payload|
+ expect(payload[:class]).to eq(described_class.name)
+ expect(payload[:database]).to eq(tracking_database)
+ expect(payload[:message]).to match(/skipping execution, migration rescheduled/)
+ end
- expect(coordinator.worker_class).to eq(described_class)
- expect(coordinator).to receive(:perform).with('Foo', [10, 20])
- end
+ lease_attempts = 3
+ delay = described_class::BACKGROUND_MIGRATIONS_DELAY
+ job_args = [10, 20]
- worker.perform('Foo', [10, 20])
- end
+ freeze_time do
+ worker.perform('Foo', job_args, lease_attempts)
- context 'when lease can be obtained' do
- let(:coordinator) { double('job coordinator') }
+ job = described_class.jobs.find { |job| job['args'] == ['Foo', job_args, lease_attempts] }
+ expect(job).to be, "Expected the job to be rescheduled with (#{job_args}, #{lease_attempts}), but it was not."
+ expected_time = delay.to_i + Time.now.to_i
+ expect(job['at']).to eq(expected_time),
+ "Expected the job to be rescheduled in #{expected_time} seconds, " \
+ "but it was rescheduled in #{job['at']} seconds."
+ end
+ end
+ end
+
+ context 'when execute_background_migrations feature flag is enabled' do
before do
- allow(Gitlab::BackgroundMigration).to receive(:coordinator_for_database)
- .with(tracking_database)
- .and_return(coordinator)
+ stub_feature_flags(execute_background_migrations: true)
- allow(coordinator).to receive(:with_shared_connection).and_yield
+ allow(worker).to receive(:jid).and_return(1)
+ allow(worker).to receive(:always_perform?).and_return(false)
+
+ allow(Postgresql::ReplicationSlot).to receive(:lag_too_great?).and_return(false)
end
- it 'sets up the shared connection before checking replication' do
- expect(coordinator).to receive(:with_shared_connection).and_yield.ordered
- expect(Postgresql::ReplicationSlot).to receive(:lag_too_great?).and_return(false).ordered
+ it 'performs jobs using the coordinator for the worker' do
+ expect_next_instance_of(Gitlab::BackgroundMigration::JobCoordinator) do |coordinator|
+ allow(coordinator).to receive(:with_shared_connection).and_yield
- expect(coordinator).to receive(:perform).with('Foo', [10, 20])
+ expect(coordinator.worker_class).to eq(described_class)
+ expect(coordinator).to receive(:perform).with('Foo', [10, 20])
+ end
worker.perform('Foo', [10, 20])
end
- it 'performs a background migration' do
- expect(coordinator).to receive(:perform).with('Foo', [10, 20])
+ context 'when lease can be obtained' do
+ let(:coordinator) { double('job coordinator') }
- worker.perform('Foo', [10, 20])
- end
+ before do
+ allow(Gitlab::BackgroundMigration).to receive(:coordinator_for_database)
+ .with(tracking_database)
+ .and_return(coordinator)
+
+ allow(coordinator).to receive(:with_shared_connection).and_yield
+ end
+
+ it 'sets up the shared connection before checking replication' do
+ expect(coordinator).to receive(:with_shared_connection).and_yield.ordered
+ expect(Postgresql::ReplicationSlot).to receive(:lag_too_great?).and_return(false).ordered
- context 'when lease_attempts is 1' do
- it 'performs a background migration' do
expect(coordinator).to receive(:perform).with('Foo', [10, 20])
- worker.perform('Foo', [10, 20], 1)
+ worker.perform('Foo', [10, 20])
end
- end
- it 'can run scheduled job and retried job concurrently' do
- expect(coordinator)
- .to receive(:perform)
- .with('Foo', [10, 20])
- .exactly(2).time
-
- worker.perform('Foo', [10, 20])
- worker.perform('Foo', [10, 20], described_class::MAX_LEASE_ATTEMPTS - 1)
- end
+ it 'performs a background migration' do
+ expect(coordinator).to receive(:perform).with('Foo', [10, 20])
- it 'sets the class that will be executed as the caller_id' do
- expect(coordinator).to receive(:perform) do
- expect(Gitlab::ApplicationContext.current).to include('meta.caller_id' => 'Foo')
+ worker.perform('Foo', [10, 20])
end
- worker.perform('Foo', [10, 20])
- end
- end
+ context 'when lease_attempts is 1' do
+ it 'performs a background migration' do
+ expect(coordinator).to receive(:perform).with('Foo', [10, 20])
- context 'when lease not obtained (migration of same class was performed recently)' do
- let(:timeout) { described_class.minimum_interval }
- let(:lease_key) { "#{described_class.name}:Foo" }
- let(:coordinator) { double('job coordinator') }
+ worker.perform('Foo', [10, 20], 1)
+ end
+ end
- before do
- allow(Gitlab::BackgroundMigration).to receive(:coordinator_for_database)
- .with(tracking_database)
- .and_return(coordinator)
+ it 'can run scheduled job and retried job concurrently' do
+ expect(coordinator)
+ .to receive(:perform)
+ .with('Foo', [10, 20])
+ .exactly(2).time
- allow(coordinator).to receive(:with_shared_connection).and_yield
+ worker.perform('Foo', [10, 20])
+ worker.perform('Foo', [10, 20], described_class::MAX_LEASE_ATTEMPTS - 1)
+ end
- expect(coordinator).not_to receive(:perform)
+ it 'sets the class that will be executed as the caller_id' do
+ expect(coordinator).to receive(:perform) do
+ expect(Gitlab::ApplicationContext.current).to include('meta.caller_id' => 'Foo')
+ end
- Gitlab::ExclusiveLease.new(lease_key, timeout: timeout).try_obtain
+ worker.perform('Foo', [10, 20])
+ end
end
- it 'reschedules the migration and decrements the lease_attempts' do
- expect(described_class)
- .to receive(:perform_in)
- .with(a_kind_of(Numeric), 'Foo', [10, 20], 4)
+ context 'when lease not obtained (migration of same class was performed recently)' do
+ let(:timeout) { described_class.minimum_interval }
+ let(:lease_key) { "#{described_class.name}:Foo" }
+ let(:coordinator) { double('job coordinator') }
- worker.perform('Foo', [10, 20], 5)
- end
+ before do
+ allow(Gitlab::BackgroundMigration).to receive(:coordinator_for_database)
+ .with(tracking_database)
+ .and_return(coordinator)
- context 'when lease_attempts is 1' do
- let(:lease_key) { "#{described_class.name}:Foo:retried" }
+ allow(coordinator).to receive(:with_shared_connection).and_yield
+
+ expect(coordinator).not_to receive(:perform)
+
+ Gitlab::ExclusiveLease.new(lease_key, timeout: timeout).try_obtain
+ end
it 'reschedules the migration and decrements the lease_attempts' do
expect(described_class)
.to receive(:perform_in)
- .with(a_kind_of(Numeric), 'Foo', [10, 20], 0)
+ .with(a_kind_of(Numeric), 'Foo', [10, 20], 4)
- worker.perform('Foo', [10, 20], 1)
+ worker.perform('Foo', [10, 20], 5)
end
- end
- context 'when lease_attempts is 0' do
- let(:lease_key) { "#{described_class.name}:Foo:retried" }
+ context 'when lease_attempts is 1' do
+ let(:lease_key) { "#{described_class.name}:Foo:retried" }
- it 'gives up performing the migration' do
- expect(described_class).not_to receive(:perform_in)
- expect(Sidekiq.logger).to receive(:warn).with(
- class: 'Foo',
- message: 'Job could not get an exclusive lease after several tries. Giving up.',
- job_id: 1)
+ it 'reschedules the migration and decrements the lease_attempts' do
+ expect(described_class)
+ .to receive(:perform_in)
+ .with(a_kind_of(Numeric), 'Foo', [10, 20], 0)
- worker.perform('Foo', [10, 20], 0)
+ worker.perform('Foo', [10, 20], 1)
+ end
end
- end
- end
- context 'when database is not healthy' do
- before do
- expect(Postgresql::ReplicationSlot).to receive(:lag_too_great?).and_return(true)
- end
+ context 'when lease_attempts is 0' do
+ let(:lease_key) { "#{described_class.name}:Foo:retried" }
- it 'reschedules a migration if the database is not healthy' do
- expect(described_class)
- .to receive(:perform_in)
- .with(a_kind_of(Numeric), 'Foo', [10, 20], 4)
+ it 'gives up performing the migration' do
+ expect(described_class).not_to receive(:perform_in)
+ expect(Sidekiq.logger).to receive(:warn).with(
+ class: 'Foo',
+ message: 'Job could not get an exclusive lease after several tries. Giving up.',
+ job_id: 1)
- worker.perform('Foo', [10, 20])
+ worker.perform('Foo', [10, 20], 0)
+ end
+ end
end
- it 'increments the unhealthy counter' do
- counter = Gitlab::Metrics.counter(:background_migration_database_health_reschedules, 'msg')
+ context 'when database is not healthy' do
+ before do
+ expect(Postgresql::ReplicationSlot).to receive(:lag_too_great?).and_return(true)
+ end
- expect(described_class).to receive(:perform_in)
+ it 'reschedules a migration if the database is not healthy' do
+ expect(described_class)
+ .to receive(:perform_in)
+ .with(a_kind_of(Numeric), 'Foo', [10, 20], 4)
- expect { worker.perform('Foo', [10, 20]) }.to change { counter.get(db_config_name: tracking_database) }.by(1)
- end
+ worker.perform('Foo', [10, 20])
+ end
+
+ it 'increments the unhealthy counter' do
+ counter = Gitlab::Metrics.counter(:background_migration_database_health_reschedules, 'msg')
+
+ expect(described_class).to receive(:perform_in)
+
+ expect { worker.perform('Foo', [10, 20]) }.to change { counter.get(db_config_name: tracking_database) }.by(1)
+ end
- context 'when lease_attempts is 0' do
- it 'gives up performing the migration' do
- expect(described_class).not_to receive(:perform_in)
- expect(Sidekiq.logger).to receive(:warn).with(
- class: 'Foo',
- message: 'Database was unhealthy after several tries. Giving up.',
- job_id: 1)
+ context 'when lease_attempts is 0' do
+ it 'gives up performing the migration' do
+ expect(described_class).not_to receive(:perform_in)
+ expect(Sidekiq.logger).to receive(:warn).with(
+ class: 'Foo',
+ message: 'Database was unhealthy after several tries. Giving up.',
+ job_id: 1)
- worker.perform('Foo', [10, 20], 0)
+ worker.perform('Foo', [10, 20], 0)
+ end
end
end
end
diff --git a/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb b/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb
index 3d4e840fe2d..54962eac100 100644
--- a/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb
+++ b/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_database, feature_flag:|
+RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_database|
include ExclusiveLeaseHelpers
describe 'defining the job attributes' do
@@ -40,13 +40,17 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d
end
describe '.enabled?' do
- it 'does not raise an error' do
- expect { described_class.enabled? }.not_to raise_error
- end
+ it 'returns true when execute_batched_migrations_on_schedule feature flag is enabled' do
+ stub_feature_flags(execute_batched_migrations_on_schedule: true)
- it 'returns true' do
expect(described_class.enabled?).to be_truthy
end
+
+ it 'returns false when execute_batched_migrations_on_schedule feature flag is disabled' do
+ stub_feature_flags(execute_batched_migrations_on_schedule: false)
+
+ expect(described_class.enabled?).to be_falsey
+ end
end
describe '#perform' do
@@ -86,7 +90,7 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d
context 'when the feature flag is disabled' do
before do
- stub_feature_flags(feature_flag => false)
+ stub_feature_flags(execute_batched_migrations_on_schedule: false)
end
it 'does nothing' do
@@ -98,10 +102,26 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d
end
context 'when the feature flag is enabled' do
+ let(:base_model) { Gitlab::Database.database_base_models[tracking_database] }
+
before do
- stub_feature_flags(feature_flag => true)
+ stub_feature_flags(execute_batched_migrations_on_schedule: true)
- allow(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:active_migration).and_return(nil)
+ allow(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:active_migration)
+ .with(connection: base_model.connection)
+ .and_return(nil)
+ end
+
+ context 'when database config is shared' do
+ it 'does nothing' do
+ expect(Gitlab::Database).to receive(:db_config_share_with)
+ .with(base_model.connection_db_config).and_return('main')
+
+ expect(worker).not_to receive(:active_migration)
+ expect(worker).not_to receive(:run_active_migration)
+
+ worker.perform
+ end
end
context 'when no active migrations exist' do
@@ -121,6 +141,7 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d
before do
allow(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:active_migration)
+ .with(connection: base_model.connection)
.and_return(migration)
allow(migration).to receive(:interval_elapsed?).with(variance: interval_variance).and_return(true)
@@ -222,6 +243,7 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d
end
end
+ let(:gitlab_schema) { "gitlab_#{tracking_database}" }
let!(:migration) do
create(
:batched_background_migration,
@@ -232,10 +254,12 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d
batch_size: batch_size,
sub_batch_size: sub_batch_size,
job_class_name: 'ExampleDataMigration',
- job_arguments: [1]
+ job_arguments: [1],
+ gitlab_schema: gitlab_schema
)
end
+ let(:base_model) { Gitlab::Database.database_base_models[tracking_database] }
let(:table_name) { 'example_data' }
let(:batch_size) { 5 }
let(:sub_batch_size) { 2 }
diff --git a/spec/support/shared_examples/workers/idempotency_shared_examples.rb b/spec/support/shared_examples/workers/idempotency_shared_examples.rb
index 9d9b371d61a..be43ea7d5f0 100644
--- a/spec/support/shared_examples/workers/idempotency_shared_examples.rb
+++ b/spec/support/shared_examples/workers/idempotency_shared_examples.rb
@@ -20,7 +20,11 @@ RSpec.shared_examples 'an idempotent worker' do
# Avoid stubbing calls for a more accurate run.
subject do
- defined?(job_args) ? perform_multiple(job_args) : perform_multiple
+ if described_class.include?(::Gitlab::EventStore::Subscriber)
+ event_worker
+ else
+ standard_worker
+ end
end
it 'is labeled as idempotent' do
@@ -30,4 +34,12 @@ RSpec.shared_examples 'an idempotent worker' do
it 'performs multiple times sequentially without raising an exception' do
expect { subject }.not_to raise_error
end
+
+ def event_worker
+ consume_event(subscriber: described_class, event: event)
+ end
+
+ def standard_worker
+ defined?(job_args) ? perform_multiple(job_args) : perform_multiple
+ end
end
diff --git a/spec/support_specs/helpers/stub_method_calls_spec.rb b/spec/support_specs/helpers/stub_method_calls_spec.rb
new file mode 100644
index 00000000000..837a2162bcd
--- /dev/null
+++ b/spec/support_specs/helpers/stub_method_calls_spec.rb
@@ -0,0 +1,107 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe StubMethodCalls do
+ include described_class
+
+ let(:object) do
+ Class.new do
+ def self.test_method
+ 'test'
+ end
+
+ def self.test_method_two(response: nil)
+ response || 'test_two'
+ end
+ end
+ end
+
+ describe '#stub_method' do
+ let(:method_to_stub) { :test_method }
+
+ it 'stubs the method response' do
+ stub_method(object, method_to_stub) { true }
+
+ expect(object.send(method_to_stub)).to eq(true)
+ end
+
+ context 'when calling it on an already stubbed method' do
+ before do
+ stub_method(object, method_to_stub) { false }
+ end
+
+ it 'stubs correctly' do
+ stub_method(object, method_to_stub) { true }
+
+ expect(object.send(method_to_stub)).to eq(true)
+ end
+ end
+
+ context 'methods that accept arguments' do
+ it 'stubs correctly' do
+ stub_method(object, method_to_stub) { |a, b| a + b }
+
+ expect(object.send(method_to_stub, 1, 2)).to eq(3)
+ end
+
+ context 'methods that use named arguments' do
+ let(:method_to_stub) { :test_method_two }
+
+ it 'stubs correctly' do
+ stub_method(object, method_to_stub) { |a: 'test'| a }
+
+ expect(object.send(method_to_stub, a: 'testing')).to eq('testing')
+ expect(object.send(method_to_stub)).to eq('test')
+ end
+
+ context 'stubbing non-existent method' do
+ let(:method_to_stub) { :another_method }
+
+ it 'stubs correctly' do
+ stub_method(object, method_to_stub) { |a: 'test'| a }
+
+ expect(object.send(method_to_stub, a: 'testing')).to eq('testing')
+ expect(object.send(method_to_stub)).to eq('test')
+ end
+ end
+ end
+ end
+ end
+
+ describe '#restore_original_method' do
+ before do
+ stub_method(object, :test_method) { true }
+ end
+
+ it 'restores original behaviour' do
+ expect(object.test_method).to eq(true)
+
+ restore_original_method(object, :test_method)
+
+ expect(object.test_method).to eq('test')
+ end
+
+ context 'method is not stubbed' do
+ specify do
+ expect do
+ restore_original_method(object, 'some_other_method')
+ end.to raise_error(NotImplementedError, "some_other_method has not been stubbed on #{object}")
+ end
+ end
+ end
+
+ describe '#restore_original_methods' do
+ before do
+ stub_method(object, :test_method) { true }
+ stub_method(object, :test_method_two) { true }
+ end
+
+ it 'restores original behaviour' do
+ restore_original_methods(object)
+
+ expect(object.test_method).to eq('test')
+ expect(object.test_method_two).to eq('test_two')
+ end
+ end
+end
diff --git a/spec/support_specs/matchers/exceed_query_limit_helpers_spec.rb b/spec/support_specs/matchers/exceed_query_limit_helpers_spec.rb
index 67d87fe3c2f..a6f5b3862a2 100644
--- a/spec/support_specs/matchers/exceed_query_limit_helpers_spec.rb
+++ b/spec/support_specs/matchers/exceed_query_limit_helpers_spec.rb
@@ -56,6 +56,7 @@ RSpec.describe ExceedQueryLimitHelpers do
TestQueries.where(version: 'x').update_all(version: 'y')
TestQueries.where(version: 'foobar').count
TestQueries.where(version: 'z').delete_all
+ Project.where(id: 1).pluck(:title)
end
end
@@ -71,10 +72,11 @@ RSpec.describe ExceedQueryLimitHelpers do
TestQueries.count
TestQueries.where(version: 'y').update_all(version: 'z')
TestQueries.where(version: 'z').delete_all
+ Project.where(id: 2).pluck(:title)
end
end
- it 'merges two query counts' do
+ it 'merges two query counts, showing only diffs' do
test_matcher = TestMatcher.new
diff = test_matcher.diff_query_counts(
@@ -131,6 +133,10 @@ RSpec.describe ExceedQueryLimitHelpers do
},
"RELEASE SAVEPOINT active_record_1" => {
"" => [0, 1]
+ },
+ "SELECT \"projects\".\"name\" FROM \"projects\"" => {
+ "WHERE \"projects\".\"id\" = 1" => [1, 0],
+ "WHERE \"projects\".\"id\" = 2" => [0, 1]
}
})
end
diff --git a/spec/tasks/gitlab/background_migrations_rake_spec.rb b/spec/tasks/gitlab/background_migrations_rake_spec.rb
index 36623e86f27..bbd33f71e60 100644
--- a/spec/tasks/gitlab/background_migrations_rake_spec.rb
+++ b/spec/tasks/gitlab/background_migrations_rake_spec.rb
@@ -112,7 +112,7 @@ RSpec.describe 'gitlab:background_migrations namespace rake tasks' do
let(:main_database_name) { Gitlab::Database::MAIN_DATABASE_NAME }
let(:model) { Gitlab::Database.database_base_models[main_database_name] }
let(:connection) { double(:connection) }
- let(:base_models) { { 'main' => model } }
+ let(:base_models) { { 'main' => model }.with_indifferent_access }
around do |example|
Gitlab::Database::SharedModel.using_connection(model.connection) do
diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb
index 52a0a9a7385..4a3b81a072f 100644
--- a/spec/tasks/gitlab/backup_rake_spec.rb
+++ b/spec/tasks/gitlab/backup_rake_spec.rb
@@ -465,7 +465,7 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
stub_env('GITLAB_BACKUP_MAX_STORAGE_CONCURRENCY', 2)
expect(::Backup::Repositories).to receive(:new)
- .with(anything, strategy: anything, storages: [])
+ .with(anything, strategy: anything, storages: [], paths: [])
.and_call_original
expect(::Backup::GitalyBackup).to receive(:new).with(anything, max_parallelism: 5, storage_parallelism: 2, incremental: false).and_call_original
diff --git a/spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb b/spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb
new file mode 100644
index 00000000000..29b80176ef8
--- /dev/null
+++ b/spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb
@@ -0,0 +1,103 @@
+# frozen_string_literal: true
+
+require 'rake_helper'
+
+RSpec.describe 'gitlab:db:decomposition:rollback:bump_ci_sequences', :silence_stdout do
+ before :all do
+ Rake.application.rake_require 'tasks/gitlab/db/decomposition/rollback/bump_ci_sequences'
+
+ # empty task as env is already loaded
+ Rake::Task.define_task :environment
+ end
+
+ let(:expected_error_message) do
+ <<-EOS.strip_heredoc
+ Please specify a positive integer `increase_by` value
+ Example: rake gitlab:db:decomposition:rollback:bump_ci_sequences[100000]
+ EOS
+ end
+
+ let(:main_sequence_name) { 'issues_id_seq' }
+ let(:ci_sequence_name) { 'ci_build_needs_id_seq' }
+
+ # This is just to make sure that all of the sequences start with `is_called=True`
+ # which means that the next call to nextval() is going to increment the sequence.
+ # To give predictable test results.
+ before do
+ ApplicationRecord.connection.select_value("select nextval($1)", nil, [ci_sequence_name])
+ end
+
+ context 'when passing wrong argument' do
+ it 'will print an error message and exit when passing no argument' do
+ expect do
+ run_rake_task('gitlab:db:decomposition:rollback:bump_ci_sequences')
+ end.to raise_error(SystemExit) { |error| expect(error.status).to eq(1) }
+ .and output(expected_error_message).to_stdout
+ end
+
+ it 'will print an error message and exit when passing a non positive integer value' do
+ expect do
+ run_rake_task('gitlab:db:decomposition:rollback:bump_ci_sequences', '-5')
+ end.to raise_error(SystemExit) { |error| expect(error.status).to eq(1) }
+ .and output(expected_error_message).to_stdout
+ end
+ end
+
+ context 'when bumping the ci sequences' do
+ it 'changes ci sequences by the passed argument `increase_by` value on the main database' do
+ expect do
+ run_rake_task('gitlab:db:decomposition:rollback:bump_ci_sequences', '15')
+ end.to change {
+ last_value_of_sequence(ApplicationRecord.connection, ci_sequence_name)
+ }.by(16) # the +1 is because the sequence has is_called = true
+ end
+
+ it 'will still increase the value of sequences that have is_called = False' do
+ # see `is_called`: https://www.postgresql.org/docs/12/functions-sequence.html
+ # choosing a new arbitrary value for the sequence
+ new_value = last_value_of_sequence(ApplicationRecord.connection, ci_sequence_name) + 1000
+ ApplicationRecord.connection.select_value("select setval($1, $2, false)", nil, [ci_sequence_name, new_value])
+ expect do
+ run_rake_task('gitlab:db:decomposition:rollback:bump_ci_sequences', '15')
+ end.to change {
+ last_value_of_sequence(ApplicationRecord.connection, ci_sequence_name)
+ }.by(15)
+ end
+
+ it 'resets the INCREMENT value of the sequences back to 1 for the following calls to nextval()' do
+ run_rake_task('gitlab:db:decomposition:rollback:bump_ci_sequences', '15')
+ value_1 = ApplicationRecord.connection.select_value("select nextval($1)", nil, [ci_sequence_name])
+ value_2 = ApplicationRecord.connection.select_value("select nextval($1)", nil, [ci_sequence_name])
+ expect(value_2 - value_1).to eq(1)
+ end
+
+ it 'does not change the sequences on the gitlab_main tables' do
+ expect do
+ run_rake_task('gitlab:db:decomposition:rollback:bump_ci_sequences', '10')
+ end.to change {
+ last_value_of_sequence(ApplicationRecord.connection, main_sequence_name)
+ }.by(0)
+ .and change {
+ last_value_of_sequence(ApplicationRecord.connection, ci_sequence_name)
+ }.by(11) # the +1 is because the sequence has is_called = true
+ end
+ end
+
+ context 'when multiple databases' do
+ before do
+ skip_if_multiple_databases_not_setup
+ end
+
+ it 'does not change ci sequences on the ci database' do
+ expect do
+ run_rake_task('gitlab:db:decomposition:rollback:bump_ci_sequences', '10')
+ end.to change {
+ last_value_of_sequence(Ci::ApplicationRecord.connection, ci_sequence_name)
+ }.by(0)
+ end
+ end
+end
+
+def last_value_of_sequence(connection, sequence_name)
+ connection.select_value("select last_value from #{sequence_name}")
+end
diff --git a/spec/tasks/gitlab/db/lock_writes_rake_spec.rb b/spec/tasks/gitlab/db/lock_writes_rake_spec.rb
new file mode 100644
index 00000000000..034c520887e
--- /dev/null
+++ b/spec/tasks/gitlab/db/lock_writes_rake_spec.rb
@@ -0,0 +1,177 @@
+# frozen_string_literal: true
+
+require 'rake_helper'
+
+RSpec.describe 'gitlab:db:lock_writes', :silence_stdout, :reestablished_active_record_base do
+ before :all do
+ Rake.application.rake_require 'active_record/railties/databases'
+ Rake.application.rake_require 'tasks/seed_fu'
+ Rake.application.rake_require 'tasks/gitlab/db/validate_config'
+ Rake.application.rake_require 'tasks/gitlab/db/lock_writes'
+
+ # empty task as env is already loaded
+ Rake::Task.define_task :environment
+ end
+
+ let!(:project) { create(:project) }
+ let!(:ci_build) { create(:ci_build) }
+ let(:main_connection) { ApplicationRecord.connection }
+ let(:ci_connection) { Ci::ApplicationRecord.connection }
+
+ context 'single database' do
+ before do
+ skip_if_multiple_databases_are_setup
+ end
+
+ context 'when locking writes' do
+ it 'does not add any triggers to the main schema tables' do
+ expect do
+ run_rake_task('gitlab:db:lock_writes')
+ end.to change {
+ number_of_triggers(main_connection)
+ }.by(0)
+ end
+
+ it 'will be still able to modify tables that belong to the main two schemas' do
+ run_rake_task('gitlab:db:lock_writes')
+ expect do
+ Project.last.touch
+ Ci::Build.last.touch
+ end.not_to raise_error
+ end
+ end
+ end
+
+ context 'multiple databases' do
+ before do
+ skip_if_multiple_databases_not_setup
+ end
+
+ context 'when locking writes' do
+ it 'adds 3 triggers to the ci schema tables on the main database' do
+ expect do
+ run_rake_task('gitlab:db:lock_writes')
+ end.to change {
+ number_of_triggers_on(main_connection, Ci::Build.table_name)
+ }.by(3) # Triggers to block INSERT / UPDATE / DELETE
+ # Triggers on TRUNCATE are not added to the information_schema.triggers
+ # See https://www.postgresql.org/message-id/16934.1568989957%40sss.pgh.pa.us
+ end
+
+ it 'adds 3 triggers to the main schema tables on the ci database' do
+ expect do
+ run_rake_task('gitlab:db:lock_writes')
+ end.to change {
+ number_of_triggers_on(ci_connection, Project.table_name)
+ }.by(3) # Triggers to block INSERT / UPDATE / DELETE
+ # Triggers on TRUNCATE are not added to the information_schema.triggers
+ # See https://www.postgresql.org/message-id/16934.1568989957%40sss.pgh.pa.us
+ end
+
+ it 'still allows writes on the tables with the correct connections' do
+ Project.update_all(updated_at: Time.now)
+ Ci::Build.update_all(updated_at: Time.now)
+ end
+
+ it 'still allows writing to gitlab_shared schema on any connection' do
+ connections = [main_connection, ci_connection]
+ connections.each do |connection|
+ Gitlab::Database::SharedModel.using_connection(connection) do
+ LooseForeignKeys::DeletedRecord.create!(
+ fully_qualified_table_name: "public.projects",
+ primary_key_value: 1,
+ cleanup_attempts: 0
+ )
+ end
+ end
+ end
+
+ it 'prevents writes on the main tables on the ci database' do
+ run_rake_task('gitlab:db:lock_writes')
+ expect do
+ ci_connection.execute("delete from projects")
+ end.to raise_error(ActiveRecord::StatementInvalid, /Table: "projects" is write protected/)
+ end
+
+ it 'prevents writes on the ci tables on the main database' do
+ run_rake_task('gitlab:db:lock_writes')
+ expect do
+ main_connection.execute("delete from ci_builds")
+ end.to raise_error(ActiveRecord::StatementInvalid, /Table: "ci_builds" is write protected/)
+ end
+
+ it 'prevents truncating a ci table on the main database' do
+ run_rake_task('gitlab:db:lock_writes')
+ expect do
+ main_connection.execute("truncate ci_build_needs")
+ end.to raise_error(ActiveRecord::StatementInvalid, /Table: "ci_build_needs" is write protected/)
+ end
+
+ it 'retries again if it receives a statement_timeout a few number of times' do
+ error_message = "PG::QueryCanceled: ERROR: canceling statement due to statement timeout"
+ call_count = 0
+ allow(main_connection).to receive(:execute) do |statement|
+ if statement.include?("CREATE TRIGGER")
+ call_count += 1
+ raise(ActiveRecord::QueryCanceled, error_message) if call_count.even?
+ end
+ end
+ run_rake_task('gitlab:db:lock_writes')
+ end
+
+ it 'raises the exception if it happened many times' do
+ error_message = "PG::QueryCanceled: ERROR: canceling statement due to statement timeout"
+ allow(main_connection).to receive(:execute) do |statement|
+ if statement.include?("CREATE TRIGGER")
+ raise(ActiveRecord::QueryCanceled, error_message)
+ end
+ end
+
+ expect do
+ run_rake_task('gitlab:db:lock_writes')
+ end.to raise_error(ActiveRecord::QueryCanceled)
+ end
+ end
+
+ context 'when unlocking writes' do
+ before do
+ run_rake_task('gitlab:db:lock_writes')
+ end
+
+ it 'removes the write protection triggers from the gitlab_main tables on the ci database' do
+ expect do
+ run_rake_task('gitlab:db:unlock_writes')
+ end.to change {
+ number_of_triggers_on(ci_connection, Project.table_name)
+ }.by(-3) # Triggers to block INSERT / UPDATE / DELETE
+ # Triggers on TRUNCATE are not added to the information_schema.triggers
+ # See https://www.postgresql.org/message-id/16934.1568989957%40sss.pgh.pa.us
+
+ expect do
+ ci_connection.execute("delete from projects")
+ end.not_to raise_error
+ end
+
+ it 'removes the write protection triggers from the gitlab_ci tables on the main database' do
+ expect do
+ run_rake_task('gitlab:db:unlock_writes')
+ end.to change {
+ number_of_triggers_on(main_connection, Ci::Build.table_name)
+ }.by(-3)
+
+ expect do
+ main_connection.execute("delete from ci_builds")
+ end.not_to raise_error
+ end
+ end
+ end
+
+ def number_of_triggers(connection)
+ connection.select_value("SELECT count(*) FROM information_schema.triggers")
+ end
+
+ def number_of_triggers_on(connection, table_name)
+ connection
+ .select_value("SELECT count(*) FROM information_schema.triggers WHERE event_object_table=$1", nil, [table_name])
+ end
+end
diff --git a/spec/tasks/gitlab/db/validate_config_rake_spec.rb b/spec/tasks/gitlab/db/validate_config_rake_spec.rb
index 0b2c844a91f..03d7504e8b1 100644
--- a/spec/tasks/gitlab/db/validate_config_rake_spec.rb
+++ b/spec/tasks/gitlab/db/validate_config_rake_spec.rb
@@ -3,6 +3,10 @@
require 'rake_helper'
RSpec.describe 'gitlab:db:validate_config', :silence_stdout do
+ # We don't need to delete this data since it only modifies `ar_internal_metadata`
+ # which would not be cleaned either by `DbCleaner`
+ self.use_transactional_tests = false
+
before :all do
Rake.application.rake_require 'active_record/railties/databases'
Rake.application.rake_require 'tasks/seed_fu'
@@ -111,6 +115,26 @@ RSpec.describe 'gitlab:db:validate_config', :silence_stdout do
end
it_behaves_like 'validates successfully'
+
+ context 'when config is pointing to incorrect server' do
+ let(:test_config) do
+ {
+ main: main_database_config.merge(port: 11235)
+ }
+ end
+
+ it_behaves_like 'validates successfully'
+ end
+
+ context 'when config is pointing to non-existent database' do
+ let(:test_config) do
+ {
+ main: main_database_config.merge(database: 'non_existent_database')
+ }
+ end
+
+ it_behaves_like 'validates successfully'
+ end
end
context 'when main: uses database_tasks=false' do
@@ -181,6 +205,23 @@ RSpec.describe 'gitlab:db:validate_config', :silence_stdout do
it_behaves_like 'raises an error', /The 'ci' since it is using 'database_tasks: false' should share database with 'main:'/
end
end
+
+ context 'one of the databases is in read-only mode' do
+ let(:test_config) do
+ {
+ main: main_database_config
+ }
+ end
+
+ let(:exception) { ActiveRecord::StatementInvalid.new("READONLY") }
+
+ before do
+ allow(exception).to receive(:cause).and_return(PG::ReadOnlySqlTransaction.new("cannot execute INSERT in a read-only transaction"))
+ allow(ActiveRecord::InternalMetadata).to receive(:upsert).at_least(:once).and_raise(exception)
+ end
+
+ it_behaves_like 'validates successfully'
+ end
end
%w[db:migrate db:schema:load db:schema:dump].each do |task|
diff --git a/spec/tasks/gitlab/db_rake_spec.rb b/spec/tasks/gitlab/db_rake_spec.rb
index e340d568269..d8199c09ca1 100644
--- a/spec/tasks/gitlab/db_rake_spec.rb
+++ b/spec/tasks/gitlab/db_rake_spec.rb
@@ -822,18 +822,20 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout do
let(:connection_pool) { instance_double(ActiveRecord::ConnectionAdapters::ConnectionPool ) }
let(:connection) { instance_double(ActiveRecord::ConnectionAdapters::PostgreSQLAdapter) }
let(:configurations) { double(ActiveRecord::DatabaseConfigurations) }
- let(:configuration) { instance_double(ActiveRecord::DatabaseConfigurations::HashConfig) }
+ let(:configuration) { instance_double(ActiveRecord::DatabaseConfigurations::HashConfig, env_name: 'test', name: 'main') }
let(:config_hash) { { username: 'foo' } }
- it 'migrate as nonsuperuser check with default username' do
+ before do
allow(Rake::Task['db:drop']).to receive(:invoke)
allow(Rake::Task['db:create']).to receive(:invoke)
allow(ActiveRecord::Base).to receive(:configurations).and_return(configurations)
allow(configurations).to receive(:configs_for).and_return([configuration])
allow(configuration).to receive(:configuration_hash).and_return(config_hash)
allow(ActiveRecord::Base).to receive(:establish_connection).and_return(connection_pool)
+ end
- expect(config_hash).to receive(:merge).with({ username: 'gitlab' })
+ it 'migrate as nonsuperuser check with default username' do
+ expect(config_hash).to receive(:merge).with({ username: 'gitlab' }).and_call_original
expect(Gitlab::Database).to receive(:check_for_non_superuser)
expect(Rake::Task['db:migrate']).to receive(:invoke)
@@ -841,14 +843,7 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout do
end
it 'migrate as nonsuperuser check with specified username' do
- allow(Rake::Task['db:drop']).to receive(:invoke)
- allow(Rake::Task['db:create']).to receive(:invoke)
- allow(ActiveRecord::Base).to receive(:configurations).and_return(configurations)
- allow(configurations).to receive(:configs_for).and_return([configuration])
- allow(configuration).to receive(:configuration_hash).and_return(config_hash)
- allow(ActiveRecord::Base).to receive(:establish_connection).and_return(connection_pool)
-
- expect(config_hash).to receive(:merge).with({ username: 'foo' })
+ expect(config_hash).to receive(:merge).with({ username: 'foo' }).and_call_original
expect(Gitlab::Database).to receive(:check_for_non_superuser)
expect(Rake::Task['db:migrate']).to receive(:invoke)
diff --git a/spec/tasks/gitlab/pages_rake_spec.rb b/spec/tasks/gitlab/pages_rake_spec.rb
index d4bfcafa7b4..9e3d5c3ccf0 100644
--- a/spec/tasks/gitlab/pages_rake_spec.rb
+++ b/spec/tasks/gitlab/pages_rake_spec.rb
@@ -7,86 +7,6 @@ RSpec.describe 'gitlab:pages', :silence_stdout do
Rake.application.rake_require 'tasks/gitlab/pages'
end
- describe 'migrate_legacy_storage task' do
- subject { run_rake_task('gitlab:pages:migrate_legacy_storage') }
-
- it 'calls migration service' do
- expect_next_instance_of(::Pages::MigrateFromLegacyStorageService, anything,
- ignore_invalid_entries: false,
- mark_projects_as_not_deployed: false) do |service|
- expect(service).to receive(:execute_with_threads).with(threads: 3, batch_size: 10).and_call_original
- end
-
- subject
- end
-
- it 'uses PAGES_MIGRATION_THREADS environment variable' do
- stub_env('PAGES_MIGRATION_THREADS', '5')
-
- expect_next_instance_of(::Pages::MigrateFromLegacyStorageService, anything,
- ignore_invalid_entries: false,
- mark_projects_as_not_deployed: false) do |service|
- expect(service).to receive(:execute_with_threads).with(threads: 5, batch_size: 10).and_call_original
- end
-
- subject
- end
-
- it 'uses PAGES_MIGRATION_BATCH_SIZE environment variable' do
- stub_env('PAGES_MIGRATION_BATCH_SIZE', '100')
-
- expect_next_instance_of(::Pages::MigrateFromLegacyStorageService, anything,
- ignore_invalid_entries: false,
- mark_projects_as_not_deployed: false) do |service|
- expect(service).to receive(:execute_with_threads).with(threads: 3, batch_size: 100).and_call_original
- end
-
- subject
- end
-
- it 'uses PAGES_MIGRATION_IGNORE_INVALID_ENTRIES environment variable' do
- stub_env('PAGES_MIGRATION_IGNORE_INVALID_ENTRIES', 'true')
-
- expect_next_instance_of(::Pages::MigrateFromLegacyStorageService, anything,
- ignore_invalid_entries: true,
- mark_projects_as_not_deployed: false) do |service|
- expect(service).to receive(:execute_with_threads).with(threads: 3, batch_size: 10).and_call_original
- end
-
- subject
- end
-
- it 'uses PAGES_MIGRATION_MARK_PROJECTS_AS_NOT_DEPLOYED environment variable' do
- stub_env('PAGES_MIGRATION_MARK_PROJECTS_AS_NOT_DEPLOYED', 'true')
-
- expect_next_instance_of(::Pages::MigrateFromLegacyStorageService, anything,
- ignore_invalid_entries: false,
- mark_projects_as_not_deployed: true) do |service|
- expect(service).to receive(:execute_with_threads).with(threads: 3, batch_size: 10).and_call_original
- end
-
- subject
- end
- end
-
- describe 'clean_migrated_zip_storage task' do
- it 'removes only migrated deployments' do
- regular_deployment = create(:pages_deployment)
- migrated_deployment = create(:pages_deployment, :migrated)
-
- regular_deployment.project.update_pages_deployment!(regular_deployment)
- migrated_deployment.project.update_pages_deployment!(migrated_deployment)
-
- expect(PagesDeployment.all).to contain_exactly(regular_deployment, migrated_deployment)
-
- run_rake_task('gitlab:pages:clean_migrated_zip_storage')
-
- expect(PagesDeployment.all).to contain_exactly(regular_deployment)
- expect(PagesDeployment.find_by_id(regular_deployment.id)).not_to be_nil
- expect(PagesDeployment.find_by_id(migrated_deployment.id)).to be_nil
- end
- end
-
describe 'gitlab:pages:deployments:migrate_to_object_storage' do
subject { run_rake_task('gitlab:pages:deployments:migrate_to_object_storage') }
diff --git a/spec/tasks/rubocop_rake_spec.rb b/spec/tasks/rubocop_rake_spec.rb
index cf7e45aae28..a92d7dc2e52 100644
--- a/spec/tasks/rubocop_rake_spec.rb
+++ b/spec/tasks/rubocop_rake_spec.rb
@@ -8,6 +8,7 @@ require 'fileutils'
require_relative '../support/silence_stdout'
require_relative '../support/helpers/next_instance_of'
require_relative '../support/helpers/rake_helpers'
+require_relative '../../rubocop/formatter/todo_formatter'
require_relative '../../rubocop/todo_dir'
RSpec.describe 'rubocop rake tasks', :silence_stdout do
@@ -29,22 +30,22 @@ RSpec.describe 'rubocop rake tasks', :silence_stdout do
around do |example|
Dir.chdir(tmp_dir) do
- with_inflections do
- example.run
+ ::RuboCop::Formatter::TodoFormatter.with_base_directory(rubocop_todo_dir) do
+ with_inflections do
+ example.run
+ end
end
end
end
before do
- allow(RuboCop::TodoDir).to receive(:new).and_return(todo_dir)
-
# This Ruby file will trigger the following 3 offenses.
File.write('a.rb', <<~RUBY)
a+b
RUBY
- # Mimic GitLab's .rubocop_todo.yml avoids relying on RuboCop's
+ # Mimicking GitLab's .rubocop_todo.yml avoids relying on RuboCop's
# default.yml configuration.
File.write('.rubocop.yml', <<~YAML)
<% unless ENV['REVEAL_RUBOCOP_TODO'] == '1' %>
diff --git a/spec/tooling/danger/datateam_spec.rb b/spec/tooling/danger/datateam_spec.rb
index e6698dd8970..e4ab3a6f4b1 100644
--- a/spec/tooling/danger/datateam_spec.rb
+++ b/spec/tooling/danger/datateam_spec.rb
@@ -86,6 +86,20 @@ RSpec.describe Tooling::Danger::Datateam do
mr_labels: ['type::maintenance', 'Data Warehouse::Impacted'],
impacted: false,
impacted_files: %w(config/metrics/20210216182127_user_secret_detection_jobs.yml)
+ },
+ 'with metric status removed' => {
+ modified_files: %w(config/metrics/20210216182127_user_secret_detection_jobs.yml app/models/user.rb),
+ changed_lines: ['+status: removed'],
+ mr_labels: ['type::maintenance'],
+ impacted: true,
+ impacted_files: %w(config/metrics/20210216182127_user_secret_detection_jobs.yml)
+ },
+ 'with metric status active' => {
+ modified_files: %w(config/metrics/20210216182127_user_secret_detection_jobs.yml app/models/user.rb),
+ changed_lines: ['+status: active'],
+ mr_labels: ['type::maintenance'],
+ impacted: false,
+ impacted_files: %w(config/metrics/20210216182127_user_secret_detection_jobs.yml)
}
}
end
diff --git a/spec/tooling/danger/project_helper_spec.rb b/spec/tooling/danger/project_helper_spec.rb
index 78e9c8e9c62..f48ca5b8f8c 100644
--- a/spec/tooling/danger/project_helper_spec.rb
+++ b/spec/tooling/danger/project_helper_spec.rb
@@ -101,14 +101,15 @@ RSpec.describe Tooling::Danger::ProjectHelper do
'Rakefile' | [:backend]
'FOO_VERSION' | [:backend]
- 'lib/scripts/bar.rb' | [:backend, :tooling]
- 'lib/scripts/bar.js' | [:frontend, :tooling]
+ 'scripts/glfm/bar.rb' | [:backend]
+ 'scripts/glfm/bar.js' | [:frontend]
+ 'scripts/lib/glfm/bar.rb' | [:backend]
+ 'scripts/lib/glfm/bar.js' | [:frontend]
'scripts/bar.rb' | [:backend, :tooling]
'scripts/bar.js' | [:frontend, :tooling]
- 'lib/scripts/subdir/bar.rb' | [:backend, :tooling]
- 'lib/scripts/subdir/bar.js' | [:frontend, :tooling]
'scripts/subdir/bar.rb' | [:backend, :tooling]
'scripts/subdir/bar.js' | [:frontend, :tooling]
+ 'scripts/foo' | [:tooling]
'Dangerfile' | [:tooling]
'danger/bundle_size/Dangerfile' | [:tooling]
@@ -118,7 +119,6 @@ RSpec.describe Tooling::Danger::ProjectHelper do
'.gitlab-ci.yml' | [:tooling]
'.gitlab/ci/cng.gitlab-ci.yml' | [:tooling]
'.gitlab/ci/ee-specific-checks.gitlab-ci.yml' | [:tooling]
- 'scripts/foo' | [:tooling]
'tooling/danger/foo' | [:tooling]
'ee/tooling/danger/foo' | [:tooling]
'lefthook.yml' | [:tooling]
diff --git a/spec/tooling/lib/tooling/find_codeowners_spec.rb b/spec/tooling/lib/tooling/find_codeowners_spec.rb
index b29c5f35ec9..10c2a076847 100644
--- a/spec/tooling/lib/tooling/find_codeowners_spec.rb
+++ b/spec/tooling/lib/tooling/find_codeowners_spec.rb
@@ -31,13 +31,37 @@ RSpec.describe Tooling::FindCodeowners do
end
end.to output(<<~CODEOWNERS).to_stdout
[Section name]
- /dir0/dir1 @group
+ /dir0/dir1/ @group
/file @group
CODEOWNERS
end
end
describe '#load_definitions' do
+ before do
+ allow(subject).to receive(:load_config).and_return(
+ {
+ '[Authentication and Authorization]': {
+ '@gitlab-org/manage/authentication-and-authorization': {
+ allow: {
+ keywords: %w[password auth token],
+ patterns:
+ %w[
+ /{,ee/}app/**/*%{keyword}*{,/**/*}
+ /{,ee/}config/**/*%{keyword}*{,/**/*}
+ /{,ee/}lib/**/*%{keyword}*{,/**/*}
+ ]
+ },
+ deny: {
+ keywords: %w[*author.* *author_* *authored*],
+ patterns: ['%{keyword}']
+ }
+ }
+ }
+ }
+ )
+ end
+
it 'expands the allow and deny list with keywords and patterns' do
subject.load_definitions.each do |section, group_defintions|
group_defintions.each do |group, definitions|
@@ -54,56 +78,20 @@ RSpec.describe Tooling::FindCodeowners do
expect(auth).to eq(
allow: %w[
- /{,ee/}app/**/*password*{/**/*,}
- /{,ee/}config/**/*password*{/**/*,}
- /{,ee/}lib/**/*password*{/**/*,}
- /{,ee/}app/**/*auth*{/**/*,}
- /{,ee/}config/**/*auth*{/**/*,}
- /{,ee/}lib/**/*auth*{/**/*,}
- /{,ee/}app/**/*token*{/**/*,}
- /{,ee/}config/**/*token*{/**/*,}
- /{,ee/}lib/**/*token*{/**/*,}
+ /{,ee/}app/**/*password*{,/**/*}
+ /{,ee/}config/**/*password*{,/**/*}
+ /{,ee/}lib/**/*password*{,/**/*}
+ /{,ee/}app/**/*auth*{,/**/*}
+ /{,ee/}config/**/*auth*{,/**/*}
+ /{,ee/}lib/**/*auth*{,/**/*}
+ /{,ee/}app/**/*token*{,/**/*}
+ /{,ee/}config/**/*token*{,/**/*}
+ /{,ee/}lib/**/*token*{,/**/*}
],
deny: %w[
- **/*author.*{/**/*,}
- **/*author_*{/**/*,}
- **/*authored*{/**/*,}
- **/*authoring*{/**/*,}
- **/*.png*{/**/*,}
- **/*.svg*{/**/*,}
- **/*deploy_token*{/**/*,}
- **/*runner{,s}_token*{/**/*,}
- **/*job_token*{/**/*,}
- **/*autocomplete_tokens*{/**/*,}
- **/*dast_site_token*{/**/*,}
- **/*reset_prometheus_token*{/**/*,}
- **/*reset_registration_token*{/**/*,}
- **/*runners_registration_token*{/**/*,}
- **/*terraform_registry_token*{/**/*,}
- **/*tokenizer*{/**/*,}
- **/*filtered_search*{/**/*,}
- **/*/alert_management/*{/**/*,}
- **/*/analytics/*{/**/*,}
- **/*/bitbucket/*{/**/*,}
- **/*/clusters/*{/**/*,}
- **/*/clusters_list/*{/**/*,}
- **/*/dast/*{/**/*,}
- **/*/dast_profiles/*{/**/*,}
- **/*/dast_site_tokens/*{/**/*,}
- **/*/dast_site_validation/*{/**/*,}
- **/*/dependency_proxy/*{/**/*,}
- **/*/error_tracking/*{/**/*,}
- **/*/google_api/*{/**/*,}
- **/*/google_cloud/*{/**/*,}
- **/*/jira_connect/*{/**/*,}
- **/*/kubernetes/*{/**/*,}
- **/*/protected_environments/*{/**/*,}
- **/*/config/feature_flags/development/jira_connect_*{/**/*,}
- **/*/config/metrics/*{/**/*,}
- **/*/app/controllers/groups/dependency_proxy_auth_controller.rb*{/**/*,}
- **/*/app/finders/ci/auth_job_finder.rb*{/**/*,}
- **/*/ee/config/metrics/*{/**/*,}
- **/*/lib/gitlab/conan_token.rb*{/**/*,}
+ *author.*
+ *author_*
+ *authored*
]
)
end
@@ -159,12 +147,31 @@ RSpec.describe Tooling::FindCodeowners do
expected_flags =
::File::FNM_DOTMATCH | ::File::FNM_PATHNAME | ::File::FNM_EXTGLOB
- expect(File).to receive(:fnmatch?).with(pattern, path, expected_flags)
+ expect(File).to receive(:fnmatch?)
+ .with("/**/#{pattern}", path, expected_flags)
subject.path_matches?(pattern, path)
end
end
+ describe '#normalize_pattern' do
+ it 'returns /**/* if the input is *' do
+ expect(subject.normalize_pattern('*')).to eq('/**/*')
+ end
+
+ it 'prepends /** if the input does not start with /' do
+ expect(subject.normalize_pattern('app')).to eq('/**/app')
+ end
+
+ it 'returns the pattern if the input starts with /' do
+ expect(subject.normalize_pattern('/app')).to eq('/app')
+ end
+
+ it 'appends **/* if the input ends with /' do
+ expect(subject.normalize_pattern('/app/')).to eq('/app/**/*')
+ end
+ end
+
describe '#consolidate_paths' do
before do
allow(subject).to receive(:find_dir_maxdepth_1).and_return(<<~LINES)
diff --git a/spec/tooling/lib/tooling/test_map_generator_spec.rb b/spec/tooling/lib/tooling/test_map_generator_spec.rb
index b52d78b01a3..1b369923d8d 100644
--- a/spec/tooling/lib/tooling/test_map_generator_spec.rb
+++ b/spec/tooling/lib/tooling/test_map_generator_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe Tooling::TestMapGenerator do
:timestamp: 1602668405
:version:
---
- "./spec/factories_spec.rb[1]":
+ "./spec/models/factories_spec.rb[1]":
- lib/gitlab/current_settings.rb
- lib/feature.rb
- lib/gitlab/marginalia.rb
@@ -62,13 +62,13 @@ RSpec.describe Tooling::TestMapGenerator do
let(:expected_mapping) do
{
'lib/gitlab/current_settings.rb' => [
- 'spec/factories_spec.rb'
+ 'spec/models/factories_spec.rb'
],
'lib/feature.rb' => [
- 'spec/factories_spec.rb'
+ 'spec/models/factories_spec.rb'
],
'lib/gitlab/marginalia.rb' => [
- 'spec/factories_spec.rb'
+ 'spec/models/factories_spec.rb'
]
}
end
@@ -96,15 +96,15 @@ RSpec.describe Tooling::TestMapGenerator do
let(:expected_mapping) do
{
'lib/gitlab/current_settings.rb' => [
- 'spec/factories_spec.rb',
+ 'spec/models/factories_spec.rb',
'spec/models/project_spec.rb'
],
'lib/feature.rb' => [
- 'spec/factories_spec.rb',
+ 'spec/models/factories_spec.rb',
'spec/models/project_spec.rb'
],
'lib/gitlab/marginalia.rb' => [
- 'spec/factories_spec.rb',
+ 'spec/models/factories_spec.rb',
'spec/models/project_spec.rb'
]
}
diff --git a/spec/tooling/quality/test_level_spec.rb b/spec/tooling/quality/test_level_spec.rb
index 98034eb4b0a..10afcb18a73 100644
--- a/spec/tooling/quality/test_level_spec.rb
+++ b/spec/tooling/quality/test_level_spec.rb
@@ -1,8 +1,33 @@
# frozen_string_literal: true
+require 'spec_helper'
+
require_relative '../../../tooling/quality/test_level'
RSpec.describe Quality::TestLevel do
+ describe 'TEST_LEVEL_FOLDERS constant' do
+ it 'all directories it refers to exists', :aggregate_failures do
+ ee_only_directories = %w[
+ lib/ee/gitlab/background_migration
+ elastic
+ elastic_integration
+ replicators
+ ]
+
+ described_class::TEST_LEVEL_FOLDERS.values.flatten.each do |dir|
+ next if ee_only_directories.include?(dir) && !Gitlab.ee?
+
+ spec_directory = if ee_only_directories.include?(dir)
+ File.join('ee', 'spec', dir)
+ else
+ File.join('spec', dir)
+ end
+
+ expect(File.exist?(spec_directory)).to eq(true), "#{spec_directory} does not exist!"
+ end
+ end
+ end
+
describe '#pattern' do
context 'when level is all' do
it 'returns a pattern' do
@@ -21,7 +46,7 @@ RSpec.describe Quality::TestLevel do
context 'when level is unit' do
it 'returns a pattern' do
expect(subject.pattern(:unit))
- .to eq("spec/{bin,channels,config,db,dependencies,elastic,elastic_integration,experiments,events,factories,finders,frontend,graphql,haml_lint,helpers,initializers,javascripts,lib,metrics_server,models,policies,presenters,rack_servers,replicators,routing,rubocop,scripts,serializers,services,sidekiq,sidekiq_cluster,spam,support_specs,tasks,uploaders,validators,views,workers,tooling,component}{,/**/}*_spec.rb")
+ .to eq("spec/{bin,channels,config,db,dependencies,elastic,elastic_integration,experiments,events,factories,finders,frontend,graphql,haml_lint,helpers,initializers,lib,metrics_server,models,policies,presenters,rack_servers,replicators,routing,rubocop,scripts,serializers,services,sidekiq,sidekiq_cluster,spam,support_specs,tasks,uploaders,validators,views,workers,tooling,components}{,/**/}*_spec.rb")
end
end
@@ -89,56 +114,56 @@ RSpec.describe Quality::TestLevel do
context 'when level is frontend_fixture' do
it 'returns a regexp' do
expect(subject.regexp(:frontend_fixture))
- .to eq(%r{spec/(frontend/fixtures)})
+ .to eq(%r{spec/(frontend/fixtures)/})
end
end
context 'when level is unit' do
it 'returns a regexp' do
expect(subject.regexp(:unit))
- .to eq(%r{spec/(bin|channels|config|db|dependencies|elastic|elastic_integration|experiments|events|factories|finders|frontend|graphql|haml_lint|helpers|initializers|javascripts|lib|metrics_server|models|policies|presenters|rack_servers|replicators|routing|rubocop|scripts|serializers|services|sidekiq|sidekiq_cluster|spam|support_specs|tasks|uploaders|validators|views|workers|tooling|component)})
+ .to eq(%r{spec/(bin|channels|config|db|dependencies|elastic|elastic_integration|experiments|events|factories|finders|frontend|graphql|haml_lint|helpers|initializers|lib|metrics_server|models|policies|presenters|rack_servers|replicators|routing|rubocop|scripts|serializers|services|sidekiq|sidekiq_cluster|spam|support_specs|tasks|uploaders|validators|views|workers|tooling|components)/})
end
end
context 'when level is migration' do
it 'returns a regexp' do
expect(subject.regexp(:migration))
- .to eq(%r{spec/(migrations|lib/gitlab/background_migration|lib/ee/gitlab/background_migration)})
+ .to eq(%r{spec/(migrations|lib/gitlab/background_migration|lib/ee/gitlab/background_migration)/})
end
end
context 'when level is background_migration' do
it 'returns a regexp' do
expect(subject.regexp(:background_migration))
- .to eq(%r{spec/(lib/gitlab/background_migration|lib/ee/gitlab/background_migration)})
+ .to eq(%r{spec/(lib/gitlab/background_migration|lib/ee/gitlab/background_migration)/})
end
end
context 'when level is integration' do
it 'returns a regexp' do
expect(subject.regexp(:integration))
- .to eq(%r{spec/(commands|controllers|mailers|requests)})
+ .to eq(%r{spec/(commands|controllers|mailers|requests)/})
end
end
context 'when level is system' do
it 'returns a regexp' do
expect(subject.regexp(:system))
- .to eq(%r{spec/(features)})
+ .to eq(%r{spec/(features)/})
end
end
context 'with a prefix' do
it 'returns a regexp' do
expect(described_class.new('ee/').regexp(:system))
- .to eq(%r{(ee/)spec/(features)})
+ .to eq(%r{(ee/)spec/(features)/})
end
end
context 'with several prefixes' do
it 'returns a regexp' do
expect(described_class.new(['', 'ee/', 'jh/']).regexp(:system))
- .to eq(%r{(|ee/|jh/)spec/(features)})
+ .to eq(%r{(|ee/|jh/)spec/(features)/})
end
end
diff --git a/spec/uploaders/gitlab_uploader_spec.rb b/spec/uploaders/gitlab_uploader_spec.rb
index 4e661e458ad..db70441aaf5 100644
--- a/spec/uploaders/gitlab_uploader_spec.rb
+++ b/spec/uploaders/gitlab_uploader_spec.rb
@@ -160,4 +160,10 @@ RSpec.describe GitlabUploader do
end
end
end
+
+ describe '.version' do
+ subject { uploader_class.version }
+
+ it { expect { subject }.to raise_error(RuntimeError, /not supported/) }
+ end
end
diff --git a/spec/uploaders/metric_image_uploader_spec.rb b/spec/uploaders/metric_image_uploader_spec.rb
new file mode 100644
index 00000000000..f714724480c
--- /dev/null
+++ b/spec/uploaders/metric_image_uploader_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MetricImageUploader do
+ describe '.workhorse_local_upload_path' do
+ it 'returns path that includes uploads dir' do
+ expect(described_class.workhorse_local_upload_path).to end_with('/uploads/tmp/uploads')
+ end
+ end
+end
diff --git a/spec/views/admin/application_settings/_repository_check.html.haml_spec.rb b/spec/views/admin/application_settings/_repository_check.html.haml_spec.rb
new file mode 100644
index 00000000000..fbabc890a8b
--- /dev/null
+++ b/spec/views/admin/application_settings/_repository_check.html.haml_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'admin/application_settings/_repository_check.html.haml' do
+ let_it_be(:user) { create(:admin) }
+ let_it_be(:application_setting) { build(:application_setting) }
+
+ before do
+ assign(:application_setting, application_setting)
+ allow(view).to receive(:current_user).and_return(user)
+ end
+
+ describe 'repository checks' do
+ it 'has the setting subsection' do
+ render
+
+ expect(rendered).to have_content('Repository checks')
+ end
+
+ it 'renders the correct setting subsection content' do
+ render
+
+ expect(rendered).to have_field('Enable repository checks')
+ expect(rendered).to have_link(
+ 'Clear all repository checks',
+ href: clear_repository_check_states_admin_application_settings_path
+ )
+ end
+ end
+
+ describe 'housekeeping' do
+ it 'has the setting subsection' do
+ render
+
+ expect(rendered).to have_content('Housekeeping')
+ end
+
+ it 'renders the correct setting subsection content' do
+ render
+
+ expect(rendered).to have_field('Enable automatic repository housekeeping')
+ expect(rendered).to have_field('Incremental repack period')
+ expect(rendered).to have_field('Full repack period')
+ expect(rendered).to have_field('Git GC period')
+ end
+ end
+
+ describe 'inactive project deletion' do
+ let_it_be(:application_setting) do
+ build(:application_setting,
+ delete_inactive_projects: true,
+ inactive_projects_delete_after_months: 2,
+ inactive_projects_min_size_mb: 250,
+ inactive_projects_send_warning_email_after_months: 1
+ )
+ end
+
+ it 'has the setting subsection' do
+ render
+
+ expect(rendered).to have_content('Inactive project deletion')
+ end
+
+ it 'renders the correct setting subsection content' do
+ render
+
+ expect(rendered).to have_selector('.js-inactive-project-deletion-form')
+ expect(rendered).to have_selector('[data-delete-inactive-projects="true"]')
+ expect(rendered).to have_selector('[data-inactive-projects-delete-after-months="2"]')
+ expect(rendered).to have_selector('[data-inactive-projects-min-size-mb="250"]')
+ expect(rendered).to have_selector('[data-inactive-projects-send-warning-email-after-months="1"]')
+ end
+ end
+end
diff --git a/spec/views/admin/application_settings/general.html.haml_spec.rb b/spec/views/admin/application_settings/general.html.haml_spec.rb
index 503e41eabc9..3614090d3cb 100644
--- a/spec/views/admin/application_settings/general.html.haml_spec.rb
+++ b/spec/views/admin/application_settings/general.html.haml_spec.rb
@@ -68,4 +68,33 @@ RSpec.describe 'admin/application_settings/general.html.haml' do
expect(rendered).not_to have_css('#js-add-license-toggle')
end
end
+
+ describe 'jira connect application key' do
+ it 'shows the jira connect application key section' do
+ render
+
+ expect(rendered).to have_css('#js-jira_connect-settings')
+ end
+
+ context 'when the jira_connect_oauth feature flag is disabled' do
+ before do
+ stub_feature_flags(jira_connect_oauth: false)
+ end
+
+ it 'does not show the jira connect application key section' do
+ render
+
+ expect(rendered).not_to have_css('#js-jira_connect-settings')
+ end
+ end
+ end
+
+ describe 'sign-up restrictions' do
+ it 'renders js-signup-form tag' do
+ render
+
+ expect(rendered).to match 'id="js-signup-form"'
+ expect(rendered).to match ' data-minimum-password-length='
+ end
+ end
end
diff --git a/spec/views/layouts/application.html.haml_spec.rb b/spec/views/layouts/application.html.haml_spec.rb
index 679d0b1ff60..0f359219718 100644
--- a/spec/views/layouts/application.html.haml_spec.rb
+++ b/spec/views/layouts/application.html.haml_spec.rb
@@ -14,6 +14,35 @@ RSpec.describe 'layouts/application' do
allow(view).to receive(:current_user_mode).and_return(Gitlab::Auth::CurrentUserMode.new(user))
end
+ describe "visual review toolbar" do
+ context "ENV['REVIEW_APPS_ENABLED'] is set to true" do
+ before do
+ stub_env(
+ 'REVIEW_APPS_ENABLED' => true,
+ 'REVIEW_APPS_MERGE_REQUEST_IID' => '123'
+ )
+ end
+
+ it 'renders the visual review toolbar' do
+ render
+
+ expect(rendered).to include('review-app-toolbar-script')
+ end
+ end
+
+ context "ENV['REVIEW_APPS_ENABLED'] is set to false" do
+ before do
+ stub_env('REVIEW_APPS_ENABLED', false)
+ end
+
+ it 'does not render the visual review toolbar' do
+ render
+
+ expect(rendered).not_to include('review-app-toolbar-script')
+ end
+ end
+ end
+
context 'body data elements for pageview context' do
let(:body_data) do
{
diff --git a/spec/views/projects/issues/_service_desk_info_content.html.haml_spec.rb b/spec/views/projects/issues/_service_desk_info_content.html.haml_spec.rb
index 1c6d729ddce..92ca0bb52d6 100644
--- a/spec/views/projects/issues/_service_desk_info_content.html.haml_spec.rb
+++ b/spec/views/projects/issues/_service_desk_info_content.html.haml_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'projects/issues/_service_desk_info_content' do
+RSpec.describe 'projects/issues/service_desk/_service_desk_info_content' do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:service_desk_address) { 'address@example.com' }
diff --git a/spec/views/projects/services/edit.html.haml_spec.rb b/spec/views/projects/settings/integrations/edit.html.haml_spec.rb
index 372ccf82a68..5f3c45166ef 100644
--- a/spec/views/projects/services/edit.html.haml_spec.rb
+++ b/spec/views/projects/settings/integrations/edit.html.haml_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'projects/services/edit' do
+RSpec.describe 'projects/settings/integrations/edit' do
let(:integration) { create(:drone_ci_integration, project: project) }
let(:project) { create(:project) }
diff --git a/spec/views/projects/tags/index.html.haml_spec.rb b/spec/views/projects/tags/index.html.haml_spec.rb
index ae59c1aa4b2..aff233b697f 100644
--- a/spec/views/projects/tags/index.html.haml_spec.rb
+++ b/spec/views/projects/tags/index.html.haml_spec.rb
@@ -4,7 +4,6 @@ require 'spec_helper'
RSpec.describe 'projects/tags/index.html.haml' do
let_it_be(:project) { create(:project, :repository) }
- let_it_be(:tags) { project.repository.tags }
let_it_be(:git_tag) { project.repository.tags.last }
let_it_be(:release) do
create(:release, project: project,
@@ -25,9 +24,31 @@ RSpec.describe 'projects/tags/index.html.haml' do
allow(view).to receive(:current_user).and_return(project.namespace.owner)
end
- it 'renders links to the Releases page for tags associated with a release' do
- render
- expect(rendered).to have_link(release.name, href: project_releases_path(project, anchor: release.tag))
+ context 'when tag is associated with a release' do
+ context 'when name does not contain a backslash' do
+ it 'renders a link to the release page' do
+ render
+ expect(rendered).to have_link(release.name, href: project_release_path(project, release))
+ end
+ end
+
+ context 'when name contains backslash' do
+ let_it_be(:release) { create(:release, project: project, tag: 'test/v1') }
+
+ before_all do
+ project.repository.add_tag(project.owner, 'test/v1', project.default_branch_or_main)
+ project.repository.expire_tags_cache
+
+ project.releases.reload
+
+ assign(:tags, Kaminari.paginate_array(tags).page(0))
+ end
+
+ it 'renders a link to the release page with backslash escaped' do
+ render
+ expect(rendered).to have_link(release.name, href: project_release_path(project, release))
+ end
+ end
end
context 'when the most recent build for a tag has artifacts' do
@@ -104,4 +125,8 @@ RSpec.describe 'projects/tags/index.html.haml' do
)
end
end
+
+ def tags
+ project.repository.tags
+ end
end
diff --git a/spec/views/shared/projects/_inactive_project_deletion_alert.html.haml_spec.rb b/spec/views/shared/projects/_inactive_project_deletion_alert.html.haml_spec.rb
new file mode 100644
index 00000000000..117771d5f30
--- /dev/null
+++ b/spec/views/shared/projects/_inactive_project_deletion_alert.html.haml_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'shared/projects/_inactive_project_deletion_alert' do
+ let_it_be(:project) { create(:project) }
+
+ let(:text) { 'Due to inactivity, this project is scheduled to be deleted on 2022-04-01. Why is this scheduled?' }
+
+ shared_examples 'does not render' do
+ before do
+ render
+ end
+
+ it { expect(rendered).not_to have_content(text) }
+ end
+
+ before do
+ allow(view).to receive(:inactive_project_deletion_date).with(project).and_return('2022-04-01')
+ end
+
+ context 'without a project' do
+ before do
+ assign(:project, nil)
+ end
+
+ it_behaves_like 'does not render'
+ end
+
+ context 'with a project' do
+ before do
+ assign(:project, project)
+ allow(view).to receive(:show_inactive_project_deletion_banner?).and_return(inactive)
+ end
+
+ context 'when the project is active' do
+ let(:inactive) { false }
+
+ it_behaves_like 'does not render'
+ end
+
+ context 'when the project is inactive' do
+ let(:inactive) { true }
+
+ before do
+ render
+ end
+
+ it 'does render the alert' do
+ expect(rendered).to have_content(text)
+ end
+ end
+ end
+end
diff --git a/spec/workers/build_success_worker_spec.rb b/spec/workers/build_success_worker_spec.rb
index 0583d79ed46..3241c931dc5 100644
--- a/spec/workers/build_success_worker_spec.rb
+++ b/spec/workers/build_success_worker_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe BuildSuccessWorker do
context 'when build exists' do
context 'when the build will stop an environment' do
- let!(:build) { create(:ci_build, :stop_review_app, environment: environment.name, project: environment.project) }
+ let!(:build) { create(:ci_build, :stop_review_app, environment: environment.name, project: environment.project, status: :success) }
let(:environment) { create(:environment, state: :available) }
it 'stops the environment' do
@@ -18,6 +18,21 @@ RSpec.describe BuildSuccessWorker do
expect(environment.reload).to be_stopped
end
+
+ context 'when the build fails' do
+ before do
+ build.update!(status: :failed)
+ environment.update!(state: :available)
+ end
+
+ it 'does not stop the environment' do
+ expect(environment).to be_available
+
+ subject
+
+ expect(environment.reload).not_to be_stopped
+ end
+ end
end
end
diff --git a/spec/workers/bulk_imports/pipeline_worker_spec.rb b/spec/workers/bulk_imports/pipeline_worker_spec.rb
index 209ae8862b6..b5f20e9ff76 100644
--- a/spec/workers/bulk_imports/pipeline_worker_spec.rb
+++ b/spec/workers/bulk_imports/pipeline_worker_spec.rb
@@ -22,9 +22,10 @@ RSpec.describe BulkImports::PipelineWorker do
before do
stub_const('FakePipeline', pipeline_class)
+ allow(entity).to receive(:pipeline_exists?).with('FakePipeline').and_return(true)
allow_next_instance_of(BulkImports::Groups::Stage) do |instance|
allow(instance).to receive(:pipelines)
- .and_return([[0, pipeline_class]])
+ .and_return([{ stage: 0, pipeline: pipeline_class }])
end
end
@@ -101,18 +102,26 @@ RSpec.describe BulkImports::PipelineWorker do
pipeline_tracker = create(
:bulk_import_tracker,
entity: entity,
- pipeline_name: 'InexistentPipeline',
+ pipeline_name: 'FakePipeline',
status_event: 'enqueue'
)
+ allow(subject).to receive(:jid).and_return('jid')
+
+ expect_next_instance_of(pipeline_class) do |pipeline|
+ expect(pipeline)
+ .to receive(:run)
+ .and_raise(StandardError, 'Error!')
+ end
+
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect(logger)
.to receive(:error)
.with(
hash_including(
- 'pipeline_name' => 'InexistentPipeline',
+ 'pipeline_name' => 'FakePipeline',
'entity_id' => entity.id,
- 'message' => "'InexistentPipeline' is not a valid BulkImport Pipeline"
+ 'message' => 'Error!'
)
)
end
@@ -120,7 +129,7 @@ RSpec.describe BulkImports::PipelineWorker do
expect(Gitlab::ErrorTracking)
.to receive(:track_exception)
.with(
- instance_of(BulkImports::Error),
+ instance_of(StandardError),
entity_id: entity.id,
pipeline_name: pipeline_tracker.pipeline_name
)
@@ -129,7 +138,18 @@ RSpec.describe BulkImports::PipelineWorker do
.to receive(:perform_async)
.with(entity.id, pipeline_tracker.stage)
- allow(subject).to receive(:jid).and_return('jid')
+ expect(BulkImports::Failure)
+ .to receive(:create)
+ .with(
+ a_hash_including(
+ bulk_import_entity_id: entity.id,
+ pipeline_class: 'FakePipeline',
+ pipeline_step: 'pipeline_worker_run',
+ exception_class: 'StandardError',
+ exception_message: 'Error!',
+ correlation_id_value: anything
+ )
+ )
subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
@@ -144,18 +164,19 @@ RSpec.describe BulkImports::PipelineWorker do
pipeline_tracker = create(
:bulk_import_tracker,
entity: entity,
- pipeline_name: 'Pipeline',
+ pipeline_name: 'FakePipeline',
status_event: 'enqueue'
)
entity.update!(status: -1)
+ expect(BulkImports::Failure).to receive(:create)
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect(logger)
.to receive(:error)
.with(
hash_including(
- 'pipeline_name' => 'Pipeline',
+ 'pipeline_name' => 'FakePipeline',
'entity_id' => entity.id,
'message' => 'Failed entity status'
)
@@ -168,56 +189,78 @@ RSpec.describe BulkImports::PipelineWorker do
end
end
- context 'when it is a network error' do
- it 'reenqueue on retriable network errors' do
- pipeline_tracker = create(
+ context 'when network error is raised' do
+ let(:pipeline_tracker) do
+ create(
:bulk_import_tracker,
entity: entity,
pipeline_name: 'FakePipeline',
status_event: 'enqueue'
)
+ end
- exception = BulkImports::NetworkError.new(
- response: double(code: 429, headers: {})
- )
+ let(:exception) do
+ BulkImports::NetworkError.new(response: instance_double(HTTParty::Response, code: 429, headers: {}))
+ end
+
+ before do
+ allow(subject).to receive(:jid).and_return('jid')
expect_next_instance_of(pipeline_class) do |pipeline|
expect(pipeline)
.to receive(:run)
.and_raise(exception)
end
+ end
- allow(subject).to receive(:jid).and_return('jid')
-
- expect_any_instance_of(BulkImports::Tracker) do |tracker|
- expect(tracker).to receive(:retry).and_call_original
- end
+ context 'when error is retriable' do
+ it 'reenqueues the worker' do
+ expect_any_instance_of(BulkImports::Tracker) do |tracker|
+ expect(tracker).to receive(:retry).and_call_original
+ end
+
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger)
+ .to receive(:info)
+ .with(
+ hash_including(
+ 'pipeline_name' => 'FakePipeline',
+ 'entity_id' => entity.id
+ )
+ )
+ end
- expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger)
- .to receive(:info)
+ expect(described_class)
+ .to receive(:perform_in)
.with(
- hash_including(
- 'pipeline_name' => 'FakePipeline',
- 'entity_id' => entity.id
- )
+ 60.seconds,
+ pipeline_tracker.id,
+ pipeline_tracker.stage,
+ pipeline_tracker.entity.id
)
+
+ subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+
+ pipeline_tracker.reload
+
+ expect(pipeline_tracker.enqueued?).to be_truthy
end
- expect(described_class)
- .to receive(:perform_in)
- .with(
- 60.seconds,
- pipeline_tracker.id,
- pipeline_tracker.stage,
- pipeline_tracker.entity.id
- )
+ context 'when error is not retriable' do
+ let(:exception) do
+ BulkImports::NetworkError.new(response: instance_double(HTTParty::Response, code: 503, headers: {}))
+ end
- subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+ it 'marks tracker as failed and logs the error' do
+ expect(described_class).not_to receive(:perform_in)
+
+ subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
- pipeline_tracker.reload
+ pipeline_tracker.reload
- expect(pipeline_tracker.enqueued?).to be_truthy
+ expect(pipeline_tracker.failed?).to eq(true)
+ end
+ end
end
end
end
@@ -253,13 +296,14 @@ RSpec.describe BulkImports::PipelineWorker do
allow_next_instance_of(BulkImports::Groups::Stage) do |instance|
allow(instance).to receive(:pipelines)
- .and_return([[0, file_extraction_pipeline]])
+ .and_return([{ stage: 0, pipeline: file_extraction_pipeline }])
end
end
it 'runs the pipeline successfully' do
allow_next_instance_of(BulkImports::ExportStatus) do |status|
allow(status).to receive(:started?).and_return(false)
+ allow(status).to receive(:empty?).and_return(false)
allow(status).to receive(:failed?).and_return(false)
end
@@ -272,6 +316,28 @@ RSpec.describe BulkImports::PipelineWorker do
it 'reenqueues pipeline worker' do
allow_next_instance_of(BulkImports::ExportStatus) do |status|
allow(status).to receive(:started?).and_return(true)
+ allow(status).to receive(:empty?).and_return(false)
+ allow(status).to receive(:failed?).and_return(false)
+ end
+
+ expect(described_class)
+ .to receive(:perform_in)
+ .with(
+ described_class::FILE_EXTRACTION_PIPELINE_PERFORM_DELAY,
+ pipeline_tracker.id,
+ pipeline_tracker.stage,
+ entity.id
+ )
+
+ subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+ end
+ end
+
+ context 'when export status is empty' do
+ it 'reenqueues pipeline worker' do
+ allow_next_instance_of(BulkImports::ExportStatus) do |status|
+ allow(status).to receive(:started?).and_return(false)
+ allow(status).to receive(:empty?).and_return(true)
allow(status).to receive(:failed?).and_return(false)
end
diff --git a/spec/workers/ci/archive_trace_worker_spec.rb b/spec/workers/ci/archive_trace_worker_spec.rb
index 889e0c92042..52723ff5823 100644
--- a/spec/workers/ci/archive_trace_worker_spec.rb
+++ b/spec/workers/ci/archive_trace_worker_spec.rb
@@ -16,6 +16,34 @@ RSpec.describe Ci::ArchiveTraceWorker do
subject
end
+
+ it 'has preloaded the arguments for archiving' do
+ allow_next_instance_of(Ci::ArchiveTraceService) do |instance|
+ allow(instance).to receive(:execute) do |job|
+ expect(job.association(:project)).to be_loaded
+ expect(job.association(:pending_state)).to be_loaded
+ end
+ end
+
+ subject
+ end
+
+ context 'when sticky_ci_archive_trace_worker is disabled' do
+ before do
+ stub_feature_flags(sticky_ci_archive_trace_worker: false)
+ end
+
+ it 'does not preload associations' do
+ allow_next_instance_of(Ci::ArchiveTraceService) do |instance|
+ allow(instance).to receive(:execute) do |job|
+ expect(job.association(:project)).not_to be_loaded
+ expect(job.association(:pending_state)).not_to be_loaded
+ end
+ end
+
+ subject
+ end
+ end
end
context 'when job is not found' do
diff --git a/spec/workers/ci/resource_groups/assign_resource_from_resource_group_worker_spec.rb b/spec/workers/ci/resource_groups/assign_resource_from_resource_group_worker_spec.rb
index be7f7ef5c8c..785cba24f9d 100644
--- a/spec/workers/ci/resource_groups/assign_resource_from_resource_group_worker_spec.rb
+++ b/spec/workers/ci/resource_groups/assign_resource_from_resource_group_worker_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe Ci::ResourceGroups::AssignResourceFromResourceGroupWorker do
context 'when resource group exists' do
it 'executes AssignResourceFromResourceGroupService' do
- expect_next_instances_of(Ci::ResourceGroups::AssignResourceFromResourceGroupService, 2, resource_group.project, nil) do |service|
+ expect_next_instances_of(Ci::ResourceGroups::AssignResourceFromResourceGroupService, 2, false, resource_group.project, nil) do |service|
expect(service).to receive(:execute).with(resource_group)
end
diff --git a/spec/workers/clusters/applications/activate_service_worker_spec.rb b/spec/workers/clusters/applications/activate_integration_worker_spec.rb
index d13ff76613c..ecb49be5a4b 100644
--- a/spec/workers/clusters/applications/activate_service_worker_spec.rb
+++ b/spec/workers/clusters/applications/activate_integration_worker_spec.rb
@@ -2,8 +2,8 @@
require 'spec_helper'
-RSpec.describe Clusters::Applications::ActivateServiceWorker, '#perform' do
- context 'cluster exists' do
+RSpec.describe Clusters::Applications::ActivateIntegrationWorker, '#perform' do
+ context 'when cluster exists' do
describe 'prometheus integration' do
let(:integration_name) { 'prometheus' }
@@ -11,7 +11,7 @@ RSpec.describe Clusters::Applications::ActivateServiceWorker, '#perform' do
create(:clusters_integrations_prometheus, cluster: cluster)
end
- context 'cluster type: group' do
+ context 'with cluster type: group' do
let(:group) { create(:group) }
let(:project) { create(:project, group: group) }
let(:cluster) { create(:cluster_for_group, groups: [group]) }
@@ -22,7 +22,7 @@ RSpec.describe Clusters::Applications::ActivateServiceWorker, '#perform' do
end
end
- context 'cluster type: project' do
+ context 'with cluster type: project' do
let(:project) { create(:project) }
let(:cluster) { create(:cluster, projects: [project]) }
@@ -32,7 +32,7 @@ RSpec.describe Clusters::Applications::ActivateServiceWorker, '#perform' do
end
end
- context 'cluster type: instance' do
+ context 'with cluster type: instance' do
let(:project) { create(:project) }
let(:cluster) { create(:cluster, :instance) }
@@ -40,11 +40,20 @@ RSpec.describe Clusters::Applications::ActivateServiceWorker, '#perform' do
expect { described_class.new.perform(cluster.id, integration_name) }
.to change { project.reload.prometheus_integration&.active }.from(nil).to(true)
end
+
+ context 'when using the old worker class' do
+ let(:described_class) { Clusters::Applications::ActivateServiceWorker }
+
+ it 'ensures Prometheus integration is activated' do
+ expect { described_class.new.perform(cluster.id, integration_name) }
+ .to change { project.reload.prometheus_integration&.active }.from(nil).to(true)
+ end
+ end
end
end
end
- context 'cluster does not exist' do
+ context 'when cluster does not exist' do
it 'does not raise Record Not Found error' do
expect { described_class.new.perform(0, 'ignored in this context') }.not_to raise_error
end
diff --git a/spec/workers/clusters/applications/deactivate_service_worker_spec.rb b/spec/workers/clusters/applications/deactivate_integration_worker_spec.rb
index 77788cfa893..3f0188eee23 100644
--- a/spec/workers/clusters/applications/deactivate_service_worker_spec.rb
+++ b/spec/workers/clusters/applications/deactivate_integration_worker_spec.rb
@@ -2,20 +2,22 @@
require 'spec_helper'
-RSpec.describe Clusters::Applications::DeactivateServiceWorker, '#perform' do
- context 'cluster exists' do
+RSpec.describe Clusters::Applications::DeactivateIntegrationWorker, '#perform' do
+ context 'when cluster exists' do
describe 'prometheus integration' do
let(:integration_name) { 'prometheus' }
let!(:integration) { create(:clusters_integrations_prometheus, cluster: cluster) }
- context 'prometheus integration exists' do
- let!(:prometheus_integration) { create(:prometheus_integration, project: project, manual_configuration: false, active: true) }
+ context 'when prometheus integration exists' do
+ let!(:prometheus_integration) do
+ create(:prometheus_integration, project: project, manual_configuration: false, active: true)
+ end
before do
integration.delete # prometheus integration before save synchronises active stated with integration existence.
end
- context 'cluster type: group' do
+ context 'with cluster type: group' do
let(:group) { create(:group) }
let(:project) { create(:project, group: group) }
let(:cluster) { create(:cluster_for_group, groups: [group]) }
@@ -26,7 +28,7 @@ RSpec.describe Clusters::Applications::DeactivateServiceWorker, '#perform' do
end
end
- context 'cluster type: project' do
+ context 'with cluster type: project' do
let(:project) { create(:project) }
let(:cluster) { create(:cluster, projects: [project]) }
@@ -36,7 +38,7 @@ RSpec.describe Clusters::Applications::DeactivateServiceWorker, '#perform' do
end
end
- context 'cluster type: instance' do
+ context 'with cluster type: instance' do
let(:project) { create(:project) }
let(:cluster) { create(:cluster, :instance) }
@@ -44,11 +46,20 @@ RSpec.describe Clusters::Applications::DeactivateServiceWorker, '#perform' do
expect { described_class.new.perform(cluster.id, integration_name) }
.to change { prometheus_integration.reload.active }.from(true).to(false)
end
+
+ context 'when using the old worker class' do
+ let(:described_class) { Clusters::Applications::ActivateServiceWorker }
+
+ it 'ensures Prometheus integration is deactivated' do
+ expect { described_class.new.perform(cluster.id, integration_name) }
+ .to change { prometheus_integration.reload.active }.from(true).to(false)
+ end
+ end
end
end
- context 'prometheus integration does not exist' do
- context 'cluster type: project' do
+ context 'when prometheus integration does not exist' do
+ context 'with cluster type: project' do
let(:project) { create(:project) }
let(:cluster) { create(:cluster, projects: [project]) }
@@ -60,7 +71,7 @@ RSpec.describe Clusters::Applications::DeactivateServiceWorker, '#perform' do
end
end
- context 'cluster does not exist' do
+ context 'when cluster does not exist' do
it 'raises Record Not Found error' do
expect { described_class.new.perform(0, 'ignored in this context') }.to raise_error(ActiveRecord::RecordNotFound)
end
diff --git a/spec/workers/clusters/applications/wait_for_uninstall_app_worker_spec.rb b/spec/workers/clusters/applications/wait_for_uninstall_app_worker_spec.rb
index 0191a2898b2..d1dd1cd738b 100644
--- a/spec/workers/clusters/applications/wait_for_uninstall_app_worker_spec.rb
+++ b/spec/workers/clusters/applications/wait_for_uninstall_app_worker_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Clusters::Applications::WaitForUninstallAppWorker, '#perform' do
subject { described_class.new.perform(app_name, app_id) }
- context 'app exists' do
+ context 'when app exists' do
let(:service) { instance_double(Clusters::Applications::CheckUninstallProgressService) }
it 'calls the check service' do
@@ -20,7 +20,7 @@ RSpec.describe Clusters::Applications::WaitForUninstallAppWorker, '#perform' do
end
end
- context 'app does not exist' do
+ context 'when app does not exist' do
let(:app_id) { 0 }
it 'does not call the check service' do
diff --git a/spec/workers/concerns/cronjob_queue_spec.rb b/spec/workers/concerns/cronjob_queue_spec.rb
index d1ad5c65ea3..0244535051f 100644
--- a/spec/workers/concerns/cronjob_queue_spec.rb
+++ b/spec/workers/concerns/cronjob_queue_spec.rb
@@ -11,11 +11,33 @@ RSpec.describe CronjobQueue do
include ApplicationWorker
include CronjobQueue # rubocop:disable Scalability/CronWorkerContext
+
+ def perform
+ AnotherWorker.perform_async('identifier')
+ end
+ end
+ end
+
+ let(:another_worker) do
+ Class.new do
+ def self.name
+ 'AnotherWorker'
+ end
+
+ include ApplicationWorker
+
+ # To keep track of the context that was active for certain arguments
+ cattr_accessor(:contexts) { {} }
+
+ def perform(identifier, *args)
+ self.class.contexts.merge!(identifier => Gitlab::ApplicationContext.current)
+ end
end
end
before do
stub_const("DummyWorker", worker)
+ stub_const("AnotherWorker", another_worker)
end
it 'sets the queue name of a worker' do
@@ -27,7 +49,7 @@ RSpec.describe CronjobQueue do
end
it 'automatically clears project, user and namespace from the context', :aggregate_failues do
- worker_context = worker.get_worker_context.to_lazy_hash.transform_values(&:call)
+ worker_context = worker.get_worker_context.to_lazy_hash.transform_values { |v| v.try(:call) }
expect(worker_context[:user]).to be_nil
expect(worker_context[:root_namespace]).to be_nil
@@ -42,6 +64,14 @@ RSpec.describe CronjobQueue do
expect(job).to include('meta.caller_id' => 'Cronjob')
end
+ it 'gets root_caller_id from the cronjob' do
+ Sidekiq::Testing.inline! do
+ worker.perform_async
+ end
+
+ expect(AnotherWorker.contexts['identifier']).to include('meta.root_caller_id' => 'Cronjob')
+ end
+
it 'does not set the caller_id if there was already one in the context' do
Gitlab::ApplicationContext.with_context(caller_id: 'already set') do
worker.perform_async
diff --git a/spec/workers/concerns/limited_capacity/job_tracker_spec.rb b/spec/workers/concerns/limited_capacity/job_tracker_spec.rb
index f141a1ad7ad..eeccdbd0e2d 100644
--- a/spec/workers/concerns/limited_capacity/job_tracker_spec.rb
+++ b/spec/workers/concerns/limited_capacity/job_tracker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe LimitedCapacity::JobTracker, :clean_gitlab_redis_queues do
+RSpec.describe LimitedCapacity::JobTracker, :clean_gitlab_redis_shared_state do
let(:job_tracker) do
described_class.new('namespace')
end
diff --git a/spec/workers/concerns/worker_attributes_spec.rb b/spec/workers/concerns/worker_attributes_spec.rb
index ad9d5eeccbe..5e8f68923fd 100644
--- a/spec/workers/concerns/worker_attributes_spec.rb
+++ b/spec/workers/concerns/worker_attributes_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe WorkerAttributes do
+ using RSpec::Parameterized::TableSyntax
+
let(:worker) do
Class.new do
def self.name
@@ -13,21 +15,64 @@ RSpec.describe WorkerAttributes do
end
end
- describe '.data_consistency' do
- context 'with valid data_consistency' do
- it 'returns correct data_consistency' do
- worker.data_consistency(:sticky)
-
- expect(worker.get_data_consistency).to eq(:sticky)
+ let(:child_worker) do
+ Class.new(worker) do
+ def self.name
+ "TestChildworker"
end
end
+ end
+
+ describe 'class attributes' do
+ # rubocop: disable Layout/LineLength
+ where(:getter, :setter, :default, :values, :expected) do
+ :get_feature_category | :feature_category | nil | [:foo] | :foo
+ :get_urgency | :urgency | :low | [:high] | :high
+ :get_data_consistency | :data_consistency | :always | [:sticky] | :sticky
+ :get_worker_resource_boundary | :worker_resource_boundary | :unknown | [:cpu] | :cpu
+ :get_weight | :weight | 1 | [3] | 3
+ :get_tags | :tags | [] | [:foo, :bar] | [:foo, :bar]
+ :get_deduplicate_strategy | :deduplicate | :until_executing | [:none] | :none
+ :get_deduplication_options | :deduplicate | {} | [:none, including_scheduled: true] | { including_scheduled: true }
+ :worker_has_external_dependencies? | :worker_has_external_dependencies! | false | [] | true
+ :idempotent? | :idempotent! | false | [] | true
+ :big_payload? | :big_payload! | false | [] | true
+ end
+ # rubocop: enable Layout/LineLength
+
+ with_them do
+ context 'when the attribute is set' do
+ before do
+ worker.public_send(setter, *values)
+ end
+
+ it 'returns the expected value' do
+ expect(worker.public_send(getter)).to eq(expected)
+ expect(child_worker.public_send(getter)).to eq(expected)
+ end
+ end
+
+ context 'when the attribute is not set' do
+ it 'returns the default value' do
+ expect(worker.public_send(getter)).to eq(default)
+ expect(child_worker.public_send(getter)).to eq(default)
+ end
+ end
+
+ context 'when the attribute is set in the child worker' do
+ before do
+ child_worker.public_send(setter, *values)
+ end
- context 'when data_consistency is not provided' do
- it 'defaults to :always' do
- expect(worker.get_data_consistency).to eq(:always)
+ it 'returns the default value for the parent, and the expected value for the child' do
+ expect(worker.public_send(getter)).to eq(default)
+ expect(child_worker.public_send(getter)).to eq(expected)
+ end
end
end
+ end
+ describe '.data_consistency' do
context 'with invalid data_consistency' do
it 'raise exception' do
expect { worker.data_consistency(:invalid) }
@@ -45,36 +90,12 @@ RSpec.describe WorkerAttributes do
it 'returns correct feature flag value' do
worker.data_consistency(:sticky, feature_flag: :test_feature_flag)
- expect(worker.get_data_consistency_feature_flag_enabled?).not_to be_truthy
+ expect(worker.get_data_consistency_feature_flag_enabled?).not_to be(true)
+ expect(child_worker.get_data_consistency_feature_flag_enabled?).not_to be(true)
end
end
end
- describe '.idempotent?' do
- subject(:idempotent?) { worker.idempotent? }
-
- context 'when the worker is idempotent' do
- before do
- worker.idempotent!
- end
-
- it { is_expected.to be_truthy }
- end
-
- context 'when the worker is not idempotent' do
- it { is_expected.to be_falsey }
- end
- end
-
- describe '.deduplicate' do
- it 'sets deduplication_strategy and deduplication_options' do
- worker.deduplicate(:until_executing, including_scheduled: true)
-
- expect(worker.send(:class_attributes)[:deduplication_strategy]).to eq(:until_executing)
- expect(worker.send(:class_attributes)[:deduplication_options]).to eq(including_scheduled: true)
- end
- end
-
describe '#deduplication_enabled?' do
subject(:deduplication_enabled?) { worker.deduplication_enabled? }
@@ -83,7 +104,10 @@ RSpec.describe WorkerAttributes do
worker.deduplicate(:until_executing)
end
- it { is_expected.to eq(true) }
+ it 'returns true' do
+ expect(worker.deduplication_enabled?).to be(true)
+ expect(child_worker.deduplication_enabled?).to be(true)
+ end
end
context 'when feature flag is set' do
@@ -99,7 +123,10 @@ RSpec.describe WorkerAttributes do
stub_feature_flags(my_feature_flag: true)
end
- it { is_expected.to eq(true) }
+ it 'returns true' do
+ expect(worker.deduplication_enabled?).to be(true)
+ expect(child_worker.deduplication_enabled?).to be(true)
+ end
end
context 'when the FF is disabled' do
@@ -107,7 +134,10 @@ RSpec.describe WorkerAttributes do
stub_feature_flags(my_feature_flag: false)
end
- it { is_expected.to eq(false) }
+ it 'returns false' do
+ expect(worker.deduplication_enabled?).to be(false)
+ expect(child_worker.deduplication_enabled?).to be(false)
+ end
end
end
end
diff --git a/spec/workers/container_registry/migration/enqueuer_worker_spec.rb b/spec/workers/container_registry/migration/enqueuer_worker_spec.rb
index a57a9e3b2e8..ab3bd8f75d4 100644
--- a/spec/workers/container_registry/migration/enqueuer_worker_spec.rb
+++ b/spec/workers/container_registry/migration/enqueuer_worker_spec.rb
@@ -32,660 +32,356 @@ RSpec.describe ContainerRegistry::Migration::EnqueuerWorker, :aggregate_failures
end
end
- context 'with container_registry_migration_phase2_enqueuer_loop disabled' do
+ context 'migrations are disabled' do
before do
- stub_feature_flags(container_registry_migration_phase2_enqueuer_loop: false)
+ allow(ContainerRegistry::Migration).to receive(:enabled?).and_return(false)
end
- shared_examples 're-enqueuing based on capacity' do |capacity_limit: 4|
- context 'below capacity' do
- before do
- allow(ContainerRegistry::Migration).to receive(:capacity).and_return(capacity_limit)
- end
-
- it 're-enqueues the worker' do
- expect(described_class).to receive(:perform_async)
- expect(described_class).to receive(:perform_in).with(7.seconds)
-
- subject
- end
-
- context 'enqueue_twice feature flag disabled' do
- before do
- stub_feature_flags(container_registry_migration_phase2_enqueue_twice: false)
- end
-
- it 'only enqueues the worker once' do
- expect(described_class).to receive(:perform_async)
- expect(described_class).not_to receive(:perform_in)
-
- subject
- end
- end
- end
-
- context 'above capacity' do
- before do
- allow(ContainerRegistry::Migration).to receive(:capacity).and_return(-1)
- end
-
- it 'does not re-enqueue the worker' do
- expect(described_class).not_to receive(:perform_async)
- expect(described_class).not_to receive(:perform_in).with(7.seconds)
-
- subject
- end
- end
- end
-
- context 'with qualified repository' do
- before do
- allow_worker(on: :next_repository) do |repository|
- allow(repository).to receive(:migration_pre_import).and_return(:ok)
- end
- end
-
- shared_examples 'starting the next import' do
- it 'starts the pre-import for the next qualified repository' do
- expect_log_extra_metadata(
- import_type: 'next',
- container_repository_id: container_repository.id,
- container_repository_path: container_repository.path,
- container_repository_migration_state: 'pre_importing'
- )
-
- expect { subject }.to make_queries_matching(/LIMIT 2/)
-
- expect(container_repository.reload).to be_pre_importing
- end
- end
-
- it_behaves_like 'starting the next import'
-
- context 'when the new pre-import maxes out the capacity' do
- before do
- # set capacity to 10
- stub_feature_flags(
- container_registry_migration_phase2_capacity_25: false
- )
-
- # Plus 2 created above gives 9 importing repositories
- create_list(:container_repository, 7, :importing)
- end
-
- it 'does not re-enqueue the worker' do
- expect(described_class).not_to receive(:perform_async)
- expect(described_class).not_to receive(:perform_in)
-
- subject
- end
- end
-
- it_behaves_like 're-enqueuing based on capacity'
-
- context 'max tag count is 0' do
- before do
- stub_application_setting(container_registry_import_max_tags_count: 0)
- # Add 8 tags to the next repository
- stub_container_registry_tags(
- repository: container_repository.path, tags: %w(a b c d e f g h), with_manifest: true
- )
- end
-
- it_behaves_like 'starting the next import'
- end
- end
-
- context 'migrations are disabled' do
+ it_behaves_like 'no action' do
before do
- allow(ContainerRegistry::Migration).to receive(:enabled?).and_return(false)
- end
-
- it_behaves_like 'no action' do
- before do
- expect_log_extra_metadata(migration_enabled: false)
- end
+ expect_log_extra_metadata(migration_enabled: false)
end
end
+ end
- context 'above capacity' do
+ context 'with no repository qualifies' do
+ include_examples 'an idempotent worker' do
before do
- create(:container_repository, :importing)
- create(:container_repository, :importing)
- allow(ContainerRegistry::Migration).to receive(:capacity).and_return(1)
- end
-
- it_behaves_like 'no action' do
- before do
- expect_log_extra_metadata(below_capacity: false, max_capacity_setting: 1)
- end
- end
-
- it 'does not re-enqueue the worker' do
- expect(ContainerRegistry::Migration::EnqueuerWorker).not_to receive(:perform_async)
- expect(ContainerRegistry::Migration::EnqueuerWorker).not_to receive(:perform_in)
-
- subject
- end
- end
-
- context 'too soon before previous completed import step' do
- where(:state, :timestamp) do
- :import_done | :migration_import_done_at
- :pre_import_done | :migration_pre_import_done_at
- :import_aborted | :migration_aborted_at
- :import_skipped | :migration_skipped_at
- end
-
- with_them do
- before do
- allow(ContainerRegistry::Migration).to receive(:enqueue_waiting_time).and_return(45.minutes)
- create(:container_repository, state, timestamp => 1.minute.ago)
- end
-
- it_behaves_like 'no action' do
- before do
- expect_log_extra_metadata(waiting_time_passed: false, current_waiting_time_setting: 45.minutes)
- end
- end
+ allow(ContainerRepository).to receive(:ready_for_import).and_return(ContainerRepository.none)
end
- context 'when last completed repository has nil timestamps' do
- before do
- allow(ContainerRegistry::Migration).to receive(:enqueue_waiting_time).and_return(45.minutes)
- create(:container_repository, migration_state: 'import_done')
- end
-
- it 'continues to try the next import' do
- expect { subject }.to change { container_repository.reload.migration_state }
- end
- end
- end
-
- context 'when an aborted import is available' do
- let_it_be(:aborted_repository) { create(:container_repository, :import_aborted) }
-
- context 'with a successful registry request' do
- before do
- allow_worker(on: :next_aborted_repository) do |repository|
- allow(repository).to receive(:migration_import).and_return(:ok)
- allow(repository.gitlab_api_client).to receive(:import_status).and_return('import_failed')
- end
- end
-
- it 'retries the import for the aborted repository' do
- expect_log_extra_metadata(
- import_type: 'retry',
- container_repository_id: aborted_repository.id,
- container_repository_path: aborted_repository.path,
- container_repository_migration_state: 'importing'
- )
-
- subject
-
- expect(aborted_repository.reload).to be_importing
- expect(container_repository.reload).to be_default
- end
-
- it_behaves_like 're-enqueuing based on capacity'
- end
-
- context 'when an error occurs' do
- it 'does not abort that migration' do
- allow_worker(on: :next_aborted_repository) do |repository|
- allow(repository).to receive(:retry_aborted_migration).and_raise(StandardError)
- end
-
- expect_log_extra_metadata(
- import_type: 'retry',
- container_repository_id: aborted_repository.id,
- container_repository_path: aborted_repository.path,
- container_repository_migration_state: 'import_aborted'
- )
-
- subject
-
- expect(aborted_repository.reload).to be_import_aborted
- expect(container_repository.reload).to be_default
- end
- end
+ it_behaves_like 'no action'
end
+ end
- context 'when no repository qualifies' do
- include_examples 'an idempotent worker' do
- before do
- allow(ContainerRepository).to receive(:ready_for_import).and_return(ContainerRepository.none)
- end
+ context 'when multiple aborted imports are available' do
+ let_it_be(:aborted_repository1) { create(:container_repository, :import_aborted) }
+ let_it_be(:aborted_repository2) { create(:container_repository, :import_aborted) }
- it_behaves_like 'no action'
- end
+ before do
+ container_repository.update!(created_at: 30.seconds.ago)
end
- context 'over max tag count' do
+ context 'with successful registry requests' do
before do
- stub_application_setting(container_registry_import_max_tags_count: 2)
+ allow_worker(on: :next_aborted_repository) do |repository|
+ allow(repository).to receive(:migration_import).and_return(:ok)
+ allow(repository.gitlab_api_client).to receive(:import_status).and_return('import_failed')
+ end
end
- it 'skips the repository' do
- expect_log_extra_metadata(
- import_type: 'next',
- container_repository_id: container_repository.id,
- container_repository_path: container_repository.path,
- container_repository_migration_state: 'import_skipped',
- tags_count_too_high: true,
- max_tags_count_setting: 2
+ it 'retries the import for the aborted repository' do
+ expect_log_info(
+ [
+ {
+ import_type: 'retry',
+ container_repository_id: aborted_repository1.id,
+ container_repository_path: aborted_repository1.path,
+ container_repository_migration_state: 'importing'
+ },
+ {
+ import_type: 'retry',
+ container_repository_id: aborted_repository2.id,
+ container_repository_path: aborted_repository2.path,
+ container_repository_migration_state: 'importing'
+ }
+ ]
)
- subject
-
- expect(container_repository.reload).to be_import_skipped
- expect(container_repository.migration_skipped_reason).to eq('too_many_tags')
- expect(container_repository.migration_skipped_at).not_to be_nil
- end
+ expect(worker).to receive(:handle_next_migration).and_call_original
- context 're-enqueuing' do
- before do
- # skipping will also re-enqueue, so we isolate the capacity behavior here
- allow_worker(on: :next_repository) do |repository|
- allow(repository).to receive(:skip_import).and_return(true)
- end
- end
+ subject
- it_behaves_like 're-enqueuing based on capacity', capacity_limit: 3
+ expect(aborted_repository1.reload).to be_importing
+ expect(aborted_repository2.reload).to be_importing
end
end
context 'when an error occurs' do
- before do
- allow(ContainerRegistry::Migration).to receive(:max_tags_count).and_raise(StandardError)
- end
-
- it 'aborts the import' do
- expect_log_extra_metadata(
- import_type: 'next',
- container_repository_id: container_repository.id,
- container_repository_path: container_repository.path,
- container_repository_migration_state: 'import_aborted'
- )
+ it 'does abort that migration' do
+ allow_worker(on: :next_aborted_repository) do |repository|
+ allow(repository).to receive(:retry_aborted_migration).and_raise(StandardError)
+ end
- expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
- instance_of(StandardError),
- next_repository_id: container_repository.id
+ expect_log_info(
+ [
+ {
+ import_type: 'retry',
+ container_repository_id: aborted_repository1.id,
+ container_repository_path: aborted_repository1.path,
+ container_repository_migration_state: 'import_aborted'
+ }
+ ]
)
subject
- expect(container_repository.reload).to be_import_aborted
+ expect(aborted_repository1.reload).to be_import_aborted
+ expect(aborted_repository2.reload).to be_import_aborted
end
end
+ end
- context 'with the exclusive lease taken' do
- let(:lease_key) { worker.send(:lease_key) }
+ context 'when multiple qualified repositories are available' do
+ let_it_be(:container_repository2) { create(:container_repository, created_at: 2.days.ago) }
- before do
- stub_exclusive_lease_taken(lease_key, timeout: 30.minutes)
+ before do
+ allow_worker(on: :next_repository) do |repository|
+ allow(repository).to receive(:migration_pre_import).and_return(:ok)
end
- it 'does not perform' do
- expect(worker).not_to receive(:runnable?)
- expect(worker).not_to receive(:re_enqueue_if_capacity)
-
- subject
- end
+ stub_container_registry_tags(
+ repository: container_repository2.path,
+ tags: %w(tag4 tag5 tag6),
+ with_manifest: true
+ )
end
- end
- context 'with container_registry_migration_phase2_enqueuer_loop enabled' do
- context 'migrations are disabled' do
- before do
- allow(ContainerRegistry::Migration).to receive(:enabled?).and_return(false)
- end
+ shared_examples 'starting all the next imports' do
+ it 'starts the pre-import for the next qualified repositories' do
+ expect_log_info(
+ [
+ {
+ import_type: 'next',
+ container_repository_id: container_repository.id,
+ container_repository_path: container_repository.path,
+ container_repository_migration_state: 'pre_importing'
+ },
+ {
+ import_type: 'next',
+ container_repository_id: container_repository2.id,
+ container_repository_path: container_repository2.path,
+ container_repository_migration_state: 'pre_importing'
+ }
+ ]
+ )
- it_behaves_like 'no action' do
- before do
- expect_log_extra_metadata(migration_enabled: false)
- end
- end
- end
+ expect(worker).to receive(:handle_next_migration).exactly(3).times.and_call_original
- context 'with no repository qualifies' do
- include_examples 'an idempotent worker' do
- before do
- allow(ContainerRepository).to receive(:ready_for_import).and_return(ContainerRepository.none)
- end
+ expect { subject }.to make_queries_matching(/LIMIT 2/)
- it_behaves_like 'no action'
+ expect(container_repository.reload).to be_pre_importing
+ expect(container_repository2.reload).to be_pre_importing
end
end
- context 'when multiple aborted imports are available' do
- let_it_be(:aborted_repository1) { create(:container_repository, :import_aborted) }
- let_it_be(:aborted_repository2) { create(:container_repository, :import_aborted) }
+ it_behaves_like 'starting all the next imports'
+ context 'when the new pre-import maxes out the capacity' do
before do
- container_repository.update!(created_at: 30.seconds.ago)
+ # set capacity to 10
+ stub_feature_flags(
+ container_registry_migration_phase2_capacity_25: false,
+ container_registry_migration_phase2_capacity_40: false
+ )
+
+ # Plus 2 created above gives 9 importing repositories
+ create_list(:container_repository, 7, :importing)
end
- context 'with successful registry requests' do
- before do
- allow_worker(on: :next_aborted_repository) do |repository|
- allow(repository).to receive(:migration_import).and_return(:ok)
- allow(repository.gitlab_api_client).to receive(:import_status).and_return('import_failed')
- end
- end
+ it 'starts the pre-import only for one qualified repository' do
+ expect_log_info(
+ [
+ {
+ import_type: 'next',
+ container_repository_id: container_repository.id,
+ container_repository_path: container_repository.path,
+ container_repository_migration_state: 'pre_importing'
+ }
+ ]
+ )
- it 'retries the import for the aborted repository' do
- expect_log_info(
- [
- {
- import_type: 'retry',
- container_repository_id: aborted_repository1.id,
- container_repository_path: aborted_repository1.path,
- container_repository_migration_state: 'importing'
- },
- {
- import_type: 'retry',
- container_repository_id: aborted_repository2.id,
- container_repository_path: aborted_repository2.path,
- container_repository_migration_state: 'importing'
- }
- ]
- )
-
- expect(worker).to receive(:handle_next_migration).and_call_original
-
- subject
-
- expect(aborted_repository1.reload).to be_importing
- expect(aborted_repository2.reload).to be_importing
- end
- end
+ subject
- context 'when an error occurs' do
- it 'does abort that migration' do
- allow_worker(on: :next_aborted_repository) do |repository|
- allow(repository).to receive(:retry_aborted_migration).and_raise(StandardError)
- end
-
- expect_log_info(
- [
- {
- import_type: 'retry',
- container_repository_id: aborted_repository1.id,
- container_repository_path: aborted_repository1.path,
- container_repository_migration_state: 'import_aborted'
- }
- ]
- )
-
- subject
-
- expect(aborted_repository1.reload).to be_import_aborted
- expect(aborted_repository2.reload).to be_import_aborted
- end
+ expect(container_repository.reload).to be_pre_importing
+ expect(container_repository2.reload).to be_default
end
end
- context 'when multiple qualified repositories are available' do
- let_it_be(:container_repository2) { create(:container_repository, created_at: 2.days.ago) }
-
+ context 'max tag count is 0' do
before do
- allow_worker(on: :next_repository) do |repository|
- allow(repository).to receive(:migration_pre_import).and_return(:ok)
- end
-
+ stub_application_setting(container_registry_import_max_tags_count: 0)
+ # Add 8 tags to the next repository
stub_container_registry_tags(
- repository: container_repository2.path,
- tags: %w(tag4 tag5 tag6),
- with_manifest: true
+ repository: container_repository.path, tags: %w(a b c d e f g h), with_manifest: true
)
end
- shared_examples 'starting all the next imports' do
- it 'starts the pre-import for the next qualified repositories' do
- expect_log_info(
- [
- {
- import_type: 'next',
- container_repository_id: container_repository.id,
- container_repository_path: container_repository.path,
- container_repository_migration_state: 'pre_importing'
- },
- {
- import_type: 'next',
- container_repository_id: container_repository2.id,
- container_repository_path: container_repository2.path,
- container_repository_migration_state: 'pre_importing'
- }
- ]
- )
-
- expect(worker).to receive(:handle_next_migration).exactly(3).times.and_call_original
-
- expect { subject }.to make_queries_matching(/LIMIT 2/)
-
- expect(container_repository.reload).to be_pre_importing
- expect(container_repository2.reload).to be_pre_importing
- end
- end
-
it_behaves_like 'starting all the next imports'
+ end
- context 'when the new pre-import maxes out the capacity' do
- before do
- # set capacity to 10
- stub_feature_flags(
- container_registry_migration_phase2_capacity_25: false
- )
+ context 'when the deadline is hit' do
+ it 'does not handle the second qualified repository' do
+ expect(worker).to receive(:loop_deadline).and_return(5.seconds.from_now, 2.seconds.ago)
+ expect(worker).to receive(:handle_next_migration).once.and_call_original
- # Plus 2 created above gives 9 importing repositories
- create_list(:container_repository, 7, :importing)
- end
+ subject
- it 'starts the pre-import only for one qualified repository' do
- expect_log_info(
- [
- {
- import_type: 'next',
- container_repository_id: container_repository.id,
- container_repository_path: container_repository.path,
- container_repository_migration_state: 'pre_importing'
- }
- ]
- )
-
- subject
-
- expect(container_repository.reload).to be_pre_importing
- expect(container_repository2.reload).to be_default
- end
+ expect(container_repository.reload).to be_pre_importing
+ expect(container_repository2.reload).to be_default
end
+ end
+ end
- context 'max tag count is 0' do
- before do
- stub_application_setting(container_registry_import_max_tags_count: 0)
- # Add 8 tags to the next repository
- stub_container_registry_tags(
- repository: container_repository.path, tags: %w(a b c d e f g h), with_manifest: true
- )
- end
+ context 'when a mix of aborted imports and qualified repositories are available' do
+ let_it_be(:aborted_repository) { create(:container_repository, :import_aborted) }
- it_behaves_like 'starting all the next imports'
+ before do
+ allow_worker(on: :next_aborted_repository) do |repository|
+ allow(repository).to receive(:migration_import).and_return(:ok)
+ allow(repository.gitlab_api_client).to receive(:import_status).and_return('import_failed')
end
- context 'when the deadline is hit' do
- it 'does not handle the second qualified repository' do
- expect(worker).to receive(:loop_deadline).and_return(5.seconds.from_now, 2.seconds.ago)
- expect(worker).to receive(:handle_next_migration).once.and_call_original
-
- subject
-
- expect(container_repository.reload).to be_pre_importing
- expect(container_repository2.reload).to be_default
- end
+ allow_worker(on: :next_repository) do |repository|
+ allow(repository).to receive(:migration_pre_import).and_return(:ok)
end
end
- context 'when a mix of aborted imports and qualified repositories are available' do
- let_it_be(:aborted_repository) { create(:container_repository, :import_aborted) }
-
- before do
- allow_worker(on: :next_aborted_repository) do |repository|
- allow(repository).to receive(:migration_import).and_return(:ok)
- allow(repository.gitlab_api_client).to receive(:import_status).and_return('import_failed')
- end
+ it 'retries the aborted repository and start the migration on the qualified repository' do
+ expect_log_info(
+ [
+ {
+ import_type: 'retry',
+ container_repository_id: aborted_repository.id,
+ container_repository_path: aborted_repository.path,
+ container_repository_migration_state: 'importing'
+ },
+ {
+ import_type: 'next',
+ container_repository_id: container_repository.id,
+ container_repository_path: container_repository.path,
+ container_repository_migration_state: 'pre_importing'
+ }
+ ]
+ )
- allow_worker(on: :next_repository) do |repository|
- allow(repository).to receive(:migration_pre_import).and_return(:ok)
- end
- end
+ subject
- it 'retries the aborted repository and start the migration on the qualified repository' do
- expect_log_info(
- [
- {
- import_type: 'retry',
- container_repository_id: aborted_repository.id,
- container_repository_path: aborted_repository.path,
- container_repository_migration_state: 'importing'
- },
- {
- import_type: 'next',
- container_repository_id: container_repository.id,
- container_repository_path: container_repository.path,
- container_repository_migration_state: 'pre_importing'
- }
- ]
- )
+ expect(aborted_repository.reload).to be_importing
+ expect(container_repository.reload).to be_pre_importing
+ end
+ end
- subject
+ context 'above capacity' do
+ before do
+ create(:container_repository, :importing)
+ create(:container_repository, :importing)
+ allow(ContainerRegistry::Migration).to receive(:capacity).and_return(1)
+ end
- expect(aborted_repository.reload).to be_importing
- expect(container_repository.reload).to be_pre_importing
+ it_behaves_like 'no action' do
+ before do
+ expect_log_extra_metadata(below_capacity: false, max_capacity_setting: 1)
end
end
+ end
- context 'above capacity' do
+ context 'too soon before previous completed import step' do
+ where(:state, :timestamp) do
+ :import_done | :migration_import_done_at
+ :pre_import_done | :migration_pre_import_done_at
+ :import_aborted | :migration_aborted_at
+ :import_skipped | :migration_skipped_at
+ end
+
+ with_them do
before do
- create(:container_repository, :importing)
- create(:container_repository, :importing)
- allow(ContainerRegistry::Migration).to receive(:capacity).and_return(1)
+ allow(ContainerRegistry::Migration).to receive(:enqueue_waiting_time).and_return(45.minutes)
+ create(:container_repository, state, timestamp => 1.minute.ago)
end
it_behaves_like 'no action' do
before do
- expect_log_extra_metadata(below_capacity: false, max_capacity_setting: 1)
+ expect_log_extra_metadata(waiting_time_passed: false, current_waiting_time_setting: 45.minutes)
end
end
end
- context 'too soon before previous completed import step' do
- where(:state, :timestamp) do
- :import_done | :migration_import_done_at
- :pre_import_done | :migration_pre_import_done_at
- :import_aborted | :migration_aborted_at
- :import_skipped | :migration_skipped_at
- end
-
- with_them do
- before do
- allow(ContainerRegistry::Migration).to receive(:enqueue_waiting_time).and_return(45.minutes)
- create(:container_repository, state, timestamp => 1.minute.ago)
- end
-
- it_behaves_like 'no action' do
- before do
- expect_log_extra_metadata(waiting_time_passed: false, current_waiting_time_setting: 45.minutes)
- end
- end
+ context 'when last completed repository has nil timestamps' do
+ before do
+ allow(ContainerRegistry::Migration).to receive(:enqueue_waiting_time).and_return(45.minutes)
+ create(:container_repository, migration_state: 'import_done')
end
- context 'when last completed repository has nil timestamps' do
- before do
- allow(ContainerRegistry::Migration).to receive(:enqueue_waiting_time).and_return(45.minutes)
- create(:container_repository, migration_state: 'import_done')
- end
-
- it 'continues to try the next import' do
- expect { subject }.to change { container_repository.reload.migration_state }
- end
+ it 'continues to try the next import' do
+ expect { subject }.to change { container_repository.reload.migration_state }
end
end
+ end
- context 'over max tag count' do
- before do
- stub_application_setting(container_registry_import_max_tags_count: 2)
- end
+ context 'over max tag count' do
+ before do
+ stub_application_setting(container_registry_import_max_tags_count: 2)
+ end
- it 'skips the repository' do
- expect_log_info(
- [
- {
- import_type: 'next',
- container_repository_id: container_repository.id,
- container_repository_path: container_repository.path,
- container_repository_migration_state: 'import_skipped',
- container_repository_migration_skipped_reason: 'too_many_tags'
- }
- ]
- )
+ it 'skips the repository' do
+ expect_log_info(
+ [
+ {
+ import_type: 'next',
+ container_repository_id: container_repository.id,
+ container_repository_path: container_repository.path,
+ container_repository_migration_state: 'import_skipped',
+ container_repository_migration_skipped_reason: 'too_many_tags'
+ }
+ ]
+ )
- expect(worker).to receive(:handle_next_migration).twice.and_call_original
- # skipping the migration will re_enqueue the job
- expect(described_class).to receive(:enqueue_a_job)
+ expect(worker).to receive(:handle_next_migration).twice.and_call_original
+ # skipping the migration will re_enqueue the job
+ expect(described_class).to receive(:enqueue_a_job)
- subject
+ subject
- expect(container_repository.reload).to be_import_skipped
- expect(container_repository.migration_skipped_reason).to eq('too_many_tags')
- expect(container_repository.migration_skipped_at).not_to be_nil
- end
+ expect(container_repository.reload).to be_import_skipped
+ expect(container_repository.migration_skipped_reason).to eq('too_many_tags')
+ expect(container_repository.migration_skipped_at).not_to be_nil
end
+ end
- context 'when an error occurs' do
- before do
- allow(ContainerRegistry::Migration).to receive(:max_tags_count).and_raise(StandardError)
- end
+ context 'when an error occurs' do
+ before do
+ allow(ContainerRegistry::Migration).to receive(:max_tags_count).and_raise(StandardError)
+ end
- it 'aborts the import' do
- expect_log_info(
- [
- {
- import_type: 'next',
- container_repository_id: container_repository.id,
- container_repository_path: container_repository.path,
- container_repository_migration_state: 'import_aborted'
- }
- ]
- )
+ it 'aborts the import' do
+ expect_log_info(
+ [
+ {
+ import_type: 'next',
+ container_repository_id: container_repository.id,
+ container_repository_path: container_repository.path,
+ container_repository_migration_state: 'import_aborted'
+ }
+ ]
+ )
- expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
- instance_of(StandardError),
- next_repository_id: container_repository.id
- )
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ instance_of(StandardError),
+ next_repository_id: container_repository.id
+ )
- # aborting the migration will re_enqueue the job
- expect(described_class).to receive(:enqueue_a_job)
+ # aborting the migration will re_enqueue the job
+ expect(described_class).to receive(:enqueue_a_job)
- subject
+ subject
- expect(container_repository.reload).to be_import_aborted
- end
+ expect(container_repository.reload).to be_import_aborted
end
+ end
- context 'with the exclusive lease taken' do
- let(:lease_key) { worker.send(:lease_key) }
+ context 'with the exclusive lease taken' do
+ let(:lease_key) { worker.send(:lease_key) }
- before do
- stub_exclusive_lease_taken(lease_key, timeout: 30.minutes)
- end
+ before do
+ stub_exclusive_lease_taken(lease_key, timeout: 30.minutes)
+ end
- it 'does not perform' do
- expect(worker).not_to receive(:handle_aborted_migration)
- expect(worker).not_to receive(:handle_next_migration)
+ it 'does not perform' do
+ expect(worker).not_to receive(:handle_aborted_migration)
+ expect(worker).not_to receive(:handle_next_migration)
- subject
- end
+ subject
end
end
diff --git a/spec/workers/container_registry/migration/guard_worker_spec.rb b/spec/workers/container_registry/migration/guard_worker_spec.rb
index c52a3fc5d54..d2bcfef2f5b 100644
--- a/spec/workers/container_registry/migration/guard_worker_spec.rb
+++ b/spec/workers/container_registry/migration/guard_worker_spec.rb
@@ -37,6 +37,7 @@ RSpec.describe ContainerRegistry::Migration::GuardWorker, :aggregate_failures do
expect(worker).to receive(:log_extra_metadata_on_done).with(:stale_migrations_count, 1)
expect(worker).to receive(:log_extra_metadata_on_done).with(:aborted_stale_migrations_count, 1)
expect(worker).to receive(:log_extra_metadata_on_done).with(:aborted_long_running_migration_ids, [stale_migration.id])
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:aborted_long_running_migration_paths, [stale_migration.path])
expect(ContainerRegistry::Migration).to receive(timeout).and_call_original
expect { subject }
@@ -44,19 +45,6 @@ RSpec.describe ContainerRegistry::Migration::GuardWorker, :aggregate_failures do
.and change { stale_migration.reload.migration_state }.to('import_aborted')
.and not_change { ongoing_migration.migration_state }
end
-
- context 'registry_migration_guard_thresholds feature flag disabled' do
- before do
- stub_feature_flags(registry_migration_guard_thresholds: false)
- end
-
- it 'falls back on the hardcoded value' do
- expect(ContainerRegistry::Migration).not_to receive(:pre_import_timeout)
-
- expect { subject }
- .to change { stale_migration.reload.migration_state }.to('import_aborted')
- end
- end
end
context 'migration is canceled' do
@@ -75,6 +63,7 @@ RSpec.describe ContainerRegistry::Migration::GuardWorker, :aggregate_failures do
expect(worker).to receive(:log_extra_metadata_on_done).with(:stale_migrations_count, 1)
expect(worker).to receive(:log_extra_metadata_on_done).with(:aborted_stale_migrations_count, 1)
expect(worker).to receive(:log_extra_metadata_on_done).with(:aborted_long_running_migration_ids, [stale_migration.id])
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:aborted_long_running_migration_paths, [stale_migration.path])
expect(ContainerRegistry::Migration).to receive(timeout).and_call_original
expect { subject }
@@ -83,19 +72,6 @@ RSpec.describe ContainerRegistry::Migration::GuardWorker, :aggregate_failures do
expect(stale_migration.reload.migration_state).to eq('import_skipped')
expect(stale_migration.reload.migration_skipped_reason).to eq('migration_canceled')
end
-
- context 'registry_migration_guard_thresholds feature flag disabled' do
- before do
- stub_feature_flags(registry_migration_guard_thresholds: false)
- end
-
- it 'falls back on the hardcoded value' do
- expect(ContainerRegistry::Migration).not_to receive(timeout)
-
- expect { subject }
- .to change { stale_migration.reload.migration_state }.to('import_skipped')
- end
- end
end
context 'when the retry limit has not been reached' do
@@ -132,16 +108,15 @@ RSpec.describe ContainerRegistry::Migration::GuardWorker, :aggregate_failures do
end
context 'with pre_importing stale migrations' do
- let(:ongoing_migration) { create(:container_repository, :pre_importing) }
- let(:stale_migration) { create(:container_repository, :pre_importing, migration_pre_import_started_at: 11.minutes.ago) }
+ let_it_be(:ongoing_migration) { create(:container_repository, :pre_importing) }
+ let_it_be(:stale_migration) { create(:container_repository, :pre_importing, migration_pre_import_started_at: 11.minutes.ago) }
+
let(:import_status) { 'test' }
before do
allow_next_instance_of(ContainerRegistry::GitlabApiClient) do |client|
allow(client).to receive(:import_status).and_return(import_status)
end
-
- stub_application_setting(container_registry_pre_import_timeout: 10.minutes.to_i)
end
it 'will abort the migration' do
@@ -161,7 +136,76 @@ RSpec.describe ContainerRegistry::Migration::GuardWorker, :aggregate_failures do
context 'the client returns pre_import_in_progress' do
let(:import_status) { 'pre_import_in_progress' }
- it_behaves_like 'handling long running migrations', timeout: :pre_import_timeout
+ shared_examples 'not aborting the stale migration' do
+ it 'will not abort the migration' do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:stale_migrations_count, 1)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:aborted_stale_migrations_count, 0)
+
+ expect { subject }
+ .to not_change(pre_importing_migrations, :count)
+ .and not_change(pre_import_done_migrations, :count)
+ .and not_change(importing_migrations, :count)
+ .and not_change(import_done_migrations, :count)
+ .and not_change(import_aborted_migrations, :count)
+ .and not_change { stale_migration.reload.migration_state }
+ .and not_change { ongoing_migration.migration_state }
+ end
+ end
+
+ context 'not long running' do
+ before do
+ stub_application_setting(container_registry_pre_import_timeout: 12.minutes.to_i)
+ end
+
+ it_behaves_like 'not aborting the stale migration'
+ end
+
+ context 'long running' do
+ before do
+ stub_application_setting(container_registry_pre_import_timeout: 9.minutes.to_i)
+ end
+
+ context 'with registry_migration_guard_dynamic_pre_import_timeout enabled' do
+ before do
+ stub_application_setting(container_registry_pre_import_tags_rate: 1)
+ end
+
+ context 'below the dynamic threshold' do
+ before do
+ allow_next_found_instance_of(ContainerRepository) do |repository|
+ allow(repository).to receive(:tags_count).and_return(11.minutes.to_i + 100)
+ end
+ end
+
+ it_behaves_like 'not aborting the stale migration'
+ end
+
+ context 'above the dynamic threshold' do
+ let(:tags) do
+ Array.new(11.minutes.to_i - 100) { |i| "tag#{i}" }
+ end
+
+ before do
+ # We can't allow_next_found_instance_of because the shared example
+ # 'handling long running migrations' is already using that.
+ # Instead, here we're going to stub the ContainerRegistry::Client instance.
+ allow_next_instance_of(ContainerRegistry::Client) do |client|
+ allow(client).to receive(:repository_tags).and_return({ 'tags' => tags })
+ end
+ end
+
+ it_behaves_like 'handling long running migrations', timeout: :pre_import_timeout
+ end
+ end
+
+ context 'with registry_migration_guard_dynamic_pre_import_timeout disabled' do
+ before do
+ stub_feature_flags(registry_migration_guard_dynamic_pre_import_timeout: false)
+ end
+
+ it_behaves_like 'handling long running migrations', timeout: :pre_import_timeout
+ end
+ end
end
end
diff --git a/spec/workers/database/batched_background_migration/ci_database_worker_spec.rb b/spec/workers/database/batched_background_migration/ci_database_worker_spec.rb
index f3cf5450048..2b4a42060d9 100644
--- a/spec/workers/database/batched_background_migration/ci_database_worker_spec.rb
+++ b/spec/workers/database/batched_background_migration/ci_database_worker_spec.rb
@@ -3,5 +3,5 @@
require 'spec_helper'
RSpec.describe Database::BatchedBackgroundMigration::CiDatabaseWorker, :clean_gitlab_redis_shared_state do
- it_behaves_like 'it runs batched background migration jobs', 'ci', feature_flag: :execute_batched_migrations_on_schedule_ci_database
+ it_behaves_like 'it runs batched background migration jobs', :ci
end
diff --git a/spec/workers/database/batched_background_migration_worker_spec.rb b/spec/workers/database/batched_background_migration_worker_spec.rb
index 7f0883def3c..a6c7db60abe 100644
--- a/spec/workers/database/batched_background_migration_worker_spec.rb
+++ b/spec/workers/database/batched_background_migration_worker_spec.rb
@@ -3,5 +3,5 @@
require 'spec_helper'
RSpec.describe Database::BatchedBackgroundMigrationWorker do
- it_behaves_like 'it runs batched background migration jobs', :main, feature_flag: :execute_batched_migrations_on_schedule
+ it_behaves_like 'it runs batched background migration jobs', :main
end
diff --git a/spec/workers/database/ci_namespace_mirrors_consistency_check_worker_spec.rb b/spec/workers/database/ci_namespace_mirrors_consistency_check_worker_spec.rb
index e5024c568cb..1c083d1d8a3 100644
--- a/spec/workers/database/ci_namespace_mirrors_consistency_check_worker_spec.rb
+++ b/spec/workers/database/ci_namespace_mirrors_consistency_check_worker_spec.rb
@@ -6,29 +6,11 @@ RSpec.describe Database::CiNamespaceMirrorsConsistencyCheckWorker do
let(:worker) { described_class.new }
describe '#perform' do
- context 'feature flag is disabled' do
- before do
- stub_feature_flags(ci_namespace_mirrors_consistency_check: false)
- end
-
- it 'does not perform the consistency check on namespaces' do
- expect(Database::ConsistencyCheckService).not_to receive(:new)
- expect(worker).not_to receive(:log_extra_metadata_on_done)
- worker.perform
- end
- end
-
- context 'feature flag is enabled' do
- before do
- stub_feature_flags(ci_namespace_mirrors_consistency_check: true)
- end
-
- it 'executes the consistency check on namespaces' do
- expect(Database::ConsistencyCheckService).to receive(:new).and_call_original
- expected_result = { batches: 0, matches: 0, mismatches: 0, mismatches_details: [] }
- expect(worker).to receive(:log_extra_metadata_on_done).with(:results, expected_result)
- worker.perform
- end
+ it 'executes the consistency check on namespaces' do
+ expect(Database::ConsistencyCheckService).to receive(:new).and_call_original
+ expected_result = { batches: 0, matches: 0, mismatches: 0, mismatches_details: [] }
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:results, expected_result)
+ worker.perform
end
context 'logs should contain the detailed mismatches' do
@@ -37,7 +19,6 @@ RSpec.describe Database::CiNamespaceMirrorsConsistencyCheckWorker do
before do
redis_shared_state_cleanup!
- stub_feature_flags(ci_namespace_mirrors_consistency_check: true)
create_list(:namespace, 10) # This will also create Ci::NameSpaceMirror objects
missing_namespace.delete
diff --git a/spec/workers/database/ci_project_mirrors_consistency_check_worker_spec.rb b/spec/workers/database/ci_project_mirrors_consistency_check_worker_spec.rb
index f8e950d8917..8c839410ccd 100644
--- a/spec/workers/database/ci_project_mirrors_consistency_check_worker_spec.rb
+++ b/spec/workers/database/ci_project_mirrors_consistency_check_worker_spec.rb
@@ -6,29 +6,11 @@ RSpec.describe Database::CiProjectMirrorsConsistencyCheckWorker do
let(:worker) { described_class.new }
describe '#perform' do
- context 'feature flag is disabled' do
- before do
- stub_feature_flags(ci_project_mirrors_consistency_check: false)
- end
-
- it 'does not perform the consistency check on projects' do
- expect(Database::ConsistencyCheckService).not_to receive(:new)
- expect(worker).not_to receive(:log_extra_metadata_on_done)
- worker.perform
- end
- end
-
- context 'feature flag is enabled' do
- before do
- stub_feature_flags(ci_project_mirrors_consistency_check: true)
- end
-
- it 'executes the consistency check on projects' do
- expect(Database::ConsistencyCheckService).to receive(:new).and_call_original
- expected_result = { batches: 0, matches: 0, mismatches: 0, mismatches_details: [] }
- expect(worker).to receive(:log_extra_metadata_on_done).with(:results, expected_result)
- worker.perform
- end
+ it 'executes the consistency check on projects' do
+ expect(Database::ConsistencyCheckService).to receive(:new).and_call_original
+ expected_result = { batches: 0, matches: 0, mismatches: 0, mismatches_details: [] }
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:results, expected_result)
+ worker.perform
end
context 'logs should contain the detailed mismatches' do
@@ -37,7 +19,6 @@ RSpec.describe Database::CiProjectMirrorsConsistencyCheckWorker do
before do
redis_shared_state_cleanup!
- stub_feature_flags(ci_project_mirrors_consistency_check: true)
create_list(:project, 10) # This will also create Ci::ProjectMirror objects
missing_project.delete
diff --git a/spec/workers/delete_container_repository_worker_spec.rb b/spec/workers/delete_container_repository_worker_spec.rb
index ec040eab2d4..a011457444a 100644
--- a/spec/workers/delete_container_repository_worker_spec.rb
+++ b/spec/workers/delete_container_repository_worker_spec.rb
@@ -3,31 +3,119 @@
require 'spec_helper'
RSpec.describe DeleteContainerRepositoryWorker do
- let(:registry) { create(:container_repository) }
- let(:project) { registry.project }
- let(:user) { project.first_owner }
+ let_it_be(:repository) { create(:container_repository) }
- subject { described_class.new }
+ let(:project) { repository.project }
+ let(:user) { project.first_owner }
+ let(:worker) { described_class.new }
describe '#perform' do
+ let(:user_id) { user.id }
+ let(:repository_id) { repository.id }
+
+ subject(:perform) { worker.perform(user_id, repository_id) }
+
it 'executes the destroy service' do
- service = instance_double(Projects::ContainerRepository::DestroyService)
- expect(service).to receive(:execute)
- expect(Projects::ContainerRepository::DestroyService).to receive(:new).with(project, user).and_return(service)
+ expect_destroy_service_execution
+
+ perform
+ end
+
+ context 'with an invalid user id' do
+ let(:user_id) { -1 }
+
+ it { expect { perform }.not_to raise_error }
+ end
- subject.perform(user.id, registry.id)
+ context 'with an invalid repository id' do
+ let(:repository_id) { -1 }
+
+ it { expect { perform }.not_to raise_error }
end
- it 'does not raise error when user could not be found' do
- expect do
- subject.perform(-1, registry.id)
- end.not_to raise_error
+ context 'with a repository being migrated', :freeze_time do
+ before do
+ stub_application_setting(
+ container_registry_pre_import_tags_rate: 0.5,
+ container_registry_import_timeout: 10.minutes.to_i
+ )
+ end
+
+ shared_examples 'destroying the repository' do
+ it 'does destroy the repository' do
+ expect_next_found_instance_of(ContainerRepository) do |container_repository|
+ expect(container_repository).not_to receive(:tags_count)
+ end
+ expect(described_class).not_to receive(:perform_in)
+ expect_destroy_service_execution
+
+ perform
+ end
+ end
+
+ shared_examples 'not re enqueuing job if feature flag is disabled' do
+ before do
+ stub_feature_flags(container_registry_migration_phase2_delete_container_repository_worker_support: false)
+ end
+
+ it_behaves_like 'destroying the repository'
+ end
+
+ context 'with migration state set to pre importing' do
+ let_it_be(:repository) { create(:container_repository, :pre_importing) }
+
+ let(:tags_count) { 60 }
+ let(:delay) { (tags_count * 0.5).seconds + 10.minutes + described_class::FIXED_DELAY }
+
+ it 'does not destroy the repository and re enqueue the job' do
+ expect_next_found_instance_of(ContainerRepository) do |container_repository|
+ expect(container_repository).to receive(:tags_count).and_return(tags_count)
+ end
+ expect(described_class).to receive(:perform_in).with(delay.from_now)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:delete_postponed, delay)
+ expect(::Projects::ContainerRepository::DestroyService).not_to receive(:new)
+
+ perform
+ end
+
+ it_behaves_like 'not re enqueuing job if feature flag is disabled'
+ end
+
+ %i[pre_import_done importing import_aborted].each do |migration_state|
+ context "with migration state set to #{migration_state}" do
+ let_it_be(:repository) { create(:container_repository, migration_state) }
+
+ let(:delay) { 10.minutes + described_class::FIXED_DELAY }
+
+ it 'does not destroy the repository and re enqueue the job' do
+ expect_next_found_instance_of(ContainerRepository) do |container_repository|
+ expect(container_repository).not_to receive(:tags_count)
+ end
+ expect(described_class).to receive(:perform_in).with(delay.from_now)
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:delete_postponed, delay)
+ expect(::Projects::ContainerRepository::DestroyService).not_to receive(:new)
+
+ perform
+ end
+
+ it_behaves_like 'not re enqueuing job if feature flag is disabled'
+ end
+ end
+
+ %i[default import_done import_skipped].each do |migration_state|
+ context "with migration state set to #{migration_state}" do
+ let_it_be(:repository) { create(:container_repository, migration_state) }
+
+ it_behaves_like 'destroying the repository'
+ it_behaves_like 'not re enqueuing job if feature flag is disabled'
+ end
+ end
end
- it 'does not raise error when registry could not be found' do
- expect do
- subject.perform(user.id, -1)
- end.not_to raise_error
+ def expect_destroy_service_execution
+ service = instance_double(Projects::ContainerRepository::DestroyService)
+ expect(service).to receive(:execute)
+ expect(Projects::ContainerRepository::DestroyService).to receive(:new).with(project, user).and_return(service)
end
end
end
diff --git a/spec/workers/deployments/hooks_worker_spec.rb b/spec/workers/deployments/hooks_worker_spec.rb
index a9240b45360..7c5f288fa57 100644
--- a/spec/workers/deployments/hooks_worker_spec.rb
+++ b/spec/workers/deployments/hooks_worker_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Deployments::HooksWorker do
describe '#perform' do
before do
- allow(ProjectServiceWorker).to receive(:perform_async)
+ allow(Integrations::ExecuteWorker).to receive(:perform_async)
end
it 'logs deployment and project IDs as metadata' do
@@ -25,7 +25,7 @@ RSpec.describe Deployments::HooksWorker do
project = deployment.project
service = create(:integrations_slack, project: project, deployment_events: true)
- expect(ProjectServiceWorker).to receive(:perform_async).with(service.id, an_instance_of(Hash))
+ expect(Integrations::ExecuteWorker).to receive(:perform_async).with(service.id, an_instance_of(Hash))
worker.perform(deployment_id: deployment.id, status_changed_at: Time.current)
end
@@ -35,13 +35,13 @@ RSpec.describe Deployments::HooksWorker do
project = deployment.project
create(:integrations_slack, project: project, deployment_events: true, active: false)
- expect(ProjectServiceWorker).not_to receive(:perform_async)
+ expect(Integrations::ExecuteWorker).not_to receive(:perform_async)
worker.perform(deployment_id: deployment.id, status_changed_at: Time.current)
end
it 'does not execute if a deployment does not exist' do
- expect(ProjectServiceWorker).not_to receive(:perform_async)
+ expect(Integrations::ExecuteWorker).not_to receive(:perform_async)
worker.perform(deployment_id: non_existing_record_id, status_changed_at: Time.current)
end
diff --git a/spec/workers/environments/auto_stop_worker_spec.rb b/spec/workers/environments/auto_stop_worker_spec.rb
index 1983cfa18ea..cb162b5a01c 100644
--- a/spec/workers/environments/auto_stop_worker_spec.rb
+++ b/spec/workers/environments/auto_stop_worker_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe Environments::AutoStopWorker do
it 'stops the environment' do
expect { subject }
.to change { Environment.find_by_name('review/feature').state }
- .from('available').to('stopped')
+ .from('available').to('stopping')
end
it 'executes the stop action' do
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index 0c83a692ca8..a9e886de52a 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -180,7 +180,9 @@ RSpec.describe 'Every Sidekiq worker' do
'ClusterWaitForAppInstallationWorker' => 3,
'ClusterWaitForAppUpdateWorker' => 3,
'ClusterWaitForIngressIpAddressWorker' => 3,
+ 'Clusters::Applications::ActivateIntegrationWorker' => 3,
'Clusters::Applications::ActivateServiceWorker' => 3,
+ 'Clusters::Applications::DeactivateIntegrationWorker' => 3,
'Clusters::Applications::DeactivateServiceWorker' => 3,
'Clusters::Applications::UninstallWorker' => 3,
'Clusters::Applications::WaitForUninstallAppWorker' => 3,
@@ -192,7 +194,6 @@ RSpec.describe 'Every Sidekiq worker' do
'CreateGithubWebhookWorker' => 3,
'CreateNoteDiffFileWorker' => 3,
'CreatePipelineWorker' => 3,
- 'DastSiteValidationWorker' => 3,
'DeleteContainerRepositoryWorker' => 3,
'DeleteDiffFilesWorker' => 3,
'DeleteMergedBranchesWorker' => 3,
@@ -227,8 +228,6 @@ RSpec.describe 'Every Sidekiq worker' do
'Epics::UpdateEpicsDatesWorker' => 3,
'ErrorTrackingIssueLinkWorker' => 3,
'Experiments::RecordConversionEventWorker' => 3,
- 'ExpireJobCacheWorker' => 3,
- 'ExpirePipelineCacheWorker' => 3,
'ExportCsvWorker' => 3,
'ExternalServiceReactiveCachingWorker' => 3,
'FileHookWorker' => false,
@@ -308,11 +307,11 @@ RSpec.describe 'Every Sidekiq worker' do
'IncidentManagement::OncallRotations::PersistAllRotationsShiftsJob' => 3,
'IncidentManagement::OncallRotations::PersistShiftsJob' => 3,
'IncidentManagement::PagerDuty::ProcessIncidentWorker' => 3,
+ 'Integrations::ExecuteWorker' => 3,
+ 'Integrations::IrkerWorker' => 3,
'InvalidGpgSignatureUpdateWorker' => 3,
'IrkerWorker' => 3,
'IssuableExportCsvWorker' => 3,
- 'IssuePlacementWorker' => 3,
- 'IssueRebalancingWorker' => 3,
'Issues::PlacementWorker' => 3,
'Issues::RebalancingWorker' => 3,
'IterationsUpdateStatusWorker' => 3,
@@ -323,6 +322,7 @@ RSpec.describe 'Every Sidekiq worker' do
'JiraConnect::SyncMergeRequestWorker' => 3,
'JiraConnect::SyncProjectWorker' => 3,
'LdapGroupSyncWorker' => 3,
+ 'Licenses::ResetSubmitLicenseUsageDataBannerWorker' => 13,
'MailScheduler::IssueDueWorker' => 3,
'MailScheduler::NotificationServiceWorker' => 3,
'MembersDestroyer::UnassignIssuablesWorker' => 3,
@@ -340,7 +340,6 @@ RSpec.describe 'Every Sidekiq worker' do
'Metrics::Dashboard::PruneOldAnnotationsWorker' => 3,
'Metrics::Dashboard::SyncDashboardsWorker' => 3,
'MigrateExternalDiffsWorker' => 3,
- 'NamespacelessProjectDestroyWorker' => 3,
'Namespaces::OnboardingIssueCreatedWorker' => 3,
'Namespaces::OnboardingPipelineCreatedWorker' => 3,
'Namespaces::OnboardingProgressWorker' => 3,
@@ -378,7 +377,6 @@ RSpec.describe 'Every Sidekiq worker' do
'PostReceive' => 3,
'ProcessCommitWorker' => 3,
'ProjectCacheWorker' => 3,
- 'ProjectDailyStatisticsWorker' => 3,
'ProjectDestroyWorker' => 3,
'ProjectExportWorker' => false,
'ProjectImportScheduleWorker' => 1,
@@ -392,7 +390,6 @@ RSpec.describe 'Every Sidekiq worker' do
'Projects::ScheduleBulkRepositoryShardMovesWorker' => 3,
'Projects::UpdateRepositoryStorageWorker' => 3,
'Projects::RefreshBuildArtifactsSizeStatisticsWorker' => 0,
- 'Prometheus::CreateDefaultAlertsWorker' => 3,
'PropagateIntegrationGroupWorker' => 3,
'PropagateIntegrationInheritDescendantWorker' => 3,
'PropagateIntegrationInheritWorker' => 3,
@@ -410,9 +407,7 @@ RSpec.describe 'Every Sidekiq worker' do
'RepositoryCleanupWorker' => 3,
'RepositoryForkWorker' => 5,
'RepositoryImportWorker' => false,
- 'RepositoryRemoveRemoteWorker' => 3,
'RepositoryUpdateMirrorWorker' => false,
- 'RepositoryPushAuditEventWorker' => 3,
'RepositoryUpdateRemoteMirrorWorker' => 3,
'RequirementsManagement::ImportRequirementsCsvWorker' => 3,
'RequirementsManagement::ProcessRequirementsReportsWorker' => 3,
diff --git a/spec/workers/expire_job_cache_worker_spec.rb b/spec/workers/expire_job_cache_worker_spec.rb
deleted file mode 100644
index e9af39ed2df..00000000000
--- a/spec/workers/expire_job_cache_worker_spec.rb
+++ /dev/null
@@ -1,31 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ExpireJobCacheWorker do
- let_it_be(:pipeline) { create(:ci_empty_pipeline) }
-
- let(:project) { pipeline.project }
-
- describe '#perform' do
- context 'with a job in the pipeline' do
- let_it_be(:job) { create(:ci_build, pipeline: pipeline) }
-
- let(:job_args) { job.id }
-
- it_behaves_like 'an idempotent worker'
-
- it_behaves_like 'worker with data consistency',
- described_class,
- data_consistency: :delayed
- end
-
- context 'when there is no job in the pipeline' do
- it 'does not change the etag store' do
- expect(Gitlab::EtagCaching::Store).not_to receive(:new)
-
- perform_multiple(non_existing_record_id)
- end
- end
- end
-end
diff --git a/spec/workers/expire_pipeline_cache_worker_spec.rb b/spec/workers/expire_pipeline_cache_worker_spec.rb
deleted file mode 100644
index f4c4df2e752..00000000000
--- a/spec/workers/expire_pipeline_cache_worker_spec.rb
+++ /dev/null
@@ -1,38 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ExpirePipelineCacheWorker do
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project) }
- let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
-
- subject { described_class.new }
-
- describe '#perform' do
- it 'executes the service' do
- expect_next_instance_of(Ci::ExpirePipelineCacheService) do |instance|
- expect(instance).to receive(:execute).with(pipeline).and_call_original
- end
-
- subject.perform(pipeline.id)
- end
-
- it "doesn't do anything if the pipeline not exist" do
- expect_any_instance_of(Ci::ExpirePipelineCacheService).not_to receive(:execute)
- expect_any_instance_of(Gitlab::EtagCaching::Store).not_to receive(:touch)
-
- subject.perform(617748)
- end
-
- skip "with https://gitlab.com/gitlab-org/gitlab/-/issues/325291 resolved" do
- it_behaves_like 'an idempotent worker' do
- let(:job_args) { [pipeline.id] }
- end
- end
-
- it_behaves_like 'worker with data consistency',
- described_class,
- data_consistency: :delayed
- end
-end
diff --git a/spec/workers/gitlab/jira_import/stage/import_issues_worker_spec.rb b/spec/workers/gitlab/jira_import/stage/import_issues_worker_spec.rb
index 10702c17cb5..2b08a592164 100644
--- a/spec/workers/gitlab/jira_import/stage/import_issues_worker_spec.rb
+++ b/spec/workers/gitlab/jira_import/stage/import_issues_worker_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::JiraImport::Stage::ImportIssuesWorker do
- include JiraServiceHelper
+ include JiraIntegrationHelpers
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, import_type: 'jira') }
diff --git a/spec/workers/gitlab/jira_import/stage/import_labels_worker_spec.rb b/spec/workers/gitlab/jira_import/stage/import_labels_worker_spec.rb
index 52c516b9ff9..d15f2caba19 100644
--- a/spec/workers/gitlab/jira_import/stage/import_labels_worker_spec.rb
+++ b/spec/workers/gitlab/jira_import/stage/import_labels_worker_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::JiraImport::Stage::ImportLabelsWorker do
- include JiraServiceHelper
+ include JiraIntegrationHelpers
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, import_type: 'jira') }
diff --git a/spec/workers/gitlab_service_ping_worker_spec.rb b/spec/workers/gitlab_service_ping_worker_spec.rb
index abccc0dc967..057639dcf1d 100644
--- a/spec/workers/gitlab_service_ping_worker_spec.rb
+++ b/spec/workers/gitlab_service_ping_worker_spec.rb
@@ -3,8 +3,14 @@
require 'spec_helper'
RSpec.describe GitlabServicePingWorker, :clean_gitlab_redis_shared_state do
+ let(:payload) { { recorded_at: Time.current.rfc3339 } }
+
before do
allow_next_instance_of(ServicePing::SubmitService) { |service| allow(service).to receive(:execute) }
+ allow_next_instance_of(ServicePing::BuildPayload) do |service|
+ allow(service).to receive(:execute).and_return(payload)
+ end
+
allow(subject).to receive(:sleep)
end
@@ -15,10 +21,54 @@ RSpec.describe GitlabServicePingWorker, :clean_gitlab_redis_shared_state do
subject.perform
end
- it 'delegates to ServicePing::SubmitService' do
- expect_next_instance_of(ServicePing::SubmitService) { |service| expect(service).to receive(:execute) }
+ context 'with prerecord_service_ping_data feature enabled' do
+ it 'delegates to ServicePing::SubmitService' do
+ stub_feature_flags(prerecord_service_ping_data: true)
- subject.perform
+ expect_next_instance_of(ServicePing::SubmitService, payload: payload) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ subject.perform
+ end
+ end
+
+ context 'with prerecord_service_ping_data feature disabled' do
+ it 'does not prerecord ServicePing, and calls SubmitService', :aggregate_failures do
+ stub_feature_flags(prerecord_service_ping_data: false)
+
+ expect(ServicePing::BuildPayload).not_to receive(:new)
+ expect(ServicePing::BuildPayload).not_to receive(:new)
+ expect_next_instance_of(ServicePing::SubmitService, payload: nil) do |service|
+ expect(service).to receive(:execute)
+ end
+ expect { subject.perform }.not_to change { RawUsageData.count }
+ end
+ end
+
+ context 'payload computation' do
+ it 'creates RawUsageData entry when there is NO entry with the same recorded_at timestamp' do
+ expect { subject.perform }.to change { RawUsageData.count }.by(1)
+ end
+
+ it 'updates RawUsageData entry when there is entry with the same recorded_at timestamp' do
+ record = create(:raw_usage_data, payload: { some_metric: 123 }, recorded_at: payload[:recorded_at])
+
+ expect { subject.perform }.to change { record.reload.payload }
+ .from("some_metric" => 123).to(payload.stringify_keys)
+ end
+
+ it 'reports errors and continue on execution' do
+ error = StandardError.new('some error')
+ allow(::ServicePing::BuildPayload).to receive(:new).and_raise(error)
+
+ expect(::Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).with(error)
+ expect_next_instance_of(::ServicePing::SubmitService, payload: nil) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ subject.perform
+ end
end
it "obtains a #{described_class::LEASE_TIMEOUT} second exclusive lease" do
diff --git a/spec/workers/project_service_worker_spec.rb b/spec/workers/integrations/execute_worker_spec.rb
index 55ec07ff79c..19600f35c8f 100644
--- a/spec/workers/project_service_worker_spec.rb
+++ b/spec/workers/integrations/execute_worker_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe ProjectServiceWorker, '#perform' do
+RSpec.describe Integrations::ExecuteWorker, '#perform' do
let_it_be(:integration) { create(:jira_integration) }
let(:worker) { described_class.new }
@@ -36,4 +36,26 @@ RSpec.describe ProjectServiceWorker, '#perform' do
end.not_to raise_error
end
end
+
+ context 'when using the old worker class' do
+ let(:described_class) { ProjectServiceWorker }
+
+ it 'uses the correct worker attributes', :aggregate_failures do
+ expect(described_class.sidekiq_options).to include('retry' => 3, 'dead' => false)
+ expect(described_class.get_data_consistency).to eq(:always)
+ expect(described_class.get_feature_category).to eq(:integrations)
+ expect(described_class.get_urgency).to eq(:low)
+ expect(described_class.worker_has_external_dependencies?).to be(true)
+ end
+
+ it 'executes integration with given data' do
+ data = { test: 'test' }
+
+ expect_next_found_instance_of(integration.class) do |integration|
+ expect(integration).to receive(:execute).with(data)
+ end
+
+ worker.perform(integration.id, data)
+ end
+ end
end
diff --git a/spec/workers/irker_worker_spec.rb b/spec/workers/integrations/irker_worker_spec.rb
index c3d40ad2783..27dc08212ea 100644
--- a/spec/workers/irker_worker_spec.rb
+++ b/spec/workers/integrations/irker_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe IrkerWorker, '#perform' do
+RSpec.describe Integrations::IrkerWorker, '#perform' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let_it_be(:push_data) { HashWithIndifferentAccess.new(Gitlab::DataBuilder::Push.build_sample(project, user)) }
@@ -25,7 +25,7 @@ RSpec.describe IrkerWorker, '#perform' do
]
end
- let(:tcp_socket) { double('socket') }
+ let(:tcp_socket) { instance_double(TCPSocket) }
subject(:worker) { described_class.new }
@@ -35,7 +35,7 @@ RSpec.describe IrkerWorker, '#perform' do
allow(tcp_socket).to receive(:close).and_return(true)
end
- context 'local requests are not allowed' do
+ context 'when local requests are not allowed' do
before do
allow(Gitlab::CurrentSettings).to receive(:allow_local_requests_from_web_hooks_and_services?).and_return(false)
end
@@ -43,7 +43,7 @@ RSpec.describe IrkerWorker, '#perform' do
it { expect(worker.perform(*arguments)).to be_falsey }
end
- context 'connection fails' do
+ context 'when connection fails' do
before do
allow(TCPSocket).to receive(:new).and_raise(Errno::ECONNREFUSED.new('test'))
end
@@ -51,7 +51,7 @@ RSpec.describe IrkerWorker, '#perform' do
it { expect(subject.perform(*arguments)).to be_falsey }
end
- context 'connection successful' do
+ context 'when connection successful' do
before do
allow(Gitlab::CurrentSettings)
.to receive(:allow_local_requests_from_web_hooks_and_services?).and_return(true)
@@ -59,7 +59,7 @@ RSpec.describe IrkerWorker, '#perform' do
it { expect(subject.perform(*arguments)).to be_truthy }
- context 'new branch' do
+ context 'with new branch' do
it 'sends a correct message with branches url' do
branches_url = Gitlab::Routing.url_helpers
.project_branches_url(project)
@@ -74,7 +74,7 @@ RSpec.describe IrkerWorker, '#perform' do
end
end
- context 'deleted branch' do
+ context 'with deleted branch' do
it 'sends a correct message' do
push_data['after'] = '0000000000000000000000000000000000000000'
@@ -86,7 +86,7 @@ RSpec.describe IrkerWorker, '#perform' do
end
end
- context 'new commits to existing branch' do
+ context 'with new commits to existing branch' do
it 'sends a correct message with a compare url' do
compare_url = Gitlab::Routing.url_helpers
.project_compare_url(project,
@@ -101,6 +101,12 @@ RSpec.describe IrkerWorker, '#perform' do
subject.perform(*arguments)
end
end
+
+ context 'when using the old worker class' do
+ let(:described_class) { ::IrkerWorker }
+
+ it { expect(subject.perform(*arguments)).to be_truthy }
+ end
end
def wrap_message(text)
diff --git a/spec/workers/issue_placement_worker_spec.rb b/spec/workers/issue_placement_worker_spec.rb
deleted file mode 100644
index 9b5121d98e8..00000000000
--- a/spec/workers/issue_placement_worker_spec.rb
+++ /dev/null
@@ -1,151 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe IssuePlacementWorker do
- describe '#perform' do
- let_it_be(:time) { Time.now.utc }
- let_it_be(:group) { create(:group) }
- let_it_be(:project) { create(:project, group: group) }
- let_it_be(:author) { create(:user) }
- let_it_be(:common_attrs) { { author: author, project: project } }
- let_it_be(:unplaced) { common_attrs.merge(relative_position: nil) }
- let_it_be_with_reload(:issue) { create(:issue, **unplaced, created_at: time) }
- let_it_be_with_reload(:issue_a) { create(:issue, **unplaced, created_at: time - 1.minute) }
- let_it_be_with_reload(:issue_b) { create(:issue, **unplaced, created_at: time - 2.minutes) }
- let_it_be_with_reload(:issue_c) { create(:issue, **unplaced, created_at: time + 1.minute) }
- let_it_be_with_reload(:issue_d) { create(:issue, **unplaced, created_at: time + 2.minutes) }
- let_it_be_with_reload(:issue_e) { create(:issue, **common_attrs, relative_position: 10, created_at: time + 1.minute) }
- let_it_be_with_reload(:issue_f) { create(:issue, **unplaced, created_at: time + 1.minute) }
-
- let_it_be(:irrelevant) { create(:issue, relative_position: nil, created_at: time) }
-
- shared_examples 'running the issue placement worker' do
- let(:issue_id) { issue.id }
- let(:project_id) { project.id }
-
- it 'places all issues created at most 5 minutes before this one at the end, most recent last' do
- expect { run_worker }.not_to change { irrelevant.reset.relative_position }
-
- expect(project.issues.order_by_relative_position)
- .to eq([issue_e, issue_b, issue_a, issue, issue_c, issue_f, issue_d])
- expect(project.issues.where(relative_position: nil)).not_to exist
- end
-
- it 'schedules rebalancing if needed' do
- issue_a.update!(relative_position: RelativePositioning::MAX_POSITION)
-
- expect(Issues::RebalancingWorker).to receive(:perform_async).with(nil, nil, project.group.id)
-
- run_worker
- end
-
- context 'there are more than QUERY_LIMIT unplaced issues' do
- before_all do
- # Ensure there are more than N issues in this set
- n = described_class::QUERY_LIMIT
- create_list(:issue, n - 5, **unplaced)
- end
-
- it 'limits the sweep to QUERY_LIMIT records, and reschedules placement' do
- expect(Issue).to receive(:move_nulls_to_end)
- .with(have_attributes(count: described_class::QUERY_LIMIT))
- .and_call_original
-
- expect(Issues::PlacementWorker).to receive(:perform_async).with(nil, project.id)
-
- run_worker
-
- expect(project.issues.where(relative_position: nil)).to exist
- end
-
- it 'is eventually correct' do
- prefix = project.issues.where.not(relative_position: nil).order(:relative_position).to_a
- moved = project.issues.where.not(id: prefix.map(&:id))
-
- run_worker
-
- expect(project.issues.where(relative_position: nil)).to exist
-
- run_worker
-
- expect(project.issues.where(relative_position: nil)).not_to exist
- expect(project.issues.order(:relative_position)).to eq(prefix + moved.order(:created_at, :id))
- end
- end
-
- context 'we are passed bad IDs' do
- let(:issue_id) { non_existing_record_id }
- let(:project_id) { non_existing_record_id }
-
- def max_positions_by_project
- Issue
- .group(:project_id)
- .pluck(:project_id, Issue.arel_table[:relative_position].maximum.as('max_relative_position'))
- .to_h
- end
-
- it 'does move any issues to the end' do
- expect { run_worker }.not_to change { max_positions_by_project }
- end
-
- context 'the project_id refers to an empty project' do
- let!(:project_id) { create(:project).id }
-
- it 'does move any issues to the end' do
- expect { run_worker }.not_to change { max_positions_by_project }
- end
- end
- end
-
- it 'anticipates the failure to place the issues, and schedules rebalancing' do
- allow(Issue).to receive(:move_nulls_to_end) { raise RelativePositioning::NoSpaceLeft }
-
- expect(Issues::RebalancingWorker).to receive(:perform_async).with(nil, nil, project.group.id)
- expect(Gitlab::ErrorTracking)
- .to receive(:log_exception)
- .with(RelativePositioning::NoSpaceLeft, worker_arguments)
-
- run_worker
- end
- end
-
- context 'passing an issue ID' do
- def run_worker
- described_class.new.perform(issue_id)
- end
-
- let(:worker_arguments) { { issue_id: issue_id, project_id: nil } }
-
- it_behaves_like 'running the issue placement worker'
-
- context 'when block_issue_repositioning is enabled' do
- let(:issue_id) { issue.id }
- let(:project_id) { project.id }
-
- before do
- stub_feature_flags(block_issue_repositioning: group)
- end
-
- it 'does not run repositioning tasks' do
- expect { run_worker }.not_to change { issue.reset.relative_position }
- end
- end
- end
-
- context 'passing a project ID' do
- def run_worker
- described_class.new.perform(nil, project_id)
- end
-
- let(:worker_arguments) { { issue_id: nil, project_id: project_id } }
-
- it_behaves_like 'running the issue placement worker'
- end
- end
-
- it 'has the `until_executed` deduplicate strategy' do
- expect(described_class.get_deduplicate_strategy).to eq(:until_executed)
- expect(described_class.get_deduplication_options).to include({ including_scheduled: true })
- end
-end
diff --git a/spec/workers/issue_rebalancing_worker_spec.rb b/spec/workers/issue_rebalancing_worker_spec.rb
deleted file mode 100644
index cfb19af05b3..00000000000
--- a/spec/workers/issue_rebalancing_worker_spec.rb
+++ /dev/null
@@ -1,104 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe IssueRebalancingWorker, :clean_gitlab_redis_shared_state do
- describe '#perform' do
- let_it_be(:group) { create(:group) }
- let_it_be(:project) { create(:project, group: group) }
- let_it_be(:issue) { create(:issue, project: project) }
-
- shared_examples 'running the worker' do
- it 'runs an instance of Issues::RelativePositionRebalancingService' do
- service = double(execute: nil)
- service_param = arguments.second.present? ? kind_of(Project.id_in([project]).class) : kind_of(group&.all_projects.class)
-
- expect(Issues::RelativePositionRebalancingService).to receive(:new).with(service_param).and_return(service)
-
- described_class.new.perform(*arguments)
- end
-
- it 'anticipates there being too many concurent rebalances' do
- service = double
- service_param = arguments.second.present? ? kind_of(Project.id_in([project]).class) : kind_of(group&.all_projects.class)
-
- allow(service).to receive(:execute).and_raise(Issues::RelativePositionRebalancingService::TooManyConcurrentRebalances)
- expect(Issues::RelativePositionRebalancingService).to receive(:new).with(service_param).and_return(service)
- expect(Gitlab::ErrorTracking).to receive(:log_exception).with(Issues::RelativePositionRebalancingService::TooManyConcurrentRebalances, include(project_id: arguments.second, root_namespace_id: arguments.third))
-
- described_class.new.perform(*arguments)
- end
-
- it 'takes no action if the value is nil' do
- expect(Issues::RelativePositionRebalancingService).not_to receive(:new)
- expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
-
- described_class.new.perform # all arguments are nil
- end
-
- it 'does not schedule a new rebalance if it finished under 1h ago' do
- container_type = arguments.second.present? ? ::Gitlab::Issues::Rebalancing::State::PROJECT : ::Gitlab::Issues::Rebalancing::State::NAMESPACE
- container_id = arguments.second || arguments.third
-
- Gitlab::Redis::SharedState.with do |redis|
- redis.set(::Gitlab::Issues::Rebalancing::State.send(:recently_finished_key, container_type, container_id), true)
- end
-
- expect(Issues::RelativePositionRebalancingService).not_to receive(:new)
- expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
-
- described_class.new.perform(*arguments)
- end
- end
-
- shared_examples 'safely handles non-existent ids' do
- it 'anticipates the inability to find the issue' do
- expect(Gitlab::ErrorTracking).to receive(:log_exception).with(ArgumentError, include(project_id: arguments.second, root_namespace_id: arguments.third))
- expect(Issues::RelativePositionRebalancingService).not_to receive(:new)
-
- described_class.new.perform(*arguments)
- end
- end
-
- context 'without root_namespace param' do
- it_behaves_like 'running the worker' do
- let(:arguments) { [-1, project.id] }
- end
-
- it_behaves_like 'safely handles non-existent ids' do
- let(:arguments) { [nil, -1] }
- end
-
- include_examples 'an idempotent worker' do
- let(:job_args) { [-1, project.id] }
- end
-
- include_examples 'an idempotent worker' do
- let(:job_args) { [nil, -1] }
- end
- end
-
- context 'with root_namespace param' do
- it_behaves_like 'running the worker' do
- let(:arguments) { [nil, nil, group.id] }
- end
-
- it_behaves_like 'safely handles non-existent ids' do
- let(:arguments) { [nil, nil, -1] }
- end
-
- include_examples 'an idempotent worker' do
- let(:job_args) { [nil, nil, group.id] }
- end
-
- include_examples 'an idempotent worker' do
- let(:job_args) { [nil, nil, -1] }
- end
- end
- end
-
- it 'has the `until_executed` deduplicate strategy' do
- expect(described_class.get_deduplicate_strategy).to eq(:until_executed)
- expect(described_class.get_deduplication_options).to include({ including_scheduled: true })
- end
-end
diff --git a/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb b/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb
index 1814abfac1d..632e4fb3071 100644
--- a/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb
+++ b/spec/workers/loose_foreign_keys/cleanup_worker_spec.rb
@@ -157,10 +157,10 @@ RSpec.describe LooseForeignKeys::CleanupWorker do
describe 'multi-database support' do
where(:current_minute, :configured_base_models, :expected_connection_model) do
- 2 | { main: 'ApplicationRecord', ci: 'Ci::ApplicationRecord' } | 'ApplicationRecord'
- 3 | { main: 'ApplicationRecord', ci: 'Ci::ApplicationRecord' } | 'Ci::ApplicationRecord'
- 2 | { main: 'ApplicationRecord' } | 'ApplicationRecord'
- 3 | { main: 'ApplicationRecord' } | 'ApplicationRecord'
+ 2 | { main: 'ActiveRecord::Base', ci: 'Ci::ApplicationRecord' } | 'ActiveRecord::Base'
+ 3 | { main: 'ActiveRecord::Base', ci: 'Ci::ApplicationRecord' } | 'Ci::ApplicationRecord'
+ 2 | { main: 'ActiveRecord::Base' } | 'ActiveRecord::Base'
+ 3 | { main: 'ActiveRecord::Base' } | 'ActiveRecord::Base'
end
with_them do
diff --git a/spec/workers/merge_requests/create_pipeline_worker_spec.rb b/spec/workers/merge_requests/create_pipeline_worker_spec.rb
index 06d44c45706..441d7652219 100644
--- a/spec/workers/merge_requests/create_pipeline_worker_spec.rb
+++ b/spec/workers/merge_requests/create_pipeline_worker_spec.rb
@@ -3,24 +3,50 @@
require 'spec_helper'
RSpec.describe MergeRequests::CreatePipelineWorker do
- subject(:worker) { described_class.new }
-
describe '#perform' do
let(:user) { create(:user) }
let(:project) { create(:project) }
let(:merge_request) { create(:merge_request) }
+ let(:worker) { described_class.new }
+
+ subject { worker.perform(project.id, user.id, merge_request.id) }
context 'when the objects exist' do
it 'calls the merge request create pipeline service and calls update head pipeline' do
aggregate_failures do
- expect_next_instance_of(MergeRequests::CreatePipelineService, project: project, current_user: user) do |service|
+ expect_next_instance_of(MergeRequests::CreatePipelineService,
+ project: project,
+ current_user: user,
+ params: { push_options: nil }) do |service|
expect(service).to receive(:execute).with(merge_request)
end
expect(MergeRequest).to receive(:find_by_id).with(merge_request.id).and_return(merge_request)
expect(merge_request).to receive(:update_head_pipeline)
- subject.perform(project.id, user.id, merge_request.id)
+ subject
+ end
+ end
+
+ context 'when push options are passed as Hash to the worker' do
+ let(:extra_params) { { 'push_options' => { 'ci' => { 'skip' => true } } } }
+
+ subject { worker.perform(project.id, user.id, merge_request.id, extra_params) }
+
+ it 'calls the merge request create pipeline service and calls update head pipeline' do
+ aggregate_failures do
+ expect_next_instance_of(MergeRequests::CreatePipelineService,
+ project: project,
+ current_user: user,
+ params: { push_options: { ci: { skip: true } } }) do |service|
+ expect(service).to receive(:execute).with(merge_request)
+ end
+
+ expect(MergeRequest).to receive(:find_by_id).with(merge_request.id).and_return(merge_request)
+ expect(merge_request).to receive(:update_head_pipeline)
+
+ subject
+ end
end
end
end
@@ -29,8 +55,7 @@ RSpec.describe MergeRequests::CreatePipelineWorker do
it 'does not call the create pipeline service' do
expect(MergeRequests::CreatePipelineService).not_to receive(:new)
- expect { subject.perform(project.id, user.id, merge_request.id) }
- .not_to raise_exception
+ expect { subject }.not_to raise_exception
end
end
diff --git a/spec/workers/merge_requests/update_head_pipeline_worker_spec.rb b/spec/workers/merge_requests/update_head_pipeline_worker_spec.rb
index 5e0b07067df..3574b8296a4 100644
--- a/spec/workers/merge_requests/update_head_pipeline_worker_spec.rb
+++ b/spec/workers/merge_requests/update_head_pipeline_worker_spec.rb
@@ -24,18 +24,6 @@ RSpec.describe MergeRequests::UpdateHeadPipelineWorker do
create(:merge_request, source_branch: 'feature', target_branch: "v1.1.0", source_project: project)
end
- context 'when related merge request is already merged' do
- let!(:merged_merge_request) do
- create(:merge_request, source_branch: 'master', target_branch: "branch_2", source_project: project, state: 'merged')
- end
-
- it 'does not schedule update head pipeline job' do
- expect(UpdateHeadPipelineForMergeRequestWorker).not_to receive(:perform_async).with(merged_merge_request.id)
-
- subject
- end
- end
-
context 'when the head pipeline sha equals merge request sha' do
let(:ref) { 'feature' }
@@ -52,6 +40,22 @@ RSpec.describe MergeRequests::UpdateHeadPipelineWorker do
expect(merge_request_1.reload.head_pipeline).to eq(pipeline)
expect(merge_request_2.reload.head_pipeline).to eq(pipeline)
end
+
+ context 'when the merge request is not open' do
+ before do
+ merge_request_1.close!
+ end
+
+ it 'only updates the open merge requests' do
+ merge_request_1
+ merge_request_2
+
+ subject
+
+ expect(merge_request_1.reload.head_pipeline).not_to eq(pipeline)
+ expect(merge_request_2.reload.head_pipeline).to eq(pipeline)
+ end
+ end
end
context 'when the head pipeline sha does not equal merge request sha' do
diff --git a/spec/workers/namespaceless_project_destroy_worker_spec.rb b/spec/workers/namespaceless_project_destroy_worker_spec.rb
deleted file mode 100644
index 93e8415f3bb..00000000000
--- a/spec/workers/namespaceless_project_destroy_worker_spec.rb
+++ /dev/null
@@ -1,77 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe NamespacelessProjectDestroyWorker do
- include ProjectForksHelper
-
- subject { described_class.new }
-
- before do
- # Stub after_save callbacks that will fail when Project has no namespace
- allow_any_instance_of(Project).to receive(:update_project_statistics).and_return(nil)
- end
-
- describe '#perform' do
- context 'project has namespace' do
- it 'does not do anything' do
- project = create(:project)
-
- subject.perform(project.id)
-
- expect(Project.unscoped.all).to include(project)
- end
- end
-
- context 'project has no namespace' do
- let!(:project) { create(:project) }
-
- before do
- allow_any_instance_of(Project).to receive(:namespace).and_return(nil)
- end
-
- context 'project not a fork of another project' do
- it "truncates the project's team" do
- expect_any_instance_of(ProjectTeam).to receive(:truncate)
-
- subject.perform(project.id)
- end
-
- it 'deletes the project' do
- subject.perform(project.id)
-
- expect(Project.unscoped.all).not_to include(project)
- end
-
- it 'does not call unlink_fork' do
- is_expected.not_to receive(:unlink_fork)
-
- subject.perform(project.id)
- end
- end
-
- context 'project forked from another' do
- let!(:parent_project) { create(:project) }
- let(:project) do
- namespaceless_project = fork_project(parent_project)
- namespaceless_project.save!
- namespaceless_project
- end
-
- it 'closes open merge requests' do
- merge_request = create(:merge_request, source_project: project, target_project: parent_project)
-
- subject.perform(project.id)
-
- expect(merge_request.reload).to be_closed
- end
-
- it 'destroys fork network members' do
- subject.perform(project.id)
-
- expect(parent_project.forked_to_members).to be_empty
- end
- end
- end
- end
-end
diff --git a/spec/workers/pages_transfer_worker_spec.rb b/spec/workers/pages_transfer_worker_spec.rb
deleted file mode 100644
index 7d17461bc5a..00000000000
--- a/spec/workers/pages_transfer_worker_spec.rb
+++ /dev/null
@@ -1,38 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe PagesTransferWorker do
- describe '#perform' do
- Gitlab::PagesTransfer::METHODS.each do |meth|
- context "when method is #{meth}" do
- let(:args) { [1, 2, 3] }
-
- it 'calls the service with the given arguments' do
- expect_next_instance_of(Gitlab::PagesTransfer) do |service|
- expect(service).to receive(meth).with(*args).and_return(true)
- end
-
- subject.perform(meth, args)
- end
-
- it 'raises an error when the service returns false' do
- expect_next_instance_of(Gitlab::PagesTransfer) do |service|
- expect(service).to receive(meth).with(*args).and_return(false)
- end
-
- expect { subject.perform(meth, args) }
- .to raise_error(described_class::TransferFailedError)
- end
- end
- end
-
- describe 'when method is not allowed' do
- it 'does nothing' do
- expect(Gitlab::PagesTransfer).not_to receive(:new)
-
- subject.perform('object_id', [])
- end
- end
- end
-end
diff --git a/spec/workers/pipeline_hooks_worker_spec.rb b/spec/workers/pipeline_hooks_worker_spec.rb
index 13a86c3d4fe..5d28b1e129a 100644
--- a/spec/workers/pipeline_hooks_worker_spec.rb
+++ b/spec/workers/pipeline_hooks_worker_spec.rb
@@ -25,6 +25,16 @@ RSpec.describe PipelineHooksWorker do
.not_to raise_error
end
end
+
+ context 'when the user is blocked' do
+ let(:pipeline) { create(:ci_pipeline, user: create(:user, :blocked)) }
+
+ it 'returns early without executing' do
+ expect(Ci::Pipelines::HookService).not_to receive(:new)
+
+ described_class.new.perform(pipeline.id)
+ end
+ end
end
it_behaves_like 'worker with data consistency',
diff --git a/spec/workers/pipeline_notification_worker_spec.rb b/spec/workers/pipeline_notification_worker_spec.rb
index 583c4bf1c0c..672debd0501 100644
--- a/spec/workers/pipeline_notification_worker_spec.rb
+++ b/spec/workers/pipeline_notification_worker_spec.rb
@@ -21,6 +21,20 @@ RSpec.describe PipelineNotificationWorker, :mailer do
subject.perform(non_existing_record_id)
end
+ context 'when the user is blocked' do
+ before do
+ expect_next_found_instance_of(Ci::Pipeline) do |pipeline|
+ allow(pipeline).to receive(:user) { build(:user, :blocked) }
+ end
+ end
+
+ it 'does nothing' do
+ expect(NotificationService).not_to receive(:new)
+
+ subject.perform(pipeline.id)
+ end
+ end
+
it_behaves_like 'worker with data consistency',
described_class,
data_consistency: :delayed
diff --git a/spec/workers/project_daily_statistics_worker_spec.rb b/spec/workers/project_daily_statistics_worker_spec.rb
deleted file mode 100644
index fa9d938acca..00000000000
--- a/spec/workers/project_daily_statistics_worker_spec.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.describe ProjectDailyStatisticsWorker, '#perform' do
- let(:worker) { described_class.new }
- let(:project) { create(:project) }
-
- describe '#perform' do
- context 'with a non-existing project' do
- it 'does nothing' do
- expect(Projects::FetchStatisticsIncrementService).not_to receive(:new)
-
- worker.perform(-1)
- end
- end
-
- context 'with an existing project without a repository' do
- it 'does nothing' do
- expect(Projects::FetchStatisticsIncrementService).not_to receive(:new)
-
- worker.perform(project.id)
- end
- end
-
- it 'calls daily_statistics_service with the given project' do
- project = create(:project, :repository)
-
- expect_next_instance_of(Projects::FetchStatisticsIncrementService, project) do |service|
- expect(service).to receive(:execute)
- end
-
- worker.perform(project.id)
- end
- end
-end
diff --git a/spec/workers/projects/inactive_projects_deletion_cron_worker_spec.rb b/spec/workers/projects/inactive_projects_deletion_cron_worker_spec.rb
index 0e7b4ea504c..ec10c66968d 100644
--- a/spec/workers/projects/inactive_projects_deletion_cron_worker_spec.rb
+++ b/spec/workers/projects/inactive_projects_deletion_cron_worker_spec.rb
@@ -5,6 +5,34 @@ require 'spec_helper'
RSpec.describe Projects::InactiveProjectsDeletionCronWorker do
include ProjectHelpers
+ shared_examples 'worker is running for more than 4 minutes' do
+ before do
+ subject.instance_variable_set(:@start_time, ::Gitlab::Metrics::System.monotonic_time - 5.minutes)
+ end
+
+ it 'stores the last processed inactive project_id in redis cache' do
+ Gitlab::Redis::Cache.with do |redis|
+ expect { worker.perform }
+ .to change { redis.get('last_processed_inactive_project_id') }.to(inactive_large_project.id.to_s)
+ end
+ end
+ end
+
+ shared_examples 'worker finishes processing in less than 4 minutes' do
+ before do
+ Gitlab::Redis::Cache.with do |redis|
+ redis.set('last_processed_inactive_project_id', inactive_large_project.id)
+ end
+ end
+
+ it 'clears the last processed inactive project_id from redis cache' do
+ Gitlab::Redis::Cache.with do |redis|
+ expect { worker.perform }
+ .to change { redis.get('last_processed_inactive_project_id') }.to(nil)
+ end
+ end
+ end
+
describe "#perform" do
subject(:worker) { described_class.new }
@@ -44,7 +72,7 @@ RSpec.describe Projects::InactiveProjectsDeletionCronWorker do
end
it 'does not invoke Projects::InactiveProjectsDeletionNotificationWorker' do
- expect(::Projects::InactiveProjectsDeletionNotificationWorker).not_to receive(:perform_in)
+ expect(::Projects::InactiveProjectsDeletionNotificationWorker).not_to receive(:perform_async)
expect(::Projects::DestroyService).not_to receive(:new)
worker.perform
@@ -68,7 +96,7 @@ RSpec.describe Projects::InactiveProjectsDeletionCronWorker do
end
it 'does not invoke Projects::InactiveProjectsDeletionNotificationWorker' do
- expect(::Projects::InactiveProjectsDeletionNotificationWorker).not_to receive(:perform_in)
+ expect(::Projects::InactiveProjectsDeletionNotificationWorker).not_to receive(:perform_async)
expect(::Projects::DestroyService).not_to receive(:new)
worker.perform
@@ -79,11 +107,12 @@ RSpec.describe Projects::InactiveProjectsDeletionCronWorker do
expect(inactive_large_project.reload.pending_delete).to eq(false)
end
+
+ it_behaves_like 'worker is running for more than 4 minutes'
+ it_behaves_like 'worker finishes processing in less than 4 minutes'
end
context 'when feature flag is enabled', :clean_gitlab_redis_shared_state, :sidekiq_inline do
- let_it_be(:delay) { anything }
-
before do
stub_feature_flags(inactive_projects_deletion: true)
end
@@ -93,8 +122,8 @@ RSpec.describe Projects::InactiveProjectsDeletionCronWorker do
expect(redis).to receive(:hset).with('inactive_projects_deletion_warning_email_notified',
"project:#{inactive_large_project.id}", Date.current)
end
- expect(::Projects::InactiveProjectsDeletionNotificationWorker).to receive(:perform_in).with(
- delay, inactive_large_project.id, deletion_date).and_call_original
+ expect(::Projects::InactiveProjectsDeletionNotificationWorker).to receive(:perform_async).with(
+ inactive_large_project.id, deletion_date).and_call_original
expect(::Projects::DestroyService).not_to receive(:new)
worker.perform
@@ -106,7 +135,7 @@ RSpec.describe Projects::InactiveProjectsDeletionCronWorker do
Date.current.to_s)
end
- expect(::Projects::InactiveProjectsDeletionNotificationWorker).not_to receive(:perform_in)
+ expect(::Projects::InactiveProjectsDeletionNotificationWorker).not_to receive(:perform_async)
expect(::Projects::DestroyService).not_to receive(:new)
worker.perform
@@ -118,7 +147,7 @@ RSpec.describe Projects::InactiveProjectsDeletionCronWorker do
15.months.ago.to_date.to_s)
end
- expect(::Projects::InactiveProjectsDeletionNotificationWorker).not_to receive(:perform_in)
+ expect(::Projects::InactiveProjectsDeletionNotificationWorker).not_to receive(:perform_async)
expect(::Projects::DestroyService).to receive(:new).with(inactive_large_project, admin_user, {})
.at_least(:once).and_call_original
@@ -131,6 +160,9 @@ RSpec.describe Projects::InactiveProjectsDeletionCronWorker do
"project:#{inactive_large_project.id}")).to be_nil
end
end
+
+ it_behaves_like 'worker is running for more than 4 minutes'
+ it_behaves_like 'worker finishes processing in less than 4 minutes'
end
it_behaves_like 'an idempotent worker'
diff --git a/spec/workers/prometheus/create_default_alerts_worker_spec.rb b/spec/workers/prometheus/create_default_alerts_worker_spec.rb
deleted file mode 100644
index d935bb20a29..00000000000
--- a/spec/workers/prometheus/create_default_alerts_worker_spec.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Prometheus::CreateDefaultAlertsWorker do
- let_it_be(:project) { create(:project) }
-
- subject { described_class.new.perform(project.id) }
-
- it 'does nothing' do
- expect { subject }.not_to change { PrometheusAlert.count }
- end
-end
diff --git a/spec/workers/repository_remove_remote_worker_spec.rb b/spec/workers/repository_remove_remote_worker_spec.rb
deleted file mode 100644
index 11081ec9b37..00000000000
--- a/spec/workers/repository_remove_remote_worker_spec.rb
+++ /dev/null
@@ -1,48 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe RepositoryRemoveRemoteWorker do
- include ExclusiveLeaseHelpers
- include GitHelpers
-
- describe '#perform' do
- let!(:project) { create(:project, :repository) }
- let(:remote_name) { 'joe'}
- let(:lease_key) { "remove_remote_#{project.id}_#{remote_name}" }
- let(:lease_timeout) { RepositoryRemoveRemoteWorker::LEASE_TIMEOUT }
-
- it 'returns nil when project does not exist' do
- expect(subject.perform(-1, 'remote_name')).to be_nil
- end
-
- context 'when project exists' do
- before do
- allow(Project)
- .to receive(:find_by)
- .with(id: project.id)
- .and_return(project)
- end
-
- it 'does nothing when cannot obtain lease' do
- stub_exclusive_lease_taken(lease_key, timeout: lease_timeout)
-
- expect(project.repository)
- .not_to receive(:remove_remote)
- expect(subject)
- .not_to receive(:log_error)
-
- subject.perform(project.id, remote_name)
- end
-
- it 'does nothing when obtain a lease' do
- stub_exclusive_lease(lease_key, timeout: lease_timeout)
-
- expect(project.repository)
- .not_to receive(:remove_remote)
-
- subject.perform(project.id, remote_name)
- end
- end
- end
-end
diff --git a/spec/workers/schedule_merge_request_cleanup_refs_worker_spec.rb b/spec/workers/schedule_merge_request_cleanup_refs_worker_spec.rb
index ef515e43474..49730d9ab8c 100644
--- a/spec/workers/schedule_merge_request_cleanup_refs_worker_spec.rb
+++ b/spec/workers/schedule_merge_request_cleanup_refs_worker_spec.rb
@@ -25,6 +25,12 @@ RSpec.describe ScheduleMergeRequestCleanupRefsWorker do
end
end
+ it 'retries stuck cleanup schedules' do
+ expect(MergeRequest::CleanupSchedule).to receive(:stuck_retry!)
+
+ worker.perform
+ end
+
include_examples 'an idempotent worker' do
it 'schedules MergeRequestCleanupRefsWorker to be performed with capacity' do
expect(MergeRequestCleanupRefsWorker).to receive(:perform_with_capacity).twice
diff --git a/spec/workers/terraform/states/destroy_worker_spec.rb b/spec/workers/terraform/states/destroy_worker_spec.rb
new file mode 100644
index 00000000000..02e79373279
--- /dev/null
+++ b/spec/workers/terraform/states/destroy_worker_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Terraform::States::DestroyWorker do
+ let(:state) { create(:terraform_state) }
+
+ describe '#perform' do
+ let(:state_id) { state.id }
+ let(:deletion_service) { instance_double(Terraform::States::DestroyService, execute: true) }
+
+ subject { described_class.new.perform(state_id) }
+
+ it 'calls the deletion service' do
+ expect(deletion_service).to receive(:execute).once
+ expect(Terraform::States::DestroyService).to receive(:new)
+ .with(state).and_return(deletion_service)
+
+ subject
+ end
+
+ context 'state no longer exists' do
+ let(:state_id) { -1 }
+
+ it 'completes without error' do
+ expect { subject }.not_to raise_error
+ end
+ end
+ end
+end
diff --git a/spec/workers/update_merge_requests_worker_spec.rb b/spec/workers/update_merge_requests_worker_spec.rb
index bd0dc2f9ef4..64fcc2bd388 100644
--- a/spec/workers/update_merge_requests_worker_spec.rb
+++ b/spec/workers/update_merge_requests_worker_spec.rb
@@ -3,28 +3,47 @@
require 'spec_helper'
RSpec.describe UpdateMergeRequestsWorker do
- include RepoHelpers
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:oldrev) { "123456" }
+ let_it_be(:newrev) { "789012" }
+ let_it_be(:ref) { "refs/heads/test" }
- let(:project) { create(:project, :repository) }
- let(:user) { create(:user) }
-
- subject { described_class.new }
+ let(:worker) { described_class.new }
describe '#perform' do
- let(:oldrev) { "123456" }
- let(:newrev) { "789012" }
- let(:ref) { "refs/heads/test" }
-
- def perform
- subject.perform(project.id, user.id, oldrev, newrev, ref)
- end
+ subject { worker.perform(project.id, user.id, oldrev, newrev, ref) }
it 'executes MergeRequests::RefreshService with expected values' do
- expect_next_instance_of(MergeRequests::RefreshService, project: project, current_user: user) do |refresh_service|
- expect(refresh_service).to receive(:execute).with(oldrev, newrev, ref)
+ expect_next_instance_of(MergeRequests::RefreshService,
+ project: project,
+ current_user: user,
+ params: { push_options: nil }) do |service|
+ expect(service)
+ .to receive(:execute)
+ .with(oldrev, newrev, ref)
end
- perform
+ subject
+ end
+
+ context 'when push options are passed as Hash' do
+ let(:extra_params) { { 'push_options' => { 'ci' => { 'skip' => true } } } }
+
+ subject { worker.perform(project.id, user.id, oldrev, newrev, ref, extra_params) }
+
+ it 'executes MergeRequests::RefreshService with expected values' do
+ expect_next_instance_of(MergeRequests::RefreshService,
+ project: project,
+ current_user: user,
+ params: { push_options: { ci: { skip: true } } }) do |service|
+ expect(service)
+ .to receive(:execute)
+ .with(oldrev, newrev, ref)
+ end
+
+ subject
+ end
end
end
end
diff --git a/spec/workers/users/deactivate_dormant_users_worker_spec.rb b/spec/workers/users/deactivate_dormant_users_worker_spec.rb
index 20cd55e19eb..297301c45e2 100644
--- a/spec/workers/users/deactivate_dormant_users_worker_spec.rb
+++ b/spec/workers/users/deactivate_dormant_users_worker_spec.rb
@@ -7,7 +7,8 @@ RSpec.describe Users::DeactivateDormantUsersWorker do
describe '#perform' do
let_it_be(:dormant) { create(:user, last_activity_on: User::MINIMUM_INACTIVE_DAYS.days.ago.to_date) }
- let_it_be(:inactive) { create(:user, last_activity_on: nil) }
+ let_it_be(:inactive) { create(:user, last_activity_on: nil, created_at: User::MINIMUM_DAYS_CREATED.days.ago.to_date) }
+ let_it_be(:inactive_recently_created) { create(:user, last_activity_on: nil, created_at: (User::MINIMUM_DAYS_CREATED - 1).days.ago.to_date) }
subject(:worker) { described_class.new }
@@ -71,6 +72,12 @@ RSpec.describe Users::DeactivateDormantUsersWorker do
expect(human_user.reload.state).to eq('blocked')
expect(service_user.reload.state).to eq('blocked')
end
+
+ it 'does not deactivate recently created users' do
+ worker.perform
+
+ expect(inactive_recently_created.reload.state).to eq('active')
+ end
end
context 'when automatic deactivation of dormant users is disabled' do
diff --git a/spec/workers/web_hooks/destroy_worker_spec.rb b/spec/workers/web_hooks/destroy_worker_spec.rb
index fd26c8591ee..8e75610a031 100644
--- a/spec/workers/web_hooks/destroy_worker_spec.rb
+++ b/spec/workers/web_hooks/destroy_worker_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe WebHooks::DestroyWorker do
+ include AfterNextHelpers
+
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
@@ -20,23 +22,26 @@ RSpec.describe WebHooks::DestroyWorker do
let!(:other_log) { create(:web_hook_log, web_hook: other_hook) }
it "deletes the Web hook and logs", :aggregate_failures do
+ expect(WebHooks::LogDestroyWorker).to receive(:perform_async)
+
expect { subject.perform(user.id, hook.id) }
- .to change { WebHookLog.count }.from(2).to(1)
- .and change { WebHook.count }.from(2).to(1)
+ .to change { WebHook.count }.from(2).to(1)
expect(WebHook.find(other_hook.id)).to be_present
expect(WebHookLog.find(other_log.id)).to be_present
end
it "raises and tracks an error if destroy failed" do
- allow_next_instance_of(::WebHooks::DestroyService) do |instance|
- expect(instance).to receive(:sync_destroy).with(anything).and_return({ status: :error, message: "failed" })
- end
+ expect_next(::WebHooks::DestroyService)
+ .to receive(:sync_destroy).with(anything)
+ .and_return(ServiceResponse.error(message: "failed"))
+
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_and_raise_exception)
+ .with(an_instance_of(described_class::DestroyError), { web_hook_id: hook.id })
+ .and_call_original
- expect(Gitlab::ErrorTracking).to receive(:track_exception)
- .with(an_instance_of(::WebHooks::DestroyService::DestroyError), web_hook_id: hook.id)
- .and_call_original
- expect { subject.perform(user.id, hook.id) }.to raise_error(::WebHooks::DestroyService::DestroyError)
+ expect { subject.perform(user.id, hook.id) }.to raise_error(described_class::DestroyError)
end
context 'with unknown hook' do
diff --git a/spec/workers/web_hooks/log_destroy_worker_spec.rb b/spec/workers/web_hooks/log_destroy_worker_spec.rb
new file mode 100644
index 00000000000..0c107c05360
--- /dev/null
+++ b/spec/workers/web_hooks/log_destroy_worker_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WebHooks::LogDestroyWorker do
+ include AfterNextHelpers
+
+ let_it_be(:project) { create(:project) }
+
+ subject { described_class.new }
+
+ describe "#perform" do
+ let!(:hook) { create(:project_hook, project: project) }
+ let!(:other_hook) { create(:project_hook, project: project) }
+ let!(:log) { create(:web_hook_log, web_hook: hook) }
+ let!(:other_log) { create(:web_hook_log, web_hook: other_hook) }
+
+ context 'with a Web hook' do
+ it "deletes the relevant logs", :aggregate_failures do
+ hook.destroy! # It does not depend on the presence of the hook
+
+ expect { subject.perform({ 'hook_id' => hook.id }) }
+ .to change { WebHookLog.count }.by(-1)
+
+ expect(WebHook.find(other_hook.id)).to be_present
+ expect(WebHookLog.find(other_log.id)).to be_present
+ end
+
+ it 'is idempotent' do
+ subject.perform({ 'hook_id' => hook.id })
+ subject.perform({ 'hook_id' => hook.id })
+
+ expect(hook.web_hook_logs).to be_none
+ end
+
+ it "raises and tracks an error if destroy failed" do
+ expect_next(::WebHooks::LogDestroyService)
+ .to receive(:execute).and_return(ServiceResponse.error(message: "failed"))
+
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_and_raise_exception)
+ .with(an_instance_of(described_class::DestroyError), { web_hook_id: hook.id })
+ .and_call_original
+
+ expect { subject.perform({ 'hook_id' => hook.id }) }
+ .to raise_error(described_class::DestroyError)
+ end
+
+ context 'with extra arguments' do
+ it 'does not raise an error' do
+ expect { subject.perform({ 'hook_id' => hook.id, 'extra' => true }) }.not_to raise_error
+
+ expect(WebHook.count).to eq(2)
+ expect(WebHookLog.count).to eq(1)
+ end
+ end
+ end
+
+ context 'with no arguments' do
+ it 'does not raise an error' do
+ expect { subject.perform }.not_to raise_error
+
+ expect(WebHook.count).to eq(2)
+ expect(WebHookLog.count).to eq(2)
+ end
+ end
+
+ context 'with empty arguments' do
+ it 'does not raise an error' do
+ expect { subject.perform({}) }.not_to raise_error
+
+ expect(WebHook.count).to eq(2)
+ expect(WebHookLog.count).to eq(2)
+ end
+ end
+
+ context 'with unknown hook' do
+ it 'does not raise an error' do
+ expect { subject.perform({ 'hook_id' => non_existing_record_id }) }.not_to raise_error
+
+ expect(WebHook.count).to eq(2)
+ expect(WebHookLog.count).to eq(2)
+ end
+ end
+ end
+end