summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2022-10-20 09:40:42 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2022-10-20 09:40:42 +0000
commitee664acb356f8123f4f6b00b73c1e1cf0866c7fb (patch)
treef8479f94a28f66654c6a4f6fb99bad6b4e86a40e /spec
parent62f7d5c5b69180e82ae8196b7b429eeffc8e7b4f (diff)
downloadgitlab-ce-ee664acb356f8123f4f6b00b73c1e1cf0866c7fb.tar.gz
Add latest changes from gitlab-org/gitlab@15-5-stable-eev15.5.0-rc42
Diffstat (limited to 'spec')
-rw-r--r--spec/bin/diagnostic_reports_uploader_spec.rb86
-rw-r--r--spec/commands/diagnostic_reports/uploader_smoke_spec.rb83
-rw-r--r--spec/components/pajamas/alert_component_spec.rb49
-rw-r--r--spec/components/pajamas/progress_component_spec.rb36
-rw-r--r--spec/components/previews/pajamas/alert_component_preview.rb5
-rw-r--r--spec/components/previews/pajamas/progress_component_preview.rb16
-rw-r--r--spec/config/object_store_settings_spec.rb21
-rw-r--r--spec/controllers/admin/application_settings_controller_spec.rb7
-rw-r--r--spec/controllers/admin/cohorts_controller_spec.rb2
-rw-r--r--spec/controllers/admin/dev_ops_report_controller_spec.rb2
-rw-r--r--spec/controllers/admin/groups_controller_spec.rb60
-rw-r--r--spec/controllers/admin/usage_trends_controller_spec.rb2
-rw-r--r--spec/controllers/autocomplete_controller_spec.rb2
-rw-r--r--spec/controllers/boards/issues_controller_spec.rb596
-rw-r--r--spec/controllers/boards/lists_controller_spec.rb333
-rw-r--r--spec/controllers/concerns/boards_responses_spec.rb23
-rw-r--r--spec/controllers/concerns/product_analytics_tracking_spec.rb44
-rw-r--r--spec/controllers/concerns/send_file_upload_spec.rb7
-rw-r--r--spec/controllers/dashboard_controller_spec.rb31
-rw-r--r--spec/controllers/groups/boards_controller_spec.rb84
-rw-r--r--spec/controllers/groups/runners_controller_spec.rb71
-rw-r--r--spec/controllers/groups_controller_spec.rb22
-rw-r--r--spec/controllers/health_check_controller_spec.rb13
-rw-r--r--spec/controllers/import/bulk_imports_controller_spec.rb23
-rw-r--r--spec/controllers/import/github_controller_spec.rb103
-rw-r--r--spec/controllers/profiles/personal_access_tokens_controller_spec.rb20
-rw-r--r--spec/controllers/profiles/preferences_controller_spec.rb3
-rw-r--r--spec/controllers/profiles/two_factor_auths_controller_spec.rb28
-rw-r--r--spec/controllers/projects/artifacts_controller_spec.rb2
-rw-r--r--spec/controllers/projects/autocomplete_sources_controller_spec.rb136
-rw-r--r--spec/controllers/projects/boards_controller_spec.rb150
-rw-r--r--spec/controllers/projects/compare_controller_spec.rb94
-rw-r--r--spec/controllers/projects/cycle_analytics_controller_spec.rb2
-rw-r--r--spec/controllers/projects/deploy_keys_controller_spec.rb31
-rw-r--r--spec/controllers/projects/design_management/designs/raw_images_controller_spec.rb2
-rw-r--r--spec/controllers/projects/graphs_controller_spec.rb2
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb56
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb23
-rw-r--r--spec/controllers/projects/milestones_controller_spec.rb20
-rw-r--r--spec/controllers/projects/pages_domains_controller_spec.rb56
-rw-r--r--spec/controllers/projects/pipelines_controller_spec.rb2
-rw-r--r--spec/controllers/projects/prometheus/metrics_controller_spec.rb2
-rw-r--r--spec/controllers/projects/protected_branches_controller_spec.rb46
-rw-r--r--spec/controllers/projects/raw_controller_spec.rb25
-rw-r--r--spec/controllers/projects_controller_spec.rb30
-rw-r--r--spec/controllers/registrations/welcome_controller_spec.rb5
-rw-r--r--spec/controllers/search_controller_spec.rb2
-rw-r--r--spec/controllers/sessions_controller_spec.rb18
-rw-r--r--spec/controllers/uploads_controller_spec.rb2
-rw-r--r--spec/factories/bulk_import/entities.rb1
-rw-r--r--spec/factories/ci/build_metadata.rb4
-rw-r--r--spec/factories/ci/builds.rb26
-rw-r--r--spec/factories/ci/job_artifacts.rb22
-rw-r--r--spec/factories/ci/pipeline_artifacts.rb5
-rw-r--r--spec/factories/ci/pipeline_metadata.rb10
-rw-r--r--spec/factories/ci/pipelines.rb8
-rw-r--r--spec/factories/ci/reports/sbom/components.rb2
-rw-r--r--spec/factories/ci/reports/sbom/reports.rb18
-rw-r--r--spec/factories/ci/reports/sbom/sources.rb5
-rw-r--r--spec/factories/customer_relations/contacts.rb4
-rw-r--r--spec/factories/events.rb14
-rw-r--r--spec/factories/git_wiki_commit_details.rb15
-rw-r--r--spec/factories/go_module_commits.rb4
-rw-r--r--spec/factories/incident_management/timeline_event_tag_links.rb8
-rw-r--r--spec/factories/incident_management/timeline_event_tags.rb8
-rw-r--r--spec/factories/ml/candidate_metrics.rb12
-rw-r--r--spec/factories/ml/candidate_params.rb10
-rw-r--r--spec/factories/ml/candidates.rb7
-rw-r--r--spec/factories/ml/experiments.rb5
-rw-r--r--spec/factories/notes.rb2
-rw-r--r--spec/factories/packages/packages.rb2
-rw-r--r--spec/factories/packages/rpm/rpm_repository_files.rb33
-rw-r--r--spec/factories/projects.rb67
-rw-r--r--spec/factories/users/namespace_user_callouts.rb10
-rw-r--r--spec/factories/users/phone_number_validations.rb10
-rw-r--r--spec/fast_spec_helper.rb2
-rw-r--r--spec/features/admin/admin_broadcast_messages_spec.rb1
-rw-r--r--spec/features/admin/admin_groups_spec.rb25
-rw-r--r--spec/features/admin/admin_runners_spec.rb64
-rw-r--r--spec/features/admin/admin_settings_spec.rb44
-rw-r--r--spec/features/admin/admin_users_impersonation_tokens_spec.rb14
-rw-r--r--spec/features/admin/users/users_spec.rb8
-rw-r--r--spec/features/boards/boards_spec.rb1
-rw-r--r--spec/features/dashboard/datetime_on_tooltips_spec.rb4
-rw-r--r--spec/features/dashboard/issues_filter_spec.rb2
-rw-r--r--spec/features/dashboard/projects_spec.rb3
-rw-r--r--spec/features/dashboard/user_filters_projects_spec.rb7
-rw-r--r--spec/features/discussion_comments/issue_spec.rb1
-rw-r--r--spec/features/discussion_comments/merge_request_spec.rb1
-rw-r--r--spec/features/expand_collapse_diffs_spec.rb5
-rw-r--r--spec/features/global_search_spec.rb1
-rw-r--r--spec/features/groups/empty_states_spec.rb4
-rw-r--r--spec/features/groups/group_runners_spec.rb46
-rw-r--r--spec/features/groups/labels/sort_labels_spec.rb4
-rw-r--r--spec/features/groups/merge_requests_spec.rb10
-rw-r--r--spec/features/groups/milestones_sorting_spec.rb4
-rw-r--r--spec/features/groups/new_group_page_spec.rb32
-rw-r--r--spec/features/groups/settings/repository_spec.rb34
-rw-r--r--spec/features/help_dropdown_spec.rb4
-rw-r--r--spec/features/ide/user_commits_changes_spec.rb35
-rw-r--r--spec/features/incidents/incident_timeline_events_spec.rb1
-rw-r--r--spec/features/issuables/markdown_references/jira_spec.rb6
-rw-r--r--spec/features/issues/incident_issue_spec.rb28
-rw-r--r--spec/features/issues/related_issues_spec.rb116
-rw-r--r--spec/features/issues/resource_label_events_spec.rb9
-rw-r--r--spec/features/issues/service_desk_spec.rb1
-rw-r--r--spec/features/issues/user_interacts_with_awards_spec.rb2
-rw-r--r--spec/features/issues/user_scrolls_to_deeplinked_note_spec.rb1
-rw-r--r--spec/features/markdown/gitlab_flavored_markdown_spec.rb9
-rw-r--r--spec/features/markdown/metrics_spec.rb2
-rw-r--r--spec/features/merge_request/batch_comments_spec.rb18
-rw-r--r--spec/features/merge_request/close_reopen_report_toggle_spec.rb10
-rw-r--r--spec/features/merge_request/merge_request_discussion_lock_spec.rb4
-rw-r--r--spec/features/merge_request/user_comments_on_diff_spec.rb1
-rw-r--r--spec/features/merge_request/user_comments_on_merge_request_spec.rb1
-rw-r--r--spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb4
-rw-r--r--spec/features/merge_request/user_jumps_to_discussion_spec.rb29
-rw-r--r--spec/features/merge_request/user_manages_subscription_spec.rb6
-rw-r--r--spec/features/merge_request/user_marks_merge_request_as_draft_spec.rb4
-rw-r--r--spec/features/merge_request/user_posts_diff_notes_spec.rb4
-rw-r--r--spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb15
-rw-r--r--spec/features/merge_request/user_sees_merge_widget_spec.rb32
-rw-r--r--spec/features/merge_request/user_views_user_status_on_merge_request_spec.rb34
-rw-r--r--spec/features/milestone_spec.rb12
-rw-r--r--spec/features/milestones/user_creates_milestone_spec.rb99
-rw-r--r--spec/features/oauth_registration_spec.rb175
-rw-r--r--spec/features/profiles/password_spec.rb23
-rw-r--r--spec/features/profiles/two_factor_auths_spec.rb8
-rw-r--r--spec/features/profiles/user_edit_profile_spec.rb50
-rw-r--r--spec/features/projects/blobs/blob_show_spec.rb2
-rw-r--r--spec/features/projects/blobs/edit_spec.rb39
-rw-r--r--spec/features/projects/blobs/user_creates_new_blob_in_new_project_spec.rb63
-rw-r--r--spec/features/projects/branches_spec.rb8
-rw-r--r--spec/features/projects/ci/editor_spec.rb13
-rw-r--r--spec/features/projects/ci/lint_spec.rb9
-rw-r--r--spec/features/projects/environments/environment_metrics_spec.rb2
-rw-r--r--spec/features/projects/environments/environment_spec.rb4
-rw-r--r--spec/features/projects/fork_spec.rb53
-rw-r--r--spec/features/projects/infrastructure_registry_spec.rb2
-rw-r--r--spec/features/projects/labels/sort_labels_spec.rb4
-rw-r--r--spec/features/projects/milestones/milestones_sorting_spec.rb4
-rw-r--r--spec/features/projects/packages_spec.rb2
-rw-r--r--spec/features/projects/pipeline_schedules_spec.rb4
-rw-r--r--spec/features/projects/pipelines/legacy_pipeline_spec.rb2
-rw-r--r--spec/features/projects/pipelines/pipeline_spec.rb2
-rw-r--r--spec/features/projects/pipelines/pipelines_spec.rb4
-rw-r--r--spec/features/projects/releases/user_creates_release_spec.rb3
-rw-r--r--spec/features/projects/settings/repository_settings_spec.rb2
-rw-r--r--spec/features/projects/settings/user_changes_default_branch_spec.rb13
-rw-r--r--spec/features/projects/settings/user_transfers_a_project_spec.rb2
-rw-r--r--spec/features/projects/settings/webhooks_settings_spec.rb8
-rw-r--r--spec/features/projects/show/user_interacts_with_stars_spec.rb12
-rw-r--r--spec/features/projects/show/user_sees_collaboration_links_spec.rb6
-rw-r--r--spec/features/projects/user_sorts_projects_spec.rb20
-rw-r--r--spec/features/projects/wiki/user_views_wiki_in_project_page_spec.rb33
-rw-r--r--spec/features/protected_branches_spec.rb2
-rw-r--r--spec/features/runners_spec.rb10
-rw-r--r--spec/features/search/user_searches_for_code_spec.rb2
-rw-r--r--spec/features/search/user_searches_for_comments_spec.rb1
-rw-r--r--spec/features/search/user_searches_for_commits_spec.rb10
-rw-r--r--spec/features/search/user_searches_for_issues_spec.rb3
-rw-r--r--spec/features/search/user_searches_for_merge_requests_spec.rb1
-rw-r--r--spec/features/search/user_searches_for_milestones_spec.rb1
-rw-r--r--spec/features/search/user_searches_for_projects_spec.rb1
-rw-r--r--spec/features/search/user_searches_for_users_spec.rb1
-rw-r--r--spec/features/search/user_searches_for_wiki_pages_spec.rb5
-rw-r--r--spec/features/search/user_uses_header_search_field_spec.rb1
-rw-r--r--spec/features/snippets/search_snippets_spec.rb4
-rw-r--r--spec/features/tags/developer_creates_tag_spec.rb25
-rw-r--r--spec/features/unsubscribe_links_spec.rb13
-rw-r--r--spec/features/uploads/user_uploads_avatar_to_profile_spec.rb2
-rw-r--r--spec/features/user_opens_link_to_comment_spec.rb2
-rw-r--r--spec/features/users/signup_spec.rb1
-rw-r--r--spec/features/work_items/work_item_spec.rb33
-rw-r--r--spec/finders/ci/runners_finder_spec.rb37
-rw-r--r--spec/finders/clusters/agent_authorizations_finder_spec.rb32
-rw-r--r--spec/finders/groups/accepting_group_transfers_finder_spec.rb13
-rw-r--r--spec/finders/merge_requests_finder_spec.rb53
-rw-r--r--spec/finders/packages/nuget/package_finder_spec.rb10
-rw-r--r--spec/finders/personal_access_tokens_finder_spec.rb44
-rw-r--r--spec/fixtures/api/schemas/board.json10
-rw-r--r--spec/fixtures/api/schemas/boards.json4
-rw-r--r--spec/fixtures/api/schemas/current-board.json16
-rw-r--r--spec/fixtures/api/schemas/ml/get_experiment.json27
-rw-r--r--spec/fixtures/api/schemas/ml/list_experiments.json39
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/user/admin.json2
-rw-r--r--spec/fixtures/ci_secure_files/sample.cerbin0 -> 1479 bytes
-rw-r--r--spec/fixtures/ci_secure_files/sample.mobileprovisionbin0 -> 12278 bytes
-rw-r--r--spec/fixtures/ci_secure_files/sample.p12bin0 -> 3352 bytes
-rw-r--r--spec/fixtures/git-cheat-sheet.pdf3
-rw-r--r--spec/fixtures/lib/gitlab/import_export/complex/project.json6
-rw-r--r--spec/fixtures/lib/gitlab/import_export/complex/tree/project/ci_pipelines.ndjson2
-rw-r--r--spec/fixtures/markdown/markdown_golden_master_examples.yml16
-rw-r--r--spec/fixtures/packages/rpm/hello-0.0.1-1.fc29.src.rpmbin0 -> 6435 bytes
-rw-r--r--spec/fixtures/packages/rpm/payload.json47
-rw-r--r--spec/fixtures/packages/rpm/repodata/repomd.xml27
-rw-r--r--spec/fixtures/packages/rubygems/package.gemspec2
-rw-r--r--spec/fixtures/sample.pdf13
-rw-r--r--spec/fixtures/security_reports/deprecated/gl-sast-report.json964
-rw-r--r--spec/fixtures/security_reports/feature-branch/gl-sast-report.json4
-rw-r--r--spec/fixtures/security_reports/master/gl-common-scanning-report-names.json59
-rw-r--r--spec/fixtures/security_reports/master/gl-common-scanning-report.json170
-rw-r--r--spec/fixtures/security_reports/master/gl-sast-report-minimal.json4
-rw-r--r--spec/fixtures/security_reports/master/gl-sast-report-semgrep-for-gosec.json7
-rw-r--r--spec/fixtures/security_reports/master/gl-sast-report-semgrep-for-multiple-findings.json134
-rw-r--r--spec/fixtures/security_reports/master/gl-sast-report.json4
-rw-r--r--spec/fixtures/security_reports/master/gl-secret-detection-report.json3
-rw-r--r--spec/frontend/__helpers__/class_spec_helper.js10
-rw-r--r--spec/frontend/__helpers__/class_spec_helper_spec.js26
-rw-r--r--spec/frontend/__helpers__/dom_shims/index.js1
-rw-r--r--spec/frontend/__helpers__/dom_shims/text_encoder.js4
-rw-r--r--spec/frontend/__helpers__/graphql_transformer.js8
-rw-r--r--spec/frontend/__helpers__/shared_test_setup.js15
-rw-r--r--spec/frontend/__helpers__/stub_component.js2
-rw-r--r--spec/frontend/__helpers__/vue_mount_component_helper.js34
-rw-r--r--spec/frontend/__helpers__/vue_test_utils_helper_spec.js3
-rw-r--r--spec/frontend/__mocks__/monaco-editor/index.js2
-rw-r--r--spec/frontend/__mocks__/monaco-yaml/index.js4
-rw-r--r--spec/frontend/access_tokens/components/access_token_table_app_spec.js147
-rw-r--r--spec/frontend/access_tokens/components/new_access_token_app_spec.js19
-rw-r--r--spec/frontend/access_tokens/index_spec.js73
-rw-r--r--spec/frontend/admin/broadcast_messages/components/base_spec.js112
-rw-r--r--spec/frontend/admin/broadcast_messages/components/messages_table_spec.js51
-rw-r--r--spec/frontend/admin/broadcast_messages/mock_data.js17
-rw-r--r--spec/frontend/admin/deploy_keys/components/table_spec.js4
-rw-r--r--spec/frontend/admin/users/components/users_table_spec.js4
-rw-r--r--spec/frontend/alert_management/components/alert_management_table_spec.js13
-rw-r--r--spec/frontend/alerts_settings/components/alerts_settings_form_spec.js59
-rw-r--r--spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js26
-rw-r--r--spec/frontend/api/projects_api_spec.js23
-rw-r--r--spec/frontend/awards_handler_spec.js4
-rw-r--r--spec/frontend/badges/components/badge_form_spec.js194
-rw-r--r--spec/frontend/badges/components/badge_list_row_spec.js137
-rw-r--r--spec/frontend/badges/components/badge_list_spec.js125
-rw-r--r--spec/frontend/badges/components/badge_spec.js150
-rw-r--r--spec/frontend/batch_comments/components/drafts_count_spec.js30
-rw-r--r--spec/frontend/batch_comments/components/preview_item_spec.js36
-rw-r--r--spec/frontend/batch_comments/components/publish_button_spec.js30
-rw-r--r--spec/frontend/behaviors/bind_in_out_spec.js9
-rw-r--r--spec/frontend/blame/blame_redirect_spec.js70
-rw-r--r--spec/frontend/blob/3d_viewer/mesh_object_spec.js2
-rw-r--r--spec/frontend/blob/blob_blame_link_spec.js6
-rw-r--r--spec/frontend/blob/components/blob_content_spec.js4
-rw-r--r--spec/frontend/blob/components/table_contents_spec.js5
-rw-r--r--spec/frontend/boards/board_card_inner_spec.js13
-rw-r--r--spec/frontend/boards/components/board_filtered_search_spec.js8
-rw-r--r--spec/frontend/boards/stores/actions_spec.js254
-rw-r--r--spec/frontend/captcha/init_recaptcha_script_spec.js3
-rw-r--r--spec/frontend/ci_variable_list/components/ci_admin_variables_spec.js16
-rw-r--r--spec/frontend/ci_variable_list/components/ci_group_variables_spec.js16
-rw-r--r--spec/frontend/ci_variable_list/components/ci_project_variables_spec.js20
-rw-r--r--spec/frontend/ci_variable_list/components/legacy_ci_variable_settings_spec.js14
-rw-r--r--spec/frontend/ci_variable_list/mocks.js6
-rw-r--r--spec/frontend/ci_variable_list/store/actions_spec.js12
-rw-r--r--spec/frontend/clusters_list/store/actions_spec.js4
-rw-r--r--spec/frontend/code_navigation/utils/index_spec.js8
-rw-r--r--spec/frontend/commit/commit_box_pipeline_mini_graph_spec.js6
-rw-r--r--spec/frontend/commit/commit_pipeline_status_component_spec.js4
-rw-r--r--spec/frontend/commit/components/commit_box_pipeline_status_spec.js8
-rw-r--r--spec/frontend/content_editor/components/content_editor_spec.js31
-rw-r--r--spec/frontend/content_editor/components/editor_state_observer_spec.js11
-rw-r--r--spec/frontend/content_editor/components/suggestions_dropdown_spec.js286
-rw-r--r--spec/frontend/content_editor/components/wrappers/label_spec.js36
-rw-r--r--spec/frontend/content_editor/extensions/heading_spec.js54
-rw-r--r--spec/frontend/content_editor/markdown_processing_spec_helper.js12
-rw-r--r--spec/frontend/content_editor/services/markdown_serializer_spec.js18
-rw-r--r--spec/frontend/content_editor/services/track_input_rules_and_shortcuts_spec.js2
-rw-r--r--spec/frontend/contributors/component/contributors_spec.js5
-rw-r--r--spec/frontend/contributors/store/actions_spec.js4
-rw-r--r--spec/frontend/crm/contacts_root_spec.js14
-rw-r--r--spec/frontend/crm/organizations_root_spec.js2
-rw-r--r--spec/frontend/cycle_analytics/value_stream_metrics_spec.js4
-rw-r--r--spec/frontend/deploy_freeze/components/deploy_freeze_modal_spec.js11
-rw-r--r--spec/frontend/deploy_freeze/components/deploy_freeze_settings_spec.js2
-rw-r--r--spec/frontend/deploy_freeze/components/deploy_freeze_table_spec.js3
-rw-r--r--spec/frontend/deploy_freeze/helpers.js9
-rw-r--r--spec/frontend/deploy_freeze/store/actions_spec.js3
-rw-r--r--spec/frontend/deploy_freeze/store/mutations_spec.js13
-rw-r--r--spec/frontend/deploy_tokens/components/new_deploy_token_spec.js103
-rw-r--r--spec/frontend/design_management/components/delete_button_spec.js6
-rw-r--r--spec/frontend/design_management/components/design_notes/design_reply_form_spec.js69
-rw-r--r--spec/frontend/design_management/components/design_overlay_spec.js8
-rw-r--r--spec/frontend/design_management/pages/design/index_spec.js10
-rw-r--r--spec/frontend/design_management/pages/index_spec.js4
-rw-r--r--spec/frontend/design_management/utils/cache_update_spec.js8
-rw-r--r--spec/frontend/diffs/components/app_spec.js24
-rw-r--r--spec/frontend/diffs/components/commit_item_spec.js2
-rw-r--r--spec/frontend/diffs/components/diff_content_spec.js2
-rw-r--r--spec/frontend/diffs/components/diff_row_spec.js2
-rw-r--r--spec/frontend/diffs/components/diff_row_utils_spec.js6
-rw-r--r--spec/frontend/diffs/components/diff_view_spec.js16
-rw-r--r--spec/frontend/diffs/components/file_row_stats_spec.js20
-rw-r--r--spec/frontend/diffs/mock_data/diff_code_quality.js6
-rw-r--r--spec/frontend/diffs/store/actions_spec.js37
-rw-r--r--spec/frontend/editor/schema/ci/ci_schema_spec.js47
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/include/empty.yml3
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/include/invalid_variable.yml3
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/include/leading_slash.yml3
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/include/no_slash.yml3
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/include/tailing_slash.yml3
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/include/empty.yml5
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/include/invalid_variable.yml5
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/include/leading_slash.yml5
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/include/no_slash.yml5
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/include/tailing_slash.yml5
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/minimal/empty.yml2
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/minimal/invalid_variable.yml2
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/minimal/leading_slash.yml2
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/minimal/no_slash.yml2
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/minimal/tailing_slash.yml2
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/project/empty.yml3
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/project/invalid_variable.yml3
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/project/leading_slash.yml3
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/project/no_slash.yml3
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/project/tailing_slash.yml3
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/variables.yml5
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/positive_tests/project_path.yml101
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/positive_tests/rules.yml2
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/positive_tests/variables.yml8
-rw-r--r--spec/frontend/editor/source_editor_ci_schema_ext_spec.js12
-rw-r--r--spec/frontend/editor/source_editor_instance_spec.js2
-rw-r--r--spec/frontend/editor/source_editor_markdown_livepreview_ext_spec.js4
-rw-r--r--spec/frontend/environments/delete_environment_modal_spec.js6
-rw-r--r--spec/frontend/environments/edit_environment_spec.js4
-rw-r--r--spec/frontend/environments/empty_state_spec.js52
-rw-r--r--spec/frontend/environments/enable_review_app_modal_spec.js23
-rw-r--r--spec/frontend/environments/environment_external_url_spec.js31
-rw-r--r--spec/frontend/environments/environment_folder_spec.js22
-rw-r--r--spec/frontend/environments/environments_app_spec.js53
-rw-r--r--spec/frontend/environments/environments_detail_header_spec.js21
-rw-r--r--spec/frontend/environments/graphql/resolvers_spec.js26
-rw-r--r--spec/frontend/environments/new_environment_spec.js4
-rw-r--r--spec/frontend/error_tracking/components/error_details_spec.js8
-rw-r--r--spec/frontend/error_tracking/store/actions_spec.js6
-rw-r--r--spec/frontend/error_tracking/store/details/actions_spec.js6
-rw-r--r--spec/frontend/error_tracking/store/list/actions_spec.js4
-rw-r--r--spec/frontend/feature_highlight/feature_highlight_helper_spec.js4
-rw-r--r--spec/frontend/fixtures/namespaces.rb47
-rw-r--r--spec/frontend/fixtures/pipeline_schedules.rb72
-rw-r--r--spec/frontend/flash_spec.js7
-rw-r--r--spec/frontend/grafana_integration/components/grafana_integration_spec.js6
-rw-r--r--spec/frontend/groups/components/app_spec.js88
-rw-r--r--spec/frontend/groups/components/group_item_spec.js11
-rw-r--r--spec/frontend/groups/components/groups_spec.js9
-rw-r--r--spec/frontend/groups/components/new_top_level_group_alert_spec.js75
-rw-r--r--spec/frontend/groups/components/overview_tabs_spec.js162
-rw-r--r--spec/frontend/groups/components/transfer_group_form_spec.js2
-rw-r--r--spec/frontend/groups/store/groups_store_spec.js10
-rw-r--r--spec/frontend/header_search/components/app_spec.js26
-rw-r--r--spec/frontend/ide/components/commit_sidebar/actions_spec.js65
-rw-r--r--spec/frontend/ide/components/commit_sidebar/list_item_spec.js125
-rw-r--r--spec/frontend/ide/components/commit_sidebar/message_field_spec.js128
-rw-r--r--spec/frontend/ide/components/commit_sidebar/radio_group_spec.js135
-rw-r--r--spec/frontend/ide/components/file_row_extra_spec.js140
-rw-r--r--spec/frontend/ide/components/file_templates/bar_spec.js71
-rw-r--r--spec/frontend/ide/components/jobs/__snapshots__/stage_spec.js.snap60
-rw-r--r--spec/frontend/ide/components/jobs/detail/description_spec.js35
-rw-r--r--spec/frontend/ide/components/jobs/detail_spec.js162
-rw-r--r--spec/frontend/ide/components/jobs/item_spec.js30
-rw-r--r--spec/frontend/ide/components/jobs/stage_spec.js9
-rw-r--r--spec/frontend/ide/components/new_dropdown/button_spec.js65
-rw-r--r--spec/frontend/ide/components/new_dropdown/modal_spec.js10
-rw-r--r--spec/frontend/ide/components/new_dropdown/upload_spec.js71
-rw-r--r--spec/frontend/ide/components/shared/tokened_input_spec.js135
-rw-r--r--spec/frontend/ide/components/terminal/terminal_spec.js2
-rw-r--r--spec/frontend/ide/init_gitlab_web_ide_spec.js6
-rw-r--r--spec/frontend/ide/stores/actions/merge_request_spec.js8
-rw-r--r--spec/frontend/ide/stores/actions/project_spec.js6
-rw-r--r--spec/frontend/ide/stores/actions_spec.js16
-rw-r--r--spec/frontend/ide/stores/modules/commit/mutations_spec.js2
-rw-r--r--spec/frontend/ide/stores/modules/terminal/actions/session_controls_spec.js6
-rw-r--r--spec/frontend/ide/stores/modules/terminal/actions/session_status_spec.js4
-rw-r--r--spec/frontend/ide/stores/mutations/tree_spec.js4
-rw-r--r--spec/frontend/ide/stores/mutations_spec.js8
-rw-r--r--spec/frontend/ide/utils_spec.js4
-rw-r--r--spec/frontend/import_entities/import_groups/components/import_table_spec.js16
-rw-r--r--spec/frontend/import_entities/import_groups/services/status_poller_spec.js4
-rw-r--r--spec/frontend/import_entities/import_projects/components/advanced_settings_spec.js60
-rw-r--r--spec/frontend/import_entities/import_projects/components/import_projects_table_spec.js22
-rw-r--r--spec/frontend/import_entities/import_projects/components/provider_repo_table_row_spec.js22
-rw-r--r--spec/frontend/import_entities/import_projects/store/actions_spec.js30
-rw-r--r--spec/frontend/integrations/edit/components/integration_form_spec.js45
-rw-r--r--spec/frontend/issuable/bulk_update_sidebar/components/status_dropdown_spec.js (renamed from spec/frontend/issuable/bulk_update_sidebar/components/status_select_spec.js)14
-rw-r--r--spec/frontend/issuable/bulk_update_sidebar/components/subscriptions_dropdown_spec.js76
-rw-r--r--spec/frontend/issuable/related_issues/components/related_issues_root_spec.js8
-rw-r--r--spec/frontend/issues/show/components/edited_spec.js16
-rw-r--r--spec/frontend/issues/show/components/fields/description_spec.js59
-rw-r--r--spec/frontend/issues/show/components/form_spec.js76
-rw-r--r--spec/frontend/issues/show/components/incidents/incident_tabs_spec.js19
-rw-r--r--spec/frontend/issues/show/components/incidents/timeline_events_form_spec.js24
-rw-r--r--spec/frontend/jira_connect/branches/components/new_branch_form_spec.js2
-rw-r--r--spec/frontend/jira_connect/subscriptions/pkce_spec.js4
-rw-r--r--spec/frontend/jobs/components/table/job_table_app_spec.js4
-rw-r--r--spec/frontend/labels/components/promote_label_modal_spec.js116
-rw-r--r--spec/frontend/lib/dompurify_spec.js4
-rw-r--r--spec/frontend/lib/utils/autosave_spec.js57
-rw-r--r--spec/frontend/lib/utils/datetime/date_format_utility_spec.js19
-rw-r--r--spec/frontend/lib/utils/text_markdown_spec.js51
-rw-r--r--spec/frontend/lib/utils/text_utility_spec.js12
-rw-r--r--spec/frontend/listbox/index_spec.js30
-rw-r--r--spec/frontend/members/components/filter_sort/sort_dropdown_spec.js2
-rw-r--r--spec/frontend/members/components/table/member_action_buttons_spec.js2
-rw-r--r--spec/frontend/members/components/table/member_avatar_spec.js2
-rw-r--r--spec/frontend/members/components/table/members_table_cell_spec.js2
-rw-r--r--spec/frontend/members/utils_spec.js12
-rw-r--r--spec/frontend/merge_conflicts/store/actions_spec.js4
-rw-r--r--spec/frontend/merge_request_spec.js10
-rw-r--r--spec/frontend/milestones/components/promote_milestone_modal_spec.js4
-rw-r--r--spec/frontend/monitoring/components/dashboard_spec.js8
-rw-r--r--spec/frontend/monitoring/components/dashboard_url_time_spec.js4
-rw-r--r--spec/frontend/monitoring/requests/index_spec.js2
-rw-r--r--spec/frontend/monitoring/store/actions_spec.js20
-rw-r--r--spec/frontend/monitoring/utils_spec.js14
-rw-r--r--spec/frontend/nav/components/top_nav_app_spec.js28
-rw-r--r--spec/frontend/notebook/cells/output/index_spec.js1
-rw-r--r--spec/frontend/notes/components/__snapshots__/notes_app_spec.js.snap8
-rw-r--r--spec/frontend/notes/components/comment_form_spec.js29
-rw-r--r--spec/frontend/notes/components/diff_discussion_header_spec.js2
-rw-r--r--spec/frontend/notes/components/discussion_actions_spec.js2
-rw-r--r--spec/frontend/notes/components/note_header_spec.js99
-rw-r--r--spec/frontend/notes/components/noteable_note_spec.js2
-rw-r--r--spec/frontend/notes/components/notes_activity_header_spec.js67
-rw-r--r--spec/frontend/notes/components/notes_app_spec.js31
-rw-r--r--spec/frontend/notes/mixins/discussion_navigation_spec.js141
-rw-r--r--spec/frontend/notes/mock_data.js17
-rw-r--r--spec/frontend/notes/utils/get_notes_filter_data_spec.js44
-rw-r--r--spec/frontend/operation_settings/components/metrics_settings_spec.js6
-rw-r--r--spec/frontend/packages_and_registries/harbor_registry/components/tags/tags_list_row_spec.js6
-rw-r--r--spec/frontend/packages_and_registries/harbor_registry/components/tags/tags_list_spec.js4
-rw-r--r--spec/frontend/packages_and_registries/harbor_registry/pages/tags_spec.js4
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/details/store/actions_spec.js18
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/infrastructure_title_spec.js2
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/packages_list_app_spec.js8
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/list/stores/actions_spec.js10
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/package_title_spec.js.snap24
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/package_title_spec.js22
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/functional/delete_package_spec.js18
-rw-r--r--spec/frontend/packages_and_registries/package_registry/mock_data.js1
-rw-r--r--spec/frontend/packages_and_registries/package_registry/pages/details_spec.js42
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/components/cleanup_image_tags_spec.js32
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/components/container_expiration_policy_spec.js37
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/mock_data.js9
-rw-r--r--spec/frontend/pages/admin/jobs/index/components/stop_jobs_modal_spec.js38
-rw-r--r--spec/frontend/pages/import/fogbugz/new_user_map/components/user_select_spec.js81
-rw-r--r--spec/frontend/pages/projects/forks/new/components/fork_form_spec.js4
-rw-r--r--spec/frontend/pages/projects/forks/new/components/project_namespace_spec.js4
-rw-r--r--spec/frontend/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js3
-rw-r--r--spec/frontend/pages/shared/wikis/components/wiki_form_spec.js291
-rw-r--r--spec/frontend/pdf/page_spec.js27
-rw-r--r--spec/frontend/performance_bar/components/request_warning_spec.js29
-rw-r--r--spec/frontend/persistent_user_callout_spec.js6
-rw-r--r--spec/frontend/pipeline_editor/components/file-nav/branch_switcher_spec.js2
-rw-r--r--spec/frontend/pipeline_editor/components/ui/pipeline_editor_empty_state_spec.js30
-rw-r--r--spec/frontend/pipeline_editor/pipeline_editor_app_spec.js21
-rw-r--r--spec/frontend/pipeline_editor/pipeline_editor_home_spec.js10
-rw-r--r--spec/frontend/pipeline_new/components/pipeline_new_form_spec.js242
-rw-r--r--spec/frontend/pipeline_new/mock_data.js59
-rw-r--r--spec/frontend/pipeline_schedules/components/pipeline_schedules_form_spec.js25
-rw-r--r--spec/frontend/pipeline_schedules/components/pipeline_schedules_spec.js161
-rw-r--r--spec/frontend/pipeline_schedules/components/table/cells/pipeline_schedule_actions_spec.js49
-rw-r--r--spec/frontend/pipeline_schedules/components/table/cells/pipeline_schedule_last_pipeline_spec.js42
-rw-r--r--spec/frontend/pipeline_schedules/components/table/cells/pipeline_schedule_next_run_spec.js43
-rw-r--r--spec/frontend/pipeline_schedules/components/table/cells/pipeline_schedule_owner_spec.js40
-rw-r--r--spec/frontend/pipeline_schedules/components/table/cells/pipeline_schedule_target_spec.js41
-rw-r--r--spec/frontend/pipeline_schedules/components/table/pipeline_schedules_table_spec.js39
-rw-r--r--spec/frontend/pipeline_schedules/mock_data.js35
-rw-r--r--spec/frontend/pipeline_wizard/components/commit_spec.js4
-rw-r--r--spec/frontend/pipeline_wizard/components/editor_spec.js20
-rw-r--r--spec/frontend/pipeline_wizard/components/widgets/list_spec.js21
-rw-r--r--spec/frontend/pipeline_wizard/components/wrapper_spec.js29
-rw-r--r--spec/frontend/pipeline_wizard/mock/yaml.js3
-rw-r--r--spec/frontend/pipelines/components/jobs/failed_jobs_app_spec.js6
-rw-r--r--spec/frontend/pipelines/components/jobs/failed_jobs_table_spec.js4
-rw-r--r--spec/frontend/pipelines/components/jobs/jobs_app_spec.js6
-rw-r--r--spec/frontend/pipelines/pipeline_multi_actions_spec.js19
-rw-r--r--spec/frontend/pipelines/pipelines_actions_spec.js4
-rw-r--r--spec/frontend/pipelines/pipelines_spec.js16
-rw-r--r--spec/frontend/pipelines/test_reports/stores/actions_spec.js4
-rw-r--r--spec/frontend/pipelines/test_reports/stores/mutations_spec.js4
-rw-r--r--spec/frontend/profile/account/components/update_username_spec.js6
-rw-r--r--spec/frontend/profile/preferences/components/profile_preferences_spec.js16
-rw-r--r--spec/frontend/projects/commit/store/actions_spec.js4
-rw-r--r--spec/frontend/projects/commits/store/actions_spec.js6
-rw-r--r--spec/frontend/projects/compare/components/app_spec.js34
-rw-r--r--spec/frontend/projects/compare/components/mock_data.js1
-rw-r--r--spec/frontend/projects/compare/components/revision_dropdown_legacy_spec.js4
-rw-r--r--spec/frontend/projects/compare/components/revision_dropdown_spec.js6
-rw-r--r--spec/frontend/projects/settings/branch_rules/components/edit/branch_dropdown_spec.js (renamed from spec/frontend/projects/settings/branch_rules/branch_dropdown_spec.js)2
-rw-r--r--spec/frontend/projects/settings/branch_rules/components/edit/index_spec.js (renamed from spec/frontend/projects/settings/branch_rules/rule_edit_spec.js)6
-rw-r--r--spec/frontend/projects/settings/branch_rules/components/edit/protections/index_spec.js (renamed from spec/frontend/projects/settings/branch_rules/components/protections/index_spec.js)8
-rw-r--r--spec/frontend/projects/settings/branch_rules/components/edit/protections/merge_protections_spec.js (renamed from spec/frontend/projects/settings/branch_rules/components/protections/merge_protections_spec.js)4
-rw-r--r--spec/frontend/projects/settings/branch_rules/components/edit/protections/push_protections_spec.js (renamed from spec/frontend/projects/settings/branch_rules/components/protections/push_protections_spec.js)4
-rw-r--r--spec/frontend/projects/settings/branch_rules/components/view/index_spec.js113
-rw-r--r--spec/frontend/projects/settings/branch_rules/components/view/mock_data.js141
-rw-r--r--spec/frontend/projects/settings/branch_rules/components/view/protection_row_spec.js71
-rw-r--r--spec/frontend/projects/settings/branch_rules/components/view/protection_spec.js68
-rw-r--r--spec/frontend/projects/settings/components/default_branch_selector_spec.js46
-rw-r--r--spec/frontend/projects/settings/components/transfer_project_form_spec.js273
-rw-r--r--spec/frontend/projects/settings/repository/branch_rules/app_spec.js18
-rw-r--r--spec/frontend/projects/settings/repository/branch_rules/components/branch_rule_spec.js30
-rw-r--r--spec/frontend/projects/settings/repository/branch_rules/mock_data.js13
-rw-r--r--spec/frontend/protected_branches/protected_branch_edit_spec.js6
-rw-r--r--spec/frontend/ref/components/ref_selector_spec.js28
-rw-r--r--spec/frontend/releases/__snapshots__/util_spec.js.snap12
-rw-r--r--spec/frontend/releases/components/app_index_spec.js6
-rw-r--r--spec/frontend/releases/components/app_show_spec.js6
-rw-r--r--spec/frontend/releases/components/evidence_block_spec.js6
-rw-r--r--spec/frontend/releases/components/tag_field_new_spec.js46
-rw-r--r--spec/frontend/releases/stores/modules/detail/actions_spec.js43
-rw-r--r--spec/frontend/releases/stores/modules/detail/getters_spec.js4
-rw-r--r--spec/frontend/releases/stores/modules/detail/mutations_spec.js11
-rw-r--r--spec/frontend/releases/util_spec.js16
-rw-r--r--spec/frontend/reports/accessibility_report/components/accessibility_issue_body_spec.js112
-rw-r--r--spec/frontend/reports/accessibility_report/grouped_accessibility_reports_app_spec.js125
-rw-r--r--spec/frontend/reports/accessibility_report/mock_data.js53
-rw-r--r--spec/frontend/reports/accessibility_report/store/actions_spec.js115
-rw-r--r--spec/frontend/reports/accessibility_report/store/getters_spec.js149
-rw-r--r--spec/frontend/reports/accessibility_report/store/mutations_spec.js64
-rw-r--r--spec/frontend/reports/components/report_section_spec.js22
-rw-r--r--spec/frontend/repository/commits_service_spec.js6
-rw-r--r--spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap2
-rw-r--r--spec/frontend/repository/components/blob_controls_spec.js12
-rw-r--r--spec/frontend/repository/components/last_commit_spec.js2
-rw-r--r--spec/frontend/repository/components/new_directory_modal_spec.js4
-rw-r--r--spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap18
-rw-r--r--spec/frontend/repository/components/table/index_spec.js2
-rw-r--r--spec/frontend/repository/components/table/parent_row_spec.js4
-rw-r--r--spec/frontend/repository/components/table/row_spec.js16
-rw-r--r--spec/frontend/repository/components/tree_content_spec.js4
-rw-r--r--spec/frontend/repository/components/upload_blob_modal_spec.js16
-rw-r--r--spec/frontend/repository/pages/blob_spec.js2
-rw-r--r--spec/frontend/repository/pages/index_spec.js2
-rw-r--r--spec/frontend/runner/admin_runner_show/admin_runner_show_app_spec.js4
-rw-r--r--spec/frontend/runner/admin_runners/admin_runners_app_spec.js75
-rw-r--r--spec/frontend/runner/components/cells/link_cell_spec.js2
-rw-r--r--spec/frontend/runner/components/cells/runner_actions_cell_spec.js4
-rw-r--r--spec/frontend/runner/components/cells/runner_owner_cell_spec.js111
-rw-r--r--spec/frontend/runner/components/cells/runner_stacked_summary_cell_spec.js6
-rw-r--r--spec/frontend/runner/components/runner_bulk_delete_checkbox_spec.js99
-rw-r--r--spec/frontend/runner/components/runner_delete_button_spec.js31
-rw-r--r--spec/frontend/runner/components/runner_details_spec.js28
-rw-r--r--spec/frontend/runner/components/runner_filtered_search_bar_spec.js33
-rw-r--r--spec/frontend/runner/components/runner_list_empty_state_spec.js57
-rw-r--r--spec/frontend/runner/components/runner_list_spec.js56
-rw-r--r--spec/frontend/runner/components/runner_membership_toggle_spec.js57
-rw-r--r--spec/frontend/runner/components/runner_stacked_layout_banner_spec.js2
-rw-r--r--spec/frontend/runner/components/runner_type_tabs_spec.js20
-rw-r--r--spec/frontend/runner/components/runner_update_form_spec.js2
-rw-r--r--spec/frontend/runner/components/search_tokens/tag_token_spec.js43
-rw-r--r--spec/frontend/runner/graphql/local_state_spec.js51
-rw-r--r--spec/frontend/runner/group_runner_show/group_runner_show_app_spec.js5
-rw-r--r--spec/frontend/runner/group_runners/group_runners_app_spec.js69
-rw-r--r--spec/frontend/runner/mock_data.js96
-rw-r--r--spec/frontend/search/sidebar/components/app_spec.js41
-rw-r--r--spec/frontend/search/sidebar/components/confidentiality_filter_spec.js2
-rw-r--r--spec/frontend/search/sidebar/components/radio_filter_spec.js2
-rw-r--r--spec/frontend/search/sidebar/components/status_filter_spec.js2
-rw-r--r--spec/frontend/search/sort/components/app_spec.js6
-rw-r--r--spec/frontend/search/store/actions_spec.js6
-rw-r--r--spec/frontend/search/topbar/components/app_spec.js6
-rw-r--r--spec/frontend/search/topbar/components/group_filter_spec.js2
-rw-r--r--spec/frontend/search/topbar/components/project_filter_spec.js2
-rw-r--r--spec/frontend/search_settings/components/search_settings_spec.js46
-rw-r--r--spec/frontend/security_configuration/components/app_spec.js2
-rw-r--r--spec/frontend/security_configuration/components/training_provider_list_spec.js4
-rw-r--r--spec/frontend/security_configuration/components/upgrade_banner_spec.js2
-rw-r--r--spec/frontend/self_monitor/components/self_monitor_form_spec.js4
-rw-r--r--spec/frontend/set_status_modal/set_status_form_spec.js2
-rw-r--r--spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js8
-rw-r--r--spec/frontend/sidebar/assignee_title_spec.js4
-rw-r--r--spec/frontend/sidebar/assignees_spec.js2
-rw-r--r--spec/frontend/sidebar/components/assignees/assignee_avatar_link_spec.js2
-rw-r--r--spec/frontend/sidebar/components/assignees/collapsed_assignee_list_spec.js2
-rw-r--r--spec/frontend/sidebar/components/assignees/collapsed_assignee_spec.js2
-rw-r--r--spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js6
-rw-r--r--spec/frontend/sidebar/components/assignees/sidebar_editable_item_spec.js36
-rw-r--r--spec/frontend/sidebar/components/assignees/uncollapsed_assignee_list_spec.js2
-rw-r--r--spec/frontend/sidebar/components/confidential/sidebar_confidentiality_form_spec.js6
-rw-r--r--spec/frontend/sidebar/components/confidential/sidebar_confidentiality_widget_spec.js4
-rw-r--r--spec/frontend/sidebar/components/copy_email_to_clipboard_spec.js2
-rw-r--r--spec/frontend/sidebar/components/crm_contacts_spec.js4
-rw-r--r--spec/frontend/sidebar/components/date/sidebar_date_widget_spec.js10
-rw-r--r--spec/frontend/sidebar/components/date/sidebar_formatted_date_spec.js2
-rw-r--r--spec/frontend/sidebar/components/severity/severity_spec.js2
-rw-r--r--spec/frontend/sidebar/components/severity/sidebar_severity_spec.js6
-rw-r--r--spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js14
-rw-r--r--spec/frontend/sidebar/components/subscriptions/sidebar_subscriptions_widget_spec.js4
-rw-r--r--spec/frontend/sidebar/components/time_tracking/report_spec.js10
-rw-r--r--spec/frontend/sidebar/components/todo_toggle/sidebar_todo_widget_spec.js16
-rw-r--r--spec/frontend/sidebar/issuable_assignees_spec.js2
-rw-r--r--spec/frontend/sidebar/lock/edit_form_buttons_spec.js6
-rw-r--r--spec/frontend/sidebar/lock/issuable_lock_form_spec.js2
-rw-r--r--spec/frontend/sidebar/mock_data.js1
-rw-r--r--spec/frontend/sidebar/participants_spec.js6
-rw-r--r--spec/frontend/sidebar/reviewer_title_spec.js4
-rw-r--r--spec/frontend/sidebar/reviewers_spec.js2
-rw-r--r--spec/frontend/sidebar/sidebar_assignees_spec.js8
-rw-r--r--spec/frontend/sidebar/sidebar_mediator_spec.js18
-rw-r--r--spec/frontend/sidebar/sidebar_move_issue_spec.js4
-rw-r--r--spec/frontend/sidebar/subscriptions_spec.js2
-rw-r--r--spec/frontend/sidebar/todo_spec.js10
-rw-r--r--spec/frontend/smart_interval_spec.js16
-rw-r--r--spec/frontend/snippet/collapsible_input_spec.js4
-rw-r--r--spec/frontend/snippets/components/edit_spec.js14
-rw-r--r--spec/frontend/snippets/components/embed_dropdown_spec.js2
-rw-r--r--spec/frontend/snippets/components/snippet_blob_edit_spec.js10
-rw-r--r--spec/frontend/snippets/components/snippet_blob_view_spec.js89
-rw-r--r--spec/frontend/snippets/components/snippet_header_spec.js14
-rw-r--r--spec/frontend/snippets/components/snippet_title_spec.js8
-rw-r--r--spec/frontend/snippets/components/snippet_visibility_edit_spec.js8
-rw-r--r--spec/frontend/terms/components/app_spec.js2
-rw-r--r--spec/frontend/terraform/components/states_table_spec.js6
-rw-r--r--spec/frontend/terraform/components/terraform_list_spec.js14
-rw-r--r--spec/frontend/toggles/index_spec.js4
-rw-r--r--spec/frontend/token_access/token_access_spec.js16
-rw-r--r--spec/frontend/tooltips/components/tooltips_spec.js18
-rw-r--r--spec/frontend/user_lists/components/edit_user_list_spec.js8
-rw-r--r--spec/frontend/user_lists/components/new_user_list_spec.js2
-rw-r--r--spec/frontend/user_lists/components/user_list_spec.js6
-rw-r--r--spec/frontend/user_lists/components/user_lists_table_spec.js4
-rw-r--r--spec/frontend/user_popovers_spec.js4
-rw-r--r--spec/frontend/vue_merge_request_widget/components/approvals/approvals_spec.js14
-rw-r--r--spec/frontend/vue_merge_request_widget/components/approvals/approvals_summary_optional_spec.js2
-rw-r--r--spec/frontend/vue_merge_request_widget/components/approvals/approvals_summary_spec.js4
-rw-r--r--spec/frontend/vue_merge_request_widget/components/artifacts_list_app_spec.js2
-rw-r--r--spec/frontend/vue_merge_request_widget/components/artifacts_list_spec.js4
-rw-r--r--spec/frontend/vue_merge_request_widget/components/mr_collapsible_extension_spec.js4
-rw-r--r--spec/frontend/vue_merge_request_widget/components/mr_widget_author_time_spec.js4
-rw-r--r--spec/frontend/vue_merge_request_widget/components/mr_widget_expandable_section_spec.js6
-rw-r--r--spec/frontend/vue_merge_request_widget/components/mr_widget_icon_spec.js2
-rw-r--r--spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_container_spec.js12
-rw-r--r--spec/frontend/vue_merge_request_widget/components/mr_widget_suggest_pipeline_spec.js4
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap10
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/merge_checks_failed_spec.js6
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_auto_merge_failed_spec.js4
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_merged_spec.js222
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_pipeline_failed_spec.js21
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_ready_to_merge_spec.js77
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_squash_before_merge_spec.js2
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_wip_spec.js2
-rw-r--r--spec/frontend/vue_merge_request_widget/components/terraform/mr_widget_terraform_container_spec.js175
-rw-r--r--spec/frontend/vue_merge_request_widget/components/terraform/terraform_plan_spec.js93
-rw-r--r--spec/frontend/vue_merge_request_widget/components/widget/__snapshots__/dynamic_content_spec.js.snap35
-rw-r--r--spec/frontend/vue_merge_request_widget/components/widget/dynamic_content_spec.js52
-rw-r--r--spec/frontend/vue_merge_request_widget/components/widget/widget_content_row_spec.js65
-rw-r--r--spec/frontend/vue_merge_request_widget/components/widget/widget_content_section_spec.js39
-rw-r--r--spec/frontend/vue_merge_request_widget/components/widget/widget_spec.js7
-rw-r--r--spec/frontend/vue_merge_request_widget/deployment/deployment_action_button_spec.js20
-rw-r--r--spec/frontend/vue_merge_request_widget/deployment/deployment_actions_spec.js20
-rw-r--r--spec/frontend/vue_merge_request_widget/deployment/deployment_spec.js16
-rw-r--r--spec/frontend/vue_merge_request_widget/deployment/deployment_view_button_spec.js94
-rw-r--r--spec/frontend/vue_merge_request_widget/extensions/test_report/index_spec.js2
-rw-r--r--spec/frontend/vue_merge_request_widget/mr_widget_how_to_merge_modal_spec.js2
-rw-r--r--spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js96
-rw-r--r--spec/frontend/vue_merge_request_widget/stores/get_state_key_spec.js17
-rw-r--r--spec/frontend/vue_shared/components/ci_badge_link_spec.js12
-rw-r--r--spec/frontend/vue_shared/components/color_select_dropdown/color_select_root_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/date_time_picker/date_time_picker_lib_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/diff_stats_dropdown_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/diff_viewer/diff_viewer_spec.js51
-rw-r--r--spec/frontend/vue_shared/components/file_finder/item_spec.js118
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/store/modules/filters/actions_spec.js16
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/branch_token_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/crm_contact_token_spec.js14
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/crm_organization_token_spec.js14
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/release_token_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/gitlab_version_check_spec.js33
-rw-r--r--spec/frontend/vue_shared/components/gl_countdown_spec.js36
-rw-r--r--spec/frontend/vue_shared/components/group_select/utils_spec.js24
-rw-r--r--spec/frontend/vue_shared/components/markdown/header_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/markdown/markdown_editor_spec.js289
-rw-r--r--spec/frontend/vue_shared/components/metric_images/metric_images_table_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/metric_images/store/actions_spec.js10
-rw-r--r--spec/frontend/vue_shared/components/modal_copy_button_spec.js7
-rw-r--r--spec/frontend/vue_shared/components/namespace_select/namespace_select_deprecated_spec.js (renamed from spec/frontend/vue_shared/components/namespace_select/namespace_select_spec.js)18
-rw-r--r--spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_note_spec.js.snap23
-rw-r--r--spec/frontend/vue_shared/components/notes/placeholder_note_spec.js14
-rw-r--r--spec/frontend/vue_shared/components/pagination_bar/pagination_bar_spec.js26
-rw-r--r--spec/frontend/vue_shared/components/panel_resizer_spec.js81
-rw-r--r--spec/frontend/vue_shared/components/registry/__snapshots__/history_item_spec.js.snap8
-rw-r--r--spec/frontend/vue_shared/components/security_reports/artifact_downloads/merge_request_artifact_download_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js11
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js12
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_create_view_spec.js10
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_labels_view_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_widget/labels_select_root_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/components/chunk_line_spec.js22
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/highlight_util_spec.js44
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/plugins/index_spec.js14
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/plugins/utils/dependency_linker_util_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/plugins/wrap_bidi_chars_spec.js17
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/plugins/wrap_child_nodes_spec.js22
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/plugins/wrap_comments_spec.js29
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js21
-rw-r--r--spec/frontend/vue_shared/components/stacked_progress_bar_spec.js124
-rw-r--r--spec/frontend/vue_shared/components/timezone_dropdown/helpers.js6
-rw-r--r--spec/frontend/vue_shared/components/timezone_dropdown/timezone_dropdown_spec.js (renamed from spec/frontend/deploy_freeze/components/timezone_dropdown_spec.js)47
-rw-r--r--spec/frontend/vue_shared/components/url_sync_spec.js62
-rw-r--r--spec/frontend/vue_shared/components/user_avatar/user_avatar_image_new_spec.js134
-rw-r--r--spec/frontend/vue_shared/components/user_avatar/user_avatar_image_old_spec.js127
-rw-r--r--spec/frontend/vue_shared/components/user_avatar/user_avatar_image_spec.js143
-rw-r--r--spec/frontend/vue_shared/components/user_avatar/user_avatar_link_new_spec.js103
-rw-r--r--spec/frontend/vue_shared/components/user_avatar/user_avatar_link_old_spec.js103
-rw-r--r--spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js119
-rw-r--r--spec/frontend/vue_shared/components/user_avatar/user_avatar_list_spec.js25
-rw-r--r--spec/frontend/vue_shared/components/user_popover/user_popover_spec.js70
-rw-r--r--spec/frontend/vue_shared/directives/safe_html_spec.js116
-rw-r--r--spec/frontend/vue_shared/issuable/__snapshots__/issuable_blocked_icon_spec.js.snap (renamed from spec/frontend/boards/components/__snapshots__/board_blocked_icon_spec.js.snap)12
-rw-r--r--spec/frontend/vue_shared/issuable/issuable_blocked_icon_spec.js (renamed from spec/frontend/boards/components/board_blocked_icon_spec.js)13
-rw-r--r--spec/frontend/vue_shared/issuable/show/components/issuable_body_spec.js8
-rw-r--r--spec/frontend/vue_shared/security_reports/security_reports_app_spec.js6
-rw-r--r--spec/frontend/webhooks/components/form_url_app_spec.js142
-rw-r--r--spec/frontend/webhooks/components/form_url_mask_item_spec.js100
-rw-r--r--spec/frontend/whats_new/components/app_spec.js6
-rw-r--r--spec/frontend/work_items/components/work_item_assignees_spec.js8
-rw-r--r--spec/frontend/work_items/components/work_item_description_spec.js31
-rw-r--r--spec/frontend/work_items/components/work_item_detail_spec.js100
-rw-r--r--spec/frontend/work_items/components/work_item_due_date_spec.js4
-rw-r--r--spec/frontend/work_items/components/work_item_labels_spec.js98
-rw-r--r--spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js2
-rw-r--r--spec/frontend/work_items/components/work_item_links/work_item_links_menu_spec.js4
-rw-r--r--spec/frontend/work_items/components/work_item_links/work_item_links_spec.js31
-rw-r--r--spec/frontend/work_items/components/work_item_milestone_spec.js247
-rw-r--r--spec/frontend/work_items/components/work_item_type_icon_spec.js2
-rw-r--r--spec/frontend/work_items/mock_data.js273
-rw-r--r--spec/frontend/work_items/router_spec.js10
-rw-r--r--spec/frontend/work_items_hierarchy/components/app_spec.js4
-rw-r--r--spec/frontend/work_items_hierarchy/components/hierarchy_spec.js2
-rw-r--r--spec/frontend_integration/content_editor/content_editor_integration_spec.js21
-rw-r--r--spec/frontend_integration/diffs/diffs_interopability_spec.js1
-rw-r--r--spec/graphql/gitlab_schema_spec.rb103
-rw-r--r--spec/graphql/graphql_triggers_spec.rb42
-rw-r--r--spec/graphql/mutations/ci/job_token_scope/add_project_spec.rb2
-rw-r--r--spec/graphql/mutations/ci/job_token_scope/remove_project_spec.rb2
-rw-r--r--spec/graphql/mutations/ci/runner/update_spec.rb10
-rw-r--r--spec/graphql/mutations/incident_management/timeline_event/create_spec.rb2
-rw-r--r--spec/graphql/mutations/incident_management/timeline_event/update_spec.rb2
-rw-r--r--spec/graphql/mutations/namespace/package_settings/update_spec.rb37
-rw-r--r--spec/graphql/mutations/work_items/update_widgets_spec.rb58
-rw-r--r--spec/graphql/resolvers/ci/all_jobs_resolver_spec.rb52
-rw-r--r--spec/graphql/resolvers/ci/job_token_scope_resolver_spec.rb6
-rw-r--r--spec/graphql/resolvers/project_pipeline_schedules_resolver_spec.rb40
-rw-r--r--spec/graphql/resolvers/users/participants_resolver_spec.rb83
-rw-r--r--spec/graphql/types/ci/job_token_scope_type_spec.rb2
-rw-r--r--spec/graphql/types/ci/job_type_spec.rb13
-rw-r--r--spec/graphql/types/ci/pipeline_schedule_status_enum_spec.rb11
-rw-r--r--spec/graphql/types/ci/pipeline_schedule_type_spec.rb30
-rw-r--r--spec/graphql/types/environment_type_spec.rb4
-rw-r--r--spec/graphql/types/namespace/package_settings_type_spec.rb20
-rw-r--r--spec/graphql/types/packages/tag_type_spec.rb2
-rw-r--r--spec/graphql/types/permission_types/ci/pipeline_schedule_type_spec.rb7
-rw-r--r--spec/graphql/types/project_type_spec.rb16
-rw-r--r--spec/graphql/types/projects/branch_rule_type_spec.rb (renamed from spec/graphql/types/branch_rule_type_spec.rb)3
-rw-r--r--spec/graphql/types/subscription_type_spec.rb2
-rw-r--r--spec/graphql/types/work_items/widgets/labels_update_input_type_spec.rb9
-rw-r--r--spec/helpers/application_helper_spec.rb19
-rw-r--r--spec/helpers/application_settings_helper_spec.rb48
-rw-r--r--spec/helpers/boards_helper_spec.rb14
-rw-r--r--spec/helpers/ci/pipeline_editor_helper_spec.rb105
-rw-r--r--spec/helpers/commits_helper_spec.rb9
-rw-r--r--spec/helpers/events_helper_spec.rb39
-rw-r--r--spec/helpers/form_helper_spec.rb68
-rw-r--r--spec/helpers/groups_helper_spec.rb31
-rw-r--r--spec/helpers/hooks_helper_spec.rb7
-rw-r--r--spec/helpers/ide_helper_spec.rb130
-rw-r--r--spec/helpers/invite_members_helper_spec.rb4
-rw-r--r--spec/helpers/issuables_description_templates_helper_spec.rb12
-rw-r--r--spec/helpers/issues_helper_spec.rb4
-rw-r--r--spec/helpers/listbox_helper_spec.rb32
-rw-r--r--spec/helpers/markup_helper_spec.rb66
-rw-r--r--spec/helpers/milestones_helper_spec.rb41
-rw-r--r--spec/helpers/nav_helper_spec.rb2
-rw-r--r--spec/helpers/projects_helper_spec.rb22
-rw-r--r--spec/helpers/recaptcha_helper_spec.rb69
-rw-r--r--spec/helpers/releases_helper_spec.rb6
-rw-r--r--spec/helpers/search_helper_spec.rb293
-rw-r--r--spec/helpers/sessions_helper_spec.rb6
-rw-r--r--spec/helpers/todos_helper_spec.rb18
-rw-r--r--spec/helpers/users_helper_spec.rb11
-rw-r--r--spec/helpers/wiki_helper_spec.rb23
-rw-r--r--spec/initializers/100_patch_omniauth_oauth2_spec.rb2
-rw-r--r--spec/initializers/attr_encrypted_no_db_connection_spec.rb40
-rw-r--r--spec/initializers/attr_encrypted_thread_safe_spec.rb28
-rw-r--r--spec/initializers/diagnostic_reports_spec.rb28
-rw-r--r--spec/initializers/memory_watchdog_spec.rb76
-rw-r--r--spec/initializers/sawyer_patch_spec.rb24
-rw-r--r--spec/initializers/sidekiq_spec.rb57
-rw-r--r--spec/lib/api/entities/bulk_imports/entity_failure_spec.rb19
-rw-r--r--spec/lib/api/entities/ml/mlflow/run_spec.rb50
-rw-r--r--spec/lib/api/helpers/merge_requests_helpers_spec.rb4
-rw-r--r--spec/lib/api/helpers/open_api_spec.rb21
-rw-r--r--spec/lib/api/helpers/packages/dependency_proxy_helpers_spec.rb66
-rw-r--r--spec/lib/api/helpers/packages_helpers_spec.rb20
-rw-r--r--spec/lib/api/helpers_spec.rb110
-rw-r--r--spec/lib/banzai/filter/pathological_markdown_filter_spec.rb27
-rw-r--r--spec/lib/banzai/filter/references/label_reference_filter_spec.rb22
-rw-r--r--spec/lib/banzai/filter/truncate_visible_filter_spec.rb128
-rw-r--r--spec/lib/banzai/filter/wiki_link_filter_spec.rb8
-rw-r--r--spec/lib/banzai/pipeline/full_pipeline_spec.rb16
-rw-r--r--spec/lib/banzai/pipeline/gfm_pipeline_spec.rb2
-rw-r--r--spec/lib/bitbucket/connection_spec.rb10
-rw-r--r--spec/lib/bulk_imports/common/pipelines/entity_finisher_spec.rb5
-rw-r--r--spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb2
-rw-r--r--spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb4
-rw-r--r--spec/lib/bulk_imports/common/rest/get_badges_query_spec.rb31
-rw-r--r--spec/lib/bulk_imports/features_spec.rb43
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb2
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/project_entities_pipeline_spec.rb2
-rw-r--r--spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb78
-rw-r--r--spec/lib/bulk_imports/network_error_spec.rb16
-rw-r--r--spec/lib/bulk_imports/pipeline/runner_spec.rb10
-rw-r--r--spec/lib/bulk_imports/pipeline_spec.rb33
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/design_bundle_pipeline_spec.rb5
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/repository_bundle_pipeline_spec.rb5
-rw-r--r--spec/lib/container_registry/client_spec.rb32
-rw-r--r--spec/lib/container_registry/gitlab_api_client_spec.rb11
-rw-r--r--spec/lib/csv_builders/stream_spec.rb20
-rw-r--r--spec/lib/expand_variables_spec.rb4
-rw-r--r--spec/lib/gitlab/analytics/usage_trends/workers_argument_builder_spec.rb18
-rw-r--r--spec/lib/gitlab/anonymous_session_spec.rb2
-rw-r--r--spec/lib/gitlab/auth/auth_finders_spec.rb2
-rw-r--r--spec/lib/gitlab/auth/o_auth/user_spec.rb44
-rw-r--r--spec/lib/gitlab/auth_spec.rb6
-rw-r--r--spec/lib/gitlab/background_migration/backfill_integrations_type_new_spec.rb30
-rw-r--r--spec/lib/gitlab/background_migration/backfill_internal_on_notes_spec.rb30
-rw-r--r--spec/lib/gitlab/background_migration/backfill_namespace_details_spec.rb55
-rw-r--r--spec/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects_spec.rb25
-rw-r--r--spec/lib/gitlab/background_migration/delete_orphaned_operational_vulnerabilities_spec.rb111
-rw-r--r--spec/lib/gitlab/background_migration/destroy_invalid_members_spec.rb141
-rw-r--r--spec/lib/gitlab/background_migration/migrate_merge_request_diff_commit_users_spec.rb9
-rw-r--r--spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb15
-rw-r--r--spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_encrypted_values_spec.rb70
-rw-r--r--spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_values_spec.rb70
-rw-r--r--spec/lib/gitlab/background_migration/update_ci_pipeline_artifacts_unknown_locked_status_spec.rb62
-rw-r--r--spec/lib/gitlab/bare_repository_import/importer_spec.rb2
-rw-r--r--spec/lib/gitlab/bare_repository_import/repository_spec.rb2
-rw-r--r--spec/lib/gitlab/batch_pop_queueing_spec.rb147
-rw-r--r--spec/lib/gitlab/bitbucket_import/importer_spec.rb59
-rw-r--r--spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/ansi2json_spec.rb389
-rw-r--r--spec/lib/gitlab/ci/build/rules/rule/clause/changes_spec.rb36
-rw-r--r--spec/lib/gitlab/ci/config/entry/legacy_variables_spec.rb173
-rw-r--r--spec/lib/gitlab/ci/config/entry/processable_spec.rb14
-rw-r--r--spec/lib/gitlab/ci/config/entry/product/parallel_spec.rb9
-rw-r--r--spec/lib/gitlab/ci/config/entry/root_spec.rb22
-rw-r--r--spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb13
-rw-r--r--spec/lib/gitlab/ci/config/entry/variable_spec.rb52
-rw-r--r--spec/lib/gitlab/ci/config/entry/variables_spec.rb56
-rw-r--r--spec/lib/gitlab/ci/config/entry/workflow_spec.rb48
-rw-r--r--spec/lib/gitlab/ci/config/external/file/artifact_spec.rb7
-rw-r--r--spec/lib/gitlab/ci/config/external/file/project_spec.rb17
-rw-r--r--spec/lib/gitlab/ci/config/external/mapper_spec.rb21
-rw-r--r--spec/lib/gitlab/ci/config_spec.rb27
-rw-r--r--spec/lib/gitlab/ci/jwt_v2_spec.rb12
-rw-r--r--spec/lib/gitlab/ci/lint_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/parsers/sbom/source/dependency_scanning_spec.rb3
-rw-r--r--spec/lib/gitlab/ci/parsers/sbom/validators/cyclonedx_schema_validator_spec.rb22
-rw-r--r--spec/lib/gitlab/ci/parsers/security/common_spec.rb106
-rw-r--r--spec/lib/gitlab/ci/parsers/security/sast_spec.rb31
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb51
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/config/content_spec.rb14
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/limit/active_jobs_spec.rb97
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb43
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb13
-rw-r--r--spec/lib/gitlab/ci/pipeline/duration_spec.rb224
-rw-r--r--spec/lib/gitlab/ci/pipeline/logger_spec.rb13
-rw-r--r--spec/lib/gitlab/ci/reports/codequality_reports_comparer_spec.rb33
-rw-r--r--spec/lib/gitlab/ci/reports/codequality_reports_spec.rb32
-rw-r--r--spec/lib/gitlab/ci/reports/sbom/source_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/reports/security/report_spec.rb18
-rw-r--r--spec/lib/gitlab/ci/secure_files/cer_spec.rb69
-rw-r--r--spec/lib/gitlab/ci/secure_files/mobile_provision_spec.rb149
-rw-r--r--spec/lib/gitlab/ci/secure_files/p12_spec.rb81
-rw-r--r--spec/lib/gitlab/ci/secure_files/x509_name_spec.rb30
-rw-r--r--spec/lib/gitlab/ci/trace_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/variables/builder/group_spec.rb11
-rw-r--r--spec/lib/gitlab/ci/variables/builder/project_spec.rb11
-rw-r--r--spec/lib/gitlab/ci/variables/builder/release_spec.rb69
-rw-r--r--spec/lib/gitlab/ci/variables/builder_spec.rb44
-rw-r--r--spec/lib/gitlab/ci/variables/collection/sort_spec.rb15
-rw-r--r--spec/lib/gitlab/ci/variables/collection_spec.rb37
-rw-r--r--spec/lib/gitlab/ci/yaml_processor/result_spec.rb9
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb330
-rw-r--r--spec/lib/gitlab/config/entry/validators_spec.rb2
-rw-r--r--spec/lib/gitlab/config_checker/external_database_checker_spec.rb99
-rw-r--r--spec/lib/gitlab/conflict/file_spec.rb27
-rw-r--r--spec/lib/gitlab/data_builder/pipeline_spec.rb1
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_spec.rb44
-rw-r--r--spec/lib/gitlab/database/each_database_spec.rb53
-rw-r--r--spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb30
-rw-r--r--spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb19
-rw-r--r--spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb42
-rw-r--r--spec/lib/gitlab/database/load_balancing/sticking_spec.rb10
-rw-r--r--spec/lib/gitlab/database/load_balancing/transaction_leaking_spec.rb67
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb184
-rw-r--r--spec/lib/gitlab/database/migrations/base_background_runner_spec.rb4
-rw-r--r--spec/lib/gitlab/database/migrations/runner_spec.rb195
-rw-r--r--spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb202
-rw-r--r--spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb19
-rw-r--r--spec/lib/gitlab/database/partitioning/convert_table_to_first_list_partition_spec.rb15
-rw-r--r--spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb9
-rw-r--r--spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb17
-rw-r--r--spec/lib/gitlab/database/partitioning/time_partition_spec.rb13
-rw-r--r--spec/lib/gitlab/database/partitioning_spec.rb14
-rw-r--r--spec/lib/gitlab/database/reflection_spec.rb6
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb20
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb8
-rw-r--r--spec/lib/gitlab/database/similarity_score_spec.rb18
-rw-r--r--spec/lib/gitlab/database_spec.rb15
-rw-r--r--spec/lib/gitlab/diff/char_diff_spec.rb22
-rw-r--r--spec/lib/gitlab/diff/file_collection_sorter_spec.rb43
-rw-r--r--spec/lib/gitlab/diff/file_spec.rb13
-rw-r--r--spec/lib/gitlab/diff/highlight_cache_spec.rb64
-rw-r--r--spec/lib/gitlab/encoding_helper_spec.rb12
-rw-r--r--spec/lib/gitlab/error_tracking/stack_trace_highlight_decorator_spec.rb6
-rw-r--r--spec/lib/gitlab/experimentation/controller_concern_spec.rb675
-rw-r--r--spec/lib/gitlab/experimentation/experiment_spec.rb58
-rw-r--r--spec/lib/gitlab/experimentation_spec.rb161
-rw-r--r--spec/lib/gitlab/git/keep_around_spec.rb13
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb77
-rw-r--r--spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb3
-rw-r--r--spec/lib/gitlab/git/wiki_spec.rb134
-rw-r--r--spec/lib/gitlab/git_access_snippet_spec.rb1
-rw-r--r--spec/lib/gitlab/git_access_spec.rb53
-rw-r--r--spec/lib/gitlab/gitaly_client/blob_service_spec.rb28
-rw-r--r--spec/lib/gitlab/gitaly_client/ref_service_spec.rb32
-rw-r--r--spec/lib/gitlab/gitaly_client/repository_service_spec.rb16
-rw-r--r--spec/lib/gitlab/gitaly_client/wiki_service_spec.rb118
-rw-r--r--spec/lib/gitlab/github_import/client_spec.rb30
-rw-r--r--spec/lib/gitlab/github_import/importer/attachments/base_importer_spec.rb28
-rw-r--r--spec/lib/gitlab/github_import/importer/attachments/issues_importer_spec.rb61
-rw-r--r--spec/lib/gitlab/github_import/importer/attachments/merge_requests_importer_spec.rb61
-rw-r--r--spec/lib/gitlab/github_import/importer/attachments/notes_importer_spec.rb58
-rw-r--r--spec/lib/gitlab/github_import/importer/attachments/releases_importer_spec.rb57
-rw-r--r--spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb14
-rw-r--r--spec/lib/gitlab/github_import/importer/issues_importer_spec.rb33
-rw-r--r--spec/lib/gitlab/github_import/importer/labels_importer_spec.rb6
-rw-r--r--spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb7
-rw-r--r--spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb10
-rw-r--r--spec/lib/gitlab/github_import/importer/note_attachments_importer_spec.rb85
-rw-r--r--spec/lib/gitlab/github_import/importer/note_importer_spec.rb7
-rw-r--r--spec/lib/gitlab/github_import/importer/notes_importer_spec.rb14
-rw-r--r--spec/lib/gitlab/github_import/importer/protected_branch_importer_spec.rb218
-rw-r--r--spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb6
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_request_merged_by_importer_spec.rb15
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb50
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb10
-rw-r--r--spec/lib/gitlab/github_import/importer/release_attachments_importer_spec.rb57
-rw-r--r--spec/lib/gitlab/github_import/importer/releases_attachments_importer_spec.rb74
-rw-r--r--spec/lib/gitlab/github_import/importer/releases_importer_spec.rb27
-rw-r--r--spec/lib/gitlab/github_import/importer/single_endpoint_diff_notes_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb32
-rw-r--r--spec/lib/gitlab/github_import/importer/single_endpoint_issue_notes_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/importer/single_endpoint_merge_request_notes_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/issuable_finder_spec.rb67
-rw-r--r--spec/lib/gitlab/github_import/markdown/attachment_spec.rb93
-rw-r--r--spec/lib/gitlab/github_import/markdown_text_spec.rb37
-rw-r--r--spec/lib/gitlab/github_import/parallel_scheduling_spec.rb32
-rw-r--r--spec/lib/gitlab/github_import/representation/diff_note_spec.rb9
-rw-r--r--spec/lib/gitlab/github_import/representation/issue_event_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/representation/issue_spec.rb17
-rw-r--r--spec/lib/gitlab/github_import/representation/note_spec.rb11
-rw-r--r--spec/lib/gitlab/github_import/representation/note_text_spec.rb80
-rw-r--r--spec/lib/gitlab/github_import/representation/protected_branch_spec.rb38
-rw-r--r--spec/lib/gitlab/github_import/representation/pull_request_review_spec.rb11
-rw-r--r--spec/lib/gitlab/github_import/representation/pull_request_spec.rb33
-rw-r--r--spec/lib/gitlab/github_import/representation/release_attachments_spec.rb49
-rw-r--r--spec/lib/gitlab/github_import/representation/user_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/settings_spec.rb82
-rw-r--r--spec/lib/gitlab/github_import/user_finder_spec.rb34
-rw-r--r--spec/lib/gitlab/grape_logging/formatters/lograge_with_timestamp_spec.rb11
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb1
-rw-r--r--spec/lib/gitlab/health_checks/master_check_spec.rb50
-rw-r--r--spec/lib/gitlab/hook_data/release_builder_spec.rb12
-rw-r--r--spec/lib/gitlab/import_export/after_export_strategies/web_upload_strategy_spec.rb130
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml11
-rw-r--r--spec/lib/gitlab/import_export/group/tree_restorer_spec.rb18
-rw-r--r--spec/lib/gitlab/import_export/group/tree_saver_spec.rb11
-rw-r--r--spec/lib/gitlab/import_export/project/relation_factory_spec.rb18
-rw-r--r--spec/lib/gitlab/import_export/project/tree_restorer_spec.rb23
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml6
-rw-r--r--spec/lib/gitlab/import_export/uploads_manager_spec.rb26
-rw-r--r--spec/lib/gitlab/import_export/wiki_repo_saver_spec.rb2
-rw-r--r--spec/lib/gitlab/jira_import/issues_importer_spec.rb50
-rw-r--r--spec/lib/gitlab/json_spec.rb10
-rw-r--r--spec/lib/gitlab/kubernetes/rollout_instances_spec.rb45
-rw-r--r--spec/lib/gitlab/legacy_github_import/branch_formatter_spec.rb18
-rw-r--r--spec/lib/gitlab/legacy_github_import/comment_formatter_spec.rb16
-rw-r--r--spec/lib/gitlab/legacy_github_import/importer_spec.rb52
-rw-r--r--spec/lib/gitlab/legacy_github_import/issuable_formatter_spec.rb2
-rw-r--r--spec/lib/gitlab/legacy_github_import/issue_formatter_spec.rb36
-rw-r--r--spec/lib/gitlab/legacy_github_import/label_formatter_spec.rb6
-rw-r--r--spec/lib/gitlab/legacy_github_import/milestone_formatter_spec.rb8
-rw-r--r--spec/lib/gitlab/legacy_github_import/pull_request_formatter_spec.rb78
-rw-r--r--spec/lib/gitlab/legacy_github_import/release_formatter_spec.rb12
-rw-r--r--spec/lib/gitlab/legacy_github_import/user_formatter_spec.rb12
-rw-r--r--spec/lib/gitlab/memory/diagnostic_reports_logger_spec.rb22
-rw-r--r--spec/lib/gitlab/memory/reports_daemon_spec.rb39
-rw-r--r--spec/lib/gitlab/memory/reports_uploader_spec.rb80
-rw-r--r--spec/lib/gitlab/memory/upload_and_cleanup_reports_spec.rb109
-rw-r--r--spec/lib/gitlab/memory/watchdog/configuration_spec.rb121
-rw-r--r--spec/lib/gitlab/memory/watchdog/monitor/heap_fragmentation_spec.rb60
-rw-r--r--spec/lib/gitlab/memory/watchdog/monitor/unique_memory_growth_spec.rb62
-rw-r--r--spec/lib/gitlab/memory/watchdog/monitor_state_spec.rb72
-rw-r--r--spec/lib/gitlab/memory/watchdog_spec.rb396
-rw-r--r--spec/lib/gitlab/metrics/global_search_slis_spec.rb129
-rw-r--r--spec/lib/gitlab/metrics/system_spec.rb67
-rw-r--r--spec/lib/gitlab/middleware/handle_malformed_strings_spec.rb22
-rw-r--r--spec/lib/gitlab/pages/cache_control_spec.rb47
-rw-r--r--spec/lib/gitlab/pagination/keyset/column_order_definition_spec.rb9
-rw-r--r--spec/lib/gitlab/pagination/keyset/in_operator_optimization/query_builder_spec.rb202
-rw-r--r--spec/lib/gitlab/pagination/keyset/in_operator_optimization/strategies/order_values_loader_strategy_spec.rb37
-rw-r--r--spec/lib/gitlab/pagination/keyset/iterator_spec.rb31
-rw-r--r--spec/lib/gitlab/pagination/keyset/order_spec.rb381
-rw-r--r--spec/lib/gitlab/profiler_spec.rb84
-rw-r--r--spec/lib/gitlab/project_search_results_spec.rb5
-rw-r--r--spec/lib/gitlab/project_transfer_spec.rb9
-rw-r--r--spec/lib/gitlab/prometheus_client_spec.rb13
-rw-r--r--spec/lib/gitlab/push_options_spec.rb25
-rw-r--r--spec/lib/gitlab/query_limiting/transaction_spec.rb12
-rw-r--r--spec/lib/gitlab/rack_attack/request_spec.rb9
-rw-r--r--spec/lib/gitlab/reference_extractor_spec.rb2
-rw-r--r--spec/lib/gitlab/regex_requires_app_spec.rb6
-rw-r--r--spec/lib/gitlab/regex_spec.rb10
-rw-r--r--spec/lib/gitlab/search/abuse_detection_spec.rb20
-rw-r--r--spec/lib/gitlab/search/query_spec.rb34
-rw-r--r--spec/lib/gitlab/serializer/ci/variables_spec.rb9
-rw-r--r--spec/lib/gitlab/sidekiq_config_spec.rb4
-rw-r--r--spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb128
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/memory_killer_spec.rb83
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb3
-rw-r--r--spec/lib/gitlab/sidekiq_middleware_spec.rb10
-rw-r--r--spec/lib/gitlab/sidekiq_status_spec.rb8
-rw-r--r--spec/lib/gitlab/slash_commands/issue_new_spec.rb15
-rw-r--r--spec/lib/gitlab/ssh_public_key_spec.rb24
-rw-r--r--spec/lib/gitlab/tracking/service_ping_context_spec.rb19
-rw-r--r--spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb150
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/aggregated_metric_spec.rb72
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/distinct_count_projects_with_expiration_policy_disabled_metric_spec.rb19
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/gitlab_for_jira_app_direct_installations_count_metric_spec.rb18
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/gitlab_for_jira_app_proxy_installations_count_metric_spec.rb19
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/merge_request_widget_extension_metric_spec.rb25
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/redis_metric_spec.rb30
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/work_items_activity_aggregated_metric_spec.rb60
-rw-r--r--spec/lib/gitlab/usage_data/topology_spec.rb278
-rw-r--r--spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb16
-rw-r--r--spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb19
-rw-r--r--spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb8
-rw-r--r--spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb8
-rw-r--r--spec/lib/gitlab/usage_data_metrics_spec.rb2
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb21
-rw-r--r--spec/lib/gitlab/user_access_snippet_spec.rb2
-rw-r--r--spec/lib/gitlab/utils_spec.rb11
-rw-r--r--spec/lib/gitlab/web_ide/config/entry/terminal_spec.rb23
-rw-r--r--spec/lib/gitlab/webpack/manifest_spec.rb9
-rw-r--r--spec/lib/gitlab/x509/signature_spec.rb14
-rw-r--r--spec/lib/gitlab/x509/tag_spec.rb4
-rw-r--r--spec/lib/google_api/cloud_platform/client_spec.rb18
-rw-r--r--spec/lib/object_storage/config_spec.rb1
-rw-r--r--spec/lib/peek/views/bullet_detailed_spec.rb9
-rw-r--r--spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb10
-rw-r--r--spec/lib/sidebars/projects/menus/analytics_menu_spec.rb30
-rw-r--r--spec/lib/sidebars/projects/menus/deployments_menu_spec.rb34
-rw-r--r--spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb25
-rw-r--r--spec/lib/sidebars/projects/menus/repository_menu_spec.rb24
-rw-r--r--spec/lib/system_check/incoming_email_check_spec.rb20
-rw-r--r--spec/lib/unnested_in_filters/rewriter_spec.rb34
-rw-r--r--spec/lib/version_check_spec.rb14
-rw-r--r--spec/mailers/emails/profile_spec.rb64
-rw-r--r--spec/migrations/20220920124709_backfill_internal_on_notes_spec.rb (renamed from spec/migrations/20220901035725_schedule_destroy_invalid_project_members_spec.rb)10
-rw-r--r--spec/migrations/20220921093355_schedule_backfill_namespace_details_spec.rb37
-rw-r--r--spec/migrations/20220921144258_remove_orphan_group_token_users_spec.rb74
-rw-r--r--spec/migrations/20220922143143_schedule_reset_duplicate_ci_runners_token_values_spec.rb35
-rw-r--r--spec/migrations/20220922143634_schedule_reset_duplicate_ci_runners_token_encrypted_values_spec.rb35
-rw-r--r--spec/migrations/20220928225711_schedule_update_ci_pipeline_artifacts_locked_status_spec.rb31
-rw-r--r--spec/migrations/20220929213730_schedule_delete_orphaned_operational_vulnerabilities_spec.rb32
-rw-r--r--spec/migrations/20221004094814_schedule_destroy_invalid_members_spec.rb (renamed from spec/migrations/20220809002011_schedule_destroy_invalid_group_members_spec.rb)2
-rw-r--r--spec/migrations/20221008032350_add_password_expiration_migration_spec.rb19
-rw-r--r--spec/migrations/20221012033107_add_password_last_changed_at_to_user_details_spec.rb33
-rw-r--r--spec/migrations/20221013154159_update_invalid_dormant_user_setting_spec.rb40
-rw-r--r--spec/migrations/add_premium_and_ultimate_plan_limits_spec.rb14
-rw-r--r--spec/migrations/adjust_task_note_rename_background_migration_values_spec.rb143
-rw-r--r--spec/migrations/backfill_epic_cache_counts_spec.rb32
-rw-r--r--spec/migrations/backfill_escalation_policies_for_oncall_schedules_spec.rb59
-rw-r--r--spec/migrations/populate_releases_access_level_from_repository_spec.rb39
-rw-r--r--spec/migrations/slice_merge_request_diff_commit_migrations_spec.rb13
-rw-r--r--spec/models/analytics/cycle_analytics/project_stage_spec.rb9
-rw-r--r--spec/models/application_setting_spec.rb39
-rw-r--r--spec/models/award_emoji_spec.rb9
-rw-r--r--spec/models/bulk_imports/entity_spec.rb51
-rw-r--r--spec/models/bulk_imports/export_status_spec.rb30
-rw-r--r--spec/models/bulk_imports/failure_spec.rb36
-rw-r--r--spec/models/ci/bridge_spec.rb6
-rw-r--r--spec/models/ci/build_metadata_spec.rb57
-rw-r--r--spec/models/ci/build_spec.rb152
-rw-r--r--spec/models/ci/build_trace_chunks/redis_spec.rb8
-rw-r--r--spec/models/ci/build_trace_spec.rb7
-rw-r--r--spec/models/ci/daily_build_group_report_result_spec.rb37
-rw-r--r--spec/models/ci/job_token/project_scope_link_spec.rb11
-rw-r--r--spec/models/ci/job_token/scope_spec.rb4
-rw-r--r--spec/models/ci/pipeline_metadata_spec.rb14
-rw-r--r--spec/models/ci/pipeline_spec.rb113
-rw-r--r--spec/models/ci/processable_spec.rb2
-rw-r--r--spec/models/ci/resource_group_spec.rb6
-rw-r--r--spec/models/ci/runner_spec.rb184
-rw-r--r--spec/models/ci/secure_file_spec.rb66
-rw-r--r--spec/models/ci/unit_test_spec.rb40
-rw-r--r--spec/models/ci/variable_spec.rb2
-rw-r--r--spec/models/clusters/agents/implicit_authorization_spec.rb2
-rw-r--r--spec/models/clusters/applications/cert_manager_spec.rb52
-rw-r--r--spec/models/clusters/platforms/kubernetes_spec.rb67
-rw-r--r--spec/models/commit_collection_spec.rb5
-rw-r--r--spec/models/compare_spec.rb15
-rw-r--r--spec/models/concerns/approvable_spec.rb8
-rw-r--r--spec/models/concerns/atomic_internal_id_spec.rb13
-rw-r--r--spec/models/concerns/cascading_namespace_setting_attribute_spec.rb347
-rw-r--r--spec/models/concerns/ci/partitionable_spec.rb26
-rw-r--r--spec/models/concerns/counter_attribute_spec.rb4
-rw-r--r--spec/models/concerns/id_in_ordered_spec.rb7
-rw-r--r--spec/models/concerns/issuable_spec.rb18
-rw-r--r--spec/models/concerns/mentionable_spec.rb2
-rw-r--r--spec/models/concerns/noteable_spec.rb100
-rw-r--r--spec/models/concerns/participable_spec.rb3
-rw-r--r--spec/models/concerns/prometheus_adapter_spec.rb2
-rw-r--r--spec/models/concerns/routable_spec.rb6
-rw-r--r--spec/models/concerns/token_authenticatable_spec.rb100
-rw-r--r--spec/models/container_repository_spec.rb2
-rw-r--r--spec/models/deployment_spec.rb15
-rw-r--r--spec/models/diff_note_spec.rb28
-rw-r--r--spec/models/diff_viewer/server_side_spec.rb26
-rw-r--r--spec/models/discussion_spec.rb9
-rw-r--r--spec/models/environment_spec.rb59
-rw-r--r--spec/models/factories_spec.rb2
-rw-r--r--spec/models/group_spec.rb19
-rw-r--r--spec/models/hooks/web_hook_spec.rb120
-rw-r--r--spec/models/incident_management/timeline_event_spec.rb7
-rw-r--r--spec/models/incident_management/timeline_event_tag_link_spec.rb10
-rw-r--r--spec/models/incident_management/timeline_event_tag_spec.rb28
-rw-r--r--spec/models/integration_spec.rb11
-rw-r--r--spec/models/integrations/chat_message/issue_message_spec.rb17
-rw-r--r--spec/models/integrations/chat_message/wiki_page_message_spec.rb26
-rw-r--r--spec/models/integrations/datadog_spec.rb4
-rw-r--r--spec/models/integrations/harbor_spec.rb70
-rw-r--r--spec/models/integrations/jira_spec.rb9
-rw-r--r--spec/models/integrations/microsoft_teams_spec.rb6
-rw-r--r--spec/models/integrations/prometheus_spec.rb4
-rw-r--r--spec/models/jira_connect/public_key_spec.rb90
-rw-r--r--spec/models/jira_connect_installation_spec.rb98
-rw-r--r--spec/models/label_note_spec.rb14
-rw-r--r--spec/models/member_spec.rb24
-rw-r--r--spec/models/members/member_role_spec.rb34
-rw-r--r--spec/models/merge_request/cleanup_schedule_spec.rb13
-rw-r--r--spec/models/merge_request_diff_file_spec.rb20
-rw-r--r--spec/models/merge_request_diff_spec.rb119
-rw-r--r--spec/models/merge_request_spec.rb311
-rw-r--r--spec/models/milestone_spec.rb128
-rw-r--r--spec/models/ml/candidate_spec.rb10
-rw-r--r--spec/models/ml/experiment_spec.rb33
-rw-r--r--spec/models/namespace/aggregation_schedule_spec.rb26
-rw-r--r--spec/models/namespace/package_setting_spec.rb9
-rw-r--r--spec/models/namespace_setting_spec.rb4
-rw-r--r--spec/models/namespace_spec.rb21
-rw-r--r--spec/models/note_spec.rb10
-rw-r--r--spec/models/notification_recipient_spec.rb20
-rw-r--r--spec/models/operations/feature_flags/strategy_spec.rb26
-rw-r--r--spec/models/packages/package_spec.rb8
-rw-r--r--spec/models/packages/rpm/repository_file_spec.rb44
-rw-r--r--spec/models/pages_domain_spec.rb2
-rw-r--r--spec/models/personal_access_token_spec.rb25
-rw-r--r--spec/models/pool_repository_spec.rb13
-rw-r--r--spec/models/preloaders/project_root_ancestor_preloader_spec.rb48
-rw-r--r--spec/models/project_authorization_spec.rb186
-rw-r--r--spec/models/project_group_link_spec.rb6
-rw-r--r--spec/models/project_setting_spec.rb4
-rw-r--r--spec/models/project_spec.rb106
-rw-r--r--spec/models/project_statistics_spec.rb48
-rw-r--r--spec/models/protected_branch_spec.rb24
-rw-r--r--spec/models/repository_spec.rb191
-rw-r--r--spec/models/resource_label_event_spec.rb10
-rw-r--r--spec/models/user_detail_spec.rb135
-rw-r--r--spec/models/user_preference_spec.rb51
-rw-r--r--spec/models/user_spec.rb263
-rw-r--r--spec/models/users/namespace_callout_spec.rb39
-rw-r--r--spec/models/users/phone_number_validation_spec.rb81
-rw-r--r--spec/models/wiki_directory_spec.rb49
-rw-r--r--spec/models/wiki_page_spec.rb148
-rw-r--r--spec/policies/blob_policy_spec.rb1
-rw-r--r--spec/policies/ci/runner_policy_spec.rb147
-rw-r--r--spec/policies/concerns/crud_policy_helpers_spec.rb39
-rw-r--r--spec/policies/group_policy_spec.rb98
-rw-r--r--spec/policies/issuable_policy_spec.rb24
-rw-r--r--spec/policies/issue_policy_spec.rb7
-rw-r--r--spec/policies/namespaces/user_namespace_policy_spec.rb2
-rw-r--r--spec/policies/project_policy_spec.rb127
-rw-r--r--spec/policies/project_snippet_policy_spec.rb328
-rw-r--r--spec/policies/wiki_page_policy_spec.rb45
-rw-r--r--spec/presenters/blobs/unfold_presenter_spec.rb20
-rw-r--r--spec/presenters/ci/build_runner_presenter_spec.rb17
-rw-r--r--spec/presenters/commit_presenter_spec.rb5
-rw-r--r--spec/presenters/deploy_key_presenter_spec.rb24
-rw-r--r--spec/presenters/event_presenter_spec.rb8
-rw-r--r--spec/presenters/key_presenter_spec.rb32
-rw-r--r--spec/presenters/project_presenter_spec.rb8
-rw-r--r--spec/presenters/projects/security/configuration_presenter_spec.rb31
-rw-r--r--spec/requests/admin/impersonation_tokens_controller_spec.rb18
-rw-r--r--spec/requests/api/admin/batched_background_migrations_spec.rb88
-rw-r--r--spec/requests/api/branches_spec.rb48
-rw-r--r--spec/requests/api/bulk_imports_spec.rb12
-rw-r--r--spec/requests/api/ci/job_artifacts_spec.rb43
-rw-r--r--spec/requests/api/ci/jobs_spec.rb25
-rw-r--r--spec/requests/api/ci/resource_groups_spec.rb42
-rw-r--r--spec/requests/api/ci/runner/jobs_request_post_spec.rb52
-rw-r--r--spec/requests/api/ci/runner/runners_reset_spec.rb1
-rw-r--r--spec/requests/api/ci/runners_spec.rb40
-rw-r--r--spec/requests/api/deploy_tokens_spec.rb37
-rw-r--r--spec/requests/api/features_spec.rb772
-rw-r--r--spec/requests/api/generic_packages_spec.rb15
-rw-r--r--spec/requests/api/graphql/ci/ci_cd_setting_spec.rb2
-rw-r--r--spec/requests/api/graphql/ci/config_spec.rb64
-rw-r--r--spec/requests/api/graphql/ci/config_variables_spec.rb9
-rw-r--r--spec/requests/api/graphql/ci/jobs_spec.rb11
-rw-r--r--spec/requests/api/graphql/ci/pipeline_schedules_spec.rb88
-rw-r--r--spec/requests/api/graphql/jobs_query_spec.rb55
-rw-r--r--spec/requests/api/graphql/milestone_spec.rb199
-rw-r--r--spec/requests/api/graphql/mutations/ci/job/artifacts_destroy_spec.rb85
-rw-r--r--spec/requests/api/graphql/mutations/ci/job_token_scope/add_project_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/ci/job_token_scope/remove_project_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/ci/pipeline_schedule_delete_spec.rb82
-rw-r--r--spec/requests/api/graphql/mutations/ci/project_ci_cd_settings_update_spec.rb51
-rw-r--r--spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb45
-rw-r--r--spec/requests/api/graphql/mutations/packages/bulk_destroy_spec.rb128
-rw-r--r--spec/requests/api/graphql/mutations/uploads/delete_spec.rb9
-rw-r--r--spec/requests/api/graphql/mutations/work_items/update_spec.rb72
-rw-r--r--spec/requests/api/graphql/mutations/work_items/update_widgets_spec.rb61
-rw-r--r--spec/requests/api/graphql/project/branch_rules_spec.rb68
-rw-r--r--spec/requests/api/graphql/project/cluster_agents_spec.rb11
-rw-r--r--spec/requests/api/graphql/project/issue/designs/designs_spec.rb33
-rw-r--r--spec/requests/api/graphql/project/issues_spec.rb50
-rw-r--r--spec/requests/api/graphql/project/merge_requests_spec.rb53
-rw-r--r--spec/requests/api/graphql/project/milestones_spec.rb7
-rw-r--r--spec/requests/api/graphql/project/work_items_spec.rb113
-rw-r--r--spec/requests/api/graphql/todo_query_spec.rb9
-rw-r--r--spec/requests/api/graphql/usage_trends_measurements_spec.rb22
-rw-r--r--spec/requests/api/graphql/work_item_spec.rb16
-rw-r--r--spec/requests/api/groups_spec.rb34
-rw-r--r--spec/requests/api/helm_packages_spec.rb11
-rw-r--r--spec/requests/api/import_github_spec.rb41
-rw-r--r--spec/requests/api/internal/base_spec.rb20
-rw-r--r--spec/requests/api/issues/issues_spec.rb15
-rw-r--r--spec/requests/api/issues/post_projects_issues_spec.rb4
-rw-r--r--spec/requests/api/issues/put_projects_issues_spec.rb4
-rw-r--r--spec/requests/api/maven_packages_spec.rb52
-rw-r--r--spec/requests/api/merge_requests_spec.rb87
-rw-r--r--spec/requests/api/metadata_spec.rb26
-rw-r--r--spec/requests/api/ml/mlflow_spec.rb330
-rw-r--r--spec/requests/api/pages_domains_spec.rb60
-rw-r--r--spec/requests/api/personal_access_tokens/self_information_spec.rb (renamed from spec/requests/api/personal_access_tokens/self_revocation_spec.rb)41
-rw-r--r--spec/requests/api/personal_access_tokens_spec.rb367
-rw-r--r--spec/requests/api/project_attributes.yml7
-rw-r--r--spec/requests/api/projects_spec.rb26
-rw-r--r--spec/requests/api/settings_spec.rb5
-rw-r--r--spec/requests/api/tags_spec.rb8
-rw-r--r--spec/requests/api/users_spec.rb33
-rw-r--r--spec/requests/api/version_spec.rb93
-rw-r--r--spec/requests/boards/lists_controller_spec.rb25
-rw-r--r--spec/requests/git_http_spec.rb54
-rw-r--r--spec/requests/groups/settings/access_tokens_controller_spec.rb15
-rw-r--r--spec/requests/ide_controller_spec.rb31
-rw-r--r--spec/requests/import/github_groups_controller_spec.rb69
-rw-r--r--spec/requests/jira_connect/public_keys_controller_spec.rb55
-rw-r--r--spec/requests/projects/ci/promeheus_metrics/histograms_controller_spec.rb9
-rw-r--r--spec/requests/projects/incident_management/timeline_events_spec.rb60
-rw-r--r--spec/requests/projects/issues_controller_spec.rb15
-rw-r--r--spec/requests/projects/merge_requests_controller_spec.rb15
-rw-r--r--spec/requests/projects/settings/access_tokens_controller_spec.rb15
-rw-r--r--spec/requests/users/namespace_callouts_spec.rb57
-rw-r--r--spec/requests/users_controller_spec.rb20
-rw-r--r--spec/routing/import_routing_spec.rb7
-rw-r--r--spec/rubocop/check_graceful_task_spec.rb18
-rw-r--r--spec/rubocop/cop/gitlab/duplicate_spec_location_spec.rb2
-rw-r--r--spec/rubocop/cop/gitlab/mark_used_feature_flags_spec.rb51
-rw-r--r--spec/rubocop/cop/gitlab/no_code_coverage_comment_spec.rb60
-rw-r--r--spec/rubocop/cop/gitlab/service_response_spec.rb41
-rw-r--r--spec/rubocop/cop/migration/background_migration_missing_active_concern_spec.rb86
-rw-r--r--spec/rubocop/cop/redis_queue_usage_spec.rb99
-rw-r--r--spec/rubocop/cop/rspec/factory_bot/avoid_create_spec.rb25
-rw-r--r--spec/rubocop/cop/rspec/top_level_describe_path_spec.rb13
-rw-r--r--spec/rubocop/cop/sidekiq_api_usage_spec.rb60
-rw-r--r--spec/rubocop/cop/sidekiq_redis_call_spec.rb30
-rw-r--r--spec/rubocop/cop/static_translation_definition_spec.rb12
-rw-r--r--spec/rubocop/cop_todo_spec.rb35
-rw-r--r--spec/rubocop/formatter/graceful_formatter_spec.rb9
-rw-r--r--spec/rubocop/formatter/todo_formatter_spec.rb70
-rw-r--r--spec/rubocop_spec_helper.rb18
-rw-r--r--spec/scripts/lib/glfm/update_example_snapshots_spec.rb759
-rw-r--r--spec/scripts/lib/glfm/update_specification_spec.rb169
-rw-r--r--spec/scripts/lib/glfm/verify_all_generated_files_are_up_to_date_spec.rb62
-rw-r--r--spec/scripts/trigger-build_spec.rb2
-rw-r--r--spec/serializers/board_serializer_spec.rb20
-rw-r--r--spec/serializers/board_simple_entity_spec.rb16
-rw-r--r--spec/serializers/build_trace_entity_spec.rb7
-rw-r--r--spec/serializers/ci/daily_build_group_report_result_serializer_spec.rb33
-rw-r--r--spec/serializers/deployment_entity_spec.rb72
-rw-r--r--spec/serializers/environment_status_entity_spec.rb35
-rw-r--r--spec/serializers/group_access_token_entity_spec.rb4
-rw-r--r--spec/serializers/import/github_org_entity_spec.rb25
-rw-r--r--spec/serializers/import/github_org_serializer_spec.rb47
-rw-r--r--spec/serializers/issue_entity_spec.rb2
-rw-r--r--spec/serializers/merge_request_poll_widget_entity_spec.rb18
-rw-r--r--spec/serializers/pipeline_serializer_spec.rb74
-rw-r--r--spec/serializers/project_access_token_entity_spec.rb4
-rw-r--r--spec/services/admin/set_feature_flag_service_spec.rb300
-rw-r--r--spec/services/alert_management/create_alert_issue_service_spec.rb6
-rw-r--r--spec/services/award_emojis/copy_service_spec.rb10
-rw-r--r--spec/services/boards/issues/create_service_spec.rb5
-rw-r--r--spec/services/boards/lists/generate_service_spec.rb45
-rw-r--r--spec/services/boards/lists/list_service_spec.rb34
-rw-r--r--spec/services/bulk_imports/create_pipeline_trackers_service_spec.rb8
-rw-r--r--spec/services/bulk_imports/create_service_spec.rb5
-rw-r--r--spec/services/bulk_imports/repository_bundle_export_service_spec.rb10
-rw-r--r--spec/services/bulk_imports/uploads_export_service_spec.rb62
-rw-r--r--spec/services/bulk_update_integration_service_spec.rb4
-rw-r--r--spec/services/ci/compare_test_reports_service_spec.rb9
-rw-r--r--spec/services/ci/create_pipeline_service/include_spec.rb46
-rw-r--r--spec/services/ci/create_pipeline_service/limit_active_jobs_spec.rb53
-rw-r--r--spec/services/ci/create_pipeline_service/logger_spec.rb1
-rw-r--r--spec/services/ci/create_pipeline_service/rules_spec.rb48
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb6
-rw-r--r--spec/services/ci/find_exposed_artifacts_service_spec.rb58
-rw-r--r--spec/services/ci/generate_kubeconfig_service_spec.rb6
-rw-r--r--spec/services/ci/job_artifacts/create_service_spec.rb3
-rw-r--r--spec/services/ci/job_artifacts/delete_service_spec.rb27
-rw-r--r--spec/services/ci/job_token_scope/add_project_service_spec.rb2
-rw-r--r--spec/services/ci/job_token_scope/remove_project_service_spec.rb2
-rw-r--r--spec/services/ci/pipeline_artifacts/coverage_report_service_spec.rb25
-rw-r--r--spec/services/ci/pipeline_artifacts/create_code_quality_mr_diff_report_service_spec.rb24
-rw-r--r--spec/services/ci/pipeline_artifacts/destroy_all_expired_service_spec.rb32
-rw-r--r--spec/services/ci/runners/register_runner_service_spec.rb43
-rw-r--r--spec/services/ci/runners/set_runner_associated_projects_service_spec.rb12
-rw-r--r--spec/services/ci/unlock_artifacts_service_spec.rb21
-rw-r--r--spec/services/clusters/applications/destroy_service_spec.rb63
-rw-r--r--spec/services/clusters/applications/uninstall_service_spec.rb77
-rw-r--r--spec/services/design_management/move_designs_service_spec.rb35
-rw-r--r--spec/services/git/tag_hooks_service_spec.rb12
-rw-r--r--spec/services/google_cloud/enable_cloudsql_service_spec.rb30
-rw-r--r--spec/services/google_cloud/setup_cloudsql_instance_service_spec.rb18
-rw-r--r--spec/services/groups/destroy_service_spec.rb6
-rw-r--r--spec/services/groups/import_export/import_service_spec.rb72
-rw-r--r--spec/services/import/github/cancel_project_import_service_spec.rb56
-rw-r--r--spec/services/import/github_service_spec.rb40
-rw-r--r--spec/services/import/gitlab_projects/create_project_service_spec.rb9
-rw-r--r--spec/services/incident_management/incidents/create_service_spec.rb4
-rw-r--r--spec/services/incident_management/issuable_escalation_statuses/prepare_update_service_spec.rb15
-rw-r--r--spec/services/incident_management/pager_duty/create_incident_issue_service_spec.rb2
-rw-r--r--spec/services/incident_management/timeline_events/create_service_spec.rb84
-rw-r--r--spec/services/incident_management/timeline_events/destroy_service_spec.rb22
-rw-r--r--spec/services/incident_management/timeline_events/update_service_spec.rb16
-rw-r--r--spec/services/issuable/process_assignees_spec.rb23
-rw-r--r--spec/services/issues/clone_service_spec.rb15
-rw-r--r--spec/services/issues/create_service_spec.rb128
-rw-r--r--spec/services/issues/move_service_spec.rb17
-rw-r--r--spec/services/issues/update_service_spec.rb32
-rw-r--r--spec/services/jira_connect/create_asymmetric_jwt_service_spec.rb46
-rw-r--r--spec/services/jira_connect/sync_service_spec.rb9
-rw-r--r--spec/services/members/create_service_spec.rb49
-rw-r--r--spec/services/members/destroy_service_spec.rb31
-rw-r--r--spec/services/merge_requests/close_service_spec.rb37
-rw-r--r--spec/services/merge_requests/create_from_issue_service_spec.rb24
-rw-r--r--spec/services/merge_requests/create_service_spec.rb2
-rw-r--r--spec/services/merge_requests/ff_merge_service_spec.rb6
-rw-r--r--spec/services/merge_requests/link_lfs_objects_service_spec.rb9
-rw-r--r--spec/services/merge_requests/mark_reviewer_reviewed_service_spec.rb22
-rw-r--r--spec/services/merge_requests/merge_service_spec.rb76
-rw-r--r--spec/services/merge_requests/mergeability/logger_spec.rb19
-rw-r--r--spec/services/merge_requests/push_options_handler_service_spec.rb16
-rw-r--r--spec/services/merge_requests/refresh_service_spec.rb53
-rw-r--r--spec/services/merge_requests/request_review_service_spec.rb22
-rw-r--r--spec/services/merge_requests/update_assignees_service_spec.rb26
-rw-r--r--spec/services/merge_requests/update_reviewers_service_spec.rb12
-rw-r--r--spec/services/merge_requests/update_service_spec.rb48
-rw-r--r--spec/services/ml/experiment_tracking/candidate_repository_spec.rb199
-rw-r--r--spec/services/ml/experiment_tracking/experiment_repository_spec.rb85
-rw-r--r--spec/services/namespaces/package_settings/update_service_spec.rb37
-rw-r--r--spec/services/notification_service_spec.rb21
-rw-r--r--spec/services/onboarding/progress_service_spec.rb6
-rw-r--r--spec/services/packages/debian/create_package_file_service_spec.rb53
-rw-r--r--spec/services/packages/mark_packages_for_destruction_service_spec.rb107
-rw-r--r--spec/services/packages/rpm/parse_package_service_spec.rb60
-rw-r--r--spec/services/packages/rpm/repository_metadata/base_builder_spec.rb13
-rw-r--r--spec/services/packages/rpm/repository_metadata/build_primary_xml_spec.rb34
-rw-r--r--spec/services/packages/rpm/repository_metadata/build_repomd_xml_spec.rb20
-rw-r--r--spec/services/pages_domains/create_acme_order_service_spec.rb10
-rw-r--r--spec/services/pages_domains/create_service_spec.rb61
-rw-r--r--spec/services/pages_domains/delete_service_spec.rb47
-rw-r--r--spec/services/pages_domains/update_service_spec.rb61
-rw-r--r--spec/services/projects/autocomplete_service_spec.rb34
-rw-r--r--spec/services/projects/container_repository/cleanup_tags_service_spec.rb394
-rw-r--r--spec/services/projects/container_repository/gitlab/cleanup_tags_service_spec.rb4
-rw-r--r--spec/services/projects/container_repository/third_party/cleanup_tags_service_spec.rb370
-rw-r--r--spec/services/projects/destroy_service_spec.rb14
-rw-r--r--spec/services/projects/import_service_spec.rb9
-rw-r--r--spec/services/projects/update_repository_storage_service_spec.rb5
-rw-r--r--spec/services/projects/update_service_spec.rb53
-rw-r--r--spec/services/repositories/changelog_service_spec.rb18
-rw-r--r--spec/services/resource_events/merge_into_notes_service_spec.rb4
-rw-r--r--spec/services/resource_events/synthetic_milestone_notes_builder_service_spec.rb9
-rw-r--r--spec/services/snippets/update_repository_storage_service_spec.rb7
-rw-r--r--spec/services/users/destroy_service_spec.rb75
-rw-r--r--spec/services/users/dismiss_namespace_callout_service_spec.rb24
-rw-r--r--spec/services/users/refresh_authorized_projects_service_spec.rb4
-rw-r--r--spec/services/web_hook_service_spec.rb21
-rw-r--r--spec/services/web_hooks/log_execution_service_spec.rb43
-rw-r--r--spec/services/work_items/update_service_spec.rb80
-rw-r--r--spec/spec_helper.rb15
-rw-r--r--spec/support/capybara.rb2
-rw-r--r--spec/support/capybara_slow_finder.rb32
-rw-r--r--spec/support/cross_database_modification.rb9
-rw-r--r--spec/support/database/multiple_databases.rb48
-rw-r--r--spec/support/database/prevent_cross_database_modification.rb10
-rw-r--r--spec/support/database_cleaner.rb23
-rw-r--r--spec/support/db_cleaner.rb38
-rw-r--r--spec/support/finder_collection_allowlist.yml1
-rw-r--r--spec/support/gitlab_stubs/gitlab_ci.yml5
-rw-r--r--spec/support/helpers/exclusive_lease_helpers.rb19
-rw-r--r--spec/support/helpers/features/web_ide_spec_helpers.rb24
-rw-r--r--spec/support/helpers/git_helpers.rb4
-rw-r--r--spec/support/helpers/graphql_helpers.rb5
-rw-r--r--spec/support/helpers/html_escaped_helpers.rb31
-rw-r--r--spec/support/helpers/ldap_helpers.rb26
-rw-r--r--spec/support/helpers/login_helpers.rb10
-rw-r--r--spec/support/helpers/migrations_helpers/vulnerabilities_helper.rb40
-rw-r--r--spec/support/helpers/project_helpers.rb16
-rw-r--r--spec/support/helpers/seed_helper.rb67
-rw-r--r--spec/support/helpers/stub_configuration.rb2
-rw-r--r--spec/support/helpers/stub_experiments.rb37
-rw-r--r--spec/support/helpers/stub_gitlab_calls.rb6
-rw-r--r--spec/support/helpers/stub_object_storage.rb17
-rw-r--r--spec/support/helpers/test_env.rb24
-rw-r--r--spec/support/helpers/usage_data_helpers.rb18
-rw-r--r--spec/support/helpers/user_helpers.rb33
-rw-r--r--spec/support/matchers/event_store.rb61
-rw-r--r--spec/support/migration.rb4
-rw-r--r--spec/support/models/partitionable_check.rb46
-rw-r--r--spec/support/rspec_order_todo.yml10
-rw-r--r--spec/support/services/issuable_update_service_shared_examples.rb6
-rw-r--r--spec/support/shared_contexts/finders/merge_requests_finder_shared_contexts.rb13
-rw-r--r--spec/support/shared_contexts/html_safe_shared_context.rb23
-rw-r--r--spec/support/shared_contexts/lib/api/helpers/packages/dependency_proxy_helpers_shared_context.rb5
-rw-r--r--spec/support/shared_contexts/markdown_golden_master_shared_examples.rb2
-rw-r--r--spec/support/shared_contexts/policies/group_policy_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/rubocop_default_rspec_language_config_context.rb32
-rw-r--r--spec/support/shared_contexts/services/packages/rpm/xml_shared_context.rb7
-rw-r--r--spec/support/shared_contexts/views/html_safe_render_shared_context.rb39
-rw-r--r--spec/support/shared_examples/bulk_imports/common/pipelines/wiki_pipeline_examples.rb48
-rw-r--r--spec/support/shared_examples/controllers/snowplow_event_tracking_examples.rb12
-rw-r--r--spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/features/access_tokens_shared_examples.rb51
-rw-r--r--spec/support/shared_examples/features/comments_on_merge_request_files_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/features/content_editor_shared_examples.rb125
-rw-r--r--spec/support/shared_examples/features/deploy_token_shared_examples.rb24
-rw-r--r--spec/support/shared_examples/features/discussion_comments_shared_example.rb4
-rw-r--r--spec/support/shared_examples/features/project_upload_files_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/features/runners_shared_examples.rb12
-rw-r--r--spec/support/shared_examples/features/search/search_timeouts_shared_examples.rb1
-rw-r--r--spec/support/shared_examples/features/sidebar/sidebar_due_date_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb12
-rw-r--r--spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb12
-rw-r--r--spec/support/shared_examples/features/wiki/user_views_wiki_pages_shared_examples.rb33
-rw-r--r--spec/support/shared_examples/graphql/n_plus_one_query_examples.rb16
-rw-r--r--spec/support/shared_examples/lib/cache_helpers_shared_examples.rb53
-rw-r--r--spec/support/shared_examples/lib/gitlab/memory/watchdog/monitor_result_shared_examples.rb10
-rw-r--r--spec/support/shared_examples/lib/gitlab/regex_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/lib/gitlab/template/template_shared_examples.rb44
-rw-r--r--spec/support/shared_examples/models/boards/listable_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/models/chat_integration_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/models/concerns/cascading_namespace_setting_shared_examples.rb355
-rw-r--r--spec/support/shared_examples/models/concerns/counter_attribute_shared_examples.rb104
-rw-r--r--spec/support/shared_examples/models/concerns/has_wiki_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/concerns/participable_shared_examples.rb42
-rw-r--r--spec/support/shared_examples/models/concerns/timebox_shared_examples.rb118
-rw-r--r--spec/support/shared_examples/models/integrations/has_web_hook_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/project_ci_cd_settings_shared_examples.rb10
-rw-r--r--spec/support/shared_examples/models/wiki_shared_examples.rb298
-rw-r--r--spec/support/shared_examples/policies/wiki_policies_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/projects/container_repository/cleanup_tags_service_shared_examples.rb62
-rw-r--r--spec/support/shared_examples/quick_actions/issuable/max_issuable_examples.rb85
-rw-r--r--spec/support/shared_examples/requests/access_tokens_controller_shared_examples.rb40
-rw-r--r--spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/requests/api/helm_packages_shared_examples.rb9
-rw-r--r--spec/support/shared_examples/requests/api/hooks_shared_examples.rb1
-rw-r--r--spec/support/shared_examples/requests/api/issuable_update_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/requests/api/npm_packages_tags_shared_examples.rb1
-rw-r--r--spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/requests/projects/google_cloud/google_oauth2_token_examples.rb1
-rw-r--r--spec/support/shared_examples/serializers/issuable_current_user_properties_shared_examples.rb22
-rw-r--r--spec/support/shared_examples/services/base_rpm_service_shared_examples.rb52
-rw-r--r--spec/support/shared_examples/services/merge_request_shared_examples.rb24
-rw-r--r--spec/support/shared_examples/services/reviewers_change_trigger_shared_examples.rb17
-rw-r--r--spec/support/view_component.rb7
-rw-r--r--spec/support_specs/capybara_slow_finder_spec.rb78
-rw-r--r--spec/support_specs/database/multiple_databases_spec.rb22
-rw-r--r--spec/support_specs/helpers/graphql_helpers_spec.rb35
-rw-r--r--spec/support_specs/helpers/html_escaped_helpers_spec.rb29
-rw-r--r--spec/support_specs/helpers/stub_method_calls_spec.rb4
-rw-r--r--spec/support_specs/matchers/event_store_spec.rb126
-rw-r--r--spec/tasks/gitlab/backup_rake_spec.rb302
-rw-r--r--spec/tasks/gitlab/db/lock_writes_rake_spec.rb20
-rw-r--r--spec/tasks/gitlab/db/truncate_legacy_tables_rake_spec.rb8
-rw-r--r--spec/tasks/gitlab/db_rake_spec.rb74
-rw-r--r--spec/tasks/gitlab/usage_data_rake_spec.rb9
-rw-r--r--spec/tooling/danger/config_files_spec.rb2
-rw-r--r--spec/tooling/danger/project_helper_spec.rb9
-rw-r--r--spec/tooling/danger/specs_spec.rb166
-rw-r--r--spec/tooling/quality/test_level_spec.rb2
-rw-r--r--spec/uploaders/job_artifact_uploader_spec.rb8
-rw-r--r--spec/uploaders/object_storage/cdn/google_cdn_spec.rb22
-rw-r--r--spec/uploaders/object_storage/cdn_spec.rb44
-rw-r--r--spec/uploaders/packages/rpm/repository_file_uploader_spec.rb45
-rw-r--r--spec/views/admin/application_settings/_package_registry.html.haml_spec.rb2
-rw-r--r--spec/views/admin/application_settings/ci_cd.html.haml_spec.rb39
-rw-r--r--spec/views/admin/broadcast_messages/index.html.haml_spec.rb36
-rw-r--r--spec/views/events/event/_common.html.haml_spec.rb26
-rw-r--r--spec/views/groups/new.html.haml_spec.rb7
-rw-r--r--spec/views/layouts/_flash.html.haml_spec.rb25
-rw-r--r--spec/views/layouts/fullscreen.html.haml_spec.rb29
-rw-r--r--spec/views/layouts/header/_gitlab_version.html.haml_spec.rb6
-rw-r--r--spec/views/layouts/nav/sidebar/_profile.html.haml_spec.rb16
-rw-r--r--spec/views/projects/hooks/edit.html.haml_spec.rb2
-rw-r--r--spec/views/projects/merge_requests/_close_reopen_draft_report_toggle.html.haml_spec.rb34
-rw-r--r--spec/views/registrations/welcome/show.html.haml_spec.rb1
-rw-r--r--spec/views/search/_results.html.haml_spec.rb9
-rw-r--r--spec/views/search/show.html.haml_spec.rb135
-rw-r--r--spec/views/shared/projects/_project.html.haml_spec.rb4
-rw-r--r--spec/workers/bulk_import_worker_spec.rb3
-rw-r--r--spec/workers/bulk_imports/entity_worker_spec.rb37
-rw-r--r--spec/workers/bulk_imports/export_request_worker_spec.rb130
-rw-r--r--spec/workers/bulk_imports/pipeline_worker_spec.rb53
-rw-r--r--spec/workers/ci/parse_secure_file_metadata_worker_spec.rb31
-rw-r--r--spec/workers/ci/pipeline_success_unlock_artifacts_worker_spec.rb21
-rw-r--r--spec/workers/concerns/gitlab/github_import/object_importer_spec.rb14
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb13
-rw-r--r--spec/workers/experiments/record_conversion_event_worker_spec.rb35
-rw-r--r--spec/workers/gitlab/github_import/attachments/import_issue_worker_spec.rb34
-rw-r--r--spec/workers/gitlab/github_import/attachments/import_merge_request_worker_spec.rb34
-rw-r--r--spec/workers/gitlab/github_import/attachments/import_note_worker_spec.rb49
-rw-r--r--spec/workers/gitlab/github_import/attachments/import_release_worker_spec.rb49
-rw-r--r--spec/workers/gitlab/github_import/import_issue_worker_spec.rb11
-rw-r--r--spec/workers/gitlab/github_import/import_release_attachments_worker_spec.rb6
-rw-r--r--spec/workers/gitlab/github_import/stage/import_attachments_worker_spec.rb60
-rw-r--r--spec/workers/gitlab/github_import/stage/import_issue_events_worker_spec.rb46
-rw-r--r--spec/workers/gitlab/github_import/stage/import_issues_and_diff_notes_worker_spec.rb44
-rw-r--r--spec/workers/gitlab/github_import/stage/import_notes_worker_spec.rb45
-rw-r--r--spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb71
-rw-r--r--spec/workers/integrations/create_external_cross_reference_worker_spec.rb2
-rw-r--r--spec/workers/merge_requests/delete_source_branch_worker_spec.rb42
-rw-r--r--spec/workers/namespaces/prune_aggregation_schedules_worker_spec.rb2
-rw-r--r--spec/workers/onboarding/issue_created_worker_spec.rb (renamed from spec/workers/namespaces/onboarding_issue_created_worker_spec.rb)2
-rw-r--r--spec/workers/onboarding/pipeline_created_worker_spec.rb (renamed from spec/workers/namespaces/onboarding_pipeline_created_worker_spec.rb)2
-rw-r--r--spec/workers/onboarding/progress_worker_spec.rb (renamed from spec/workers/namespaces/onboarding_progress_worker_spec.rb)2
-rw-r--r--spec/workers/onboarding/user_added_worker_spec.rb (renamed from spec/workers/namespaces/onboarding_user_added_worker_spec.rb)2
-rw-r--r--spec/workers/pages/invalidate_domain_cache_worker_spec.rb101
-rw-r--r--spec/workers/process_commit_worker_spec.rb28
-rw-r--r--spec/workers/project_cache_worker_spec.rb6
-rw-r--r--spec/workers/project_destroy_worker_spec.rb8
-rw-r--r--spec/workers/repository_check/single_repository_worker_spec.rb6
-rw-r--r--spec/workers/repository_fork_worker_spec.rb2
-rw-r--r--spec/workers/run_pipeline_schedule_worker_spec.rb3
1575 files changed, 36890 insertions, 21027 deletions
diff --git a/spec/bin/diagnostic_reports_uploader_spec.rb b/spec/bin/diagnostic_reports_uploader_spec.rb
new file mode 100644
index 00000000000..9a929de6d0e
--- /dev/null
+++ b/spec/bin/diagnostic_reports_uploader_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'tempfile'
+
+RSpec.describe 'bin/diagnostic-reports-uploader' do
+ let(:reports_dir) { Dir.mktmpdir }
+ let(:gcs_key) { Tempfile.new }
+ let(:gcs_project) { 'test_gcs_project' }
+ let(:gcs_bucket) { 'test_gcs_bucket' }
+
+ after do
+ FileUtils.remove_entry(reports_dir)
+ FileUtils.remove_entry(gcs_key)
+ end
+
+ subject(:load_bin) { load File.expand_path('../../bin/diagnostic-reports-uploader', __dir__) }
+
+ context 'when necessary ENV vars are set' do
+ before do
+ stub_env('GITLAB_DIAGNOSTIC_REPORTS_PATH', reports_dir)
+ stub_env('GITLAB_GCP_KEY_PATH', gcs_key.path)
+ stub_env('GITLAB_DIAGNOSTIC_REPORTS_PROJECT', gcs_project)
+ stub_env('GITLAB_DIAGNOSTIC_REPORTS_BUCKET', gcs_bucket)
+ end
+
+ let(:reports_uploader) { instance_double(Gitlab::Memory::ReportsUploader) }
+ let(:upload_and_cleanup_reports) { instance_double(Gitlab::Memory::UploadAndCleanupReports) }
+ let(:logger) { instance_double(Gitlab::Memory::DiagnosticReportsLogger) }
+
+ it 'runs successfully' do
+ expect(Gitlab::Memory::DiagnosticReportsLogger).to receive(:new).and_return(logger)
+
+ expect(Gitlab::Memory::ReportsUploader)
+ .to receive(:new).with(gcs_key: gcs_key.path, gcs_project: gcs_project, gcs_bucket: gcs_bucket, logger: logger)
+ .and_return(reports_uploader)
+
+ expect(Gitlab::Memory::UploadAndCleanupReports)
+ .to receive(:new).with(uploader: reports_uploader, reports_path: reports_dir, logger: logger)
+ .and_return(upload_and_cleanup_reports)
+
+ expect(upload_and_cleanup_reports).to receive(:call)
+
+ load_bin
+ end
+ end
+
+ context 'when GITLAB_DIAGNOSTIC_REPORTS_PATH is missing' do
+ it 'raises RuntimeError' do
+ expect { load_bin }.to raise_error(RuntimeError, 'GITLAB_DIAGNOSTIC_REPORTS_PATH dir is missing')
+ end
+ end
+
+ context 'when GITLAB_GCP_KEY_PATH is missing' do
+ before do
+ stub_env('GITLAB_DIAGNOSTIC_REPORTS_PATH', reports_dir)
+ end
+
+ it 'raises RuntimeError' do
+ expect { load_bin }.to raise_error(RuntimeError, /GCS keyfile not found/)
+ end
+ end
+
+ context 'when GITLAB_DIAGNOSTIC_REPORTS_PROJECT is missing' do
+ before do
+ stub_env('GITLAB_DIAGNOSTIC_REPORTS_PATH', reports_dir)
+ stub_env('GITLAB_GCP_KEY_PATH', gcs_key.path)
+ end
+
+ it 'raises RuntimeError' do
+ expect { load_bin }.to raise_error(RuntimeError, 'GITLAB_DIAGNOSTIC_REPORTS_PROJECT is missing')
+ end
+ end
+
+ context 'when GITLAB_DIAGNOSTIC_REPORTS_BUCKET is missing' do
+ before do
+ stub_env('GITLAB_DIAGNOSTIC_REPORTS_PATH', reports_dir)
+ stub_env('GITLAB_GCP_KEY_PATH', gcs_key.path)
+ stub_env('GITLAB_DIAGNOSTIC_REPORTS_PROJECT', gcs_project)
+ end
+
+ it 'raises RuntimeError' do
+ expect { load_bin }.to raise_error(RuntimeError, 'GITLAB_DIAGNOSTIC_REPORTS_BUCKET is missing')
+ end
+ end
+end
diff --git a/spec/commands/diagnostic_reports/uploader_smoke_spec.rb b/spec/commands/diagnostic_reports/uploader_smoke_spec.rb
new file mode 100644
index 00000000000..9fbceb68844
--- /dev/null
+++ b/spec/commands/diagnostic_reports/uploader_smoke_spec.rb
@@ -0,0 +1,83 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'tempfile'
+
+# We need to capture pid from Process.spawn and then clean up by killing the process, which requires instance variables.
+# rubocop: disable RSpec/InstanceVariable
+RSpec.describe 'bin/diagnostic-reports-uploader' do
+ # This is a smoke test for 'bin/diagnostic-reports-uploader'.
+ # We intend to run this binary with `ruby bin/diagnostic-reports-uploader`, without preloading the entire Rails app.
+ # Also, we use inline gemfile, to avoid pulling full Gemfile from the main app into memory.
+ # The goal of that test is to confirm that the binary starts that way.
+ # The implementation logic is covered in 'spec/bin/diagnostic_reports_uploader_spec.rb'
+ include FastRailsRoot
+
+ let(:gcs_bucket) { 'test_bucket' }
+ let(:gcs_project) { 'test_project' }
+ let(:gcs_key) { Tempfile.new }
+ let(:reports_dir) { Dir.mktmpdir }
+ let(:report) { Tempfile.new('report.json', reports_dir) }
+
+ let(:env) do
+ {
+ 'GITLAB_DIAGNOSTIC_REPORTS_BUCKET' => gcs_bucket,
+ 'GITLAB_DIAGNOSTIC_REPORTS_PROJECT' => gcs_project,
+ 'GITLAB_GCP_KEY_PATH' => gcs_key.path,
+ 'GITLAB_DIAGNOSTIC_REPORTS_PATH' => reports_dir,
+ 'GITLAB_DIAGNOSTIC_REPORTS_UPLOADER_SLEEP_S' => '1'
+ }
+ end
+
+ before do
+ gcs_key.write(
+ {
+ type: "service_account",
+ client_email: 'test@gitlab.com',
+ private_key_id: "test_id",
+ private_key: File.read(rails_root_join('spec/fixtures/ssl_key.pem'))
+ }.to_json
+ )
+ gcs_key.rewind
+
+ FileUtils.touch(report.path)
+ end
+
+ after do
+ if @pid
+ Timeout.timeout(10) do
+ Process.kill('TERM', @pid)
+ Process.waitpid(@pid)
+ end
+ end
+ rescue Errno::ESRCH, Errno::ECHILD => _
+ # 'No such process' or 'No child processes' means the process died before
+ ensure
+ gcs_key.unlink
+ FileUtils.rm_rf(reports_dir, secure: true)
+ end
+
+ it 'starts successfully' do
+ expect(File.exist?(report.path)).to be true
+
+ bin_path = rails_root_join("bin/diagnostic-reports-uploader")
+
+ cmd = ['bundle', 'exec', 'ruby', bin_path]
+ @pid = Process.spawn(env, *cmd)
+
+ expect(Gitlab::ProcessManagement.process_alive?(@pid)).to be true
+
+ expect do
+ Timeout.timeout(10) do
+ # Uploader will remove the file, no matter the upload result. We are waiting for exactly that.
+ # The report being removed means the uploader loop works. We are not attempting real upload.
+ attempted_upload_and_cleanup = false
+ until attempted_upload_and_cleanup
+ sleep 1
+ attempted_upload_and_cleanup = !File.exist?(report.path)
+ end
+ end
+ end.not_to raise_error
+ end
+end
+# rubocop: enable RSpec/InstanceVariable
diff --git a/spec/components/pajamas/alert_component_spec.rb b/spec/components/pajamas/alert_component_spec.rb
index c60724c7b78..4a90a9e0b88 100644
--- a/spec/components/pajamas/alert_component_spec.rb
+++ b/spec/components/pajamas/alert_component_spec.rb
@@ -45,11 +45,37 @@ RSpec.describe Pajamas::AlertComponent, :aggregate_failures, type: :component do
end
end
+ describe 'title' do
+ before do
+ render_inline described_class.new(title: title)
+ end
+
+ context 'with non-empty string' do
+ let(:title) { '_title_' }
+
+ it 'sets the title' do
+ expect(page).to have_selector('.gl-alert-title')
+ expect(page).to have_content(title)
+ expect(page).not_to have_selector('.gl-alert-icon-no-title')
+ end
+ end
+
+ context 'with nil, empty or blank string' do
+ where(:title) { [nil, '', ' '] }
+
+ with_them do
+ it 'does not set a title' do
+ expect(page).not_to have_selector('.gl-alert-title')
+ expect(page).to have_selector('.gl-alert-icon-no-title')
+ end
+ end
+ end
+ end
+
context 'with custom options' do
context 'with simple options' do
before do
render_inline described_class.new(
- title: '_title_',
alert_options: {
class: '_alert_class_',
data: {
@@ -60,12 +86,6 @@ RSpec.describe Pajamas::AlertComponent, :aggregate_failures, type: :component do
)
end
- it 'sets the title' do
- expect(page).to have_selector('.gl-alert-title')
- expect(page).to have_content('_title_')
- expect(page).not_to have_selector('.gl-alert-icon-no-title')
- end
-
it 'sets the alert_class' do
expect(page).to have_selector('._alert_class_')
end
@@ -129,7 +149,7 @@ RSpec.describe Pajamas::AlertComponent, :aggregate_failures, type: :component do
end
context 'with setting variant type' do
- where(:variant) { [:warning, :success, :danger, :tip] }
+ where(:variant) { [:warning, "success", :danger, "tip"] }
before do
render_inline described_class.new(variant: variant)
@@ -138,7 +158,18 @@ RSpec.describe Pajamas::AlertComponent, :aggregate_failures, type: :component do
with_them do
it 'renders the variant' do
expect(page).to have_selector(".gl-alert-#{variant}")
- expect(page).to have_selector("[data-testid='#{described_class::ICONS[variant]}-icon']")
+ expect(page).to have_selector("[data-testid='#{described_class::VARIANT_ICONS[variant.to_sym]}-icon']")
+ end
+ end
+
+ context "with unknown or nil variant" do
+ where(:variant) { [:foo, nil] }
+
+ with_them do
+ it "adds the default variant class" do
+ expect(page).to have_selector(".gl-alert-info")
+ expect(page).to have_selector("[data-testid='information-o-icon']")
+ end
end
end
end
diff --git a/spec/components/pajamas/progress_component_spec.rb b/spec/components/pajamas/progress_component_spec.rb
new file mode 100644
index 00000000000..5172f459a84
--- /dev/null
+++ b/spec/components/pajamas/progress_component_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Pajamas::ProgressComponent, type: :component do
+ before do
+ render_inline(described_class.new(value: value, variant: variant))
+ end
+
+ let(:value) { 33 }
+ let(:variant) { nil }
+
+ describe "value" do
+ it "sets the width of the progressbar" do
+ expect(page).to have_css ".progress-bar[style='width: #{value}%;']"
+ end
+ end
+
+ describe "variant" do
+ where(:variant) { [:primary, :success] }
+
+ with_them do
+ it "adds variant class" do
+ expect(page).to have_css ".progress-bar.bg-#{variant}"
+ end
+ end
+
+ context "with unknown variant" do
+ let(:variant) { :nope }
+
+ it "adds the default variant class" do
+ expect(page).to have_css ".progress-bar.bg-primary"
+ end
+ end
+ end
+end
diff --git a/spec/components/previews/pajamas/alert_component_preview.rb b/spec/components/previews/pajamas/alert_component_preview.rb
index 9a6b77715f5..e1889032c8b 100644
--- a/spec/components/previews/pajamas/alert_component_preview.rb
+++ b/spec/components/previews/pajamas/alert_component_preview.rb
@@ -1,12 +1,13 @@
# frozen_string_literal: true
module Pajamas
class AlertComponentPreview < ViewComponent::Preview
+ # @param title text
# @param body text
# @param dismissible toggle
# @param variant select [info, warning, success, danger, tip]
- def default(body: nil, dismissible: true, variant: :info)
+ def default(title: "Alert title (optional)", body: "Alert message goes here.", dismissible: true, variant: :info)
render(Pajamas::AlertComponent.new(
- title: "Title",
+ title: title,
dismissible: dismissible,
variant: variant.to_sym
)) do |c|
diff --git a/spec/components/previews/pajamas/progress_component_preview.rb b/spec/components/previews/pajamas/progress_component_preview.rb
new file mode 100644
index 00000000000..4de07872a80
--- /dev/null
+++ b/spec/components/previews/pajamas/progress_component_preview.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+module Pajamas
+ class ProgressComponentPreview < ViewComponent::Preview
+ # Progress
+ # ---
+ #
+ # See its design reference [here](https://design.gitlab.com/components/progress-bar).
+ #
+ # @param value number
+ # @param variant select [primary, success]
+ def default(value: 50, variant: :primary)
+ render Pajamas::ProgressComponent.new(value: value, variant: variant)
+ end
+ end
+end
diff --git a/spec/config/object_store_settings_spec.rb b/spec/config/object_store_settings_spec.rb
index 8ddb5652dae..9275c809550 100644
--- a/spec/config/object_store_settings_spec.rb
+++ b/spec/config/object_store_settings_spec.rb
@@ -113,6 +113,27 @@ RSpec.describe ObjectStoreSettings do
expect(settings.lfs['object_store']['bucket_prefix']).to eq('lfs')
end
+ context 'with Google CDN enabled' do
+ let(:cdn_config) do
+ {
+ 'provider' => 'Google',
+ 'url' => 'https://cdn.example.org',
+ 'key_name' => 'stanhu-key',
+ 'key' => Base64.urlsafe_encode64(SecureRandom.hex)
+ }
+ end
+
+ before do
+ config['object_store']['objects']['artifacts']['cdn'] = cdn_config
+ end
+
+ it 'populates artifacts CDN config' do
+ subject
+
+ expect(settings.artifacts['object_store']['cdn']).to eq(cdn_config)
+ end
+ end
+
it 'raises an error when a bucket is missing' do
config['object_store']['objects']['lfs'].delete('bucket')
diff --git a/spec/controllers/admin/application_settings_controller_spec.rb b/spec/controllers/admin/application_settings_controller_spec.rb
index ab0cad989cb..0ad0a111156 100644
--- a/spec/controllers/admin/application_settings_controller_spec.rb
+++ b/spec/controllers/admin/application_settings_controller_spec.rb
@@ -211,6 +211,13 @@ RSpec.describe Admin::ApplicationSettingsController, :do_not_mock_admin_mode_set
expect(ApplicationSetting.current.valid_runner_registrars).to eq(['project'])
end
+ it 'updates can_create_group setting' do
+ put :update, params: { application_setting: { can_create_group: false } }
+
+ expect(response).to redirect_to(general_admin_application_settings_path)
+ expect(ApplicationSetting.current.can_create_group).to eq(false)
+ end
+
context "personal access token prefix settings" do
let(:application_settings) { ApplicationSetting.current }
diff --git a/spec/controllers/admin/cohorts_controller_spec.rb b/spec/controllers/admin/cohorts_controller_spec.rb
index 766073977c6..50626a5da91 100644
--- a/spec/controllers/admin/cohorts_controller_spec.rb
+++ b/spec/controllers/admin/cohorts_controller_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Admin::CohortsController do
let(:target_id) { 'i_analytics_cohorts' }
end
- it_behaves_like 'Snowplow event tracking' do
+ it_behaves_like 'Snowplow event tracking with RedisHLL context' do
subject { get :index }
let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
diff --git a/spec/controllers/admin/dev_ops_report_controller_spec.rb b/spec/controllers/admin/dev_ops_report_controller_spec.rb
index 5d7a7e089aa..52a46b5e99a 100644
--- a/spec/controllers/admin/dev_ops_report_controller_spec.rb
+++ b/spec/controllers/admin/dev_ops_report_controller_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe Admin::DevOpsReportController do
let(:request_params) { { tab: 'devops-score' } }
end
- it_behaves_like 'Snowplow event tracking' do
+ it_behaves_like 'Snowplow event tracking with RedisHLL context' do
subject { get :show, format: :html }
let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
diff --git a/spec/controllers/admin/groups_controller_spec.rb b/spec/controllers/admin/groups_controller_spec.rb
index fb843ac6a7a..37cb0a1f289 100644
--- a/spec/controllers/admin/groups_controller_spec.rb
+++ b/spec/controllers/admin/groups_controller_spec.rb
@@ -44,64 +44,4 @@ RSpec.describe Admin::GroupsController do
end.to change { Namespace::AdminNote.count }.by(1)
end
end
-
- describe 'PUT #members_update' do
- let_it_be(:group_user) { create(:user) }
-
- it 'adds user to members', :aggregate_failures, :snowplow do
- put :members_update, params: {
- id: group,
- user_id: group_user.id,
- access_level: Gitlab::Access::GUEST
- }
-
- expect(controller).to set_flash.to 'Users were successfully added.'
- expect(response).to redirect_to(admin_group_path(group))
- expect(group.users).to include group_user
- expect_snowplow_event(
- category: 'Members::CreateService',
- action: 'create_member',
- label: 'admin-group-page',
- property: 'existing_user',
- user: admin
- )
- end
-
- it 'can add unlimited members', :aggregate_failures do
- put :members_update, params: {
- id: group,
- user_id: 1.upto(1000).to_a.join(','),
- access_level: Gitlab::Access::GUEST
- }
-
- expect(controller).to set_flash.to 'Users were successfully added.'
- expect(response).to redirect_to(admin_group_path(group))
- end
-
- it 'adds no user to members', :aggregate_failures do
- put :members_update, params: {
- id: group,
- user_id: '',
- access_level: Gitlab::Access::GUEST
- }
-
- expect(controller).to set_flash.to 'No users specified.'
- expect(response).to redirect_to(admin_group_path(group))
- expect(group.users).not_to include group_user
- end
-
- it 'updates the project_creation_level successfully' do
- expect do
- post :update, params: { id: group.to_param, group: { project_creation_level: ::Gitlab::Access::NO_ONE_PROJECT_ACCESS } }
- end.to change { group.reload.project_creation_level }.to(::Gitlab::Access::NO_ONE_PROJECT_ACCESS)
- end
-
- it 'updates the subgroup_creation_level successfully' do
- expect do
- post :update,
- params: { id: group.to_param,
- group: { subgroup_creation_level: ::Gitlab::Access::OWNER_SUBGROUP_ACCESS } }
- end.to change { group.reload.subgroup_creation_level }.to(::Gitlab::Access::OWNER_SUBGROUP_ACCESS)
- end
- end
end
diff --git a/spec/controllers/admin/usage_trends_controller_spec.rb b/spec/controllers/admin/usage_trends_controller_spec.rb
index 356f603bf57..87cf8988b4e 100644
--- a/spec/controllers/admin/usage_trends_controller_spec.rb
+++ b/spec/controllers/admin/usage_trends_controller_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Admin::UsageTrendsController do
let(:target_id) { 'i_analytics_instance_statistics' }
end
- it_behaves_like 'Snowplow event tracking' do
+ it_behaves_like 'Snowplow event tracking with RedisHLL context' do
subject { get :index }
let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
diff --git a/spec/controllers/autocomplete_controller_spec.rb b/spec/controllers/autocomplete_controller_spec.rb
index 70e58124d21..e9b39d44e46 100644
--- a/spec/controllers/autocomplete_controller_spec.rb
+++ b/spec/controllers/autocomplete_controller_spec.rb
@@ -96,7 +96,7 @@ RSpec.describe AutocompleteController do
end
context 'user order' do
- it 'shows exact matches first' do
+ it 'shows exact matches first', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/375028' do
reported_user = create(:user, username: 'reported_user', name: 'Doug')
user = create(:user, username: 'user', name: 'User')
user1 = create(:user, username: 'user1', name: 'Ian')
diff --git a/spec/controllers/boards/issues_controller_spec.rb b/spec/controllers/boards/issues_controller_spec.rb
deleted file mode 100644
index 3e1cdfccc61..00000000000
--- a/spec/controllers/boards/issues_controller_spec.rb
+++ /dev/null
@@ -1,596 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Boards::IssuesController do
- include ExternalAuthorizationServiceHelpers
-
- let(:project) { create(:project, :private) }
- let(:board) { create(:board, project: project) }
- let(:user) { create(:user) }
- let(:guest) { create(:user) }
-
- let(:planning) { create(:label, project: project, name: 'Planning') }
- let(:development) { create(:label, project: project, name: 'Development') }
-
- let!(:list1) { create(:list, board: board, label: planning, position: 0) }
- let!(:list2) { create(:list, board: board, label: development, position: 1) }
-
- before do
- project.add_maintainer(user)
- project.add_guest(guest)
- end
-
- describe 'GET index', :request_store do
- let(:johndoe) { create(:user, avatar: fixture_file_upload(File.join('spec/fixtures/dk.png'))) }
-
- context 'with invalid board id' do
- it 'returns a not found 404 response' do
- list_issues user: user, board: non_existing_record_id, list: list2
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'when list id is present' do
- context 'with valid list id' do
- let(:group) { create(:group, :private, projects: [project]) }
- let(:group_board) { create(:board, group: group) }
- let!(:list3) { create(:list, board: group_board, label: development, position: 2) }
- let(:sub_group_1) { create(:group, :private, parent: group) }
-
- before do
- group.add_maintainer(user)
- end
-
- it 'returns issues that have the list label applied' do
- issue = create(:labeled_issue, project: project, labels: [planning])
- create(:labeled_issue, project: project, labels: [planning])
- create(:labeled_issue, project: project, labels: [development], due_date: Date.tomorrow)
- create(:labeled_issue, project: project, labels: [development], assignees: [johndoe])
- issue.subscribe(johndoe, project)
- expect(Issue).to receive(:move_nulls_to_end)
-
- list_issues user: user, board: board, list: list2
-
- expect(response).to match_response_schema('entities/issue_boards')
- expect(json_response['issues'].length).to eq 2
- expect(development.issues.map(&:relative_position)).not_to include(nil)
- end
-
- it 'returns issues by closed_at in descending order in closed list' do
- create(:closed_issue, project: project, title: 'New Issue 1', closed_at: 1.day.ago)
- create(:closed_issue, project: project, title: 'New Issue 2', closed_at: 1.week.ago)
-
- list_issues user: user, board: board, list: board.lists.last.id
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['issues'].length).to eq(2)
- expect(json_response['issues'][0]['title']).to eq('New Issue 1')
- expect(json_response['issues'][1]['title']).to eq('New Issue 2')
- end
-
- it 'avoids N+1 database queries' do
- create(:labeled_issue, project: project, labels: [development])
- control_count = ActiveRecord::QueryRecorder.new { list_issues(user: user, board: board, list: list2) }.count
-
- # 25 issues is bigger than the page size
- # the relative position will ignore the `#make_sure_position_set` queries
- create_list(:labeled_issue, 25, project: project, labels: [development], assignees: [johndoe], relative_position: 1)
-
- expect { list_issues(user: user, board: board, list: list2) }.not_to exceed_query_limit(control_count)
- end
-
- it 'avoids N+1 database queries when adding a project', :request_store do
- create(:labeled_issue, project: project, labels: [development])
- control_count = ActiveRecord::QueryRecorder.new { list_issues(user: user, board: group_board, list: list3) }.count
-
- 2.times do
- p = create(:project, group: group)
- create(:labeled_issue, project: p, labels: [development])
- end
-
- project_2 = create(:project, group: group)
- create(:labeled_issue, project: project_2, labels: [development], assignees: [johndoe])
-
- # because each issue without relative_position must be updated with
- # a different value, we have 8 extra queries per issue
- expect { list_issues(user: user, board: group_board, list: list3) }.not_to exceed_query_limit(control_count + (2 * 8 - 1))
- end
-
- it 'avoids N+1 database queries when adding a subgroup, project, and issue' do
- create(:project, group: sub_group_1)
- create(:labeled_issue, project: project, labels: [development])
- control_count = ActiveRecord::QueryRecorder.new { list_issues(user: user, board: group_board, list: list3) }.count
- project_2 = create(:project, group: group)
-
- 2.times do
- p = create(:project, group: sub_group_1)
- create(:labeled_issue, project: p, labels: [development])
- end
-
- create(:labeled_issue, project: project_2, labels: [development], assignees: [johndoe])
-
- expect { list_issues(user: user, board: group_board, list: list3) }.not_to exceed_query_limit(control_count + (2 * 8 - 1))
- end
-
- it 'does not query issues table more than once' do
- recorder = ActiveRecord::QueryRecorder.new { list_issues(user: user, board: board, list: list1) }
- query_count = recorder.occurrences.select { |query,| query.match?(/FROM "?issues"?/) }.each_value.first
-
- expect(query_count).to eq(1)
- end
-
- context 'when block_issue_repositioning feature flag is enabled' do
- before do
- stub_feature_flags(block_issue_repositioning: true)
- end
-
- it 'does not reposition issues with null position' do
- expect(Issue).not_to receive(:move_nulls_to_end)
-
- list_issues(user: user, board: group_board, list: list3)
- end
- end
- end
-
- context 'with invalid list id' do
- it 'returns a not found 404 response' do
- list_issues user: user, board: board, list: non_existing_record_id
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
-
- context 'when list id is missing' do
- it 'returns opened issues without board labels applied' do
- bug = create(:label, project: project, name: 'Bug')
- create(:issue, project: project)
- create(:labeled_issue, project: project, labels: [planning])
- create(:labeled_issue, project: project, labels: [development])
- create(:labeled_issue, project: project, labels: [bug])
-
- list_issues user: user, board: board
-
- expect(response).to match_response_schema('entities/issue_boards')
- expect(json_response['issues'].length).to eq 2
- end
- end
-
- context 'with unauthorized user' do
- let(:unauth_user) { create(:user) }
-
- it 'returns a forbidden 403 response' do
- list_issues user: unauth_user, board: board, list: list2
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- context 'with external authorization' do
- before do
- sign_in(user)
- enable_external_authorization_service_check
- end
-
- it 'returns a 403 for group boards' do
- group = create(:group)
- group_board = create(:board, group: group)
-
- list_issues(user: user, board: group_board)
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
-
- it 'is successful for project boards' do
- project_board = create(:board, project: project)
-
- list_issues(user: user, board: project_board)
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
-
- describe 'PUT bulk_move' do
- let(:todo) { create(:group_label, group: group, name: 'Todo') }
- let(:development) { create(:group_label, group: group, name: 'Development') }
- let(:user) { create(:group_member, :maintainer, user: create(:user), group: group ).user }
- let(:guest) { create(:group_member, :guest, user: create(:user), group: group ).user }
- let(:project) { create(:project, group: group) }
- let(:group) { create(:group) }
- let(:board) { create(:board, project: project) }
- let(:list1) { create(:list, board: board, label: todo, position: 0) }
- let(:list2) { create(:list, board: board, label: development, position: 1) }
- let(:issue1) { create(:labeled_issue, project: project, labels: [todo], author: user, relative_position: 10) }
- let(:issue2) { create(:labeled_issue, project: project, labels: [todo], author: user, relative_position: 20) }
- let(:issue3) { create(:labeled_issue, project: project, labels: [todo], author: user, relative_position: 30) }
- let(:issue4) { create(:labeled_issue, project: project, labels: [development], author: user, relative_position: 100) }
-
- let(:move_params) do
- {
- board_id: board.id,
- ids: [issue1.id, issue2.id, issue3.id],
- from_list_id: list1.id,
- to_list_id: list2.id,
- move_before_id: issue4.id,
- move_after_id: nil
- }
- end
-
- before do
- project.add_maintainer(user)
- project.add_guest(guest)
- end
-
- shared_examples 'move issues endpoint provider' do
- before do
- sign_in(signed_in_user)
- end
-
- it 'responds as expected' do
- put :bulk_move, params: move_issues_params
- expect(response).to have_gitlab_http_status(expected_status)
-
- if expected_status == 200
- expect(json_response).to include(
- 'count' => move_issues_params[:ids].size,
- 'success' => true
- )
-
- expect(json_response['issues'].pluck('id')).to match_array(move_issues_params[:ids])
- end
- end
-
- it 'moves issues as expected' do
- put :bulk_move, params: move_issues_params
- expect(response).to have_gitlab_http_status(expected_status)
-
- list_issues user: requesting_user, board: board, list: list2
- expect(response).to have_gitlab_http_status(:ok)
-
- expect(response).to match_response_schema('entities/issue_boards')
-
- responded_issues = json_response['issues']
- expect(responded_issues.length).to eq expected_issue_count
-
- ids_in_order = responded_issues.pluck('id')
- expect(ids_in_order).to eq(expected_issue_ids_in_order)
- end
- end
-
- context 'when items are moved to another list' do
- it_behaves_like 'move issues endpoint provider' do
- let(:signed_in_user) { user }
- let(:move_issues_params) { move_params }
- let(:requesting_user) { user }
- let(:expected_status) { 200 }
- let(:expected_issue_count) { 4 }
- let(:expected_issue_ids_in_order) { [issue4.id, issue1.id, issue2.id, issue3.id] }
- end
- end
-
- context 'when moving just one issue' do
- it_behaves_like 'move issues endpoint provider' do
- let(:signed_in_user) { user }
- let(:move_issues_params) do
- move_params.dup.tap do |hash|
- hash[:ids] = [issue2.id]
- end
- end
-
- let(:requesting_user) { user }
- let(:expected_status) { 200 }
- let(:expected_issue_count) { 2 }
- let(:expected_issue_ids_in_order) { [issue4.id, issue2.id] }
- end
- end
-
- context 'when user is not allowed to move issue' do
- it_behaves_like 'move issues endpoint provider' do
- let(:signed_in_user) { guest }
- let(:move_issues_params) do
- move_params.dup.tap do |hash|
- hash[:ids] = [issue2.id]
- end
- end
-
- let(:requesting_user) { user }
- let(:expected_status) { 403 }
- let(:expected_issue_count) { 1 }
- let(:expected_issue_ids_in_order) { [issue4.id] }
- end
- end
-
- context 'when issues should be moved visually above existing issue in list' do
- it_behaves_like 'move issues endpoint provider' do
- let(:signed_in_user) { user }
- let(:move_issues_params) do
- move_params.dup.tap do |hash|
- hash[:move_after_id] = issue4.id
- hash[:move_before_id] = nil
- end
- end
-
- let(:requesting_user) { user }
- let(:expected_status) { 200 }
- let(:expected_issue_count) { 4 }
- let(:expected_issue_ids_in_order) { [issue1.id, issue2.id, issue3.id, issue4.id] }
- end
- end
-
- context 'when destination list is empty' do
- before do
- # Remove issue from list
- issue4.labels -= [development]
- issue4.save!
- end
-
- it_behaves_like 'move issues endpoint provider' do
- let(:signed_in_user) { user }
- let(:move_issues_params) do
- move_params.dup.tap do |hash|
- hash[:move_before_id] = nil
- end
- end
-
- let(:requesting_user) { user }
- let(:expected_status) { 200 }
- let(:expected_issue_count) { 3 }
- let(:expected_issue_ids_in_order) { [issue1.id, issue2.id, issue3.id] }
- end
- end
-
- context 'when no position arguments are given' do
- it_behaves_like 'move issues endpoint provider' do
- let(:signed_in_user) { user }
- let(:move_issues_params) do
- move_params.dup.tap do |hash|
- hash[:move_before_id] = nil
- end
- end
-
- let(:requesting_user) { user }
- let(:expected_status) { 200 }
- let(:expected_issue_count) { 4 }
- let(:expected_issue_ids_in_order) { [issue1.id, issue2.id, issue3.id, issue4.id] }
- end
- end
-
- context 'when move_before_id and move_after_id are given' do
- let(:issue5) { create(:labeled_issue, project: project, labels: [development], author: user, relative_position: 90) }
-
- it_behaves_like 'move issues endpoint provider' do
- let(:signed_in_user) { user }
- let(:move_issues_params) do
- move_params.dup.tap do |hash|
- hash[:move_before_id] = issue5.id
- hash[:move_after_id] = issue4.id
- end
- end
-
- let(:requesting_user) { user }
- let(:expected_status) { 200 }
- let(:expected_issue_count) { 5 }
- let(:expected_issue_ids_in_order) { [issue5.id, issue1.id, issue2.id, issue3.id, issue4.id] }
- end
- end
-
- context 'when request contains too many issues' do
- it_behaves_like 'move issues endpoint provider' do
- let(:signed_in_user) { user }
- let(:move_issues_params) do
- move_params.dup.tap do |hash|
- hash[:ids] = (0..51).to_a
- end
- end
-
- let(:requesting_user) { user }
- let(:expected_status) { 422 }
- let(:expected_issue_count) { 1 }
- let(:expected_issue_ids_in_order) { [issue4.id] }
- end
- end
-
- context 'when request is malformed' do
- it_behaves_like 'move issues endpoint provider' do
- let(:signed_in_user) { user }
- let(:move_issues_params) do
- move_params.dup.tap do |hash|
- hash[:ids] = 'foobar'
- end
- end
-
- let(:requesting_user) { user }
- let(:expected_status) { 400 }
- let(:expected_issue_count) { 1 }
- let(:expected_issue_ids_in_order) { [issue4.id] }
- end
- end
- end
-
- def list_issues(user:, board:, list: nil)
- sign_in(user)
-
- params = {
- board_id: board.to_param,
- list_id: list.try(:to_param)
- }
-
- unless board.try(:parent).is_a?(Group)
- params[:namespace_id] = project.namespace.to_param
- params[:project_id] = project
- end
-
- get :index, params: params.compact
- end
- end
-
- describe 'POST create' do
- context 'when trying to create issue on an unauthorized project' do
- let(:unauthorized_project) { create(:project, :private) }
- let(:issue_params) { { project_id: unauthorized_project.id } }
-
- it 'creates the issue on the board\'s project' do
- expect do
- create_issue user: user, board: board, list: list1, title: 'New issue', additional_issue_params: issue_params
- end.to change(Issue, :count).by(1)
-
- created_issue = Issue.last
-
- expect(created_issue.project).to eq(project)
- expect(unauthorized_project.reload.issues.count).to eq(0)
- end
- end
-
- context 'with valid params' do
- before do
- create_issue user: user, board: board, list: list1, title: 'New issue'
- end
-
- it 'returns a successful 200 response' do
- expect(response).to have_gitlab_http_status(:ok)
- end
-
- it 'returns the created issue' do
- expect(response).to match_response_schema('entities/issue_board')
- end
-
- it 'sets the default work_item_type' do
- expect(Issue.last.work_item_type.base_type).to eq('issue')
- end
- end
-
- context 'with invalid params' do
- context 'when title is nil' do
- it 'returns an unprocessable entity 422 response' do
- create_issue user: user, board: board, list: list1, title: nil
-
- expect(response).to have_gitlab_http_status(:unprocessable_entity)
- end
- end
-
- context 'when list does not belongs to project board' do
- it 'returns a not found 404 response' do
- list = create(:list)
-
- create_issue user: user, board: board, list: list, title: 'New issue'
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'with invalid board id' do
- it 'returns a not found 404 response' do
- create_issue user: user, board: non_existing_record_id, list: list1, title: 'New issue'
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'with invalid list id' do
- it 'returns a not found 404 response' do
- create_issue user: user, board: board, list: non_existing_record_id, title: 'New issue'
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
-
- context 'with guest user' do
- context 'in open list' do
- it 'returns a successful 200 response' do
- open_list = board.lists.create!(list_type: :backlog)
- create_issue user: guest, board: board, list: open_list, title: 'New issue'
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
-
- context 'in label list' do
- it 'returns a forbidden 403 response' do
- create_issue user: guest, board: board, list: list1, title: 'New issue'
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
- end
-
- def create_issue(user:, board:, list:, title:, additional_issue_params: {})
- sign_in(user)
-
- post :create, params: {
- board_id: board.to_param,
- list_id: list.to_param,
- issue: { title: title, project_id: project.id }.merge(additional_issue_params)
- },
- format: :json
- end
- end
-
- describe 'PATCH update' do
- let!(:issue) { create(:labeled_issue, project: project, labels: [planning]) }
-
- context 'with valid params' do
- it 'returns a successful 200 response' do
- move user: user, board: board, issue: issue, from_list_id: list1.id, to_list_id: list2.id
-
- expect(response).to have_gitlab_http_status(:ok)
- end
-
- it 'moves issue to the desired list' do
- move user: user, board: board, issue: issue, from_list_id: list1.id, to_list_id: list2.id
-
- expect(issue.reload.labels).to contain_exactly(development)
- end
- end
-
- context 'with invalid params' do
- it 'returns a unprocessable entity 422 response for invalid lists' do
- move user: user, board: board, issue: issue, from_list_id: nil, to_list_id: nil
-
- expect(response).to have_gitlab_http_status(:unprocessable_entity)
- end
-
- it 'returns a not found 404 response for invalid board id' do
- move user: user, board: non_existing_record_id, issue: issue, from_list_id: list1.id, to_list_id: list2.id
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
-
- it 'returns a not found 404 response for invalid issue id' do
- move user: user, board: board, issue: double(id: non_existing_record_id), from_list_id: list1.id, to_list_id: list2.id
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'with unauthorized user' do
- let(:guest) { create(:user) }
-
- before do
- project.add_guest(guest)
- end
-
- it 'returns a forbidden 403 response' do
- move user: guest, board: board, issue: issue, from_list_id: list1.id, to_list_id: list2.id
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- def move(user:, board:, issue:, from_list_id:, to_list_id:)
- sign_in(user)
-
- patch :update, params: {
- namespace_id: project.namespace.to_param,
- project_id: project.id,
- board_id: board.to_param,
- id: issue.id,
- from_list_id: from_list_id,
- to_list_id: to_list_id
- },
- format: :json
- end
- end
-end
diff --git a/spec/controllers/boards/lists_controller_spec.rb b/spec/controllers/boards/lists_controller_spec.rb
deleted file mode 100644
index 95334974e66..00000000000
--- a/spec/controllers/boards/lists_controller_spec.rb
+++ /dev/null
@@ -1,333 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Boards::ListsController do
- let(:project) { create(:project) }
- let(:board) { create(:board, project: project) }
- let(:user) { create(:user) }
- let(:guest) { create(:user) }
-
- before do
- project.add_maintainer(user)
- project.add_guest(guest)
- end
-
- describe 'GET index' do
- before do
- create(:list, board: board)
- end
-
- it 'returns a successful 200 response' do
- read_board_list user: user, board: board
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq 'application/json'
- end
-
- it 'returns a list of board lists' do
- read_board_list user: user, board: board
-
- expect(response).to match_response_schema('lists')
- expect(json_response.length).to eq 3
- end
-
- context 'when another user has list preferences' do
- before do
- board.lists.first.update_preferences_for(guest, collapsed: true)
- end
-
- it 'returns the complete list of board lists' do
- read_board_list user: user, board: board
-
- expect(json_response.length).to eq 3
- end
- end
-
- context 'with unauthorized user' do
- let(:unauth_user) { create(:user) }
-
- it 'returns a forbidden 403 response' do
- read_board_list user: unauth_user, board: board
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- def read_board_list(user:, board:)
- sign_in(user)
-
- get :index, params: {
- namespace_id: project.namespace.to_param,
- project_id: project,
- board_id: board.to_param
- },
- format: :json
- end
- end
-
- describe 'POST create' do
- context 'with valid params' do
- let(:label) { create(:label, project: project, name: 'Development') }
-
- it 'returns a successful 200 response' do
- create_board_list user: user, board: board, label_id: label.id
-
- expect(response).to have_gitlab_http_status(:ok)
- end
-
- it 'returns the created list' do
- create_board_list user: user, board: board, label_id: label.id
-
- expect(response).to match_response_schema('list')
- end
- end
-
- context 'with invalid params' do
- context 'when label is nil' do
- it 'returns an unprocessable entity 422 response' do
- create_board_list user: user, board: board, label_id: nil
-
- expect(response).to have_gitlab_http_status(:unprocessable_entity)
- expect(json_response['errors']).to eq(['Label not found'])
- end
- end
-
- context 'when label that does not belongs to project' do
- it 'returns an unprocessable entity 422 response' do
- label = create(:label, name: 'Development')
-
- create_board_list user: user, board: board, label_id: label.id
-
- expect(response).to have_gitlab_http_status(:unprocessable_entity)
- expect(json_response['errors']).to eq(['Label not found'])
- end
- end
- end
-
- context 'with unauthorized user' do
- it 'returns a forbidden 403 response' do
- label = create(:label, project: project, name: 'Development')
-
- create_board_list user: guest, board: board, label_id: label.id
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- def create_board_list(user:, board:, label_id:)
- sign_in(user)
-
- post :create, params: {
- namespace_id: project.namespace.to_param,
- project_id: project,
- board_id: board.to_param,
- list: { label_id: label_id }
- },
- format: :json
- end
- end
-
- describe 'PATCH update' do
- let!(:planning) { create(:list, board: board, position: 0) }
- let!(:development) { create(:list, board: board, position: 1) }
-
- context 'with valid position' do
- it 'returns a successful 200 response' do
- move user: user, board: board, list: planning, position: 1
-
- expect(response).to have_gitlab_http_status(:ok)
- end
-
- it 'moves the list to the desired position' do
- move user: user, board: board, list: planning, position: 1
-
- expect(planning.reload.position).to eq 1
- end
- end
-
- context 'with invalid position' do
- it 'returns an unprocessable entity 422 response' do
- move user: user, board: board, list: planning, position: 6
-
- expect(response).to have_gitlab_http_status(:unprocessable_entity)
- end
- end
-
- context 'with invalid list id' do
- it 'returns a not found 404 response' do
- move user: user, board: board, list: non_existing_record_id, position: 1
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'with unauthorized user' do
- it 'returns a 422 unprocessable entity response' do
- move user: guest, board: board, list: planning, position: 6
-
- expect(response).to have_gitlab_http_status(:unprocessable_entity)
- end
- end
-
- context 'with collapsed preference' do
- it 'saves collapsed preference for user' do
- save_setting user: user, board: board, list: planning, setting: { collapsed: true }
-
- expect(planning.preferences_for(user).collapsed).to eq(true)
- expect(response).to have_gitlab_http_status(:ok)
- end
-
- it 'saves not collapsed preference for user' do
- save_setting user: user, board: board, list: planning, setting: { collapsed: false }
-
- expect(planning.preferences_for(user).collapsed).to eq(false)
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
-
- context 'with a list_type other than :label' do
- let!(:closed) { create(:closed_list, board: board, position: 2) }
-
- it 'saves collapsed preference for user' do
- save_setting user: user, board: board, list: closed, setting: { collapsed: true }
-
- expect(closed.preferences_for(user).collapsed).to eq(true)
- expect(response).to have_gitlab_http_status(:ok)
- end
-
- it 'saves not collapsed preference for user' do
- save_setting user: user, board: board, list: closed, setting: { collapsed: false }
-
- expect(closed.preferences_for(user).collapsed).to eq(false)
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
-
- def move(user:, board:, list:, position:)
- sign_in(user)
-
- params = { namespace_id: project.namespace.to_param,
- project_id: project.id,
- board_id: board.to_param,
- id: list.to_param,
- list: { position: position },
- format: :json }
-
- patch :update, params: params, as: :json
- end
-
- def save_setting(user:, board:, list:, setting: {})
- sign_in(user)
-
- params = { namespace_id: project.namespace.to_param,
- project_id: project.id,
- board_id: board.to_param,
- id: list.to_param,
- list: setting,
- format: :json }
-
- patch :update, params: params, as: :json
- end
- end
-
- describe 'DELETE destroy' do
- let!(:planning) { create(:list, board: board, position: 0) }
-
- context 'with valid list id' do
- it 'returns a successful 200 response' do
- remove_board_list user: user, board: board, list: planning
-
- expect(response).to have_gitlab_http_status(:ok)
- end
-
- it 'removes list from board' do
- expect { remove_board_list user: user, board: board, list: planning }.to change(board.lists, :size).by(-1)
- end
- end
-
- context 'with invalid list id' do
- it 'returns a not found 404 response' do
- remove_board_list user: user, board: board, list: non_existing_record_id
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'with unauthorized user' do
- it 'returns a forbidden 403 response' do
- remove_board_list user: guest, board: board, list: planning
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- context 'with an error service response' do
- it 'returns an unprocessable entity response' do
- allow(Boards::Lists::DestroyService).to receive(:new)
- .and_return(double(execute: ServiceResponse.error(message: 'error')))
-
- remove_board_list user: user, board: board, list: planning
-
- expect(response).to have_gitlab_http_status(:unprocessable_entity)
- end
- end
-
- def remove_board_list(user:, board:, list:)
- sign_in(user)
-
- delete :destroy, params: {
- namespace_id: project.namespace.to_param,
- project_id: project,
- board_id: board.to_param,
- id: list.to_param
- },
- format: :json
- end
- end
-
- describe 'POST generate' do
- context 'when board lists is empty' do
- it 'returns a successful 200 response' do
- generate_default_lists user: user, board: board
-
- expect(response).to have_gitlab_http_status(:ok)
- end
-
- it 'returns the defaults lists' do
- generate_default_lists user: user, board: board
-
- expect(response).to match_response_schema('lists')
- end
- end
-
- context 'when board lists is not empty' do
- it 'returns an unprocessable entity 422 response' do
- create(:list, board: board)
-
- generate_default_lists user: user, board: board
-
- expect(response).to have_gitlab_http_status(:unprocessable_entity)
- end
- end
-
- context 'with unauthorized user' do
- it 'returns a forbidden 403 response' do
- generate_default_lists user: guest, board: board
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- def generate_default_lists(user:, board:)
- sign_in(user)
-
- post :generate, params: {
- namespace_id: project.namespace.to_param,
- project_id: project,
- board_id: board.to_param
- },
- format: :json
- end
- end
-end
diff --git a/spec/controllers/concerns/boards_responses_spec.rb b/spec/controllers/concerns/boards_responses_spec.rb
deleted file mode 100644
index 553a547d42c..00000000000
--- a/spec/controllers/concerns/boards_responses_spec.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BoardsResponses do
- let(:controller_class) do
- Class.new do
- include BoardsResponses
- end
- end
-
- subject(:controller) { controller_class.new }
-
- describe '#serialize_as_json' do
- let!(:board) { create(:board) }
-
- it 'serializes properly' do
- expected = { "id" => board.id }
-
- expect(subject.serialize_as_json(board)).to include(expected)
- end
- end
-end
diff --git a/spec/controllers/concerns/product_analytics_tracking_spec.rb b/spec/controllers/concerns/product_analytics_tracking_spec.rb
index 2e734d81ea0..28b79a10624 100644
--- a/spec/controllers/concerns/product_analytics_tracking_spec.rb
+++ b/spec/controllers/concerns/product_analytics_tracking_spec.rb
@@ -51,15 +51,21 @@ RSpec.describe ProductAnalyticsTracking, :snowplow do
end
end
- def expect_tracking(user: self.user)
+ def expect_redis_hll_tracking
expect(Gitlab::UsageDataCounters::HLLRedisCounter).to have_received(:track_event)
.with('g_analytics_valuestream', values: instance_of(String))
+ end
+
+ def expect_snowplow_tracking(user)
+ context = Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: 'g_analytics_valuestream')
+ .to_context.to_json
expect_snowplow_event(
category: anything,
action: 'g_analytics_valuestream',
namespace: group,
- user: user
+ user: user,
+ context: [context]
)
end
@@ -77,7 +83,8 @@ RSpec.describe ProductAnalyticsTracking, :snowplow do
it 'tracks the event' do
get :index
- expect_tracking
+ expect_redis_hll_tracking
+ expect_snowplow_tracking(user)
end
context 'when FF is disabled' do
@@ -97,7 +104,8 @@ RSpec.describe ProductAnalyticsTracking, :snowplow do
get :index
- expect_tracking
+ expect_redis_hll_tracking
+ expect_snowplow_tracking(user)
end
it 'does not track the event if DNT is enabled' do
@@ -137,7 +145,8 @@ RSpec.describe ProductAnalyticsTracking, :snowplow do
get :show, params: { id: 1 }
- expect_tracking(user: nil)
+ expect_redis_hll_tracking
+ expect_snowplow_tracking(nil)
end
end
@@ -151,21 +160,24 @@ RSpec.describe ProductAnalyticsTracking, :snowplow do
it 'tracks the event when there is custom id' do
get :show, params: { id: 1 }
- expect_tracking(user: nil)
+ expect_redis_hll_tracking
+ expect_snowplow_tracking(nil)
end
- it 'does not track the HLL event when there is no custom id' do
- allow(controller).to receive(:get_custom_id).and_return(nil)
+ context 'when there is no custom_id set' do
+ before do
+ allow(controller).to receive(:get_custom_id).and_return(nil)
- get :show, params: { id: 2 }
+ get :show, params: { id: 2 }
+ end
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
- expect_snowplow_event(
- category: anything,
- action: 'g_analytics_valuestream',
- namespace: group,
- user: nil
- )
+ it 'does not track the HLL event' do
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
+ end
+
+ it 'tracks Snowplow event' do
+ expect_snowplow_tracking(nil)
+ end
end
end
end
diff --git a/spec/controllers/concerns/send_file_upload_spec.rb b/spec/controllers/concerns/send_file_upload_spec.rb
index f9a6afb95ea..32304815bbb 100644
--- a/spec/controllers/concerns/send_file_upload_spec.rb
+++ b/spec/controllers/concerns/send_file_upload_spec.rb
@@ -96,9 +96,10 @@ RSpec.describe SendFileUpload do
expect(controller).to receive(:params).at_least(:once).and_return(width: '64')
expect(controller).to receive(:head).with(:ok)
- expect(Gitlab::Workhorse).to receive(:send_scaled_image).with(a_string_matching('^(/.+|https://.+)'), 64, 'image/png').and_return([
- Gitlab::Workhorse::SEND_DATA_HEADER, "send-scaled-img:faux"
- ])
+ expect(Gitlab::Workhorse).to receive(:send_scaled_image)
+ .with(a_string_matching('^(/.+|https://.+)'), 64, 'image/png')
+ .and_return([Gitlab::Workhorse::SEND_DATA_HEADER, "send-scaled-img:faux"])
+
expect(headers).to receive(:store).with(Gitlab::Workhorse::SEND_DATA_HEADER, "send-scaled-img:faux")
subject
diff --git a/spec/controllers/dashboard_controller_spec.rb b/spec/controllers/dashboard_controller_spec.rb
index aed310531e6..21810f64cb4 100644
--- a/spec/controllers/dashboard_controller_spec.rb
+++ b/spec/controllers/dashboard_controller_spec.rb
@@ -4,11 +4,14 @@ require 'spec_helper'
RSpec.describe DashboardController do
context 'signed in' do
- let(:user) { create(:user) }
- let(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
- before do
+ before_all do
project.add_maintainer(user)
+ end
+
+ before do
sign_in(user)
end
@@ -30,6 +33,28 @@ RSpec.describe DashboardController do
end
it_behaves_like 'issuables requiring filter', :issues
+
+ it 'includes tasks in issue list' do
+ task = create(:work_item, :task, project: project, author: user)
+
+ get :issues, params: { author_id: user.id }
+
+ expect(assigns[:issues].map(&:id)).to include(task.id)
+ end
+
+ context 'when work_items is disabled' do
+ before do
+ stub_feature_flags(work_items: false)
+ end
+
+ it 'does not include tasks in issue list' do
+ task = create(:work_item, :task, project: project, author: user)
+
+ get :issues, params: { author_id: user.id }
+
+ expect(assigns[:issues].map(&:id)).not_to include(task.id)
+ end
+ end
end
describe 'GET merge requests' do
diff --git a/spec/controllers/groups/boards_controller_spec.rb b/spec/controllers/groups/boards_controller_spec.rb
index 6201cddecb0..4e441f86765 100644
--- a/spec/controllers/groups/boards_controller_spec.rb
+++ b/spec/controllers/groups/boards_controller_spec.rb
@@ -3,11 +3,14 @@
require 'spec_helper'
RSpec.describe Groups::BoardsController do
- let(:group) { create(:group) }
- let(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:user) { create(:user) }
- before do
+ before_all do
group.add_maintainer(user)
+ end
+
+ before do
sign_in(user)
end
@@ -57,46 +60,17 @@ RSpec.describe Groups::BoardsController do
end
end
- context 'when format is JSON' do
- it 'return an array with one group board' do
- create(:board, group: group)
-
- expect(Boards::VisitsFinder).not_to receive(:new)
-
- list_boards format: :json
-
- expect(response).to match_response_schema('boards')
- expect(json_response.length).to eq 1
- end
-
- context 'with unauthorized user' do
- before do
- expect(Ability).to receive(:allowed?).with(user, :log_in, :global).and_call_original
- allow(Ability).to receive(:allowed?).with(user, :read_cross_project, :global).and_return(true)
- allow(Ability).to receive(:allowed?).with(user, :read_group, group).and_return(true)
- allow(Ability).to receive(:allowed?).with(user, :read_issue_board, group).and_return(false)
- end
-
- it 'returns a not found 404 response' do
- list_boards format: :json
-
- expect(response).to have_gitlab_http_status(:not_found)
- expect(response.media_type).to eq 'application/json'
- end
- end
- end
-
it_behaves_like 'disabled when using an external authorization service' do
subject { list_boards }
end
- def list_boards(format: :html)
- get :index, params: { group_id: group }, format: format
+ def list_boards
+ get :index, params: { group_id: group }
end
end
describe 'GET show' do
- let!(:board) { create(:board, group: group) }
+ let_it_be(:board) { create(:board, group: group) }
context 'when format is HTML' do
it 'renders template' do
@@ -123,12 +97,12 @@ RSpec.describe Groups::BoardsController do
end
context 'when user is signed out' do
- let(:group) { create(:group, :public) }
+ let(:public_board) { create(:board, group: create(:group, :public)) }
it 'does not save visit' do
sign_out(user)
- expect { read_board board: board }.to change(BoardGroupRecentVisit, :count).by(0)
+ expect { read_board board: public_board }.to change(BoardGroupRecentVisit, :count).by(0)
expect(response).to render_template :show
expect(response.media_type).to eq 'text/html'
@@ -136,37 +110,11 @@ RSpec.describe Groups::BoardsController do
end
end
- context 'when format is JSON' do
- it 'returns project board' do
- expect(Boards::Visits::CreateService).not_to receive(:new)
-
- read_board board: board, format: :json
-
- expect(response).to match_response_schema('board')
- end
-
- context 'with unauthorized user' do
- before do
- expect(Ability).to receive(:allowed?).with(user, :log_in, :global).and_call_original
- allow(Ability).to receive(:allowed?).with(user, :read_cross_project, :global).and_return(true)
- allow(Ability).to receive(:allowed?).with(user, :read_group, group).and_return(true)
- allow(Ability).to receive(:allowed?).with(user, :read_group, group).and_return(false)
- end
-
- it 'returns a not found 404 response' do
- read_board board: board, format: :json
-
- expect(response).to have_gitlab_http_status(:not_found)
- expect(response.media_type).to eq 'application/json'
- end
- end
- end
-
context 'when board does not belong to group' do
it 'returns a not found 404 response' do
another_board = create(:board)
- read_board board: another_board
+ get :show, params: { group_id: group, id: another_board.to_param }
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -176,12 +124,8 @@ RSpec.describe Groups::BoardsController do
subject { read_board board: board }
end
- def read_board(board:, format: :html)
- get :show, params: {
- group_id: group,
- id: board.to_param
- },
- format: format
+ def read_board(board:)
+ get :show, params: { group_id: board.group, id: board.to_param }
end
end
end
diff --git a/spec/controllers/groups/runners_controller_spec.rb b/spec/controllers/groups/runners_controller_spec.rb
index 77c62c0d930..6dbf0803892 100644
--- a/spec/controllers/groups/runners_controller_spec.rb
+++ b/spec/controllers/groups/runners_controller_spec.rb
@@ -8,9 +8,11 @@ RSpec.describe Groups::RunnersController do
let_it_be(:project) { create(:project, group: group) }
let!(:runner) { create(:ci_runner, :group, groups: [group]) }
- let!(:runner_project) { create(:ci_runner, :project, projects: [project]) }
+ let!(:project_runner) { create(:ci_runner, :project, projects: [project]) }
+ let!(:instance_runner) { create(:ci_runner, :instance) }
- let(:params_runner_project) { { group_id: group, id: runner_project } }
+ let(:params_runner_project) { { group_id: group, id: project_runner } }
+ let(:params_runner_instance) { { group_id: group, id: instance_runner } }
let(:params) { { group_id: group, id: runner } }
before do
@@ -70,8 +72,15 @@ RSpec.describe Groups::RunnersController do
expect(response).to render_template(:show)
end
+ it 'renders show with 200 status code instance runner' do
+ get :show, params: { group_id: group, id: instance_runner }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:show)
+ end
+
it 'renders show with 200 status code project runner' do
- get :show, params: { group_id: group, id: runner_project }
+ get :show, params: { group_id: group, id: project_runner }
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:show)
@@ -89,8 +98,14 @@ RSpec.describe Groups::RunnersController do
expect(response).to have_gitlab_http_status(:not_found)
end
+ it 'renders a 404 instance runner' do
+ get :show, params: { group_id: group, id: instance_runner }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
it 'renders a 404 project runner' do
- get :show, params: { group_id: group, id: runner_project }
+ get :show, params: { group_id: group, id: project_runner }
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -103,15 +118,21 @@ RSpec.describe Groups::RunnersController do
group.add_owner(user)
end
- it 'renders show with 200 status code' do
+ it 'renders edit with 200 status code' do
get :edit, params: { group_id: group, id: runner }
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:edit)
end
- it 'renders show with 200 status code project runner' do
- get :edit, params: { group_id: group, id: runner_project }
+ it 'renders a 404 instance runner' do
+ get :edit, params: { group_id: group, id: instance_runner }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'renders edit with 200 status code project runner' do
+ get :edit, params: { group_id: group, id: project_runner }
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:edit)
@@ -130,7 +151,7 @@ RSpec.describe Groups::RunnersController do
end
it 'renders a 404 project runner' do
- get :edit, params: { group_id: group, id: runner_project }
+ get :edit, params: { group_id: group, id: project_runner }
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -154,15 +175,26 @@ RSpec.describe Groups::RunnersController do
expect(runner.reload.description).to eq(new_desc)
end
+ it 'does not update the instance runner' do
+ new_desc = instance_runner.description.swapcase
+
+ expect do
+ post :update, params: params_runner_instance.merge(runner: { description: new_desc } )
+ end.to not_change { instance_runner.ensure_runner_queue_value }
+ .and not_change { instance_runner.description }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
it 'updates the project runner, ticks the queue, and redirects project runner' do
- new_desc = runner_project.description.swapcase
+ new_desc = project_runner.description.swapcase
expect do
post :update, params: params_runner_project.merge(runner: { description: new_desc } )
- end.to change { runner_project.ensure_runner_queue_value }
+ end.to change { project_runner.ensure_runner_queue_value }
expect(response).to have_gitlab_http_status(:found)
- expect(runner_project.reload.description).to eq(new_desc)
+ expect(project_runner.reload.description).to eq(new_desc)
end
end
@@ -182,15 +214,26 @@ RSpec.describe Groups::RunnersController do
expect(runner.reload.description).to eq(old_desc)
end
+ it 'rejects the update and responds 404 instance runner' do
+ old_desc = instance_runner.description
+
+ expect do
+ post :update, params: params_runner_instance.merge(runner: { description: old_desc.swapcase } )
+ end.not_to change { instance_runner.ensure_runner_queue_value }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(instance_runner.reload.description).to eq(old_desc)
+ end
+
it 'rejects the update and responds 404 project runner' do
- old_desc = runner_project.description
+ old_desc = project_runner.description
expect do
post :update, params: params_runner_project.merge(runner: { description: old_desc.swapcase } )
- end.not_to change { runner_project.ensure_runner_queue_value }
+ end.not_to change { project_runner.ensure_runner_queue_value }
expect(response).to have_gitlab_http_status(:not_found)
- expect(runner_project.reload.description).to eq(old_desc)
+ expect(project_runner.reload.description).to eq(old_desc)
end
end
end
diff --git a/spec/controllers/groups_controller_spec.rb b/spec/controllers/groups_controller_spec.rb
index c4e4eeec953..5bbe236077c 100644
--- a/spec/controllers/groups_controller_spec.rb
+++ b/spec/controllers/groups_controller_spec.rb
@@ -229,7 +229,7 @@ RSpec.describe GroupsController, factory_default: :keep do
sign_in(user)
expect do
- post :create, params: { group: { name: 'new_group', path: "new_group" } }
+ post :create, params: { group: { name: 'new_group', path: 'new_group' } }
end.to change { Group.count }.by(1)
expect(response).to have_gitlab_http_status(:found)
@@ -240,13 +240,31 @@ RSpec.describe GroupsController, factory_default: :keep do
sign_in(create(:admin))
expect do
- post :create, params: { group: { name: 'new_group', path: "new_group" } }
+ post :create, params: { group: { name: 'new_group', path: 'new_group' } }
end.to change { Group.count }.by(1)
expect(response).to have_gitlab_http_status(:found)
end
end
+ context 'when creating chat team' do
+ before do
+ stub_mattermost_setting(enabled: true)
+ end
+
+ it 'triggers Mattermost::CreateTeamService' do
+ sign_in(user)
+
+ expect_next_instance_of(::Mattermost::CreateTeamService) do |service|
+ expect(service).to receive(:execute).and_return({ name: 'test-chat-team', id: 1 })
+ end
+
+ post :create, params: { group: { name: 'new_group', path: 'new_group', create_chat_team: 1 } }
+
+ expect(response).to have_gitlab_http_status(:found)
+ end
+ end
+
context 'when creating subgroups' do
[true, false].each do |can_create_group_status|
context "and can_create_group is #{can_create_group_status}" do
diff --git a/spec/controllers/health_check_controller_spec.rb b/spec/controllers/health_check_controller_spec.rb
index 7f55c4407dd..47290b1d0d6 100644
--- a/spec/controllers/health_check_controller_spec.rb
+++ b/spec/controllers/health_check_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe HealthCheckController, :request_store do
+RSpec.describe HealthCheckController, :request_store, :use_clean_rails_memory_store_caching do
include StubENV
let(:xml_response) { Hash.from_xml(response.body)['hash'] }
@@ -93,12 +93,13 @@ RSpec.describe HealthCheckController, :request_store do
context 'when a service is down and an endpoint is accessed from whitelisted ip' do
before do
- allow(HealthCheck::Utils).to receive(:process_checks).with(['standard']).and_return('The server is on fire')
- allow(HealthCheck::Utils).to receive(:process_checks).with(['email']).and_return('Email is on fire')
+ allow(::HealthCheck).to receive(:include_error_in_response_body).and_return(true)
allow(Gitlab::RequestContext.instance).to receive(:client_ip).and_return(whitelisted_ip)
end
it 'supports failure plaintext response' do
+ expect(HealthCheck::Utils).to receive(:process_checks).with(['standard']).and_return('The server is on fire')
+
get :index
expect(response).to have_gitlab_http_status(:internal_server_error)
@@ -107,6 +108,8 @@ RSpec.describe HealthCheckController, :request_store do
end
it 'supports failure json response' do
+ expect(HealthCheck::Utils).to receive(:process_checks).with(['standard']).and_return('The server is on fire')
+
get :index, format: :json
expect(response).to have_gitlab_http_status(:internal_server_error)
@@ -116,6 +119,8 @@ RSpec.describe HealthCheckController, :request_store do
end
it 'supports failure xml response' do
+ expect(HealthCheck::Utils).to receive(:process_checks).with(['standard']).and_return('The server is on fire')
+
get :index, format: :xml
expect(response).to have_gitlab_http_status(:internal_server_error)
@@ -125,6 +130,8 @@ RSpec.describe HealthCheckController, :request_store do
end
it 'supports failure responses for specific checks' do
+ expect(HealthCheck::Utils).to receive(:process_checks).with(['email']).and_return('Email is on fire')
+
get :index, params: { checks: 'email' }, format: :json
expect(response).to have_gitlab_http_status(:internal_server_error)
diff --git a/spec/controllers/import/bulk_imports_controller_spec.rb b/spec/controllers/import/bulk_imports_controller_spec.rb
index 3be12717664..a0bb39f3e98 100644
--- a/spec/controllers/import/bulk_imports_controller_spec.rb
+++ b/spec/controllers/import/bulk_imports_controller_spec.rb
@@ -247,10 +247,10 @@ RSpec.describe Import::BulkImportsController do
"source_full_path" => "full_path",
"destination_slug" => "destination_name",
"destination_namespace" => "root" },
- { "source_type" => "group_entity2",
- "source_full_path" => "full_path2",
- "destination_slug" => "destination_name2",
- "destination_namespace" => "root" }]
+ { "source_type" => "group_entity",
+ "source_full_path" => "full_path",
+ "destination_slug" => "destination_name",
+ "destination_namespace" => "invalid-namespace" }]
end
before do
@@ -308,6 +308,21 @@ RSpec.describe Import::BulkImportsController do
expect(json_response).to match_array([{ "success" => true, "id" => bulk_import.id, "message" => nil }])
end
end
+
+ context 'when source type is project' do
+ let(:bulk_import_params) do
+ [{ "source_type" => "project_entity",
+ "source_full_path" => "full_path",
+ "destination_slug" => "destination_name",
+ "destination_namespace" => "root" }]
+ end
+
+ it 'returns 422' do
+ post :create, params: { bulk_import: bulk_import_params }
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ end
+ end
end
end
diff --git a/spec/controllers/import/github_controller_spec.rb b/spec/controllers/import/github_controller_spec.rb
index 269eb62cae6..f3632e7370c 100644
--- a/spec/controllers/import/github_controller_spec.rb
+++ b/spec/controllers/import/github_controller_spec.rb
@@ -44,13 +44,6 @@ RSpec.describe Import::GithubController do
end
describe "GET callback" do
- before do
- allow(controller).to receive(:get_token).and_return(token)
- allow(controller).to receive(:oauth_options).and_return({})
-
- stub_omniauth_provider('github')
- end
-
context "when auth state param is missing from session" do
it "reports an error" do
get :callback
@@ -63,17 +56,31 @@ RSpec.describe Import::GithubController do
context "when auth state param is present in session" do
let(:valid_auth_state) { "secret-state" }
- before do
- session[:github_auth_state_key] = valid_auth_state
- end
+ context 'when remove_legacy_github_client feature is disabled' do
+ before do
+ stub_feature_flags(remove_legacy_github_client: false)
+ allow_next_instance_of(Gitlab::LegacyGithubImport::Client) do |client|
+ allow(client).to receive(:get_token).and_return(token)
+ end
+ session[:github_auth_state_key] = valid_auth_state
+ end
+
+ it "updates access token if state param is valid" do
+ token = "asdasd12345"
- it "updates access token if state param is valid" do
- token = "asdasd12345"
+ get :callback, params: { state: valid_auth_state }
- get :callback, params: { state: valid_auth_state }
+ expect(session[:github_access_token]).to eq(token)
+ expect(controller).to redirect_to(status_import_github_url)
+ end
+
+ it "includes namespace_id from query params if it is present" do
+ namespace_id = 1
+
+ get :callback, params: { state: valid_auth_state, namespace_id: namespace_id }
- expect(session[:github_access_token]).to eq(token)
- expect(controller).to redirect_to(status_import_github_url)
+ expect(controller).to redirect_to(status_import_github_url(namespace_id: namespace_id))
+ end
end
it "reports an error if state param is invalid" do
@@ -83,12 +90,31 @@ RSpec.describe Import::GithubController do
expect(flash[:alert]).to eq('Access denied to your GitHub account.')
end
- it "includes namespace_id from query params if it is present" do
- namespace_id = 1
+ context 'when remove_legacy_github_client feature is enabled' do
+ before do
+ stub_feature_flags(remove_legacy_github_client: true)
+ allow_next_instance_of(OAuth2::Client) do |client|
+ allow(client).to receive_message_chain(:auth_code, :get_token, :token).and_return(token)
+ end
+ session[:github_auth_state_key] = valid_auth_state
+ end
+
+ it "updates access token if state param is valid" do
+ token = "asdasd12345"
- get :callback, params: { state: valid_auth_state, namespace_id: namespace_id }
+ get :callback, params: { state: valid_auth_state }
+
+ expect(session[:github_access_token]).to eq(token)
+ expect(controller).to redirect_to(status_import_github_url)
+ end
- expect(controller).to redirect_to(status_import_github_url(namespace_id: namespace_id))
+ it "includes namespace_id from query params if it is present" do
+ namespace_id = 1
+
+ get :callback, params: { state: valid_auth_state, namespace_id: namespace_id }
+
+ expect(controller).to redirect_to(status_import_github_url(namespace_id: namespace_id))
+ end
end
end
end
@@ -218,7 +244,7 @@ RSpec.describe Import::GithubController do
it 'makes request to github search api' do
expect_next_instance_of(Octokit::Client) do |client|
- expect(client).to receive(:user).and_return(double(login: user_login))
+ expect(client).to receive(:user).and_return({ login: user_login })
expect(client).to receive(:search_repositories).with(search_query, { page: 1, per_page: 25 }).and_return({ items: [].to_enum })
end
@@ -234,7 +260,7 @@ RSpec.describe Import::GithubController do
context 'when no page is specified' do
it 'requests first page' do
expect_next_instance_of(Octokit::Client) do |client|
- expect(client).to receive(:user).and_return(double(login: user_login))
+ expect(client).to receive(:user).and_return({ login: user_login })
expect(client).to receive(:search_repositories).with(search_query, { page: 1, per_page: 25 }).and_return({ items: [].to_enum })
end
@@ -250,7 +276,7 @@ RSpec.describe Import::GithubController do
context 'when page is specified' do
it 'requests repos with specified page' do
expect_next_instance_of(Octokit::Client) do |client|
- expect(client).to receive(:user).and_return(double(login: user_login))
+ expect(client).to receive(:user).and_return({ login: user_login })
expect(client).to receive(:search_repositories).with(search_query, { page: 2, per_page: 25 }).and_return({ items: [].to_enum })
end
@@ -321,4 +347,37 @@ RSpec.describe Import::GithubController do
expect(json_response[0]['stats']).to include('imported')
end
end
+
+ describe "POST cancel" do
+ let_it_be(:project) { create(:project, :import_started, import_type: 'github', import_url: 'https://fake.url') }
+
+ context 'when project import was canceled' do
+ before do
+ allow(Import::Github::CancelProjectImportService)
+ .to receive(:new).with(project, user)
+ .and_return(double(execute: { status: :success, project: project }))
+ end
+
+ it 'returns success' do
+ post :cancel, params: { project_id: project.id }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'when project import was not canceled' do
+ before do
+ allow(Import::Github::CancelProjectImportService)
+ .to receive(:new).with(project, user)
+ .and_return(double(execute: { status: :error, message: 'The import cannot be canceled because it is finished', http_status: :bad_request }))
+ end
+
+ it 'returns error' do
+ post :cancel, params: { project_id: project.id }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['errors']).to eq('The import cannot be canceled because it is finished')
+ end
+ end
+ end
end
diff --git a/spec/controllers/profiles/personal_access_tokens_controller_spec.rb b/spec/controllers/profiles/personal_access_tokens_controller_spec.rb
index 99e9644da66..8dee0490fd6 100644
--- a/spec/controllers/profiles/personal_access_tokens_controller_spec.rb
+++ b/spec/controllers/profiles/personal_access_tokens_controller_spec.rb
@@ -35,6 +35,18 @@ RSpec.describe Profiles::PersonalAccessTokensController do
expect(created_token).not_to be_nil
expect(created_token.expires_at).to eq(expires_at)
end
+
+ it 'does not allow creation when personal access tokens are disabled' do
+ allow(::Gitlab::CurrentSettings).to receive_messages(personal_access_tokens_disabled?: true)
+
+ post :create, params: { personal_access_token: token_attributes }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it_behaves_like "#create access token" do
+ let(:url) { :create }
+ end
end
describe '#index' do
@@ -66,6 +78,14 @@ RSpec.describe Profiles::PersonalAccessTokensController do
)
end
+ it 'returns 404 when personal access tokens are disabled' do
+ allow(::Gitlab::CurrentSettings).to receive_messages(personal_access_tokens_disabled?: true)
+
+ get :index
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
context "access_token_pagination feature flag is enabled" do
before do
stub_feature_flags(access_token_pagination: true)
diff --git a/spec/controllers/profiles/preferences_controller_spec.rb b/spec/controllers/profiles/preferences_controller_spec.rb
index 7add3a72337..e2a216bb462 100644
--- a/spec/controllers/profiles/preferences_controller_spec.rb
+++ b/spec/controllers/profiles/preferences_controller_spec.rb
@@ -53,7 +53,8 @@ RSpec.describe Profiles::PreferencesController do
first_day_of_week: '1',
preferred_language: 'jp',
tab_width: '5',
- render_whitespace_in_code: 'true'
+ render_whitespace_in_code: 'true',
+ use_legacy_web_ide: 'true'
}.with_indifferent_access
expect(user).to receive(:assign_attributes).with(ActionController::Parameters.new(prefs).permit!)
diff --git a/spec/controllers/profiles/two_factor_auths_controller_spec.rb b/spec/controllers/profiles/two_factor_auths_controller_spec.rb
index 33cba675777..1dd564427d3 100644
--- a/spec/controllers/profiles/two_factor_auths_controller_spec.rb
+++ b/spec/controllers/profiles/two_factor_auths_controller_spec.rb
@@ -31,13 +31,26 @@ RSpec.describe Profiles::TwoFactorAuthsController do
shared_examples 'user must enter a valid current password' do
let(:current_password) { '123' }
- let(:redirect_path) { profile_two_factor_auth_path }
+ let(:error_message) { { message: _('You must provide a valid current password') } }
it 'requires the current password', :aggregate_failures do
go
- expect(response).to redirect_to(redirect_path)
- expect(flash[:alert]).to eq(_('You must provide a valid current password'))
+ expect(assigns[:error]).to eq(error_message)
+ expect(response).to render_template(:show)
+ end
+
+ it 'assigns qr_code' do
+ code = double('qr code')
+ expect(subject).to receive(:build_qr_code).and_return(code)
+
+ go
+ expect(assigns[:qr_code]).to eq(code)
+ end
+
+ it 'assigns account_string' do
+ go
+ expect(assigns[:account_string]).to eq("#{Gitlab.config.gitlab.host}:#{user.email}")
end
context 'when the user is on the last sign in attempt' do
@@ -58,8 +71,7 @@ RSpec.describe Profiles::TwoFactorAuthsController do
it 'does not require the current password', :aggregate_failures do
go
- expect(response).not_to redirect_to(redirect_path)
- expect(flash[:alert]).to be_nil
+ expect(assigns[:error]).not_to eq(error_message)
end
end
@@ -71,8 +83,7 @@ RSpec.describe Profiles::TwoFactorAuthsController do
it 'does not require the current password', :aggregate_failures do
go
- expect(response).not_to redirect_to(redirect_path)
- expect(flash[:alert]).to be_nil
+ expect(assigns[:error]).not_to eq(error_message)
end
end
@@ -84,8 +95,7 @@ RSpec.describe Profiles::TwoFactorAuthsController do
it 'does not require the current password', :aggregate_failures do
go
- expect(response).not_to redirect_to(redirect_path)
- expect(flash[:alert]).to be_nil
+ expect(assigns[:error]).not_to eq(error_message)
end
end
end
diff --git a/spec/controllers/projects/artifacts_controller_spec.rb b/spec/controllers/projects/artifacts_controller_spec.rb
index 263f488ddbf..808e67eff3d 100644
--- a/spec/controllers/projects/artifacts_controller_spec.rb
+++ b/spec/controllers/projects/artifacts_controller_spec.rb
@@ -229,7 +229,7 @@ RSpec.describe Projects::ArtifactsController do
expect(response.body).to include(
'You must have developer or higher permissions in the associated project to view job logs when debug trace is enabled. ' \
'To disable debug trace, set the &#39;CI_DEBUG_TRACE&#39; variable to &#39;false&#39; in your pipeline configuration or CI/CD settings. ' \
- 'If you need to view this job log, a project maintainer must add you to the project with developer permissions or higher.'
+ 'If you need to view this job log, a project maintainer or owner must add you to the project with developer permissions or higher.'
)
end
end
diff --git a/spec/controllers/projects/autocomplete_sources_controller_spec.rb b/spec/controllers/projects/autocomplete_sources_controller_spec.rb
index a5274b6543e..7077aae6b45 100644
--- a/spec/controllers/projects/autocomplete_sources_controller_spec.rb
+++ b/spec/controllers/projects/autocomplete_sources_controller_spec.rb
@@ -5,37 +5,133 @@ require 'spec_helper'
RSpec.describe Projects::AutocompleteSourcesController do
let_it_be(:group, reload: true) { create(:group) }
let_it_be(:project) { create(:project, namespace: group) }
- let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:public_project) { create(:project, :public, group: group) }
+ let_it_be(:development) { create(:label, project: project, name: 'Development') }
+ let_it_be(:issue) { create(:labeled_issue, project: project, labels: [development]) }
let_it_be(:user) { create(:user) }
def members_by_username(username)
json_response.find { |member| member['username'] == username }
end
- describe 'GET members' do
+ describe 'GET commands' do
+ before do
+ group.add_owner(user)
+ end
+
+ context 'with a public project' do
+ shared_examples 'issuable commands' do
+ it 'returns empty array when no user logged in' do
+ get :commands, format: :json, params: { namespace_id: group.path, project_id: public_project.path, type: issuable_type, type_id: issuable_iid }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq([])
+ end
+
+ it 'raises an error when no target type specified' do
+ sign_in(user)
+
+ expect { get :commands, format: :json, params: { namespace_id: group.path, project_id: project.path } }
+ .to raise_error(ActionController::ParameterMissing)
+ end
+
+ it 'returns an array of commands' do
+ sign_in(user)
+
+ get :commands, format: :json, params: { namespace_id: group.path, project_id: public_project.path, type: issuable_type, type_id: issuable_iid }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_present
+ end
+ end
+
+ context 'with an issue' do
+ let(:issuable_type) { issue.class.name }
+ let(:issuable_iid) { issue.iid }
+
+ it_behaves_like 'issuable commands'
+ end
+
+ context 'with merge request' do
+ let(:merge_request) { create(:merge_request, target_project: public_project, source_project: public_project) }
+ let(:issuable_type) { merge_request.class.name }
+ let(:issuable_iid) { merge_request.iid }
+
+ it_behaves_like 'issuable commands'
+ end
+ end
+ end
+
+ describe 'GET labels' do
before do
group.add_owner(user)
sign_in(user)
end
- it 'returns an array of member object' do
- get :members, format: :json, params: { namespace_id: group.path, project_id: project.path, type: issue.class.name, type_id: issue.id }
+ it 'raises an error when no target type specified' do
+ expect { get :labels, format: :json, params: { namespace_id: group.path, project_id: project.path } }
+ .to raise_error(ActionController::ParameterMissing)
+ end
+
+ it 'returns an array of labels' do
+ get :labels, format: :json, params: { namespace_id: group.path, project_id: project.path, type: issue.class.name, type_id: issue.id }
+
+ expect(json_response).to be_a(Array)
+ expect(json_response.count).to eq(1)
+ expect(json_response[0]['title']).to eq('Development')
+ end
+ end
+
+ describe 'GET members' do
+ context 'when logged in' do
+ before do
+ group.add_owner(user)
+ sign_in(user)
+ end
+
+ it 'returns 400 when no target type specified' do
+ expect { get :members, format: :json, params: { namespace_id: group.path, project_id: project.path } }
+ .to raise_error(ActionController::ParameterMissing)
+ end
+
+ it 'returns an array of member object' do
+ get :members, format: :json, params: { namespace_id: group.path, project_id: project.path, type: issue.class.name, type_id: issue.id }
+
+ expect(members_by_username('all').symbolize_keys).to include(
+ username: 'all',
+ name: 'All Project and Group Members',
+ count: 1)
+
+ expect(members_by_username(group.full_path).symbolize_keys).to include(
+ type: group.class.name,
+ name: group.full_name,
+ avatar_url: group.avatar_url,
+ count: 1)
+
+ expect(members_by_username(user.username).symbolize_keys).to include(
+ type: user.class.name,
+ name: user.name,
+ avatar_url: user.avatar_url)
+ end
+ end
+
+ context 'when anonymous' do
+ it 'redirects to login page' do
+ get :members, format: :json, params: { namespace_id: group.path, project_id: project.path, type: issue.class.name, type_id: issue.id }
- expect(members_by_username('all').symbolize_keys).to include(
- username: 'all',
- name: 'All Project and Group Members',
- count: 1)
+ expect(response).to redirect_to new_user_session_path
+ end
- expect(members_by_username(group.full_path).symbolize_keys).to include(
- type: group.class.name,
- name: group.full_name,
- avatar_url: group.avatar_url,
- count: 1)
+ context 'with public project' do
+ it 'returns no members' do
+ get :members, format: :json, params: { namespace_id: group.path, project_id: public_project.path, type: issue.class.name, type_id: issue.id }
- expect(members_by_username(user.username).symbolize_keys).to include(
- type: user.class.name,
- name: user.name,
- avatar_url: user.avatar_url)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_a(Array)
+ expect(json_response.count).to eq(1)
+ expect(json_response.first['count']).to eq(0)
+ end
+ end
end
end
@@ -88,7 +184,7 @@ RSpec.describe Projects::AutocompleteSourcesController do
it 'lists contacts' do
group.add_developer(user)
- get :contacts, format: :json, params: { namespace_id: group.path, project_id: project.path }
+ get :contacts, format: :json, params: { namespace_id: group.path, project_id: project.path, type: issue.class.name, type_id: issue.id }
emails = json_response.map { |contact_data| contact_data["email"] }
expect(emails).to match_array([contact_1.email, contact_2.email])
@@ -97,7 +193,7 @@ RSpec.describe Projects::AutocompleteSourcesController do
context 'when a user can not read contacts' do
it 'renders 404' do
- get :contacts, format: :json, params: { namespace_id: group.path, project_id: project.path }
+ get :contacts, format: :json, params: { namespace_id: group.path, project_id: project.path, type: issue.class.name, type_id: issue.id }
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -108,7 +204,7 @@ RSpec.describe Projects::AutocompleteSourcesController do
it 'renders 404' do
group.add_developer(user)
- get :contacts, format: :json, params: { namespace_id: group.path, project_id: project.path }
+ get :contacts, format: :json, params: { namespace_id: group.path, project_id: project.path, type: issue.class.name, type_id: issue.id }
expect(response).to have_gitlab_http_status(:not_found)
end
diff --git a/spec/controllers/projects/boards_controller_spec.rb b/spec/controllers/projects/boards_controller_spec.rb
index cde3a8d4761..89d0669f47b 100644
--- a/spec/controllers/projects/boards_controller_spec.rb
+++ b/spec/controllers/projects/boards_controller_spec.rb
@@ -3,11 +3,14 @@
require 'spec_helper'
RSpec.describe Projects::BoardsController do
- let(:project) { create(:project) }
- let(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
- before do
+ before_all do
project.add_maintainer(user)
+ end
+
+ before do
sign_in(user)
end
@@ -16,77 +19,63 @@ RSpec.describe Projects::BoardsController do
expect { list_boards }.to change(project.boards, :count).by(1)
end
- it 'sets boards_endpoint instance variable to a boards path' do
+ it 'renders template' do
list_boards
- expect(assigns(:boards_endpoint)).to eq project_boards_path(project)
+ expect(response).to render_template :index
+ expect(response.media_type).to eq 'text/html'
end
- context 'when format is HTML' do
- it 'renders template' do
- list_boards
+ context 'when there are recently visited boards' do
+ let_it_be(:boards) { create_list(:board, 3, resource_parent: project) }
- expect(response).to render_template :index
- expect(response.media_type).to eq 'text/html'
+ before_all do
+ visit_board(boards[2], Time.current + 1.minute)
+ visit_board(boards[0], Time.current + 2.minutes)
+ visit_board(boards[1], Time.current + 5.minutes)
end
- context 'with unauthorized user' do
- before do
- expect(Ability).to receive(:allowed?).with(user, :log_in, :global).and_call_original
- allow(Ability).to receive(:allowed?).with(user, :read_project, project).and_return(true)
- allow(Ability).to receive(:allowed?).with(user, :read_issue_board, project).and_return(false)
- end
-
- it 'returns a not found 404 response' do
- list_boards
+ it 'redirects to latest visited board' do
+ list_boards
- expect(response).to have_gitlab_http_status(:not_found)
- expect(response.media_type).to eq 'text/html'
- end
+ expect(response).to redirect_to(
+ namespace_project_board_path(namespace_id: project.namespace, project_id: project, id: boards[1].id)
+ )
end
- context 'when user is signed out' do
- let(:project) { create(:project, :public) }
-
- it 'renders template' do
- sign_out(user)
+ def visit_board(board, time)
+ create(:board_project_recent_visit, project: project, board: board, user: user, updated_at: time)
+ end
+ end
- board = create(:board, project: project)
- create(:board_project_recent_visit, project: board.project, board: board, user: user)
+ context 'with unauthorized user' do
+ before do
+ expect(Ability).to receive(:allowed?).with(user, :log_in, :global).and_call_original
+ allow(Ability).to receive(:allowed?).with(user, :read_project, project).and_return(true)
+ allow(Ability).to receive(:allowed?).with(user, :read_issue_board, project).and_return(false)
+ end
- list_boards
+ it 'returns a not found 404 response' do
+ list_boards
- expect(response).to render_template :index
- expect(response.media_type).to eq 'text/html'
- end
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(response.media_type).to eq 'text/html'
end
end
- context 'when format is JSON' do
- it 'returns a list of project boards' do
- create_list(:board, 2, project: project)
-
- expect(Boards::VisitsFinder).not_to receive(:new)
-
- list_boards format: :json
+ context 'when user is signed out' do
+ let(:project) { create(:project, :public) }
- expect(response).to match_response_schema('boards')
- expect(json_response.length).to eq 2
- end
+ it 'renders template' do
+ sign_out(user)
- context 'with unauthorized user' do
- before do
- expect(Ability).to receive(:allowed?).with(user, :log_in, :global).and_call_original
- allow(Ability).to receive(:allowed?).with(user, :read_project, project).and_return(true)
- allow(Ability).to receive(:allowed?).with(user, :read_issue_board, project).and_return(false)
- end
+ board = create(:board, project: project)
+ create(:board_project_recent_visit, project: board.project, board: board, user: user)
- it 'returns a not found 404 response' do
- list_boards format: :json
+ list_boards
- expect(response).to have_gitlab_http_status(:not_found)
- expect(response.media_type).to eq 'application/json'
- end
+ expect(response).to render_template :index
+ expect(response.media_type).to eq 'text/html'
end
end
@@ -104,23 +93,16 @@ RSpec.describe Projects::BoardsController do
subject { list_boards }
end
- def list_boards(format: :html)
+ def list_boards
get :index, params: {
namespace_id: project.namespace,
project_id: project
- },
- format: format
+ }
end
end
describe 'GET show' do
- let!(:board) { create(:board, project: project) }
-
- it 'sets boards_endpoint instance variable to a boards path' do
- read_board board: board
-
- expect(assigns(:boards_endpoint)).to eq project_boards_path(project)
- end
+ let_it_be(:board) { create(:board, project: project) }
context 'when format is HTML' do
it 'renders template' do
@@ -146,12 +128,12 @@ RSpec.describe Projects::BoardsController do
end
context 'when user is signed out' do
- let(:project) { create(:project, :public) }
+ let(:public_board) { create(:board, project: create(:project, :public)) }
it 'does not save visit' do
sign_out(user)
- expect { read_board board: board }.to change(BoardProjectRecentVisit, :count).by(0)
+ expect { read_board board: public_board }.to change(BoardProjectRecentVisit, :count).by(0)
expect(response).to render_template :show
expect(response.media_type).to eq 'text/html'
@@ -159,48 +141,18 @@ RSpec.describe Projects::BoardsController do
end
end
- context 'when format is JSON' do
- it 'returns project board' do
- expect(Boards::Visits::CreateService).not_to receive(:new)
-
- read_board board: board, format: :json
-
- expect(response).to match_response_schema('board')
- end
-
- context 'with unauthorized user' do
- before do
- expect(Ability).to receive(:allowed?).with(user, :log_in, :global).and_call_original
- allow(Ability).to receive(:allowed?).with(user, :read_project, project).and_return(true)
- allow(Ability).to receive(:allowed?).with(user, :read_issue_board, project).and_return(false)
- end
-
- it 'returns a not found 404 response' do
- read_board board: board, format: :json
-
- expect(response).to have_gitlab_http_status(:not_found)
- expect(response.media_type).to eq 'application/json'
- end
- end
- end
-
context 'when board does not belong to project' do
it 'returns a not found 404 response' do
another_board = create(:board)
- read_board board: another_board
+ get :show, params: { namespace_id: project.namespace, project_id: project, id: another_board.to_param }
expect(response).to have_gitlab_http_status(:not_found)
end
end
- def read_board(board:, format: :html)
- get :show, params: {
- namespace_id: project.namespace,
- project_id: project,
- id: board.to_param
- },
- format: format
+ def read_board(board:)
+ get :show, params: { namespace_id: board.project.namespace, project_id: board.project, id: board.to_param }
end
end
end
diff --git a/spec/controllers/projects/compare_controller_spec.rb b/spec/controllers/projects/compare_controller_spec.rb
index 6ed6f7017e3..3751b89951c 100644
--- a/spec/controllers/projects/compare_controller_spec.rb
+++ b/spec/controllers/projects/compare_controller_spec.rb
@@ -67,11 +67,13 @@ RSpec.describe Projects::CompareController do
from: from_ref,
to: to_ref,
w: whitespace,
- page: page
+ page: page,
+ straight: straight
}
end
let(:whitespace) { nil }
+ let(:straight) { nil }
let(:page) { nil }
context 'when the refs exist in the same project' do
@@ -142,6 +144,58 @@ RSpec.describe Projects::CompareController do
end
end
+ context 'when comparing missing commits between source and target' do
+ let(:from_project_id) { nil }
+ let(:from_ref) { '5937ac0a7beb003549fc5fd26fc247adbce4a52e' }
+ let(:to_ref) { '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9' }
+ let(:page) { 1 }
+
+ context 'when comparing them in the other direction' do
+ let(:straight) { "false" }
+ let(:from_ref) { '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9' }
+ let(:to_ref) { '5937ac0a7beb003549fc5fd26fc247adbce4a52e' }
+
+ it 'the commits are there' do
+ show_request
+
+ expect(response).to be_successful
+ expect(assigns(:commits).length).to be >= 2
+ expect(assigns(:diffs).raw_diff_files.size).to be >= 2
+ expect(assigns(:diffs).diff_files.first).to be_present
+ end
+ end
+
+ context 'with straight mode true' do
+ let(:from_ref) { '5937ac0a7beb003549fc5fd26fc247adbce4a52e' }
+ let(:to_ref) { '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9' }
+
+ let(:straight) { "true" }
+
+ it 'the commits are empty, but the removed lines are visible as diffs' do
+ show_request
+
+ expect(response).to be_successful
+ expect(assigns(:commits).length).to be == 0
+ expect(assigns(:diffs).diff_files.size).to be >= 4
+ end
+ end
+
+ context 'with straight mode false' do
+ let(:from_ref) { '5937ac0a7beb003549fc5fd26fc247adbce4a52e' }
+ let(:to_ref) { '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9' }
+
+ let(:straight) { "false" }
+
+ it 'the additional commits are not visible in diffs and commits' do
+ show_request
+
+ expect(response).to be_successful
+ expect(assigns(:commits).length).to be == 0
+ expect(assigns(:diffs).diff_files.size).to be == 0
+ end
+ end
+ end
+
context 'when the refs exist in different projects but the user cannot see' do
let(:from_project_id) { private_fork.id }
let(:from_ref) { 'improve%2Fmore-awesome' }
@@ -450,10 +504,13 @@ RSpec.describe Projects::CompareController do
project_id: project,
from: from_ref,
to: to_ref,
+ straight: straight,
format: :json
}
end
+ let(:straight) { nil }
+
context 'when the source and target refs exist' do
let(:from_ref) { 'improve%2Fawesome' }
let(:to_ref) { 'feature' }
@@ -469,10 +526,43 @@ RSpec.describe Projects::CompareController do
escaped_to_ref = Addressable::URI.unescape(to_ref)
compare_service = CompareService.new(project, escaped_to_ref)
+ compare = compare_service.execute(project, escaped_from_ref, straight: false)
+
+ expect(CompareService).to receive(:new).with(project, escaped_to_ref).and_return(compare_service)
+ expect(compare_service).to receive(:execute).with(project, escaped_from_ref, straight: false).and_return(compare)
+
+ expect(compare).to receive(:commits).and_return(CommitCollection.new(project, [signature_commit, non_signature_commit]))
+ expect(non_signature_commit).to receive(:has_signature?).and_return(false)
+ end
+
+ it 'returns only the commit with a signature' do
+ signatures_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ signatures = json_response['signatures']
+
+ expect(signatures.size).to eq(1)
+ expect(signatures.first['commit_sha']).to eq(signature_commit.sha)
+ expect(signatures.first['html']).to be_present
+ end
+ end
+
+ context 'when the user has access to the project with straight compare' do
+ render_views
+
+ let(:signature_commit) { project.commit_by(oid: '0b4bc9a49b562e85de7cc9e834518ea6828729b9') }
+ let(:non_signature_commit) { build(:commit, project: project, safe_message: "message", sha: 'non_signature_commit') }
+ let(:straight) { "true" }
+
+ before do
+ escaped_from_ref = Addressable::URI.unescape(from_ref)
+ escaped_to_ref = Addressable::URI.unescape(to_ref)
+
+ compare_service = CompareService.new(project, escaped_to_ref)
compare = compare_service.execute(project, escaped_from_ref)
expect(CompareService).to receive(:new).with(project, escaped_to_ref).and_return(compare_service)
- expect(compare_service).to receive(:execute).with(project, escaped_from_ref).and_return(compare)
+ expect(compare_service).to receive(:execute).with(project, escaped_from_ref, straight: true).and_return(compare)
expect(compare).to receive(:commits).and_return(CommitCollection.new(project, [signature_commit, non_signature_commit]))
expect(non_signature_commit).to receive(:has_signature?).and_return(false)
diff --git a/spec/controllers/projects/cycle_analytics_controller_spec.rb b/spec/controllers/projects/cycle_analytics_controller_spec.rb
index f5dd8abd67b..034e6104f99 100644
--- a/spec/controllers/projects/cycle_analytics_controller_spec.rb
+++ b/spec/controllers/projects/cycle_analytics_controller_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe Projects::CycleAnalyticsController do
let(:target_id) { 'p_analytics_valuestream' }
end
- it_behaves_like 'Snowplow event tracking' do
+ it_behaves_like 'Snowplow event tracking with RedisHLL context' do
subject { get :show, params: request_params, format: :html }
let(:request_params) { { namespace_id: project.namespace, project_id: project } }
diff --git a/spec/controllers/projects/deploy_keys_controller_spec.rb b/spec/controllers/projects/deploy_keys_controller_spec.rb
index 308146ce792..fd844808d81 100644
--- a/spec/controllers/projects/deploy_keys_controller_spec.rb
+++ b/spec/controllers/projects/deploy_keys_controller_spec.rb
@@ -72,13 +72,15 @@ RSpec.describe Projects::DeployKeysController do
end
describe 'POST create' do
+ let(:deploy_key_content) { attributes_for(:deploy_key)[:key] }
+
def create_params(title = 'my-key')
{
namespace_id: project.namespace.path,
project_id: project.path,
deploy_key: {
title: title,
- key: attributes_for(:deploy_key)[:key],
+ key: deploy_key_content,
deploy_keys_projects_attributes: { '0' => { can_push: '1' } }
}
}
@@ -96,13 +98,38 @@ RSpec.describe Projects::DeployKeysController do
expect(response).to redirect_to(project_settings_repository_path(project, anchor: 'js-deploy-keys-settings'))
end
- context 'when the deploy key is invalid' do
+ context 'when the deploy key has an invalid title' do
it 'shows an alert with the validations errors' do
post :create, params: create_params(nil)
expect(flash[:alert]).to eq("Title can't be blank, Deploy keys projects deploy key title can't be blank")
end
end
+
+ context 'when the deploy key is not supported SSH public key' do
+ let(:deploy_key_content) { 'bogus ssh public key' }
+
+ it 'shows an alert with a help link' do
+ post :create, params: create_params
+
+ expect(assigns(:key).errors.count).to be > 1
+ expect(flash[:alert]).to eq('Deploy Key must be a <a target="_blank" rel="noopener noreferrer" ' \
+ 'href="/help/user/ssh#supported-ssh-key-types">supported SSH public key.</a>')
+ end
+ end
+
+ context 'when the deploy key already exists' do
+ before do
+ create(:deploy_key, title: 'my-key', key: deploy_key_content, projects: [project])
+ end
+
+ it 'shows an alert with the validations errors' do
+ post :create, params: create_params
+
+ expect(flash[:alert]).to eq("Fingerprint sha256 has already been taken, " \
+ "Deploy keys projects deploy key fingerprint sha256 has already been taken")
+ end
+ end
end
describe '/enable/:id' do
diff --git a/spec/controllers/projects/design_management/designs/raw_images_controller_spec.rb b/spec/controllers/projects/design_management/designs/raw_images_controller_spec.rb
index 55ab0f0eefa..2d39e0e5317 100644
--- a/spec/controllers/projects/design_management/designs/raw_images_controller_spec.rb
+++ b/spec/controllers/projects/design_management/designs/raw_images_controller_spec.rb
@@ -132,7 +132,7 @@ RSpec.describe Projects::DesignManagement::Designs::RawImagesController do
subject
expect(response.header['ETag']).to be_present
- expect(response.header['Cache-Control']).to eq("max-age=60, private")
+ expect(response.header['Cache-Control']).to eq("max-age=60, private, must-revalidate, stale-while-revalidate=60, stale-if-error=300, s-maxage=60")
end
end
diff --git a/spec/controllers/projects/graphs_controller_spec.rb b/spec/controllers/projects/graphs_controller_spec.rb
index 9227c7dd70a..3dfc22927cf 100644
--- a/spec/controllers/projects/graphs_controller_spec.rb
+++ b/spec/controllers/projects/graphs_controller_spec.rb
@@ -90,7 +90,7 @@ RSpec.describe Projects::GraphsController do
let(:target_id) { 'p_analytics_repo' }
end
- it_behaves_like 'Snowplow event tracking' do
+ it_behaves_like 'Snowplow event tracking with RedisHLL context' do
subject do
sign_in(user)
get :charts, params: request_params, format: :html
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index c48be8efb1b..0c3795540e0 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -1107,6 +1107,46 @@ RSpec.describe Projects::IssuesController do
end
end
+ context 'when create service return an unrecoverable error with http_status' do
+ let(:http_status) { 403 }
+
+ before do
+ allow_next_instance_of(::Issues::CreateService) do |create_service|
+ allow(create_service).to receive(:execute).and_return(
+ ServiceResponse.error(message: 'unrecoverable error', http_status: http_status)
+ )
+ end
+ end
+
+ it 'renders 403 and logs the error' do
+ expect(Gitlab::AppLogger).to receive(:warn).with(
+ message: 'Cannot create issue',
+ errors: ['unrecoverable error'],
+ http_status: http_status
+ )
+
+ post_new_issue
+
+ expect(response).to have_gitlab_http_status :forbidden
+ end
+
+ context 'when no render method is found for the returned http_status' do
+ let(:http_status) { nil }
+
+ it 'renders 404 and logs the error' do
+ expect(Gitlab::AppLogger).to receive(:warn).with(
+ message: 'Cannot create issue',
+ errors: ['unrecoverable error'],
+ http_status: http_status
+ )
+
+ post_new_issue
+
+ expect(response).to have_gitlab_http_status :not_found
+ end
+ end
+ end
+
it 'creates the issue successfully', :aggregate_failures do
issue = post_new_issue
@@ -1661,13 +1701,27 @@ RSpec.describe Projects::IssuesController do
end
it 'allows CSV export' do
- expect(IssuableExportCsvWorker).to receive(:perform_async).with(:issue, viewer.id, project.id, anything)
+ expect(IssuableExportCsvWorker).to receive(:perform_async)
+ .with(:issue, viewer.id, project.id, hash_including('issue_types' => Issue::TYPES_FOR_LIST))
request_csv
expect(response).to redirect_to(project_issues_path(project))
expect(controller).to set_flash[:notice].to match(/\AYour CSV export has started/i)
end
+
+ context 'when work_items is disabled' do
+ before do
+ stub_feature_flags(work_items: false)
+ end
+
+ it 'does not include tasks in CSV export' do
+ expect(IssuableExportCsvWorker).to receive(:perform_async)
+ .with(:issue, viewer.id, project.id, hash_including('issue_types' => Issue::TYPES_FOR_LIST.excluding('task')))
+
+ request_csv
+ end
+ end
end
context 'when not logged in' do
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index 9c4baeae836..a41abd8c16d 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -77,7 +77,8 @@ RSpec.describe Projects::MergeRequestsController do
merge_request,
'json',
diff_head: true,
- view: 'inline'))
+ view: 'inline',
+ w: '0'))
end
context 'when diff files were cleaned' do
@@ -498,7 +499,7 @@ RSpec.describe Projects::MergeRequestsController do
context 'when a squash commit message is passed' do
let(:message) { 'My custom squash commit message' }
- it 'passes the same message to SquashService', :sidekiq_might_not_need_inline do
+ it 'passes the same message to SquashService', :sidekiq_inline do
params = { squash: '1',
squash_commit_message: message,
sha: merge_request.diff_head_sha }
@@ -790,7 +791,7 @@ RSpec.describe Projects::MergeRequestsController do
context 'with private builds' do
context 'for the target project member' do
- it 'does not respond with serialized pipelines', :sidekiq_might_not_need_inline do
+ it 'does not respond with serialized pipelines' do
expect(json_response['pipelines']).to be_empty
expect(json_response['count']['all']).to eq(0)
expect(response).to include_pagination_headers
@@ -800,7 +801,7 @@ RSpec.describe Projects::MergeRequestsController do
context 'for the source project member' do
let(:user) { fork_user }
- it 'responds with serialized pipelines', :sidekiq_might_not_need_inline do
+ it 'responds with serialized pipelines' do
expect(json_response['pipelines']).to be_present
expect(json_response['count']['all']).to eq(1)
expect(response).to include_pagination_headers
@@ -816,7 +817,7 @@ RSpec.describe Projects::MergeRequestsController do
end
context 'for the target project member' do
- it 'does not respond with serialized pipelines', :sidekiq_might_not_need_inline do
+ it 'does not respond with serialized pipelines' do
expect(json_response['pipelines']).to be_present
expect(json_response['count']['all']).to eq(1)
expect(response).to include_pagination_headers
@@ -826,7 +827,7 @@ RSpec.describe Projects::MergeRequestsController do
context 'for the source project member' do
let(:user) { fork_user }
- it 'responds with serialized pipelines', :sidekiq_might_not_need_inline do
+ it 'responds with serialized pipelines' do
expect(json_response['pipelines']).to be_present
expect(json_response['count']['all']).to eq(1)
expect(response).to include_pagination_headers
@@ -1855,13 +1856,13 @@ RSpec.describe Projects::MergeRequestsController do
create(:merge_request, source_project: forked, target_project: project, target_branch: 'master', head_pipeline: pipeline)
end
- it 'links to the environment on that project', :sidekiq_might_not_need_inline do
+ it 'links to the environment on that project' do
get_ci_environments_status
expect(json_response.first['url']).to match(/#{forked.full_path}/)
end
- context "when environment_target is 'merge_commit'", :sidekiq_might_not_need_inline do
+ context "when environment_target is 'merge_commit'" do
it 'returns nothing' do
get_ci_environments_status(environment_target: 'merge_commit')
@@ -1891,13 +1892,13 @@ RSpec.describe Projects::MergeRequestsController do
# we're trying to reduce the overall number of queries for this method.
# set a hard limit for now. https://gitlab.com/gitlab-org/gitlab-foss/issues/52287
- it 'keeps queries in check', :sidekiq_might_not_need_inline do
+ it 'keeps queries in check' do
control_count = ActiveRecord::QueryRecorder.new { get_ci_environments_status }.count
expect(control_count).to be <= 137
end
- it 'has no N+1 SQL issues for environments', :request_store, :sidekiq_might_not_need_inline, retry: 0 do
+ it 'has no N+1 SQL issues for environments', :request_store, retry: 0 do
# First run to insert test data from lets, which does take up some 30 queries
get_ci_environments_status
@@ -2144,7 +2145,7 @@ RSpec.describe Projects::MergeRequestsController do
sign_in(fork_owner)
end
- it 'returns 200', :sidekiq_might_not_need_inline do
+ it 'returns 200' do
expect_rebase_worker_for(fork_owner)
post_rebase
diff --git a/spec/controllers/projects/milestones_controller_spec.rb b/spec/controllers/projects/milestones_controller_spec.rb
index b62353784b3..28da7eff8fc 100644
--- a/spec/controllers/projects/milestones_controller_spec.rb
+++ b/spec/controllers/projects/milestones_controller_spec.rb
@@ -44,6 +44,26 @@ RSpec.describe Projects::MilestonesController do
end
end
+ describe "#create" do
+ it 'does not redirect without redirect_path' do
+ post :create, params: { namespace_id: project.namespace.id, project_id: project.id, milestone: { title: 'test' } }
+
+ expect(response).to redirect_to(project_milestone_path(project, project.milestones.last))
+ end
+
+ it 'redirects when given a redirect_path' do
+ post :create, params: { namespace_id: project.namespace.id, project_id: project.id, redirect_path: 'new_release', milestone: { title: 'test' } }
+
+ expect(response).to redirect_to(new_project_release_path(project))
+ end
+
+ it 'will not redirect when given a redirect_path with an error' do
+ post :create, params: { namespace_id: project.namespace.id, project_id: project.id, redirect_path: 'new_release', milestone: { title: nil } }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
describe "#index" do
context "as html" do
def render_index(project:, page:, search_title: '')
diff --git a/spec/controllers/projects/pages_domains_controller_spec.rb b/spec/controllers/projects/pages_domains_controller_spec.rb
index 691508d1e14..b29bbef0c40 100644
--- a/spec/controllers/projects/pages_domains_controller_spec.rb
+++ b/spec/controllers/projects/pages_domains_controller_spec.rb
@@ -63,9 +63,15 @@ RSpec.describe Projects::PagesDomainsController do
describe 'POST create' do
it "creates a new pages domain" do
- expect do
- post(:create, params: request_params.merge(pages_domain: pages_domain_params))
- end.to change { PagesDomain.count }.by(1)
+ expect { post(:create, params: request_params.merge(pages_domain: pages_domain_params)) }
+ .to change { PagesDomain.count }.by(1)
+ .and publish_event(PagesDomains::PagesDomainCreatedEvent)
+ .with(
+ project_id: project.id,
+ namespace_id: project.namespace.id,
+ root_namespace_id: project.root_namespace.id,
+ domain: pages_domain_params[:domain]
+ )
created_domain = PagesDomain.reorder(:id).last
@@ -106,6 +112,17 @@ RSpec.describe Projects::PagesDomainsController do
end.to change { pages_domain.reload.certificate }.to(pages_domain_params[:user_provided_certificate])
end
+ it 'publishes PagesDomainUpdatedEvent event' do
+ expect { patch(:update, params: params) }
+ .to publish_event(PagesDomains::PagesDomainUpdatedEvent)
+ .with(
+ project_id: project.id,
+ namespace_id: project.namespace.id,
+ root_namespace_id: project.root_namespace.id,
+ domain: pages_domain.domain
+ )
+ end
+
it 'redirects to the project page' do
patch(:update, params: params)
@@ -134,6 +151,11 @@ RSpec.describe Projects::PagesDomainsController do
expect(response).to render_template('show')
end
+
+ it 'does not publish PagesDomainUpdatedEvent event' do
+ expect { patch(:update, params: params) }
+ .to not_publish_event(PagesDomains::PagesDomainUpdatedEvent)
+ end
end
context 'when parameters include the domain' do
@@ -197,9 +219,15 @@ RSpec.describe Projects::PagesDomainsController do
describe 'DELETE destroy' do
it "deletes the pages domain" do
- expect do
- delete(:destroy, params: request_params.merge(id: pages_domain.domain))
- end.to change { PagesDomain.count }.by(-1)
+ expect { delete(:destroy, params: request_params.merge(id: pages_domain.domain)) }
+ .to change(PagesDomain, :count).by(-1)
+ .and publish_event(PagesDomains::PagesDomainDeletedEvent)
+ .with(
+ project_id: project.id,
+ namespace_id: project.namespace.id,
+ root_namespace_id: project.root_namespace.id,
+ domain: pages_domain.domain
+ )
expect(response).to redirect_to(project_pages_path(project))
end
@@ -216,6 +244,17 @@ RSpec.describe Projects::PagesDomainsController do
expect(response).to redirect_to(project_pages_domain_path(project, pages_domain))
end
+ it 'publishes PagesDomainUpdatedEvent event' do
+ expect { subject }
+ .to publish_event(PagesDomains::PagesDomainUpdatedEvent)
+ .with(
+ project_id: project.id,
+ namespace_id: project.namespace.id,
+ root_namespace_id: project.root_namespace.id,
+ domain: pages_domain.domain
+ )
+ end
+
it 'removes certificate' do
expect do
subject
@@ -245,6 +284,11 @@ RSpec.describe Projects::PagesDomainsController do
expect(pages_domain.key).to be_present
end
+ it 'does not publish PagesDomainUpdatedEvent event' do
+ expect { subject }
+ .to not_publish_event(PagesDomains::PagesDomainUpdatedEvent)
+ end
+
it 'redirects to show page with a flash message' do
subject
diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb
index b9acaf65892..6e2de0c4d57 100644
--- a/spec/controllers/projects/pipelines_controller_spec.rb
+++ b/spec/controllers/projects/pipelines_controller_spec.rb
@@ -859,7 +859,7 @@ RSpec.describe Projects::PipelinesController do
let(:target_id) { ['p_analytics_pipelines', tab[:event]] }
end
- it_behaves_like 'Snowplow event tracking' do
+ it_behaves_like 'Snowplow event tracking with RedisHLL context' do
subject { get :charts, params: request_params, format: :html }
let(:request_params) { { namespace_id: project.namespace, project_id: project, id: pipeline.id, chart: tab[:chart_param] } }
diff --git a/spec/controllers/projects/prometheus/metrics_controller_spec.rb b/spec/controllers/projects/prometheus/metrics_controller_spec.rb
index cd195b95100..327651b2058 100644
--- a/spec/controllers/projects/prometheus/metrics_controller_spec.rb
+++ b/spec/controllers/projects/prometheus/metrics_controller_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Projects::Prometheus::MetricsController do
let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:prometheus_project) }
+ let_it_be(:project) { create(:project, :with_prometheus_integration) }
let(:prometheus_adapter) { double('prometheus_adapter', can_query?: true) }
diff --git a/spec/controllers/projects/protected_branches_controller_spec.rb b/spec/controllers/projects/protected_branches_controller_spec.rb
index 4996bd90005..14728618633 100644
--- a/spec/controllers/projects/protected_branches_controller_spec.rb
+++ b/spec/controllers/projects/protected_branches_controller_spec.rb
@@ -4,26 +4,23 @@ require('spec_helper')
RSpec.describe Projects::ProtectedBranchesController do
let_it_be_with_reload(:project) { create(:project, :repository) }
- let_it_be(:maintainer) { create(:user) }
+ let_it_be_with_reload(:empty_project) { create(:project, :empty_repo) }
+ let_it_be(:maintainer) { create(:user, maintainer_projects: [project, empty_project]) }
let(:protected_branch) { create(:protected_branch, project: project) }
let(:project_params) { { namespace_id: project.namespace.to_param, project_id: project } }
let(:base_params) { project_params.merge(id: protected_branch.id) }
let(:user) { maintainer }
- before_all do
- project.add_maintainer(maintainer)
- end
-
before do
sign_in(user)
end
describe "GET #index" do
- let(:project) { create(:project_empty_repo, :public) }
+ it 'redirects to repository settings' do
+ get(:index, params: { namespace_id: empty_project.namespace.to_param, project_id: empty_project })
- it "redirects empty repo to projects page" do
- get(:index, params: { namespace_id: project.namespace.to_param, project_id: project })
+ expect(response).to redirect_to(project_settings_repository_path(empty_project))
end
end
@@ -42,6 +39,18 @@ RSpec.describe Projects::ProtectedBranchesController do
end.to change(ProtectedBranch, :count).by(1)
end
+ context 'when repository is empty' do
+ let(:project) { empty_project }
+
+ it 'creates the protected branch rule' do
+ expect do
+ post(:create, params: project_params.merge(protected_branch: create_params))
+ end.to change(ProtectedBranch, :count).by(1)
+
+ expect(response).to have_gitlab_http_status(:found)
+ end
+ end
+
context 'when a policy restricts rule creation' do
it "prevents creation of the protected branch rule" do
disallow(:create_protected_branch, an_instance_of(ProtectedBranch))
@@ -63,6 +72,17 @@ RSpec.describe Projects::ProtectedBranchesController do
expect(json_response["name"]).to eq('new_name')
end
+ context 'when repository is empty' do
+ let(:project) { empty_project }
+
+ it 'updates the protected branch rule' do
+ put(:update, params: base_params.merge(protected_branch: update_params))
+
+ expect(protected_branch.reload.name).to eq('new_name')
+ expect(json_response["name"]).to eq('new_name')
+ end
+ end
+
context 'when a policy restricts rule update' do
it "prevents update of the protected branch rule" do
disallow(:update_protected_branch, protected_branch)
@@ -83,6 +103,16 @@ RSpec.describe Projects::ProtectedBranchesController do
expect { ProtectedBranch.find(protected_branch.id) }.to raise_error(ActiveRecord::RecordNotFound)
end
+ context 'when repository is empty' do
+ let(:project) { empty_project }
+
+ it 'deletes the protected branch rule' do
+ delete(:destroy, params: base_params)
+
+ expect { ProtectedBranch.find(protected_branch.id) }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+
context 'when a policy restricts rule deletion' do
it "prevents deletion of the protected branch rule" do
disallow(:destroy_protected_branch, protected_branch)
diff --git a/spec/controllers/projects/raw_controller_spec.rb b/spec/controllers/projects/raw_controller_spec.rb
index e0d88fa799f..1c9aafacbd9 100644
--- a/spec/controllers/projects/raw_controller_spec.rb
+++ b/spec/controllers/projects/raw_controller_spec.rb
@@ -247,9 +247,11 @@ RSpec.describe Projects::RawController do
sign_in create(:user)
request_file
- expect(response.cache_control[:public]).to eq(true)
- expect(response.cache_control[:max_age]).to eq(60)
+ expect(response.headers['ETag']).to eq("\"bdd5aa537c1e1f6d1b66de4bac8a6132\"")
expect(response.cache_control[:no_store]).to be_nil
+ expect(response.header['Cache-Control']).to eq(
+ 'max-age=60, public, must-revalidate, stale-while-revalidate=60, stale-if-error=300, s-maxage=60'
+ )
end
context 'when a public project has private repo' do
@@ -260,7 +262,9 @@ RSpec.describe Projects::RawController do
sign_in user
request_file
- expect(response.header['Cache-Control']).to include('max-age=60, private')
+ expect(response.header['Cache-Control']).to eq(
+ 'max-age=60, private, must-revalidate, stale-while-revalidate=60, stale-if-error=300, s-maxage=60'
+ )
end
end
@@ -274,6 +278,21 @@ RSpec.describe Projects::RawController do
expect(response).to have_gitlab_http_status(:not_modified)
end
end
+
+ context 'when improve_blobs_cache_headers disabled' do
+ before do
+ stub_feature_flags(improve_blobs_cache_headers: false)
+ end
+
+ it 'uses weak etags with a restricted set of headers' do
+ sign_in create(:user)
+ request_file
+
+ expect(response.headers['ETag']).to eq("W/\"bdd5aa537c1e1f6d1b66de4bac8a6132\"")
+ expect(response.cache_control[:no_store]).to be_nil
+ expect(response.header['Cache-Control']).to eq('max-age=60, public')
+ end
+ end
end
end
end
diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb
index b30610d98d7..b5797e374f3 100644
--- a/spec/controllers/projects_controller_spec.rb
+++ b/spec/controllers/projects_controller_spec.rb
@@ -231,7 +231,7 @@ RSpec.describe ProjectsController do
end
context "project with broken repo" do
- let_it_be(:empty_project) { create(:project_broken_repo, :public) }
+ let_it_be(:empty_project) { create(:project, :public) }
before do
sign_in(user)
@@ -246,8 +246,6 @@ RSpec.describe ProjectsController do
end
it "renders the empty project view" do
- allow(Project).to receive(:repo).and_raise(Gitlab::Git::Repository::NoRepository)
-
expect(response).to render_template('projects/no_repo')
end
end
@@ -299,14 +297,16 @@ RSpec.describe ProjectsController do
end
it "renders files even with invalid license" do
+ invalid_license = ::Gitlab::Git::DeclaredLicense.new(key: 'woozle', name: 'woozle wuzzle')
+
controller.instance_variable_set(:@project, public_project)
- expect(public_project.repository).to receive(:license_key).and_return('woozle wuzzle').at_least(:once)
+ expect(public_project.repository).to receive(:license).and_return(invalid_license).at_least(:once)
get_show
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template('_files')
- expect(response.body).to have_content('LICENSE') # would be 'MIT license' if stub not works
+ expect(response.body).to have_content('woozle wuzzle')
end
describe 'tracking events', :snowplow do
@@ -1233,26 +1233,6 @@ RSpec.describe ProjectsController do
get :refs, params: { namespace_id: project.namespace, id: project, ref: "123456" }
end
- context 'when use_gitaly_pagination_for_refs is disabled' do
- before do
- stub_feature_flags(use_gitaly_pagination_for_refs: false)
- end
-
- it 'does not use gitaly pagination' do
- expected_params = ActionController::Parameters.new(ref: '123456', per_page: 100).permit!
-
- expect_next_instance_of(BranchesFinder, project.repository, expected_params) do |finder|
- expect(finder).to receive(:execute).with(gitaly_pagination: false).and_call_original
- end
-
- expect_next_instance_of(TagsFinder, project.repository, expected_params) do |finder|
- expect(finder).to receive(:execute).with(gitaly_pagination: false).and_call_original
- end
-
- get :refs, params: { namespace_id: project.namespace, id: project, ref: "123456" }
- end
- end
-
context 'when gitaly is unavailable' do
before do
expect_next_instance_of(TagsFinder) do |finder|
diff --git a/spec/controllers/registrations/welcome_controller_spec.rb b/spec/controllers/registrations/welcome_controller_spec.rb
index 14e88d469ba..a3b246fbedd 100644
--- a/spec/controllers/registrations/welcome_controller_spec.rb
+++ b/spec/controllers/registrations/welcome_controller_spec.rb
@@ -31,7 +31,6 @@ RSpec.describe Registrations::WelcomeController do
context 'when role and setup_for_company is set' do
before do
- stub_feature_flags(about_your_company_registration_flow: false)
user.update!(setup_for_company: false)
sign_in(user)
end
@@ -61,10 +60,6 @@ RSpec.describe Registrations::WelcomeController do
end
describe '#update' do
- before do
- stub_feature_flags(about_your_company_registration_flow: false)
- end
-
subject(:update) do
patch :update, params: { user: { role: 'software_developer', setup_for_company: 'false' } }
end
diff --git a/spec/controllers/search_controller_spec.rb b/spec/controllers/search_controller_spec.rb
index 4131bd148da..7ab66b04a6e 100644
--- a/spec/controllers/search_controller_spec.rb
+++ b/spec/controllers/search_controller_spec.rb
@@ -218,7 +218,7 @@ RSpec.describe SearchController do
end
end
- it_behaves_like 'Snowplow event tracking' do
+ it_behaves_like 'Snowplow event tracking with RedisHLL context' do
subject { get :show, params: { group_id: namespace.id, scope: 'blobs', search: 'term' } }
let(:project) { nil }
diff --git a/spec/controllers/sessions_controller_spec.rb b/spec/controllers/sessions_controller_spec.rb
index 0e0770fb94c..80cf060bc45 100644
--- a/spec/controllers/sessions_controller_spec.rb
+++ b/spec/controllers/sessions_controller_spec.rb
@@ -399,18 +399,30 @@ RSpec.describe SessionsController do
end
context 'when OTP is invalid' do
- before do
- authenticate_2fa(otp_attempt: 'invalid')
- end
+ let(:code) { 'invalid' }
it 'does not authenticate' do
+ authenticate_2fa(otp_attempt: code)
+
expect(subject.current_user).not_to eq user
end
it 'warns about invalid OTP code' do
+ authenticate_2fa(otp_attempt: code)
+
expect(controller).to set_flash.now[:alert]
.to(/Invalid two-factor code/)
end
+
+ it 'sends an email to the user informing about the attempt to sign in with a wrong OTP code' do
+ controller.request.remote_addr = '1.2.3.4'
+
+ expect_next_instance_of(NotificationService) do |instance|
+ expect(instance).to receive(:two_factor_otp_attempt_failed).with(user, '1.2.3.4')
+ end
+
+ authenticate_2fa(otp_attempt: code)
+ end
end
end
diff --git a/spec/controllers/uploads_controller_spec.rb b/spec/controllers/uploads_controller_spec.rb
index c27e58634f6..e128db8d1c1 100644
--- a/spec/controllers/uploads_controller_spec.rb
+++ b/spec/controllers/uploads_controller_spec.rb
@@ -216,7 +216,7 @@ RSpec.describe UploadsController do
end
context 'for PDF files' do
- let(:filename) { 'git-cheat-sheet.pdf' }
+ let(:filename) { 'sample.pdf' }
let(:expected_disposition) { 'inline;' }
let(:note) { create(:note, :with_pdf_attachment, project: project) }
diff --git a/spec/factories/bulk_import/entities.rb b/spec/factories/bulk_import/entities.rb
index cf31ffec4f6..eeb4f8325ae 100644
--- a/spec/factories/bulk_import/entities.rb
+++ b/spec/factories/bulk_import/entities.rb
@@ -9,6 +9,7 @@ FactoryBot.define do
sequence(:destination_namespace) { |n| "destination-path-#{n}" }
destination_name { 'Imported Entity' }
+ sequence(:source_xid)
trait(:group_entity) do
source_type { :group_entity }
diff --git a/spec/factories/ci/build_metadata.rb b/spec/factories/ci/build_metadata.rb
index cfc86c4ef4b..a0a5305ef39 100644
--- a/spec/factories/ci/build_metadata.rb
+++ b/spec/factories/ci/build_metadata.rb
@@ -3,5 +3,9 @@
FactoryBot.define do
factory :ci_build_metadata, class: 'Ci::BuildMetadata' do
build { association(:ci_build, strategy: :build, metadata: instance) }
+
+ after(:build) do |metadata|
+ metadata.build&.valid?
+ end
end
end
diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb
index 8c2edc8cd9f..9a3b2837ab8 100644
--- a/spec/factories/ci/builds.rb
+++ b/spec/factories/ci/builds.rb
@@ -424,79 +424,79 @@ FactoryBot.define do
trait :codequality_report do
after(:build) do |build|
- build.job_artifacts << create(:ci_job_artifact, :codequality, job: build)
+ build.job_artifacts << build(:ci_job_artifact, :codequality, job: build)
end
end
trait :sast_report do
after(:build) do |build|
- build.job_artifacts << create(:ci_job_artifact, :sast, job: build)
+ build.job_artifacts << build(:ci_job_artifact, :sast, job: build)
end
end
trait :secret_detection_report do
after(:build) do |build|
- build.job_artifacts << create(:ci_job_artifact, :secret_detection, job: build)
+ build.job_artifacts << build(:ci_job_artifact, :secret_detection, job: build)
end
end
trait :test_reports do
after(:build) do |build|
- build.job_artifacts << create(:ci_job_artifact, :junit, job: build)
+ build.job_artifacts << build(:ci_job_artifact, :junit, job: build)
end
end
trait :test_reports_with_attachment do
after(:build) do |build|
- build.job_artifacts << create(:ci_job_artifact, :junit_with_attachment, job: build)
+ build.job_artifacts << build(:ci_job_artifact, :junit_with_attachment, job: build)
end
end
trait :broken_test_reports do
after(:build) do |build|
- build.job_artifacts << create(:ci_job_artifact, :junit_with_corrupted_data, job: build)
+ build.job_artifacts << build(:ci_job_artifact, :junit_with_corrupted_data, job: build)
end
end
trait :test_reports_with_duplicate_failed_test_names do
after(:build) do |build|
- build.job_artifacts << create(:ci_job_artifact, :junit_with_duplicate_failed_test_names, job: build)
+ build.job_artifacts << build(:ci_job_artifact, :junit_with_duplicate_failed_test_names, job: build)
end
end
trait :test_reports_with_three_failures do
after(:build) do |build|
- build.job_artifacts << create(:ci_job_artifact, :junit_with_three_failures, job: build)
+ build.job_artifacts << build(:ci_job_artifact, :junit_with_three_failures, job: build)
end
end
trait :accessibility_reports do
after(:build) do |build|
- build.job_artifacts << create(:ci_job_artifact, :accessibility, job: build)
+ build.job_artifacts << build(:ci_job_artifact, :accessibility, job: build)
end
end
trait :coverage_reports do
after(:build) do |build|
- build.job_artifacts << create(:ci_job_artifact, :cobertura, job: build)
+ build.job_artifacts << build(:ci_job_artifact, :cobertura, job: build)
end
end
trait :codequality_reports do
after(:build) do |build|
- build.job_artifacts << create(:ci_job_artifact, :codequality, job: build)
+ build.job_artifacts << build(:ci_job_artifact, :codequality, job: build)
end
end
trait :codequality_reports_without_degradation do
after(:build) do |build|
- build.job_artifacts << create(:ci_job_artifact, :codequality_without_errors, job: build)
+ build.job_artifacts << build(:ci_job_artifact, :codequality_without_errors, job: build)
end
end
trait :terraform_reports do
after(:build) do |build|
- build.job_artifacts << create(:ci_job_artifact, :terraform, job: build)
+ build.job_artifacts << build(:ci_job_artifact, :terraform, job: build)
end
end
diff --git a/spec/factories/ci/job_artifacts.rb b/spec/factories/ci/job_artifacts.rb
index f8b964cf8e0..304d77e8521 100644
--- a/spec/factories/ci/job_artifacts.rb
+++ b/spec/factories/ci/job_artifacts.rb
@@ -352,6 +352,18 @@ FactoryBot.define do
end
end
+ # Equivalent Semgrep report for combined :sast_bandit and :sast_gosec reports.
+ # This report includes signature tracking.
+ trait :sast_semgrep_for_multiple_findings do
+ file_type { :sast }
+ file_format { :raw }
+
+ after(:build) do |artifact, _|
+ artifact.file = fixture_file_upload(
+ Rails.root.join('spec/fixtures/security_reports/master/gl-sast-report-semgrep-for-multiple-findings.json'), 'application/json')
+ end
+ end
+
trait :common_security_report do
file_format { :raw }
file_type { :dependency_scanning }
@@ -381,16 +393,6 @@ FactoryBot.define do
end
end
- trait :sast_deprecated do
- file_type { :sast }
- file_format { :raw }
-
- after(:build) do |artifact, _|
- artifact.file = fixture_file_upload(
- Rails.root.join('spec/fixtures/security_reports/deprecated/gl-sast-report.json'), 'application/json')
- end
- end
-
trait :sast_with_corrupted_data do
file_type { :sast }
file_format { :raw }
diff --git a/spec/factories/ci/pipeline_artifacts.rb b/spec/factories/ci/pipeline_artifacts.rb
index d096f149c3a..bdd390126dd 100644
--- a/spec/factories/ci/pipeline_artifacts.rb
+++ b/spec/factories/ci/pipeline_artifacts.rb
@@ -17,6 +17,11 @@ FactoryBot.define do
association :pipeline, :unlocked, factory: :ci_pipeline
end
+ trait :artifact_unlocked do
+ association :pipeline, :unlocked, factory: :ci_pipeline
+ locked { :unlocked }
+ end
+
trait :checksummed do
verification_checksum { 'abc' }
end
diff --git a/spec/factories/ci/pipeline_metadata.rb b/spec/factories/ci/pipeline_metadata.rb
new file mode 100644
index 00000000000..600cfaa92c6
--- /dev/null
+++ b/spec/factories/ci/pipeline_metadata.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ci_pipeline_metadata, class: 'Ci::PipelineMetadata' do
+ title { 'Pipeline title' }
+
+ pipeline factory: :ci_empty_pipeline
+ project
+ end
+end
diff --git a/spec/factories/ci/pipelines.rb b/spec/factories/ci/pipelines.rb
index d6b1da1d5c2..650b8647237 100644
--- a/spec/factories/ci/pipelines.rb
+++ b/spec/factories/ci/pipelines.rb
@@ -8,6 +8,7 @@ FactoryBot.define do
sha { 'b83d6e391c22777fca1ed3012fce84f633d7fed0' }
status { 'pending' }
add_attribute(:protected) { false }
+ partition_id { 1234 }
project
@@ -18,6 +19,8 @@ FactoryBot.define do
transient { child_of { nil } }
transient { upstream_of { nil } }
+ transient { title { nil } }
+
after(:build) do |pipeline, evaluator|
if evaluator.child_of
pipeline.project = evaluator.child_of.project
@@ -25,6 +28,10 @@ FactoryBot.define do
end
pipeline.ensure_project_iid!
+
+ if evaluator.title
+ pipeline.pipeline_metadata = build(:ci_pipeline_metadata, title: evaluator.title, project: pipeline.project, pipeline: pipeline)
+ end
end
after(:create) do |pipeline, evaluator|
@@ -47,6 +54,7 @@ FactoryBot.define do
end
factory :ci_pipeline do
+ partition_id { 1234 }
transient { ci_ref_presence { true } }
before(:create) do |pipeline, evaluator|
diff --git a/spec/factories/ci/reports/sbom/components.rb b/spec/factories/ci/reports/sbom/components.rb
index 317e1c863cf..fd9b4386130 100644
--- a/spec/factories/ci/reports/sbom/components.rb
+++ b/spec/factories/ci/reports/sbom/components.rb
@@ -2,7 +2,7 @@
FactoryBot.define do
factory :ci_reports_sbom_component, class: '::Gitlab::Ci::Reports::Sbom::Component' do
- type { :library }
+ type { "library" }
sequence(:name) { |n| "component-#{n}" }
sequence(:version) { |n| "v0.0.#{n}" }
diff --git a/spec/factories/ci/reports/sbom/reports.rb b/spec/factories/ci/reports/sbom/reports.rb
new file mode 100644
index 00000000000..4a83b5898ef
--- /dev/null
+++ b/spec/factories/ci/reports/sbom/reports.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ci_reports_sbom_report, class: '::Gitlab::Ci::Reports::Sbom::Report' do
+ transient do
+ num_components { 5 }
+ components { build_list :ci_reports_sbom_component, num_components }
+ source { association :ci_reports_sbom_source }
+ end
+
+ after(:build) do |report, options|
+ options.components.each { |component| report.add_component(component) }
+ report.set_source(options.source)
+ end
+
+ skip_create
+ end
+end
diff --git a/spec/factories/ci/reports/sbom/sources.rb b/spec/factories/ci/reports/sbom/sources.rb
index 9093aba86a6..688c0250b5f 100644
--- a/spec/factories/ci/reports/sbom/sources.rb
+++ b/spec/factories/ci/reports/sbom/sources.rb
@@ -19,15 +19,12 @@ FactoryBot.define do
}
end
- fingerprint { Digest::SHA256.hexdigest(data.to_json) }
-
skip_create
initialize_with do
::Gitlab::Ci::Reports::Sbom::Source.new(
type: type,
- data: data,
- fingerprint: fingerprint
+ data: data
)
end
end
diff --git a/spec/factories/customer_relations/contacts.rb b/spec/factories/customer_relations/contacts.rb
index 821c45d7514..1896510d362 100644
--- a/spec/factories/customer_relations/contacts.rb
+++ b/spec/factories/customer_relations/contacts.rb
@@ -11,5 +11,9 @@ FactoryBot.define do
trait :with_organization do
organization
end
+
+ trait :inactive do
+ state { :inactive }
+ end
end
end
diff --git a/spec/factories/events.rb b/spec/factories/events.rb
index 403165a3935..a4f06a48621 100644
--- a/spec/factories/events.rb
+++ b/spec/factories/events.rb
@@ -26,10 +26,10 @@ FactoryBot.define do
factory :wiki_page_event do
action { :created }
- # rubocop: disable FactoryBot/InlineAssociation
+ # rubocop: disable RSpec/FactoryBot/InlineAssociation
# A persistent project is needed to have a wiki page being created properly.
project { @overrides[:wiki_page]&.container || create(:project, :wiki_repo) }
- # rubocop: enable FactoryBot/InlineAssociation
+ # rubocop: enable RSpec/FactoryBot/InlineAssociation
target { association(:wiki_page_meta, :for_wiki_page, wiki_page: wiki_page) }
transient do
@@ -54,6 +54,16 @@ FactoryBot.define do
target { note }
end
+ trait :for_issue do
+ target { association(:issue, issue_type: :issue) }
+ target_type { 'Issue' }
+ end
+
+ trait :for_work_item do
+ target { association(:work_item, :task) }
+ target_type { 'WorkItem' }
+ end
+
factory :design_event, traits: [:has_design] do
action { :created }
target { design }
diff --git a/spec/factories/git_wiki_commit_details.rb b/spec/factories/git_wiki_commit_details.rb
deleted file mode 100644
index fb3f2954b12..00000000000
--- a/spec/factories/git_wiki_commit_details.rb
+++ /dev/null
@@ -1,15 +0,0 @@
-# frozen_string_literal: true
-
-FactoryBot.define do
- factory :git_wiki_commit_details, class: 'Gitlab::Git::Wiki::CommitDetails' do
- skip_create
-
- transient do
- author { association(:user) }
- end
-
- sequence(:message) { |n| "Commit message #{n}" }
-
- initialize_with { new(author.id, author.username, author.name, author.email, message) }
- end
-end
diff --git a/spec/factories/go_module_commits.rb b/spec/factories/go_module_commits.rb
index 4f86d38954c..44f4ce9ea31 100644
--- a/spec/factories/go_module_commits.rb
+++ b/spec/factories/go_module_commits.rb
@@ -7,12 +7,12 @@ FactoryBot.define do
transient do
files { { 'foo.txt' => 'content' } }
message { 'Message' }
- # rubocop: disable FactoryBot/InlineAssociation
+ # rubocop: disable RSpec/FactoryBot/InlineAssociation
# We need a persisted project so we can create commits and tags
# in `commit` otherwise linting this factory with `build` strategy
# will fail.
project { create(:project, :repository) }
- # rubocop: enable FactoryBot/InlineAssociation
+ # rubocop: enable RSpec/FactoryBot/InlineAssociation
service do
Files::MultiService.new(
diff --git a/spec/factories/incident_management/timeline_event_tag_links.rb b/spec/factories/incident_management/timeline_event_tag_links.rb
new file mode 100644
index 00000000000..883aca29f99
--- /dev/null
+++ b/spec/factories/incident_management/timeline_event_tag_links.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :incident_management_timeline_event_tag_link, class: 'IncidentManagement::TimelineEventTagLink' do
+ association :timeline_event_tag, factory: :incident_management_timeline_event_tag
+ association :timeline_event, factory: :incident_management_timeline_event
+ end
+end
diff --git a/spec/factories/incident_management/timeline_event_tags.rb b/spec/factories/incident_management/timeline_event_tags.rb
new file mode 100644
index 00000000000..6333872ee4f
--- /dev/null
+++ b/spec/factories/incident_management/timeline_event_tags.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :incident_management_timeline_event_tag, class: 'IncidentManagement::TimelineEventTag' do
+ project
+ name { 'Start time' }
+ end
+end
diff --git a/spec/factories/ml/candidate_metrics.rb b/spec/factories/ml/candidate_metrics.rb
new file mode 100644
index 00000000000..28e3974d39f
--- /dev/null
+++ b/spec/factories/ml/candidate_metrics.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ml_candidate_metrics, class: '::Ml::CandidateMetric' do
+ association :candidate, factory: :ml_candidates
+
+ sequence(:name) { |n| "metric#{n}" }
+ value { 2.0 }
+ step { 1 }
+ tracked_at { 1234 }
+ end
+end
diff --git a/spec/factories/ml/candidate_params.rb b/spec/factories/ml/candidate_params.rb
new file mode 100644
index 00000000000..73cb0c54089
--- /dev/null
+++ b/spec/factories/ml/candidate_params.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ml_candidate_params, class: '::Ml::CandidateParam' do
+ association :candidate, factory: :ml_candidates
+
+ sequence(:name) { |n| "metric#{n}" }
+ sequence(:value) { |n| "value#{n}" }
+ end
+end
diff --git a/spec/factories/ml/candidates.rb b/spec/factories/ml/candidates.rb
index b5644ee3841..4fbcdc46103 100644
--- a/spec/factories/ml/candidates.rb
+++ b/spec/factories/ml/candidates.rb
@@ -3,5 +3,12 @@ FactoryBot.define do
factory :ml_candidates, class: '::Ml::Candidate' do
association :experiment, factory: :ml_experiments
association :user
+
+ trait :with_metrics_and_params do
+ after(:create) do |candidate|
+ candidate.metrics = FactoryBot.create_list(:ml_candidate_metrics, 2, candidate: candidate )
+ candidate.params = FactoryBot.create_list(:ml_candidate_params, 2, candidate: candidate )
+ end
+ end
end
end
diff --git a/spec/factories/ml/experiments.rb b/spec/factories/ml/experiments.rb
index 043ca712e60..e4f5a0da6cf 100644
--- a/spec/factories/ml/experiments.rb
+++ b/spec/factories/ml/experiments.rb
@@ -2,7 +2,8 @@
FactoryBot.define do
factory :ml_experiments, class: '::Ml::Experiment' do
sequence(:name) { |n| "experiment#{n}" }
- association :project
- association :user
+
+ project
+ user
end
end
diff --git a/spec/factories/notes.rb b/spec/factories/notes.rb
index 2159f5b2dc1..530b4616765 100644
--- a/spec/factories/notes.rb
+++ b/spec/factories/notes.rb
@@ -184,7 +184,7 @@ FactoryBot.define do
end
trait :with_pdf_attachment do
- attachment { fixture_file_upload("spec/fixtures/git-cheat-sheet.pdf", "application/pdf") }
+ attachment { fixture_file_upload("spec/fixtures/sample.pdf", "application/pdf") }
end
trait :confidential do
diff --git a/spec/factories/packages/packages.rb b/spec/factories/packages/packages.rb
index 8074e505243..1da4f0cedbc 100644
--- a/spec/factories/packages/packages.rb
+++ b/spec/factories/packages/packages.rb
@@ -253,7 +253,7 @@ FactoryBot.define do
end
trait(:without_loaded_metadatum) do
- conan_metadatum { build(:conan_metadatum, package: nil) } # rubocop:disable FactoryBot/InlineAssociation
+ conan_metadatum { build(:conan_metadatum, package: nil) } # rubocop:disable RSpec/FactoryBot/InlineAssociation
end
end
diff --git a/spec/factories/packages/rpm/rpm_repository_files.rb b/spec/factories/packages/rpm/rpm_repository_files.rb
new file mode 100644
index 00000000000..079d32b3995
--- /dev/null
+++ b/spec/factories/packages/rpm/rpm_repository_files.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :rpm_repository_file, class: 'Packages::Rpm::RepositoryFile' do
+ project
+
+ file_name { 'repomd.xml' }
+ file_sha1 { 'efae869b4e95d54796a46481f3a211d6a88d0323' }
+ file_md5 { 'ddf8a75330c896a8d7709e75f8b5982a' }
+ size { 3127.kilobytes }
+ status { :default }
+
+ transient do
+ file_metadatum_trait { :xml }
+ end
+
+ transient do
+ file_fixture { 'spec/fixtures/packages/rpm/repodata/repomd.xml' }
+ end
+
+ after(:build) do |package_file, evaluator|
+ package_file.file = fixture_file_upload(evaluator.file_fixture)
+ end
+
+ trait(:object_storage) do
+ file_store { Packages::Rpm::RepositoryFileUploader::Store::REMOTE }
+ end
+
+ trait :pending_destruction do
+ status { :pending_destruction }
+ end
+ end
+end
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index 93ed68a4573..b62995dce42 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -54,7 +54,8 @@ FactoryBot.define do
import_last_error { nil }
forward_deployment_enabled { nil }
restrict_user_defined_variables { nil }
- ci_job_token_scope_enabled { nil }
+ ci_outbound_job_token_scope_enabled { nil }
+ ci_inbound_job_token_scope_enabled { nil }
runner_token_expiration_interval { nil }
runner_token_expiration_interval_human_readable { nil }
end
@@ -112,7 +113,8 @@ FactoryBot.define do
project.merge_trains_enabled = evaluator.merge_trains_enabled unless evaluator.merge_trains_enabled.nil?
project.keep_latest_artifact = evaluator.keep_latest_artifact unless evaluator.keep_latest_artifact.nil?
project.restrict_user_defined_variables = evaluator.restrict_user_defined_variables unless evaluator.restrict_user_defined_variables.nil?
- project.ci_job_token_scope_enabled = evaluator.ci_job_token_scope_enabled unless evaluator.ci_job_token_scope_enabled.nil?
+ project.ci_outbound_job_token_scope_enabled = evaluator.ci_outbound_job_token_scope_enabled unless evaluator.ci_outbound_job_token_scope_enabled.nil?
+ project.ci_inbound_job_token_scope_enabled = evaluator.ci_inbound_job_token_scope_enabled unless evaluator.ci_inbound_job_token_scope_enabled.nil?
project.runner_token_expiration_interval = evaluator.runner_token_expiration_interval unless evaluator.runner_token_expiration_interval.nil?
project.runner_token_expiration_interval_human_readable = evaluator.runner_token_expiration_interval_human_readable unless evaluator.runner_token_expiration_interval_human_readable.nil?
@@ -330,12 +332,6 @@ FactoryBot.define do
repository_read_only { true }
end
- trait :broken_repo do
- after(:create) do |project|
- TestEnv.rm_storage_dir(project.repository_storage, "#{project.disk_path}.git/refs")
- end
- end
-
trait :test_repo do
after :create do |project|
# There are various tests that rely on there being no repository cache.
@@ -427,12 +423,24 @@ FactoryBot.define do
error_tracking_setting { association :project_error_tracking_setting }
end
+ trait :with_redmine_integration do
+ has_external_issue_tracker { true }
+
+ redmine_integration
+ end
+
trait :with_jira_integration do
has_external_issue_tracker { true }
jira_integration
end
+ trait :with_prometheus_integration do
+ after :create do |project|
+ create(:prometheus_integration, project: project)
+ end
+ end
+
# Project with empty repository
#
# This is a case when you just created a project
@@ -441,13 +449,6 @@ FactoryBot.define do
empty_repo
end
- # Project with broken repository
- #
- # Project with an invalid repository state
- factory :project_broken_repo, parent: :project do
- broken_repo
- end
-
factory :forked_project_with_submodules, parent: :project do
path { 'forked-gitlabhq' }
@@ -459,42 +460,6 @@ FactoryBot.define do
end
end
- factory :redmine_project, parent: :project do
- has_external_issue_tracker { true }
-
- redmine_integration
- end
-
- factory :youtrack_project, parent: :project do
- has_external_issue_tracker { true }
-
- youtrack_integration
- end
-
- factory :jira_project, parent: :project do
- has_external_issue_tracker { true }
-
- jira_integration
- end
-
- factory :prometheus_project, parent: :project do
- after :create do |project|
- project.create_prometheus_integration(
- active: true,
- properties: {
- api_url: 'https://prometheus.example.com/',
- manual_configuration: true
- }
- )
- end
- end
-
- factory :ewm_project, parent: :project do
- has_external_issue_tracker { true }
-
- ewm_integration
- end
-
factory :project_with_design, parent: :project do
after(:create) do |project|
issue = create(:issue, project: project)
diff --git a/spec/factories/users/namespace_user_callouts.rb b/spec/factories/users/namespace_user_callouts.rb
deleted file mode 100644
index fded63d0cce..00000000000
--- a/spec/factories/users/namespace_user_callouts.rb
+++ /dev/null
@@ -1,10 +0,0 @@
-# frozen_string_literal: true
-
-FactoryBot.define do
- factory :namespace_callout, class: 'Users::NamespaceCallout' do
- feature_name { :invite_members_banner }
-
- user
- namespace
- end
-end
diff --git a/spec/factories/users/phone_number_validations.rb b/spec/factories/users/phone_number_validations.rb
new file mode 100644
index 00000000000..da53dda89b4
--- /dev/null
+++ b/spec/factories/users/phone_number_validations.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :phone_number_validation, class: 'Users::PhoneNumberValidation' do
+ user
+ country { 'US' }
+ international_dial_code { 1 }
+ phone_number { '555' }
+ end
+end
diff --git a/spec/fast_spec_helper.rb b/spec/fast_spec_helper.rb
index db4d9125e6e..393cd6f6a21 100644
--- a/spec/fast_spec_helper.rb
+++ b/spec/fast_spec_helper.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-if $".include?(File.expand_path('spec_helper.rb', __dir__))
+if $LOADED_FEATURES.include?(File.expand_path('spec_helper.rb', __dir__))
# There's no need to load anything here if spec_helper is already loaded
# because spec_helper is more extensive than fast_spec_helper
return
diff --git a/spec/features/admin/admin_broadcast_messages_spec.rb b/spec/features/admin/admin_broadcast_messages_spec.rb
index 875eb9dd0ce..b5416f539f1 100644
--- a/spec/features/admin/admin_broadcast_messages_spec.rb
+++ b/spec/features/admin/admin_broadcast_messages_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe 'Admin Broadcast Messages' do
before do
admin = create(:admin)
sign_in(admin)
+ stub_feature_flags(vue_broadcast_messages: false)
gitlab_enable_admin_mode_sign_in(admin)
create(
:broadcast_message,
diff --git a/spec/features/admin/admin_groups_spec.rb b/spec/features/admin/admin_groups_spec.rb
index 040c6a65b7c..657dd52228e 100644
--- a/spec/features/admin/admin_groups_spec.rb
+++ b/spec/features/admin/admin_groups_spec.rb
@@ -207,31 +207,6 @@ RSpec.describe 'Admin Groups' do
end
describe 'add user into a group', :js do
- shared_examples 'adds user into a group' do
- it do
- visit admin_group_path(group)
-
- select2(user_selector, from: '#user_id', multiple: true)
- page.within '#new_project_member' do
- select2(Gitlab::Access::REPORTER, from: '#access_level')
- end
- click_button "Add users to group"
-
- page.within ".group-users-list" do
- expect(page).to have_content(user.name)
- expect(page).to have_content('Reporter')
- end
- end
- end
-
- it_behaves_like 'adds user into a group' do
- let(:user_selector) { user.id }
- end
-
- it_behaves_like 'adds user into a group' do
- let(:user_selector) { user.email }
- end
-
context 'when membership is set to expire' do
it 'renders relative time' do
expire_time = Time.current + 2.days
diff --git a/spec/features/admin/admin_runners_spec.rb b/spec/features/admin/admin_runners_spec.rb
index fe9fd01d3d5..35e57213bdb 100644
--- a/spec/features/admin/admin_runners_spec.rb
+++ b/spec/features/admin/admin_runners_spec.rb
@@ -66,10 +66,26 @@ RSpec.describe "Admin Runners" do
it 'has all necessary texts' do
expect(page).to have_text "Register an instance runner"
+ expect(page).to have_text "#{s_('Runners|All')} 3"
expect(page).to have_text "#{s_('Runners|Online')} 1"
expect(page).to have_text "#{s_('Runners|Offline')} 2"
expect(page).to have_text "#{s_('Runners|Stale')} 1"
end
+
+ describe 'delete all runners in bulk' do
+ before do
+ check s_('Runners|Select all')
+ click_button s_('Runners|Delete selected')
+
+ within_modal do
+ click_on 'Permanently delete 3 runners'
+ end
+
+ wait_for_requests
+ end
+
+ it_behaves_like 'shows no runners registered'
+ end
end
it 'shows a job count' do
@@ -351,14 +367,15 @@ RSpec.describe "Admin Runners" do
end
describe 'filter by tag' do
- before_all do
- create(:ci_runner, :instance, description: 'runner-blue', tag_list: ['blue'])
- create(:ci_runner, :instance, description: 'runner-red', tag_list: ['red'])
- end
+ let_it_be(:runner_1) { create(:ci_runner, :instance, description: 'runner-blue', tag_list: ['blue']) }
+ let_it_be(:runner_2) { create(:ci_runner, :instance, description: 'runner-2-blue', tag_list: ['blue']) }
+ let_it_be(:runner_3) { create(:ci_runner, :instance, description: 'runner-red', tag_list: ['red']) }
- it 'shows tags suggestions' do
+ before do
visit admin_runners_path
+ end
+ it 'shows tags suggestions' do
open_filtered_search_suggestions('Tags')
page.within(search_bar_selector) do
@@ -367,23 +384,25 @@ RSpec.describe "Admin Runners" do
end
end
- it 'shows correct runner when tag matches' do
- visit admin_runners_path
+ it_behaves_like 'filters by tag' do
+ let(:tag) { 'blue' }
+ let(:found_runner) { runner_1.description }
+ let(:missing_runner) { runner_3.description }
+ end
- expect(page).to have_content 'runner-blue'
- expect(page).to have_content 'runner-red'
+ context 'when tag does not match' do
+ before do
+ input_filtered_search_filter_is_only('Tags', 'green')
+ end
- input_filtered_search_filter_is_only('Tags', 'blue')
+ it_behaves_like 'shows no runners found'
- expect(page).to have_content 'runner-blue'
- expect(page).not_to have_content 'runner-red'
+ it 'shows no runner' do
+ expect(page).not_to have_content 'runner-blue'
+ end
end
it 'shows correct runner when tag is selected and search term is entered' do
- create(:ci_runner, :instance, description: 'runner-2-blue', tag_list: ['blue'])
-
- visit admin_runners_path
-
input_filtered_search_filter_is_only('Tags', 'blue')
expect(page).to have_content 'runner-blue'
@@ -396,19 +415,6 @@ RSpec.describe "Admin Runners" do
expect(page).not_to have_content 'runner-blue'
expect(page).not_to have_content 'runner-red'
end
-
- context 'when tag does not match' do
- before do
- visit admin_runners_path
- input_filtered_search_filter_is_only('Tags', 'green')
- end
-
- it_behaves_like 'shows no runners found'
-
- it 'shows no runner' do
- expect(page).not_to have_content 'runner-blue'
- end
- end
end
it 'sorts by last contact date' do
diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb
index a5df142d188..94c5f397670 100644
--- a/spec/features/admin/admin_settings_spec.rb
+++ b/spec/features/admin/admin_settings_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe 'Admin updates settings' do
include TermsHelper
include UsageDataHelpers
- let(:admin) { create(:admin) }
+ let_it_be(:admin) { create(:admin) }
let(:dot_com?) { false }
context 'application setting :admin_mode is enabled', :request_store do
@@ -177,10 +177,10 @@ RSpec.describe 'Admin updates settings' do
end
it 'change Dormant users period' do
- expect(page).to have_field _('Period of inactivity (days)')
+ expect(page).to have_field _('Days of inactivity before deactivation')
page.within(find('[data-testid="account-limit"]')) do
- fill_in _('application_setting_deactivate_dormant_users_period'), with: '35'
+ fill_in _('application_setting_deactivate_dormant_users_period'), with: '90'
click_button 'Save changes'
end
@@ -188,7 +188,7 @@ RSpec.describe 'Admin updates settings' do
page.refresh
- expect(page).to have_field _('Period of inactivity (days)'), with: '35'
+ expect(page).to have_field _('Days of inactivity before deactivation'), with: '90'
end
end
end
@@ -400,39 +400,19 @@ RSpec.describe 'Admin updates settings' do
end
context 'Runner Registration' do
- context 'when feature is enabled' do
- before do
- stub_feature_flags(runner_registration_control: true)
- end
-
- it 'allows admins to control who has access to register runners' do
- visit ci_cd_admin_application_settings_path
-
- expect(current_settings.valid_runner_registrars).to eq(ApplicationSetting::VALID_RUNNER_REGISTRAR_TYPES)
+ it 'allows admins to control who has access to register runners' do
+ visit ci_cd_admin_application_settings_path
- page.within('.as-runner') do
- find_all('input[type="checkbox"]').each(&:click)
+ expect(current_settings.valid_runner_registrars).to eq(ApplicationSetting::VALID_RUNNER_REGISTRAR_TYPES)
- click_button 'Save changes'
- end
-
- expect(current_settings.valid_runner_registrars).to eq([])
- expect(page).to have_content "Application settings saved successfully"
- end
- end
+ page.within('.as-runner') do
+ find_all('input[type="checkbox"]').each(&:click)
- context 'when feature is disabled' do
- before do
- stub_feature_flags(runner_registration_control: false)
+ click_button 'Save changes'
end
- it 'does not allow admins to control who has access to register runners' do
- visit ci_cd_admin_application_settings_path
-
- expect(current_settings.valid_runner_registrars).to eq(ApplicationSetting::VALID_RUNNER_REGISTRAR_TYPES)
-
- expect(page).not_to have_css('.as-runner')
- end
+ expect(current_settings.valid_runner_registrars).to eq([])
+ expect(page).to have_content "Application settings saved successfully"
end
end
diff --git a/spec/features/admin/admin_users_impersonation_tokens_spec.rb b/spec/features/admin/admin_users_impersonation_tokens_spec.rb
index 7e57cffc791..45dccf9921f 100644
--- a/spec/features/admin/admin_users_impersonation_tokens_spec.rb
+++ b/spec/features/admin/admin_users_impersonation_tokens_spec.rb
@@ -9,15 +9,11 @@ RSpec.describe 'Admin > Users > Impersonation Tokens', :js do
let!(:user) { create(:user) }
def active_impersonation_tokens
- find(".table.active-tokens")
- end
-
- def no_personal_access_tokens_message
- find(".settings-message")
+ find("[data-testid='active-tokens']")
end
def created_impersonation_token
- find("#created-personal-access-token").value
+ find_field('new-access-token').value
end
before do
@@ -80,8 +76,7 @@ RSpec.describe 'Admin > Users > Impersonation Tokens', :js do
accept_gl_confirm(button_text: 'Revoke') { click_on "Revoke" }
- expect(page).to have_selector(".settings-message")
- expect(no_personal_access_tokens_message).to have_text("This user has no active impersonation tokens.")
+ expect(active_impersonation_tokens).to have_text("This user has no active impersonation tokens.")
end
it "removes expired tokens from 'active' section" do
@@ -89,8 +84,7 @@ RSpec.describe 'Admin > Users > Impersonation Tokens', :js do
visit admin_user_impersonation_tokens_path(user_id: user.username)
- expect(page).to have_selector(".settings-message")
- expect(no_personal_access_tokens_message).to have_text("This user has no active impersonation tokens.")
+ expect(active_impersonation_tokens).to have_text("This user has no active impersonation tokens.")
end
end
diff --git a/spec/features/admin/users/users_spec.rb b/spec/features/admin/users/users_spec.rb
index 236327ea687..9c59f0226e0 100644
--- a/spec/features/admin/users/users_spec.rb
+++ b/spec/features/admin/users/users_spec.rb
@@ -366,7 +366,7 @@ RSpec.describe 'Admin::Users' do
expect(user.projects_limit)
.to eq(Gitlab.config.gitlab.default_projects_limit)
expect(user.can_create_group)
- .to eq(Gitlab.config.gitlab.default_can_create_group)
+ .to eq(Gitlab::CurrentSettings.can_create_group)
end
it 'creates user with valid data' do
@@ -481,14 +481,14 @@ RSpec.describe 'Admin::Users' do
end
it 'lists groups' do
- within(:css, '.gl-mb-3 + .card') do
+ within(:css, '.gl-mb-3 + .gl-card') do
expect(page).to have_content 'Groups'
expect(page).to have_link group.name, href: admin_group_path(group)
end
end
it 'allows navigation to the group details' do
- within(:css, '.gl-mb-3 + .card') do
+ within(:css, '.gl-mb-3 + .gl-card') do
click_link group.name
end
expect(page).to have_content "Group: #{group.name}"
@@ -496,7 +496,7 @@ RSpec.describe 'Admin::Users' do
end
it 'shows the group access level' do
- within(:css, '.gl-mb-3 + .card') do
+ within(:css, '.gl-mb-3 + .gl-card') do
expect(page).to have_content 'Developer'
end
end
diff --git a/spec/features/boards/boards_spec.rb b/spec/features/boards/boards_spec.rb
index f279af90aa3..a09c9d258dc 100644
--- a/spec/features/boards/boards_spec.rb
+++ b/spec/features/boards/boards_spec.rb
@@ -23,7 +23,6 @@ RSpec.describe 'Project issue boards', :js do
project.add_maintainer(user2)
sign_in(user)
- stub_feature_flags(gl_avatar_for_all_user_avatars: false)
set_cookie('sidebar_collapsed', 'true')
end
diff --git a/spec/features/dashboard/datetime_on_tooltips_spec.rb b/spec/features/dashboard/datetime_on_tooltips_spec.rb
index 48a6976f263..875ae41c55d 100644
--- a/spec/features/dashboard/datetime_on_tooltips_spec.rb
+++ b/spec/features/dashboard/datetime_on_tooltips_spec.rb
@@ -41,9 +41,9 @@ RSpec.describe 'Tooltips on .timeago dates', :js do
end
it 'has the datetime formated correctly' do
- expect(page).to have_selector('.js-timeago.snippet-created-ago', text: '1 day ago')
+ expect(page).to have_selector('[data-testid=snippet-created-at] .js-timeago', text: '1 day ago')
- page.find('.js-timeago.snippet-created-ago').hover
+ page.find('[data-testid=snippet-created-at] .js-timeago').hover
expect(datetime_in_tooltip).to eq(created_date)
end
diff --git a/spec/features/dashboard/issues_filter_spec.rb b/spec/features/dashboard/issues_filter_spec.rb
index 3c774f8b269..0d10aed955a 100644
--- a/spec/features/dashboard/issues_filter_spec.rb
+++ b/spec/features/dashboard/issues_filter_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe 'Dashboard Issues filtering', :js do
it 'updates atom feed link' do
visit_issues(milestone_title: '', assignee_username: user.username)
- link = find('.nav-controls a[title="Subscribe to RSS feed"]')
+ link = find('[data-testid="rss-feed-link"]')
params = CGI.parse(URI.parse(link[:href]).query)
auto_discovery_link = find('link[type="application/atom+xml"]', visible: false)
auto_discovery_params = CGI.parse(URI.parse(auto_discovery_link[:href]).query)
diff --git a/spec/features/dashboard/projects_spec.rb b/spec/features/dashboard/projects_spec.rb
index 847d0faf60d..0b468854322 100644
--- a/spec/features/dashboard/projects_spec.rb
+++ b/spec/features/dashboard/projects_spec.rb
@@ -112,13 +112,12 @@ RSpec.describe 'Dashboard Projects' do
end
context 'when on Starred projects tab', :js do
- it 'shows the empty state when there are no starred projects', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/222357' do
+ it 'shows the empty state when there are no starred projects' do
visit(starred_dashboard_projects_path)
element = page.find('.row.empty-state')
expect(element).to have_content("You don't have starred projects yet.")
- expect(element.find('.svg-content img')['src']).to have_content('illustrations/starred_empty')
end
it 'shows only starred projects' do
diff --git a/spec/features/dashboard/user_filters_projects_spec.rb b/spec/features/dashboard/user_filters_projects_spec.rb
index 2cf56f93cf9..e25da5854ab 100644
--- a/spec/features/dashboard/user_filters_projects_spec.rb
+++ b/spec/features/dashboard/user_filters_projects_spec.rb
@@ -145,7 +145,14 @@ RSpec.describe 'Dashboard > User filters projects' do
end
it 'filters any project' do
+ # Selecting the same option in the `GlListbox` does not emit `select` event
+ # and that is why URL update won't be triggered. Given that `Any` is a default option
+ # we need to explicitly switch from some other option (e.g. `Internal`) to `Any`
+ # to trigger the page update
+ select_dropdown_option '#filtered-search-visibility-dropdown > .dropdown', 'Internal', '.dropdown-item'
+
select_dropdown_option '#filtered-search-visibility-dropdown > .dropdown', 'Any', '.dropdown-item'
+
list = page.all('.projects-list .project-name').map(&:text)
expect(list).to contain_exactly("Internal project", "Private project", "Treasure", "Victorialand")
diff --git a/spec/features/discussion_comments/issue_spec.rb b/spec/features/discussion_comments/issue_spec.rb
index 0bb43343ecd..ebb57b37918 100644
--- a/spec/features/discussion_comments/issue_spec.rb
+++ b/spec/features/discussion_comments/issue_spec.rb
@@ -8,7 +8,6 @@ RSpec.describe 'Thread Comments Issue', :js do
let(:issue) { create(:issue, project: project) }
before do
- stub_feature_flags(remove_user_attributes_projects: false)
project.add_maintainer(user)
sign_in(user)
diff --git a/spec/features/discussion_comments/merge_request_spec.rb b/spec/features/discussion_comments/merge_request_spec.rb
index 4fa82de3b4b..a90ff3721d3 100644
--- a/spec/features/discussion_comments/merge_request_spec.rb
+++ b/spec/features/discussion_comments/merge_request_spec.rb
@@ -8,7 +8,6 @@ RSpec.describe 'Thread Comments Merge Request', :js do
let(:merge_request) { create(:merge_request, source_project: project) }
before do
- stub_feature_flags(remove_user_attributes_projects: false)
project.add_maintainer(user)
sign_in(user)
diff --git a/spec/features/expand_collapse_diffs_spec.rb b/spec/features/expand_collapse_diffs_spec.rb
index 98282e47488..c3096677a73 100644
--- a/spec/features/expand_collapse_diffs_spec.rb
+++ b/spec/features/expand_collapse_diffs_spec.rb
@@ -4,12 +4,13 @@ require 'spec_helper'
RSpec.describe 'Expand and collapse diffs', :js do
let(:branch) { 'expand-collapse-diffs' }
- let(:project) { create(:project, :repository) }
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:admin) { create(:admin) }
before do
allow(Gitlab::CurrentSettings).to receive(:diff_max_patch_bytes).and_return(100.kilobytes)
- admin = create(:admin)
sign_in(admin)
gitlab_enable_admin_mode_sign_in(admin)
diff --git a/spec/features/global_search_spec.rb b/spec/features/global_search_spec.rb
index baa691d244e..666bf3594de 100644
--- a/spec/features/global_search_spec.rb
+++ b/spec/features/global_search_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe 'Global search' do
let(:project) { create(:project, namespace: user.namespace) }
before do
+ stub_feature_flags(search_page_vertical_nav: false)
project.add_maintainer(user)
sign_in(user)
end
diff --git a/spec/features/groups/empty_states_spec.rb b/spec/features/groups/empty_states_spec.rb
index f11e5c56545..84882fc674e 100644
--- a/spec/features/groups/empty_states_spec.rb
+++ b/spec/features/groups/empty_states_spec.rb
@@ -94,9 +94,7 @@ RSpec.describe 'Group empty states' do
end
it "shows a new #{issuable_name} button" do
- within '.empty-state' do
- expect(page).to have_content("create #{issuable_name}")
- end
+ expect(page).to have_content("create #{issuable_name}")
end
it "the new #{issuable_name} button opens a project dropdown" do
diff --git a/spec/features/groups/group_runners_spec.rb b/spec/features/groups/group_runners_spec.rb
index ada03726c97..e9807c487d5 100644
--- a/spec/features/groups/group_runners_spec.rb
+++ b/spec/features/groups/group_runners_spec.rb
@@ -114,6 +114,35 @@ RSpec.describe "Group Runners" do
end
end
+ context "with an online instance runner" do
+ let!(:instance_runner) do
+ create(:ci_runner, :instance, description: 'runner-baz', contacted_at: Time.zone.now)
+ end
+
+ before do
+ visit group_runners_path(group)
+ end
+
+ context "when selecting 'Show only inherited'" do
+ before do
+ find("[data-testid='runner-membership-toggle'] button").click
+
+ wait_for_requests
+ end
+
+ it_behaves_like 'shows runner in list' do
+ let(:runner) { instance_runner }
+ end
+
+ it 'shows runner details page' do
+ click_link("##{instance_runner.id} (#{instance_runner.short_sha})")
+
+ expect(current_url).to include(group_runner_path(group, instance_runner))
+ expect(page).to have_content "#{s_('Runners|Description')} runner-baz"
+ end
+ end
+ end
+
context 'with a multi-project runner' do
let(:project) { create(:project, group: group) }
let(:project_2) { create(:project, group: group) }
@@ -123,7 +152,7 @@ RSpec.describe "Group Runners" do
visit group_runners_path(group)
within_runner_row(runner.id) do
- expect(page).to have_button 'Delete runner', disabled: true
+ expect(page).not_to have_button 'Delete runner'
end
end
end
@@ -142,6 +171,21 @@ RSpec.describe "Group Runners" do
end
end
end
+
+ describe 'filter by tag' do
+ let!(:runner_1) { create(:ci_runner, :group, groups: [group], description: 'runner-blue', tag_list: ['blue']) }
+ let!(:runner_2) { create(:ci_runner, :group, groups: [group], description: 'runner-red', tag_list: ['red']) }
+
+ before do
+ visit group_runners_path(group)
+ end
+
+ it_behaves_like 'filters by tag' do
+ let(:tag) { 'blue' }
+ let(:found_runner) { runner_1.description }
+ let(:missing_runner) { runner_2.description }
+ end
+ end
end
describe "Group runner show page", :js do
diff --git a/spec/features/groups/labels/sort_labels_spec.rb b/spec/features/groups/labels/sort_labels_spec.rb
index fba166449f8..9d05703aae6 100644
--- a/spec/features/groups/labels/sort_labels_spec.rb
+++ b/spec/features/groups/labels/sort_labels_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe 'Sort labels', :js do
it 'sorts by date' do
click_button 'Name'
- sort_options = find('ul.dropdown-menu').all('li').collect(&:text)
+ sort_options = find('ul[role="listbox"]').all('li').collect(&:text)
expect(sort_options[0]).to eq('Name')
expect(sort_options[1]).to eq('Name, descending')
@@ -37,7 +37,7 @@ RSpec.describe 'Sort labels', :js do
expect(sort_options[4]).to eq('Updated date')
expect(sort_options[5]).to eq('Oldest updated')
- click_button 'Name, descending'
+ find('li', text: 'Name, descending').click
# assert default sorting
within '.other-labels' do
diff --git a/spec/features/groups/merge_requests_spec.rb b/spec/features/groups/merge_requests_spec.rb
index be1db970e9d..296b839c8fc 100644
--- a/spec/features/groups/merge_requests_spec.rb
+++ b/spec/features/groups/merge_requests_spec.rb
@@ -57,6 +57,16 @@ RSpec.describe 'Group merge requests page' do
expect(find('#js-dropdown-assignee .filter-dropdown')).to have_content(user.name)
expect(find('#js-dropdown-assignee .filter-dropdown')).not_to have_content(user2.name)
end
+
+ it 'will still show the navbar with no results' do
+ search_term = 'some-search-term-that-produces-zero-results'
+
+ filtered_search.set(search_term)
+ filtered_search.send_keys(:enter)
+
+ expect(page).to have_content('filter produced no results')
+ expect(page).to have_link('Open', href: "/groups/#{group.name}/-/merge_requests?scope=all&search=#{search_term}&state=opened")
+ end
end
describe 'new merge request dropdown' do
diff --git a/spec/features/groups/milestones_sorting_spec.rb b/spec/features/groups/milestones_sorting_spec.rb
index 22d7ff91d41..125bf9ce3a7 100644
--- a/spec/features/groups/milestones_sorting_spec.rb
+++ b/spec/features/groups/milestones_sorting_spec.rb
@@ -29,9 +29,9 @@ RSpec.describe 'Milestones sorting', :js do
within '[data-testid=milestone_sort_by_dropdown]' do
click_button 'Due soon'
- expect(find('.gl-new-dropdown-contents').all('.gl-new-dropdown-item-text-wrapper p').map(&:text)).to eq(['Due soon', 'Due later', 'Start soon', 'Start later', 'Name, ascending', 'Name, descending'])
+ expect(find('ul[role="listbox"]').all('li').map(&:text)).to eq(['Due soon', 'Due later', 'Start soon', 'Start later', 'Name, ascending', 'Name, descending'])
- click_button 'Due later'
+ find('li', text: 'Due later').click
expect(page).to have_button('Due later')
end
diff --git a/spec/features/groups/new_group_page_spec.rb b/spec/features/groups/new_group_page_spec.rb
new file mode 100644
index 00000000000..6a8af9c31fd
--- /dev/null
+++ b/spec/features/groups/new_group_page_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'New group page', :js do
+ let(:user) { create(:user) }
+ let(:group) { create(:group) }
+
+ before do
+ sign_in(user)
+ end
+
+ describe 'new top level group alert' do
+ context 'when a user visits the new group page' do
+ it 'shows the new top level group alert' do
+ visit new_group_path(anchor: 'create-group-pane')
+
+ expect(page).to have_selector('[data-testid="new-top-level-alert"]')
+ end
+ end
+
+ context 'when a user visits the new sub group page' do
+ let(:parent_group) { create(:group) }
+
+ it 'does not show the new top level group alert' do
+ visit new_group_path(parent_id: parent_group.id, anchor: 'create-group-pane')
+
+ expect(page).not_to have_selector('[data-testid="new-top-level-alert"]')
+ end
+ end
+ end
+end
diff --git a/spec/features/groups/settings/repository_spec.rb b/spec/features/groups/settings/repository_spec.rb
index 159deb2a4e3..f6b8bbdd35f 100644
--- a/spec/features/groups/settings/repository_spec.rb
+++ b/spec/features/groups/settings/repository_spec.rb
@@ -2,14 +2,17 @@
require 'spec_helper'
-RSpec.describe 'Group Repository settings' do
+RSpec.describe 'Group Repository settings', :js do
include WaitForRequests
- let(:user) { create(:user) }
- let(:group) { create(:group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group, reload: true) { create(:group) }
- before do
+ before_all do
group.add_owner(user)
+ end
+
+ before do
sign_in(user)
end
@@ -20,9 +23,26 @@ RSpec.describe 'Group Repository settings' do
stub_container_registry_config(enabled: true)
end
- it_behaves_like 'a deploy token in settings' do
- let(:entity_type) { 'group' }
- let(:page_path) { group_settings_repository_path(group) }
+ context 'when ajax deploy tokens is enabled' do
+ before do
+ stub_feature_flags(ajax_new_deploy_token: true)
+ end
+
+ it_behaves_like 'a deploy token in settings' do
+ let(:entity_type) { 'group' }
+ let(:page_path) { group_settings_repository_path(group) }
+ end
+ end
+
+ context 'when ajax deploy tokens is disabled' do
+ before do
+ stub_feature_flags(ajax_new_deploy_token: false)
+ end
+
+ it_behaves_like 'a deploy token in settings' do
+ let(:entity_type) { 'group' }
+ let(:page_path) { group_settings_repository_path(group) }
+ end
end
end
diff --git a/spec/features/help_dropdown_spec.rb b/spec/features/help_dropdown_spec.rb
index e64c19d4708..a9c014a9408 100644
--- a/spec/features/help_dropdown_spec.rb
+++ b/spec/features/help_dropdown_spec.rb
@@ -59,6 +59,10 @@ RSpec.describe "Help Dropdown", :js do
expect(page).to have_text('Your GitLab Version')
expect(page).to have_text("#{Gitlab.version_info.major}.#{Gitlab.version_info.minor}")
expect(page).to have_selector('.version-check-badge')
+ expect(page).to have_selector(
+ 'a[data-testid="gitlab-version-container"][href="/help/update/index"]'
+ )
+ expect(page).to have_selector('.version-check-badge[href="/help/update/index"]')
expect(page).to have_text(ui_text)
end
end
diff --git a/spec/features/ide/user_commits_changes_spec.rb b/spec/features/ide/user_commits_changes_spec.rb
deleted file mode 100644
index 04b215710b3..00000000000
--- a/spec/features/ide/user_commits_changes_spec.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'IDE user commits changes', :js do
- include WebIdeSpecHelpers
-
- let(:project) { create(:project, :public, :repository) }
- let(:user) { project.first_owner }
-
- before do
- stub_feature_flags(vscode_web_ide: false)
-
- sign_in(user)
-
- ide_visit(project)
- end
-
- it 'user updates nested files' do
- content = <<~HEREDOC
- Lorem ipsum
- Dolar sit
- Amit
- HEREDOC
-
- ide_create_new_file('foo/bar/lorem_ipsum.md', content: content)
- ide_delete_file('foo/bar/.gitkeep')
-
- ide_commit
-
- expect(page).to have_content('All changes are committed')
- expect(project.repository.blob_at('master', 'foo/bar/.gitkeep')).to be_nil
- expect(project.repository.blob_at('master', 'foo/bar/lorem_ipsum.md').data).to eql(content)
- end
-end
diff --git a/spec/features/incidents/incident_timeline_events_spec.rb b/spec/features/incidents/incident_timeline_events_spec.rb
index 6db9f87d6f2..ef0eb27d310 100644
--- a/spec/features/incidents/incident_timeline_events_spec.rb
+++ b/spec/features/incidents/incident_timeline_events_spec.rb
@@ -12,7 +12,6 @@ RSpec.describe 'Incident timeline events', :js do
end
before do
- stub_feature_flags(incident_timeline: true)
sign_in(developer)
visit project_issues_incident_path(project, incident)
diff --git a/spec/features/issuables/markdown_references/jira_spec.rb b/spec/features/issuables/markdown_references/jira_spec.rb
index 9d46b3a274e..66d0022f7e9 100644
--- a/spec/features/issuables/markdown_references/jira_spec.rb
+++ b/spec/features/issuables/markdown_references/jira_spec.rb
@@ -15,8 +15,6 @@ RSpec.describe "Jira", :js do
before do
remotelink = double(:remotelink, all: [], build: double(save!: true))
- stub_feature_flags(remove_user_attributes_projects: false)
-
stub_request(:get, "https://jira.example.com/rest/api/2/issue/JIRA-5")
stub_request(:post, "https://jira.example.com/rest/api/2/issue/JIRA-5/comment")
allow_next_instance_of(JIRA::Resource::Issue) do |instance|
@@ -59,7 +57,7 @@ RSpec.describe "Jira", :js do
visit(issue_path(issue_actual_project))
page.within("#notes") do
- expect(page).to have_content("#{user.to_reference} mentioned in merge request #{merge_request.to_reference}")
+ expect(page).to have_content("#{user.name} mentioned in merge request #{merge_request.to_reference}")
end
end
@@ -67,7 +65,7 @@ RSpec.describe "Jira", :js do
visit(issue_path(issue_other_project))
page.within("#notes") do
- expect(page).to have_content("#{user.to_reference} mentioned in merge request #{merge_request.to_reference(other_project)}")
+ expect(page).to have_content("#{user.name} mentioned in merge request #{merge_request.to_reference(other_project)}")
end
end
end
diff --git a/spec/features/issues/incident_issue_spec.rb b/spec/features/issues/incident_issue_spec.rb
index 56be1493ed2..d6cde466d1b 100644
--- a/spec/features/issues/incident_issue_spec.rb
+++ b/spec/features/issues/incident_issue_spec.rb
@@ -26,7 +26,6 @@ RSpec.describe 'Incident Detail', :js do
context 'when user displays the incident' do
before do
- stub_feature_flags(incident_timeline: project)
project.add_developer(user)
sign_in(user)
@@ -52,8 +51,8 @@ RSpec.describe 'Incident Detail', :js do
aggregate_failures 'when on summary tab (default tab)' do
hidden_items = find_all('.js-issue-widgets')
- # Linked Issues/MRs and comment box
- expect(hidden_items.count).to eq(2)
+ # Linked Issues/MRs and comment box and emoji block
+ expect(hidden_items.count).to eq(3)
expect(hidden_items).to all(be_visible)
edit_button = find_all('[aria-label="Edit title and description"]')
@@ -67,13 +66,13 @@ RSpec.describe 'Incident Detail', :js do
expect(incident_tabs).to have_content('"yet.another": 73')
# does not show the linked issues and notes/comment components' do
- hidden_items = find_all('.js-issue-widgets')
+ hidden_items = find_all('.js-issue-widgets', wait: false)
# Linked Issues/MRs and comment box are hidden on page
expect(hidden_items.count).to eq(0)
# does not show the edit title and description button
- edit_button = find_all('[aria-label="Edit title and description"]')
+ edit_button = find_all('[aria-label="Edit title and description"]', wait: false)
expect(edit_button.count).to eq(0)
end
end
@@ -83,32 +82,19 @@ RSpec.describe 'Incident Detail', :js do
before do
visit project_issue_path(project, incident)
wait_for_requests
+
click_link 'Timeline'
+ wait_for_requests
end
it 'does not show the linked issues and notes/comment components' do
page.within('.issuable-details') do
- hidden_items = find_all('.js-issue-widgets')
+ hidden_items = find_all('.js-issue-widgets', wait: false)
# Linked Issues/MRs and comment box are hidden on page
expect(hidden_items.count).to eq(0)
end
end
end
-
- context 'when incident_timeline feature flag is disabled' do
- before do
- stub_feature_flags(incident_timeline: false)
-
- visit project_issues_incident_path(project, incident)
- wait_for_requests
- end
-
- it 'does not show Timeline tab' do
- tabs = find('[data-testid="incident-tabs"]')
-
- expect(tabs).not_to have_content('Timeline')
- end
- end
end
end
diff --git a/spec/features/issues/related_issues_spec.rb b/spec/features/issues/related_issues_spec.rb
index 818e99f2ec9..62127295a7c 100644
--- a/spec/features/issues/related_issues_spec.rb
+++ b/spec/features/issues/related_issues_spec.rb
@@ -3,42 +3,41 @@
require 'spec_helper'
RSpec.describe 'Related issues', :js do
- let(:user) { create(:user) }
- let(:project) { create(:project_empty_repo, :public) }
- let(:project_b) { create(:project_empty_repo, :public) }
- let(:project_unauthorized) { create(:project_empty_repo, :public) }
- let(:issue_a) { create(:issue, project: project) }
- let(:issue_b) { create(:issue, project: project) }
- let(:issue_c) { create(:issue, project: project) }
- let(:issue_d) { create(:issue, project: project) }
- let(:issue_project_b_a) { create(:issue, project: project_b) }
- let(:issue_project_unauthorized_a) { create(:issue, project: project_unauthorized) }
+ let_it_be(:user) { create(:user) }
+
+ let_it_be(:project) { create(:project_empty_repo, :public) }
+ let_it_be(:project_b) { create(:project_empty_repo, :public) }
+ let_it_be(:project_unauthorized) { create(:project_empty_repo, :public) }
+ let_it_be(:internal_project) { create(:project_empty_repo, :internal) }
+ let_it_be(:private_project) { create(:project_empty_repo, :private) }
+ let_it_be(:public_project) { create(:project_empty_repo, :public) }
+
+ let_it_be(:issue_a) { create(:issue, project: project) }
+ let_it_be(:issue_b) { create(:issue, project: project) }
+ let_it_be(:issue_c) { create(:issue, project: project) }
+ let_it_be(:issue_d) { create(:issue, project: project) }
+ let_it_be(:issue_project_b_a) { create(:issue, project: project_b) }
+ let_it_be(:issue_project_unauthorized_a) { create(:issue, project: project_unauthorized) }
+ let_it_be(:internal_issue) { create(:issue, project: internal_project) }
+ let_it_be(:private_issue) { create(:issue, project: private_project) }
+ let_it_be(:public_issue) { create(:issue, project: public_project) }
context 'widget visibility' do
context 'when not logged in' do
it 'does not show widget when internal project' do
- project = create :project_empty_repo, :internal
- issue = create :issue, project: project
-
- visit project_issue_path(project, issue)
+ visit project_issue_path(internal_project, internal_issue)
expect(page).not_to have_css('.related-issues-block')
end
it 'does not show widget when private project' do
- project = create :project_empty_repo, :private
- issue = create :issue, project: project
-
- visit project_issue_path(project, issue)
+ visit project_issue_path(private_project, private_issue)
expect(page).not_to have_css('.related-issues-block')
end
it 'shows widget when public project' do
- project = create :project_empty_repo, :public
- issue = create :issue, project: project
-
- visit project_issue_path(project, issue)
+ visit project_issue_path(public_project, public_issue)
expect(page).to have_css('.related-issues-block')
expect(page).not_to have_button 'Add a related issue'
@@ -51,39 +50,29 @@ RSpec.describe 'Related issues', :js do
end
it 'shows widget when internal project' do
- project = create :project_empty_repo, :internal
- issue = create :issue, project: project
-
- visit project_issue_path(project, issue)
+ visit project_issue_path(internal_project, internal_issue)
expect(page).to have_css('.related-issues-block')
expect(page).not_to have_button 'Add a related issue'
end
it 'does not show widget when private project' do
- project = create :project_empty_repo, :private
- issue = create :issue, project: project
-
- visit project_issue_path(project, issue)
+ visit project_issue_path(private_project, private_issue)
expect(page).not_to have_css('.related-issues-block')
end
it 'shows widget when public project' do
- project = create :project_empty_repo, :public
- issue = create :issue, project: project
-
- visit project_issue_path(project, issue)
+ visit project_issue_path(public_project, public_issue)
expect(page).to have_css('.related-issues-block')
expect(page).not_to have_button 'Add a related issue'
end
it 'shows widget on their own public issue' do
- project = create :project_empty_repo, :public
- issue = create :issue, project: project, author: user
+ issue = create :issue, project: public_project, author: user
- visit project_issue_path(project, issue)
+ visit project_issue_path(public_project, issue)
expect(page).to have_css('.related-issues-block')
expect(page).not_to have_button 'Add a related issue'
@@ -96,33 +85,27 @@ RSpec.describe 'Related issues', :js do
end
it 'shows widget when internal project' do
- project = create :project_empty_repo, :internal
- issue = create :issue, project: project
- project.add_guest(user)
+ internal_project.add_guest(user)
- visit project_issue_path(project, issue)
+ visit project_issue_path(internal_project, internal_issue)
expect(page).to have_css('.related-issues-block')
expect(page).not_to have_button 'Add a related issue'
end
it 'shows widget when private project' do
- project = create :project_empty_repo, :private
- issue = create :issue, project: project
- project.add_guest(user)
+ private_project.add_guest(user)
- visit project_issue_path(project, issue)
+ visit project_issue_path(private_project, private_issue)
expect(page).to have_css('.related-issues-block')
expect(page).not_to have_button 'Add a related issue'
end
it 'shows widget when public project' do
- project = create :project_empty_repo, :public
- issue = create :issue, project: project
- project.add_guest(user)
+ public_project.add_guest(user)
- visit project_issue_path(project, issue)
+ visit project_issue_path(public_project, public_issue)
expect(page).to have_css('.related-issues-block')
expect(page).not_to have_button 'Add a related issue'
@@ -135,44 +118,37 @@ RSpec.describe 'Related issues', :js do
end
it 'shows widget when internal project' do
- project = create :project_empty_repo, :internal
- issue = create :issue, project: project
- project.add_reporter(user)
+ internal_project.add_reporter(user)
- visit project_issue_path(project, issue)
+ visit project_issue_path(internal_project, internal_issue)
expect(page).to have_css('.related-issues-block')
expect(page).to have_button 'Add a related issue'
end
it 'shows widget when private project' do
- project = create :project_empty_repo, :private
- issue = create :issue, project: project
- project.add_reporter(user)
+ private_project.add_reporter(user)
- visit project_issue_path(project, issue)
+ visit project_issue_path(private_project, private_issue)
expect(page).to have_css('.related-issues-block')
expect(page).to have_button 'Add a related issue'
end
it 'shows widget when public project' do
- project = create :project_empty_repo, :public
- issue = create :issue, project: project
- project.add_reporter(user)
+ public_project.add_reporter(user)
- visit project_issue_path(project, issue)
+ visit project_issue_path(public_project, public_issue)
expect(page).to have_css('.related-issues-block')
expect(page).to have_button 'Add a related issue'
end
it 'shows widget on their own public issue' do
- project = create :project_empty_repo, :public
- issue = create :issue, project: project, author: user
- project.add_reporter(user)
+ issue = create :issue, project: public_project, author: user
+ public_project.add_reporter(user)
- visit project_issue_path(project, issue)
+ visit project_issue_path(public_project, issue)
expect(page).to have_css('.related-issues-block')
expect(page).to have_button 'Add a related issue'
@@ -184,8 +160,11 @@ RSpec.describe 'Related issues', :js do
let!(:issue_link_b) { create :issue_link, source: issue_a, target: issue_b }
let!(:issue_link_c) { create :issue_link, source: issue_a, target: issue_c }
- before do
+ before_all do
project.add_guest(user)
+ end
+
+ before do
sign_in(user)
end
@@ -213,9 +192,12 @@ RSpec.describe 'Related issues', :js do
end
context 'when user has permission to manage related issues' do
- before do
+ before_all do
project.add_maintainer(user)
project_b.add_maintainer(user)
+ end
+
+ before do
sign_in(user)
end
diff --git a/spec/features/issues/resource_label_events_spec.rb b/spec/features/issues/resource_label_events_spec.rb
index e4da2f67516..e8158b3e2aa 100644
--- a/spec/features/issues/resource_label_events_spec.rb
+++ b/spec/features/issues/resource_label_events_spec.rb
@@ -14,7 +14,6 @@ RSpec.describe 'List issue resource label events', :js do
let!(:event) { create(:resource_label_event, user: user, issue: issue, label: label) }
before do
- stub_feature_flags(remove_user_attributes_projects: false)
visit project_issue_path(project, issue)
wait_for_requests
end
@@ -22,13 +21,7 @@ RSpec.describe 'List issue resource label events', :js do
it 'shows both notes and resource label events' do
page.within('#notes') do
expect(find("#note_#{note.id}")).to have_content 'some note'
- expect(find("#note_#{event.discussion_id}")).to have_content 'added foo label'
- end
- end
-
- it 'shows the user status on the system note for the label' do
- page.within("#note_#{event.discussion_id}") do
- expect(page).to show_user_status user_status
+ expect(find("#note_#{event.reload.discussion_id}")).to have_content 'added foo label'
end
end
end
diff --git a/spec/features/issues/service_desk_spec.rb b/spec/features/issues/service_desk_spec.rb
index cc0d35afd60..87cd00fac6b 100644
--- a/spec/features/issues/service_desk_spec.rb
+++ b/spec/features/issues/service_desk_spec.rb
@@ -90,7 +90,6 @@ RSpec.describe 'Service Desk Issue Tracker', :js do
it 'displays the small info box, documentation, a button to configure service desk, and the address' do
aggregate_failures do
- expect(page).to have_css('.non-empty-state')
expect(page).to have_link('Learn more.', href: help_page_path('user/project/service_desk'))
expect(page).not_to have_link('Enable Service Desk')
expect(page).to have_content(project.service_desk_address)
diff --git a/spec/features/issues/user_interacts_with_awards_spec.rb b/spec/features/issues/user_interacts_with_awards_spec.rb
index 8ed56108f00..47b28b88108 100644
--- a/spec/features/issues/user_interacts_with_awards_spec.rb
+++ b/spec/features/issues/user_interacts_with_awards_spec.rb
@@ -218,7 +218,7 @@ RSpec.describe 'User interacts with awards' do
expect(first('[data-testid="award-button"]')).to have_content '1'
end
- it 'removes award from issue' do
+ it 'removes award from issue', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/375241' do
first('[data-testid="award-button"]').click
find('[data-testid="award-button"].selected').click
diff --git a/spec/features/issues/user_scrolls_to_deeplinked_note_spec.rb b/spec/features/issues/user_scrolls_to_deeplinked_note_spec.rb
index 5aae5abaf10..1fa8f533869 100644
--- a/spec/features/issues/user_scrolls_to_deeplinked_note_spec.rb
+++ b/spec/features/issues/user_scrolls_to_deeplinked_note_spec.rb
@@ -10,7 +10,6 @@ RSpec.describe 'User scrolls to deep-linked note' do
context 'on issue page', :js do
it 'on comment' do
- stub_feature_flags(gl_avatar_for_all_user_avatars: false)
visit project_issue_path(project, issue, anchor: "note_#{comment_1.id}")
wait_for_requests
diff --git a/spec/features/markdown/gitlab_flavored_markdown_spec.rb b/spec/features/markdown/gitlab_flavored_markdown_spec.rb
index e831d1be608..17fe2dab8f7 100644
--- a/spec/features/markdown/gitlab_flavored_markdown_spec.rb
+++ b/spec/features/markdown/gitlab_flavored_markdown_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe "GitLab Flavored Markdown" do
- include CycleAnalyticsHelpers
-
let(:user) { create(:user) }
let(:project) { create(:project) }
let(:issue) { create(:issue, project: project) }
@@ -24,7 +22,12 @@ RSpec.describe "GitLab Flavored Markdown" do
let(:commit) { project.commit }
before do
- create_commit("fix #{issue.to_reference}\n\nask #{fred.to_reference} for details", project, user, 'master')
+ project.repository.commit_files(
+ user,
+ branch_name: 'master',
+ message: "fix #{issue.to_reference}\n\nask #{fred.to_reference} for details",
+ actions: [{ action: :create, file_path: 'a/new.file', content: 'This is a file' }]
+ )
end
it "renders title in commits#index" do
diff --git a/spec/features/markdown/metrics_spec.rb b/spec/features/markdown/metrics_spec.rb
index 44354c9df47..61dd41204f8 100644
--- a/spec/features/markdown/metrics_spec.rb
+++ b/spec/features/markdown/metrics_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe 'Metrics rendering', :js, :kubeclient, :use_clean_rails_memory_st
include MetricsDashboardUrlHelpers
let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:prometheus_project) }
+ let_it_be(:project) { create(:project, :with_prometheus_integration) }
let_it_be(:environment) { create(:environment, project: project) }
let(:issue) { create(:issue, project: project, description: description) }
diff --git a/spec/features/merge_request/batch_comments_spec.rb b/spec/features/merge_request/batch_comments_spec.rb
index bccdc3c4c62..f01217df8c5 100644
--- a/spec/features/merge_request/batch_comments_spec.rb
+++ b/spec/features/merge_request/batch_comments_spec.rb
@@ -74,6 +74,24 @@ RSpec.describe 'Merge request > Batch comments', :js do
expect(page).to have_selector('.draft-note-component', text: 'Testing update')
end
+ context 'multiple times on the same diff line' do
+ it 'shows both drafts at once' do
+ write_diff_comment
+
+ # All of the Diff helpers like click_diff_line (or write_diff_comment)
+ # fail very badly when run a second time.
+ # This recreates the relevant logic.
+ line = find_by_scrolling("[id='#{sample_compare.changes[0][:line_code]}']")
+ line.hover
+ line.find('.js-add-diff-note-button').click
+
+ write_comment(text: 'A second draft!', button_text: 'Add to review')
+
+ expect(page).to have_text('Line is wrong')
+ expect(page).to have_text('A second draft!')
+ end
+ end
+
context 'with image and file draft note' do
let(:merge_request) { create(:merge_request_with_diffs, :with_image_diffs, source_project: project) }
let!(:draft_on_text) { create(:draft_note_on_text_diff, merge_request: merge_request, author: user, path: 'README.md', note: 'Lorem ipsum on text...') }
diff --git a/spec/features/merge_request/close_reopen_report_toggle_spec.rb b/spec/features/merge_request/close_reopen_report_toggle_spec.rb
index dea9a10a4ec..5e9400935c3 100644
--- a/spec/features/merge_request/close_reopen_report_toggle_spec.rb
+++ b/spec/features/merge_request/close_reopen_report_toggle_spec.rb
@@ -24,14 +24,14 @@ RSpec.describe 'Issuables Close/Reopen/Report toggle' do
context 'close/reopen/report toggle' do
it 'opens a dropdown when toggle is clicked' do
- click_button 'Toggle dropdown'
+ find('[data-testid="merge-request-actions"]').click
expect(container).to have_link("Close merge request")
expect(container).to have_link('Report abuse')
end
it 'links to Report Abuse' do
- click_button 'Toggle dropdown'
+ find('[data-testid="merge-request-actions"]').click
click_link 'Report abuse'
expect(page).to have_content('Report abuse to admin')
@@ -42,7 +42,7 @@ RSpec.describe 'Issuables Close/Reopen/Report toggle' do
let(:issuable) { create(:merge_request, :opened, source_project: project) }
it 'shows the `Edit` and `Mark as draft` buttons' do
- click_button 'Toggle dropdown'
+ find('[data-testid="merge-request-actions"]').click
expect(container).to have_link('Edit')
expect(container).to have_link('Mark as draft')
@@ -56,7 +56,7 @@ RSpec.describe 'Issuables Close/Reopen/Report toggle' do
let(:issuable) { create(:merge_request, :closed, source_project: project) }
it 'shows both the `Edit` and `Reopen` button' do
- click_button 'Toggle dropdown'
+ find('[data-testid="merge-request-actions"]').click
expect(container).to have_link('Edit')
expect(container).to have_link('Report abuse')
@@ -68,7 +68,7 @@ RSpec.describe 'Issuables Close/Reopen/Report toggle' do
let(:issuable) { create(:merge_request, :closed, source_project: project, author: user) }
it 'shows both the `Edit` and `Reopen` button' do
- click_button 'Toggle dropdown'
+ find('[data-testid="merge-request-actions"]').click
expect(container).to have_link('Edit')
expect(container).to have_link('Reopen merge request')
diff --git a/spec/features/merge_request/merge_request_discussion_lock_spec.rb b/spec/features/merge_request/merge_request_discussion_lock_spec.rb
index a7bc2a062af..d69295744f7 100644
--- a/spec/features/merge_request/merge_request_discussion_lock_spec.rb
+++ b/spec/features/merge_request/merge_request_discussion_lock_spec.rb
@@ -90,7 +90,7 @@ RSpec.describe 'Merge Request Discussion Lock', :js do
end
it 'the user can lock the merge_request' do
- click_button 'Toggle dropdown'
+ find('[data-testid="merge-request-actions"]').click
expect(page).to have_content('Lock merge request')
end
@@ -103,7 +103,7 @@ RSpec.describe 'Merge Request Discussion Lock', :js do
end
it 'the user can unlock the merge_request' do
- click_button 'Toggle dropdown'
+ find('[data-testid="merge-request-actions"]').click
expect(page).to have_content('Unlock merge request')
end
diff --git a/spec/features/merge_request/user_comments_on_diff_spec.rb b/spec/features/merge_request/user_comments_on_diff_spec.rb
index fd33731cb7b..ffaf403e873 100644
--- a/spec/features/merge_request/user_comments_on_diff_spec.rb
+++ b/spec/features/merge_request/user_comments_on_diff_spec.rb
@@ -14,7 +14,6 @@ RSpec.describe 'User comments on a diff', :js do
let(:user) { create(:user) }
before do
- stub_feature_flags(remove_user_attributes_projects: false)
project.add_maintainer(user)
sign_in(user)
diff --git a/spec/features/merge_request/user_comments_on_merge_request_spec.rb b/spec/features/merge_request/user_comments_on_merge_request_spec.rb
index ec1e2fea851..dbcfc2b968f 100644
--- a/spec/features/merge_request/user_comments_on_merge_request_spec.rb
+++ b/spec/features/merge_request/user_comments_on_merge_request_spec.rb
@@ -10,7 +10,6 @@ RSpec.describe 'User comments on a merge request', :js do
let(:user) { create(:user) }
before do
- stub_feature_flags(remove_user_attributes_projects: false)
project.add_maintainer(user)
sign_in(user)
diff --git a/spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb b/spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb
index f5b5460769e..07d99a786ba 100644
--- a/spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb
+++ b/spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb
@@ -6,8 +6,8 @@ RSpec.describe 'Batch diffs', :js do
include MergeRequestDiffHelpers
include RepoHelpers
- let(:project) { create(:project, :repository) }
- let(:merge_request) { create(:merge_request, source_project: project, source_branch: 'master', target_branch: 'empty-branch') }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project, source_branch: 'master', target_branch: 'empty-branch') }
before do
sign_in(project.first_owner)
diff --git a/spec/features/merge_request/user_jumps_to_discussion_spec.rb b/spec/features/merge_request/user_jumps_to_discussion_spec.rb
deleted file mode 100644
index 9bded1c5572..00000000000
--- a/spec/features/merge_request/user_jumps_to_discussion_spec.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'User jumps to the next unresolved discussion', :js do
- let(:project) { create(:project, :repository) }
- let(:merge_request) do
- create(:merge_request_with_diffs, source_project: project, target_project: project, source_branch: 'merge-test')
- end
-
- let(:user) { create(:user) }
-
- before do
- create(:discussion_note, noteable: merge_request, project: project, author: user)
-
- project.add_maintainer(user)
- sign_in(user)
-
- visit(diffs_project_merge_request_path(project, merge_request))
-
- wait_for_requests
- end
-
- it 'jumps to overview tab' do
- find('.discussion-next-btn').click
-
- expect(page).to have_css('.notes-tab.active')
- end
-end
diff --git a/spec/features/merge_request/user_manages_subscription_spec.rb b/spec/features/merge_request/user_manages_subscription_spec.rb
index 9fb85957979..a8d59a6ffb5 100644
--- a/spec/features/merge_request/user_manages_subscription_spec.rb
+++ b/spec/features/merge_request/user_manages_subscription_spec.rb
@@ -43,22 +43,18 @@ RSpec.describe 'User manages subscription', :js do
it 'toggles subscription' do
wait_for_requests
- click_button 'Toggle dropdown'
+ find('[data-testid="merge-request-actions"]').click
expect(page).to have_selector('.gl-toggle:not(.is-checked)')
find('[data-testid="notifications-toggle"] .gl-toggle').click
wait_for_requests
- click_button 'Toggle dropdown'
-
expect(page).to have_selector('.gl-toggle.is-checked')
find('[data-testid="notifications-toggle"] .gl-toggle').click
wait_for_requests
- click_button 'Toggle dropdown'
-
expect(page).to have_selector('.gl-toggle:not(.is-checked)')
end
end
diff --git a/spec/features/merge_request/user_marks_merge_request_as_draft_spec.rb b/spec/features/merge_request/user_marks_merge_request_as_draft_spec.rb
index c3a61476442..d85f275b724 100644
--- a/spec/features/merge_request/user_marks_merge_request_as_draft_spec.rb
+++ b/spec/features/merge_request/user_marks_merge_request_as_draft_spec.rb
@@ -16,12 +16,12 @@ RSpec.describe 'Merge request > User marks merge request as draft', :js do
end
it 'toggles draft status' do
- click_button 'Toggle dropdown'
+ find('[data-testid="merge-request-actions"]').click
click_link 'Mark as draft'
expect(page).to have_content("Draft: #{merge_request.title}")
- click_button 'Toggle dropdown'
+ find('[data-testid="merge-request-actions"]').click
page.within('.detail-page-header-actions') do
click_link 'Mark as ready'
diff --git a/spec/features/merge_request/user_posts_diff_notes_spec.rb b/spec/features/merge_request/user_posts_diff_notes_spec.rb
index 1eebb6c2e28..8af0e957c14 100644
--- a/spec/features/merge_request/user_posts_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_posts_diff_notes_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe 'Merge request > User posts diff notes', :js do
include MergeRequestDiffHelpers
include Spec::Support::Helpers::ModalHelpers
- let(:merge_request) { create(:merge_request) }
+ let_it_be(:merge_request) { create(:merge_request) }
let(:project) { merge_request.source_project }
let(:user) { project.creator }
let(:comment_button_class) { '.add-diff-note' }
@@ -219,7 +219,7 @@ RSpec.describe 'Merge request > User posts diff notes', :js do
end
context 'with a match line' do
- it 'does not allow commenting' do
+ it 'does not allow commenting', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/375024' do
match_should_not_allow_commenting(find_by_scrolling('.match', match: :first))
end
end
diff --git a/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb b/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
index 8225fcbfd89..a6c024be698 100644
--- a/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
@@ -8,10 +8,11 @@ RSpec.describe 'Merge request > User sees avatars on diff notes', :js do
include Spec::Support::Helpers::ModalHelpers
include MergeRequestDiffHelpers
- let(:project) { create(:project, :public, :repository) }
- let(:user) { project.creator }
- let(:merge_request) { create(:merge_request_with_diffs, source_project: project, author: user, title: 'Bug NS-04') }
- let(:path) { 'files/ruby/popen.rb' }
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:user) { project.creator }
+ let_it_be(:merge_request) { create(:merge_request_with_diffs, source_project: project, author: user, title: 'Bug NS-04') }
+
+ let(:path) { 'files/ruby/popen.rb' }
let(:position) do
build(:text_diff_position, :added,
file: path,
@@ -22,10 +23,12 @@ RSpec.describe 'Merge request > User sees avatars on diff notes', :js do
let!(:note) { create(:diff_note_on_merge_request, project: project, noteable: merge_request, position: position) }
- before do
+ before_all do
project.add_maintainer(user)
+ end
+
+ before do
sign_in user
- stub_feature_flags(gl_avatar_for_all_user_avatars: false)
set_cookie('sidebar_collapsed', 'true')
end
diff --git a/spec/features/merge_request/user_sees_merge_widget_spec.rb b/spec/features/merge_request/user_sees_merge_widget_spec.rb
index c2a0e528ea7..77ac6fac22f 100644
--- a/spec/features/merge_request/user_sees_merge_widget_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_widget_spec.rb
@@ -13,13 +13,15 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
let(:merge_request) { create(:merge_request, source_project: project) }
let(:merge_request_in_only_mwps_project) { create(:merge_request, source_project: project_only_mwps) }
+ def click_expand_button
+ find('[data-testid="report-section-expand-button"]').click
+ end
+
before do
project.add_maintainer(user)
project_only_mwps.add_maintainer(user)
sign_in(user)
- stub_feature_flags(refactor_mr_widgets_extensions: false)
- stub_feature_flags(refactor_mr_widgets_extensions_user: false)
stub_feature_flags(refactor_mr_widget_test_summary: false)
end
@@ -606,7 +608,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
it 'shows test reports summary which includes the new failure' do
within(".js-reports-container") do
- click_button 'Expand'
+ click_expand_button
expect(page).to have_content('Test summary contained 1 failed out of 2 total tests')
within(".js-report-section-container") do
@@ -621,7 +623,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
context 'when user clicks the new failure' do
it 'shows the test report detail' do
within(".js-reports-container") do
- click_button 'Expand'
+ click_expand_button
within(".js-report-section-container") do
click_button 'addTest'
@@ -654,7 +656,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
it 'shows test reports summary which includes the existing failure' do
within(".js-reports-container") do
- click_button 'Expand'
+ click_expand_button
expect(page).to have_content('Test summary contained 1 failed out of 2 total tests')
within(".js-report-section-container") do
@@ -668,7 +670,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
context 'when user clicks the existing failure' do
it 'shows test report detail of it' do
within(".js-reports-container") do
- click_button 'Expand'
+ click_expand_button
within(".js-report-section-container") do
click_button 'Test#sum when a is 1 and b is 3 returns summary'
@@ -701,7 +703,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
it 'shows test reports summary which includes the resolved failure' do
within(".js-reports-container") do
- click_button 'Expand'
+ click_expand_button
expect(page).to have_content('Test summary contained 1 fixed test result out of 2 total tests')
within(".js-report-section-container") do
@@ -715,7 +717,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
context 'when user clicks the resolved failure' do
it 'shows test report detail of it' do
within(".js-reports-container") do
- click_button 'Expand'
+ click_expand_button
within(".js-report-section-container") do
click_button 'addTest'
@@ -747,7 +749,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
it 'shows test reports summary which includes the new error' do
within(".js-reports-container") do
- click_button 'Expand'
+ click_expand_button
expect(page).to have_content('Test summary contained 1 error out of 2 total tests')
within(".js-report-section-container") do
@@ -762,7 +764,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
context 'when user clicks the new error' do
it 'shows the test report detail' do
within(".js-reports-container") do
- click_button 'Expand'
+ click_expand_button
within(".js-report-section-container") do
click_button 'addTest'
@@ -794,7 +796,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
it 'shows test reports summary which includes the existing error' do
within(".js-reports-container") do
- click_button 'Expand'
+ click_expand_button
expect(page).to have_content('Test summary contained 1 error out of 2 total tests')
within(".js-report-section-container") do
@@ -808,7 +810,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
context 'when user clicks the existing error' do
it 'shows test report detail of it' do
within(".js-reports-container") do
- click_button 'Expand'
+ click_expand_button
within(".js-report-section-container") do
click_button 'Test#sum when a is 4 and b is 4 returns summary'
@@ -840,7 +842,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
it 'shows test reports summary which includes the resolved error' do
within(".js-reports-container") do
- click_button 'Expand'
+ click_expand_button
expect(page).to have_content('Test summary contained 1 fixed test result out of 2 total tests')
within(".js-report-section-container") do
@@ -854,7 +856,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
context 'when user clicks the resolved error' do
it 'shows test report detail of it' do
within(".js-reports-container") do
- click_button 'Expand'
+ click_expand_button
within(".js-report-section-container") do
click_button 'addTest'
@@ -894,7 +896,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
it 'shows test reports summary which includes the resolved failure' do
within(".js-reports-container") do
- click_button 'Expand'
+ click_expand_button
expect(page).to have_content('Test summary contained 20 failed out of 20 total tests')
within(".js-report-section-container") do
diff --git a/spec/features/merge_request/user_views_user_status_on_merge_request_spec.rb b/spec/features/merge_request/user_views_user_status_on_merge_request_spec.rb
deleted file mode 100644
index d3ea8b955f2..00000000000
--- a/spec/features/merge_request/user_views_user_status_on_merge_request_spec.rb
+++ /dev/null
@@ -1,34 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Project > Merge request > View user status' do
- let(:project) { create(:project, :public, :repository) }
- let(:merge_request) do
- create(:merge_request, source_project: project, target_project: project, author: create(:user))
- end
-
- before do
- stub_feature_flags(remove_user_attributes_projects: false)
- end
-
- subject { visit merge_request_path(merge_request) }
-
- context 'for notes', :js do
- describe 'the status of the author of a note on a merge request' do
- let(:note) { create(:note, noteable: merge_request, project: project, author: create(:user)) }
-
- it_behaves_like 'showing user status' do
- let(:user_with_status) { note.author }
- end
- end
-
- describe 'the status of the author of a diff note on a merge request' do
- let(:note) { create(:diff_note_on_merge_request, noteable: merge_request, project: project, author: create(:user)) }
-
- it_behaves_like 'showing user status' do
- let(:user_with_status) { note.author }
- end
- end
- end
-end
diff --git a/spec/features/milestone_spec.rb b/spec/features/milestone_spec.rb
index 2a1ea1a4e73..98d623902a5 100644
--- a/spec/features/milestone_spec.rb
+++ b/spec/features/milestone_spec.rb
@@ -23,11 +23,17 @@ RSpec.describe 'Milestone' do
fill_in "milestone_due_date", with: '2016-12-16'
end
- find('input[name="commit"]').click
+ click_button 'Create milestone'
expect(find('[data-testid="no-issues-alert"]')).to have_content('Assign some issues to this milestone.')
expect(page).to have_content('Nov 16, 2016–Dec 16, 2016')
end
+
+ it 'passes redirect_path through to form' do
+ visit new_project_milestone_path(project, redirect_path: 'new_release')
+
+ expect(find('#redirect_path', visible: :all)[:value]).to eq('new_release')
+ end
end
describe 'Open a milestone with closed issues' do
@@ -49,7 +55,7 @@ RSpec.describe 'Milestone' do
page.within '.milestone-form' do
fill_in "milestone_title", with: milestone.title
end
- find('input[name="commit"]').click
+ click_button 'Create milestone'
expect(find('.gl-alert-danger')).to have_content('already being used for another group or project milestone.')
end
@@ -62,7 +68,7 @@ RSpec.describe 'Milestone' do
page.within '.milestone-form' do
fill_in "milestone_title", with: milestone.title
end
- find('input[name="commit"]').click
+ click_button 'Create milestone'
expect(find('.gl-alert-danger')).to have_content('already being used for another group or project milestone.')
end
diff --git a/spec/features/milestones/user_creates_milestone_spec.rb b/spec/features/milestones/user_creates_milestone_spec.rb
index dd377aa4a26..1ab231632fb 100644
--- a/spec/features/milestones/user_creates_milestone_spec.rb
+++ b/spec/features/milestones/user_creates_milestone_spec.rb
@@ -3,29 +3,100 @@
require 'spec_helper'
RSpec.describe "User creates milestone", :js do
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:inherited_guest) { create(:user) }
+ let_it_be(:inherited_developer) { create(:user) }
+ let_it_be(:group) { create(:group, :public) }
+
+ shared_examples 'creates milestone' do
+ specify do
+ title = "v2.3"
+
+ fill_in("Title", with: title)
+ fill_in("Description", with: "# Description header")
+ click_button("Create milestone")
+
+ expect(page).to have_content(title)
+ .and have_content("Issues")
+ .and have_header_with_correct_id_and_link(1, "Description header", "description-header")
+
+ visit(activity_project_path(project))
+
+ expect(page).to have_content("#{user.name} #{user.to_reference} opened milestone")
+ end
+ end
+
+ shared_examples 'renders not found' do
+ specify do
+ expect(page).to have_title('Not Found')
+ expect(page).to have_content('Page Not Found')
+ end
+ end
+
+ before_all do
+ group.add_guest(inherited_guest)
+ group.add_developer(inherited_developer)
+ end
before do
- project.add_developer(user)
sign_in(user)
-
visit(new_project_milestone_path(project))
end
- it "creates milestone" do
- title = "v2.3"
+ context 'when project is public' do
+ let_it_be(:project) { create(:project, :public, group: group) }
+
+ context 'and issues and merge requests are private' do
+ before_all do
+ project.project_feature.update!(
+ issues_access_level: ProjectFeature::PRIVATE,
+ merge_requests_access_level: ProjectFeature::PRIVATE
+ )
+ end
+
+ context 'when user is an inherited member from the group' do
+ context 'and user is a guest' do
+ let(:user) { inherited_guest }
+
+ it_behaves_like 'renders not found'
+ end
+
+ context 'and user is a developer' do
+ let(:user) { inherited_developer }
+
+ it_behaves_like 'creates milestone'
+ end
+ end
+ end
+ end
+
+ context 'when project is private' do
+ let_it_be(:project) { create(:project, :private, group: group) }
+
+ context 'and user is a direct project member' do
+ before_all do
+ project.add_developer(developer)
+ end
+
+ context 'when user is a developer' do
+ let(:user) { developer }
+
+ it_behaves_like 'creates milestone'
+ end
+ end
- fill_in("Title", with: title)
- fill_in("Description", with: "# Description header")
- click_button("Create milestone")
+ context 'and user is an inherited member from the group' do
+ context 'when user is a guest' do
+ let(:user) { inherited_guest }
- expect(page).to have_content(title)
- .and have_content("Issues")
- .and have_header_with_correct_id_and_link(1, "Description header", "description-header")
+ it_behaves_like 'renders not found'
+ end
- visit(activity_project_path(project))
+ context 'when user is a developer' do
+ let(:user) { inherited_developer }
- expect(page).to have_content("#{user.name} #{user.to_reference} opened milestone")
+ it_behaves_like 'creates milestone'
+ end
+ end
end
end
diff --git a/spec/features/oauth_registration_spec.rb b/spec/features/oauth_registration_spec.rb
index cb8343b8065..0a35b5a7e42 100644
--- a/spec/features/oauth_registration_spec.rb
+++ b/spec/features/oauth_registration_spec.rb
@@ -3,126 +3,139 @@
require 'spec_helper'
RSpec.describe 'OAuth Registration', :js, :allow_forgery_protection do
- include DeviseHelpers
include LoginHelpers
include TermsHelper
using RSpec::Parameterized::TableSyntax
+ let(:uid) { 'my-uid' }
+ let(:email) { 'user@example.com' }
+
around do |example|
with_omniauth_full_host { example.run }
end
- context 'when the user registers using single-sign on provider' do
- let(:uid) { 'my-uid' }
- let(:email) { 'user@example.com' }
-
- where(:provider, :additional_info) do
- :github | {}
- :twitter | {}
- :bitbucket | {}
- :gitlab | {}
- :google_oauth2 | {}
- :facebook | {}
- :cas3 | {}
- :auth0 | {}
- :authentiq | {}
- :salesforce | { extra: { email_verified: true } }
- :dingtalk | {}
- :alicloud | {}
+ where(:provider, :additional_info) do
+ :github | {}
+ :twitter | {}
+ :bitbucket | {}
+ :gitlab | {}
+ :google_oauth2 | {}
+ :facebook | {}
+ :cas3 | {}
+ :auth0 | {}
+ :authentiq | {}
+ :salesforce | { extra: { email_verified: true } }
+ :dingtalk | {}
+ :alicloud | {}
+ end
+
+ with_them do
+ before do
+ stub_omniauth_provider(provider)
+ stub_feature_flags(update_oauth_registration_flow: true)
end
- with_them do
+ context 'when block_auto_created_users is true' do
before do
- stub_omniauth_provider(provider)
- stub_feature_flags(update_oauth_registration_flow: true)
+ stub_omniauth_setting(block_auto_created_users: true)
end
- context 'when block_auto_created_users is true' do
- before do
- stub_omniauth_setting(block_auto_created_users: true)
- end
+ it 'redirects back to the sign-in page' do
+ register_via(provider, uid, email, additional_info: additional_info)
- it 'redirects back to the sign-in page' do
- register_via(provider, uid, email, additional_info: additional_info)
+ expect(page).to have_current_path new_user_session_path
+ expect(page).to have_content('Your account is pending approval')
+ end
+ end
- expect(page).to have_current_path new_user_session_path
- expect(page).to have_content('Your account is pending approval')
- end
+ context 'when block_auto_created_users is false' do
+ before do
+ stub_omniauth_setting(block_auto_created_users: false)
+ end
+
+ it 'redirects to the initial welcome path' do
+ register_via(provider, uid, email, additional_info: additional_info)
+
+ expect(page).to have_current_path users_sign_up_welcome_path
+ expect(page).to have_content('Welcome to GitLab, mockuser!')
end
- context 'when block_auto_created_users is false' do
+ context 'when terms are enforced' do
before do
- stub_omniauth_setting(block_auto_created_users: false)
+ enforce_terms
end
- it 'redirects to the initial welcome path' do
+ it 'auto accepts terms and redirects to the initial welcome path' do
register_via(provider, uid, email, additional_info: additional_info)
expect(page).to have_current_path users_sign_up_welcome_path
expect(page).to have_content('Welcome to GitLab, mockuser!')
end
+ end
- context 'when terms are enforced' do
- before do
- enforce_terms
- end
+ context 'when provider does not send a verified email address' do
+ let(:email) { 'temp-email-for-oauth@email.com' }
- it 'auto accepts terms and redirects to the initial welcome path' do
- register_via(provider, uid, email, additional_info: additional_info)
+ it 'redirects to the profile path' do
+ register_via(provider, uid, email, additional_info: additional_info)
- expect(page).to have_current_path users_sign_up_welcome_path
- expect(page).to have_content('Welcome to GitLab, mockuser!')
- end
+ expect(page).to have_current_path profile_path
+ expect(page).to have_content('Please complete your profile with email address')
end
+ end
- context 'when provider does not send a verified email address' do
- let(:email) { 'temp-email-for-oauth@email.com' }
+ context 'when registering via an invitation email' do
+ let_it_be(:owner) { create(:user) }
+ let_it_be(:group) { create(:group, name: 'Owned') }
+ let_it_be(:project) { create(:project, :repository, namespace: group) }
+
+ let(:invite_email) { generate(:email) }
+ let(:extra_params) { { invite_type: Emails::Members::INITIAL_INVITE } }
+ let(:group_invite) do
+ create(
+ :group_member, :invited,
+ group: group,
+ invite_email: invite_email,
+ created_by: owner
+ )
+ end
- it 'redirects to the profile path' do
- register_via(provider, uid, email, additional_info: additional_info)
+ before do
+ project.add_maintainer(owner)
+ group.add_owner(owner)
+ group_invite.generate_invite_token!
- expect(page).to have_current_path profile_path
- expect(page).to have_content('Please complete your profile with email address')
- end
+ mock_auth_hash(provider, uid, invite_email, additional_info: additional_info)
end
- context 'when registering via an invitation email' do
- let_it_be(:owner) { create(:user) }
- let_it_be(:group) { create(:group, name: 'Owned') }
- let_it_be(:project) { create(:project, :repository, namespace: group) }
-
- let(:invite_email) { generate(:email) }
- let(:extra_params) { { invite_type: Emails::Members::INITIAL_INVITE } }
- let(:group_invite) do
- create(
- :group_member, :invited,
- group: group,
- invite_email: invite_email,
- created_by: owner
- )
- end
-
- before do
- project.add_maintainer(owner)
- group.add_owner(owner)
- group_invite.generate_invite_token!
-
- mock_auth_hash(provider, uid, invite_email, additional_info: additional_info)
- end
-
- it 'redirects to the activity page with all the projects/groups invitations accepted' do
- visit invite_path(group_invite.raw_invite_token, extra_params)
- click_link_or_button "oauth-login-#{provider}"
- fill_in_welcome_form
-
- expect(page).to have_content('You have been granted Owner access to group Owned.')
- expect(page).to have_current_path(activity_group_path(group), ignore_query: true)
- end
+ it 'redirects to the activity page with all the projects/groups invitations accepted' do
+ visit invite_path(group_invite.raw_invite_token, extra_params)
+ click_link_or_button "oauth-login-#{provider}"
+ fill_in_welcome_form
+
+ expect(page).to have_content('You have been granted Owner access to group Owned.')
+ expect(page).to have_current_path(activity_group_path(group), ignore_query: true)
end
end
end
end
+ context 'when update_oauth_registration_flow is disabled' do
+ before do
+ stub_omniauth_provider(:github)
+ stub_omniauth_setting(block_auto_created_users: false)
+ stub_feature_flags(update_oauth_registration_flow: false)
+
+ enforce_terms
+ end
+
+ it 'presents the terms page' do
+ register_via(:github, uid, email)
+
+ expect(page).to have_content('These are the terms')
+ end
+ end
+
def fill_in_welcome_form
select 'Software Developer', from: 'user_role'
click_button 'Get started!'
diff --git a/spec/features/profiles/password_spec.rb b/spec/features/profiles/password_spec.rb
index 1d0db488751..6c860740354 100644
--- a/spec/features/profiles/password_spec.rb
+++ b/spec/features/profiles/password_spec.rb
@@ -51,11 +51,11 @@ RSpec.describe 'Profile > Password' do
end
context 'Password authentication unavailable' do
- before do
- gitlab_sign_in(user)
- end
-
context 'Regular user' do
+ before do
+ gitlab_sign_in(user)
+ end
+
let(:user) { create(:user) }
it 'renders 404 when password authentication is disabled for the web interface and Git' do
@@ -69,7 +69,22 @@ RSpec.describe 'Profile > Password' do
end
context 'LDAP user' do
+ include LdapHelpers
+
+ let(:ldap_settings) { { enabled: true } }
let(:user) { create(:omniauth_user, provider: 'ldapmain') }
+ let(:provider) { 'ldapmain' }
+ let(:provider_label) { 'Main LDAP' }
+
+ before do
+ stub_ldap_setting(ldap_settings)
+ stub_ldap_access(user, provider, provider_label)
+ sign_in_using_ldap!(user, provider_label, provider)
+ end
+
+ after(:all) do
+ Rails.application.reload_routes!
+ end
it 'renders 404' do
visit edit_profile_password_path
diff --git a/spec/features/profiles/two_factor_auths_spec.rb b/spec/features/profiles/two_factor_auths_spec.rb
index 9a58950b8f3..b4355f2d669 100644
--- a/spec/features/profiles/two_factor_auths_spec.rb
+++ b/spec/features/profiles/two_factor_auths_spec.rb
@@ -6,6 +6,8 @@ RSpec.describe 'Two factor auths' do
include Spec::Support::Helpers::ModalHelpers
context 'when signed in' do
+ let(:invalid_current_pwd_msg) { 'You must provide a valid current password' }
+
before do
sign_in(user)
end
@@ -18,7 +20,7 @@ RSpec.describe 'Two factor auths' do
register_2fa(user.current_otp, '123')
- expect(page).to have_content('You must provide a valid current password')
+ expect(page).to have_selector('.gl-alert-title', text: invalid_current_pwd_msg, count: 1)
register_2fa(user.reload.current_otp, user.password)
@@ -76,7 +78,7 @@ RSpec.describe 'Two factor auths' do
click_button 'Disable'
end
- expect(page).to have_content('You must provide a valid current password')
+ expect(page).to have_selector('.gl-alert-title', text: invalid_current_pwd_msg, count: 1)
fill_in 'current_password', with: user.password
@@ -97,7 +99,7 @@ RSpec.describe 'Two factor auths' do
click_button 'Regenerate recovery codes'
- expect(page).to have_content('You must provide a valid current password')
+ expect(page).to have_selector('.gl-alert-title', text: invalid_current_pwd_msg, count: 1)
fill_in 'current_password', with: user.password
diff --git a/spec/features/profiles/user_edit_profile_spec.rb b/spec/features/profiles/user_edit_profile_spec.rb
index d887a367fcb..56a70f37779 100644
--- a/spec/features/profiles/user_edit_profile_spec.rb
+++ b/spec/features/profiles/user_edit_profile_spec.rb
@@ -5,10 +5,9 @@ require 'spec_helper'
RSpec.describe 'User edit profile' do
include Spec::Support::Helpers::Features::NotesHelpers
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
before do
- stub_feature_flags(remove_user_attributes_projects: false)
sign_in(user)
visit(profile_path)
end
@@ -456,6 +455,8 @@ RSpec.describe 'User edit profile' do
end
context 'Remove status button' do
+ let(:user) { create(:user) }
+
before do
user.status = UserStatus.new(message: 'Eating bread', emoji: 'stuffed_flatbread')
@@ -495,45 +496,6 @@ RSpec.describe 'User edit profile' do
expect(page).to have_emoji('speech_balloon')
end
-
- context 'note header' do
- let(:project) { create(:project_empty_repo, :public) }
- let(:issue) { create(:issue, project: project) }
- let(:emoji) { "stuffed_flatbread" }
-
- before do
- project.add_guest(user)
- create(:user_status, user: user, message: 'Taking notes', emoji: emoji)
-
- visit(project_issue_path(project, issue))
-
- add_note("This is a comment")
- visit(project_issue_path(project, issue))
-
- wait_for_requests
- end
-
- it 'displays the status emoji' do
- first_note = page.find_all(".main-notes-list .timeline-entry").first
-
- expect(first_note).to have_emoji(emoji)
- end
-
- it 'clears the status emoji' do
- open_edit_status_modal
-
- page.within "#set-user-status-modal" do
- click_button 'Remove status'
- end
-
- visit(project_issue_path(project, issue))
- wait_for_requests
-
- first_note = page.find_all(".main-notes-list .timeline-entry").first
-
- expect(first_note).not_to have_css('.user-status-emoji')
- end
- end
end
context 'User time preferences', :js do
@@ -551,13 +513,13 @@ RSpec.describe 'User edit profile' do
it 'allows the user to select a time zone from a dropdown list of options' do
expect(page.find('.user-time-preferences .dropdown')).not_to have_css('.show')
- page.find('.user-time-preferences .js-timezone-dropdown').click
+ page.find('.user-time-preferences .dropdown').click
expect(page.find('.user-time-preferences .dropdown')).to have_css('.show')
- page.find("a", text: "Nuku'alofa").click
+ page.find("button", text: "Arizona").click
- expect(page).to have_field(:user_timezone, with: 'Pacific/Tongatapu', type: :hidden)
+ expect(page).to have_field(:user_timezone, with: 'America/Phoenix', type: :hidden)
end
it 'timezone defaults to empty' do
diff --git a/spec/features/projects/blobs/blob_show_spec.rb b/spec/features/projects/blobs/blob_show_spec.rb
index 13a4c1b5912..93e5be18229 100644
--- a/spec/features/projects/blobs/blob_show_spec.rb
+++ b/spec/features/projects/blobs/blob_show_spec.rb
@@ -675,7 +675,7 @@ RSpec.describe 'File blob', :js do
expect(page).to have_content('This project is licensed under the MIT License.')
# shows a learn more link
- expect(page).to have_link('Learn more', href: 'http://choosealicense.com/licenses/mit/')
+ expect(page).to have_link('Learn more', href: 'https://opensource.org/licenses/MIT')
end
end
end
diff --git a/spec/features/projects/blobs/edit_spec.rb b/spec/features/projects/blobs/edit_spec.rb
index f198a1f42e2..5587b8abab3 100644
--- a/spec/features/projects/blobs/edit_spec.rb
+++ b/spec/features/projects/blobs/edit_spec.rb
@@ -7,8 +7,8 @@ RSpec.describe 'Editing file blob', :js do
include TreeHelper
include BlobSpecHelpers
- let(:project) { create(:project, :public, :repository) }
- let(:merge_request) { create(:merge_request, source_project: project, source_branch: 'feature', target_branch: 'master') }
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project, source_branch: 'feature', target_branch: 'master') }
let(:branch) { 'master' }
let(:file_path) { project.repository.ls_files(project.repository.root_ref)[1] }
let(:readme_file_path) { 'README.md' }
@@ -82,6 +82,34 @@ RSpec.describe 'Editing file blob', :js do
end
end
+ context 'blob edit toolbar' do
+ toolbar_buttons = [
+ "Add bold text",
+ "Add italic text",
+ "Add strikethrough text",
+ "Insert a quote",
+ "Insert code",
+ "Add a link",
+ "Add a bullet list",
+ "Add a numbered list",
+ "Add a checklist",
+ "Add a collapsible section",
+ "Add a table"
+ ]
+
+ before do
+ visit project_edit_blob_path(project, tree_join(branch, readme_file_path))
+ end
+
+ it "has defined set of toolbar buttons" do
+ buttons = page.all('.file-buttons .md-header-toolbar button[type="button"]')
+ expect(buttons.length).to eq(toolbar_buttons.length)
+ toolbar_buttons.each_with_index do |button_title, i|
+ expect(buttons[i]['title']).to include(button_title)
+ end
+ end
+ end
+
context 'from blob file path' do
before do
visit project_blob_path(project, tree_join(branch, file_path))
@@ -156,11 +184,14 @@ RSpec.describe 'Editing file blob', :js do
end
context 'as developer' do
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
let(:protected_branch) { 'protected-branch' }
- before do
+ before_all do
project.add_developer(user)
+ end
+
+ before do
project.repository.add_branch(user, protected_branch, 'master')
create(:protected_branch, project: project, name: protected_branch)
sign_in(user)
diff --git a/spec/features/projects/blobs/user_creates_new_blob_in_new_project_spec.rb b/spec/features/projects/blobs/user_creates_new_blob_in_new_project_spec.rb
deleted file mode 100644
index d2774aa74c9..00000000000
--- a/spec/features/projects/blobs/user_creates_new_blob_in_new_project_spec.rb
+++ /dev/null
@@ -1,63 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'User creates new blob', :js do
- include WebIdeSpecHelpers
-
- let(:user) { create(:user) }
- let(:project) { create(:project, :empty_repo) }
-
- before do
- stub_feature_flags(vscode_web_ide: false)
- end
-
- shared_examples 'creating a file' do
- it 'allows the user to add a new file in Web IDE' do
- visit project_path(project)
-
- click_link 'New file'
-
- wait_for_requests
-
- ide_create_new_file('dummy-file', content: "Hello world\n")
-
- ide_commit
-
- expect(page).to have_content('All changes are committed')
- expect(project.repository.blob_at('master', 'dummy-file').data).to eql("Hello world\n")
- end
- end
-
- describe 'as a maintainer' do
- before do
- project.add_maintainer(user)
- sign_in(user)
- end
-
- it_behaves_like 'creating a file'
- end
-
- describe 'as an admin' do
- let(:user) { create(:user, :admin) }
-
- before do
- sign_in(user)
- gitlab_enable_admin_mode_sign_in(user)
- end
-
- it_behaves_like 'creating a file'
- end
-
- describe 'as a developer' do
- before do
- project.add_developer(user)
- sign_in(user)
- visit project_path(project)
- end
-
- it 'does not allow pushing to the default branch' do
- expect(page).not_to have_content('New file')
- end
- end
-end
diff --git a/spec/features/projects/branches_spec.rb b/spec/features/projects/branches_spec.rb
index 727f9aa486e..361a07ebd0b 100644
--- a/spec/features/projects/branches_spec.rb
+++ b/spec/features/projects/branches_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe 'Branches' do
- let(:user) { create(:user) }
- let(:project) { create(:project, :public, :repository) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public, :repository) }
let(:repository) { project.repository }
context 'logged in as developer' do
@@ -175,7 +175,7 @@ RSpec.describe 'Branches' do
search_for_branch('fix')
expect(page).not_to have_content('fix')
- expect(all('.all-branches').last).to have_selector('li', count: 0)
+ expect(all('.all-branches', wait: false).last).to have_selector('li', count: 0)
end
end
@@ -233,7 +233,7 @@ RSpec.describe 'Branches' do
end
context 'with one or more pipeline', :js do
- let(:project) { create(:project, :public, :empty_repo) }
+ let_it_be(:project) { create(:project, :public, :empty_repo) }
before do
sha = create_file(branch_name: "branch")
diff --git a/spec/features/projects/ci/editor_spec.rb b/spec/features/projects/ci/editor_spec.rb
index 8197fe46c7b..c96d5f5823f 100644
--- a/spec/features/projects/ci/editor_spec.rb
+++ b/spec/features/projects/ci/editor_spec.rb
@@ -135,6 +135,19 @@ RSpec.describe 'Pipeline Editor', :js do
end
end
+ describe 'Commit Form' do
+ it 'is preserved when changing tabs' do
+ find('#commit-message').set('message', clear: :backspace)
+ find('#source-branch-field').set('new_branch', clear: :backspace)
+
+ click_link 'Validate'
+ click_link 'Edit'
+
+ expect(find('#commit-message').value).to eq('message')
+ expect(find('#source-branch-field').value).to eq('new_branch')
+ end
+ end
+
describe 'Editor content' do
it 'user can reset their CI configuration' do
page.within('#source-editor-') do
diff --git a/spec/features/projects/ci/lint_spec.rb b/spec/features/projects/ci/lint_spec.rb
index 608511ae5a5..8d5f62d8a06 100644
--- a/spec/features/projects/ci/lint_spec.rb
+++ b/spec/features/projects/ci/lint_spec.rb
@@ -5,13 +5,16 @@ require 'spec_helper'
RSpec.describe 'CI Lint', :js do
include Spec::Support::Helpers::Features::SourceEditorSpecHelpers
- let(:project) { create(:project, :repository) }
- let(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
let(:content_selector) { '.content .view-lines' }
- before do
+ before_all do
project.add_developer(user)
+ end
+
+ before do
sign_in(user)
visit project_ci_lint_path(project)
diff --git a/spec/features/projects/environments/environment_metrics_spec.rb b/spec/features/projects/environments/environment_metrics_spec.rb
index bf0949443de..d486d8cf551 100644
--- a/spec/features/projects/environments/environment_metrics_spec.rb
+++ b/spec/features/projects/environments/environment_metrics_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe 'Environment > Metrics' do
include PrometheusHelpers
let(:user) { create(:user) }
- let(:project) { create(:prometheus_project, :repository) }
+ let(:project) { create(:project, :with_prometheus_integration, :repository) }
let(:pipeline) { create(:ci_pipeline, project: project) }
let(:build) { create(:ci_build, pipeline: pipeline) }
let(:environment) { create(:environment, project: project) }
diff --git a/spec/features/projects/environments/environment_spec.rb b/spec/features/projects/environments/environment_spec.rb
index be4b21dfff4..dc6e496d081 100644
--- a/spec/features/projects/environments/environment_spec.rb
+++ b/spec/features/projects/environments/environment_spec.rb
@@ -97,6 +97,10 @@ RSpec.describe 'Environment' do
it 'does show deployments' do
expect(page).to have_link("#{build.name} (##{build.id})")
end
+
+ it 'shows a tooltip on the job name' do
+ expect(page).to have_css("[title=\"#{build.name} (##{build.id})\"].has-tooltip")
+ end
end
context 'when there is a failed deployment' do
diff --git a/spec/features/projects/fork_spec.rb b/spec/features/projects/fork_spec.rb
index b8c127f0078..24943e7dd0f 100644
--- a/spec/features/projects/fork_spec.rb
+++ b/spec/features/projects/fork_spec.rb
@@ -118,17 +118,28 @@ RSpec.describe 'Project fork' do
end
end
+ shared_examples "increments the fork counter on the source project's page" do
+ specify :sidekiq_might_not_need_inline do
+ create_forks
+
+ visit project_path(project)
+
+ expect(page).to have_css('.fork-count', text: 2)
+ end
+ end
+
it_behaves_like 'fork button on project page'
it_behaves_like 'create fork page', 'Fork project'
context 'fork form', :js do
let(:group) { create(:group) }
+ let(:group2) { create(:group) }
let(:user) { create(:group_member, :maintainer, user: create(:user), group: group ).user }
- def submit_form
+ def submit_form(group_obj = group)
find('[data-testid="select_namespace_dropdown"]').click
- find('[data-testid="select_namespace_dropdown_search_field"]').fill_in(with: group.name)
- click_button group.name
+ find('[data-testid="select_namespace_dropdown_search_field"]').fill_in(with: group_obj.name)
+ click_button group_obj.name
click_button 'Fork project'
end
@@ -166,5 +177,41 @@ RSpec.describe 'Project fork' do
expect(page).to have_content("#{group.name} / #{fork_name}")
end
end
+
+ context 'with cache_home_panel feature flag' do
+ before do
+ create(:group_member, :maintainer, user: user, group: group2 )
+ end
+
+ context 'when caching is enabled' do
+ before do
+ stub_feature_flags(cache_home_panel: project)
+ end
+
+ it_behaves_like "increments the fork counter on the source project's page"
+ end
+
+ context 'when caching is disabled' do
+ before do
+ stub_feature_flags(cache_home_panel: false)
+ end
+
+ it_behaves_like "increments the fork counter on the source project's page"
+ end
+ end
end
end
+
+private
+
+def create_fork(group_obj = group)
+ visit project_path(project)
+ find('.fork-btn').click
+ submit_form(group_obj)
+ wait_for_requests
+end
+
+def create_forks
+ create_fork
+ create_fork(group2)
+end
diff --git a/spec/features/projects/infrastructure_registry_spec.rb b/spec/features/projects/infrastructure_registry_spec.rb
index 27d0866bc69..aab1cec8762 100644
--- a/spec/features/projects/infrastructure_registry_spec.rb
+++ b/spec/features/projects/infrastructure_registry_spec.rb
@@ -57,7 +57,7 @@ RSpec.describe 'Infrastructure Registry' do
it 'allows you to delete a module', :aggregate_failures do
# this is still using the package copy in the UI too
click_button('Remove package')
- click_button('Delete package')
+ click_button('Permanently delete')
expect(page).to have_content 'Package deleted successfully'
expect(page).not_to have_content(terraform_module.name)
diff --git a/spec/features/projects/labels/sort_labels_spec.rb b/spec/features/projects/labels/sort_labels_spec.rb
index ecbc4b524dc..f2f1acd2348 100644
--- a/spec/features/projects/labels/sort_labels_spec.rb
+++ b/spec/features/projects/labels/sort_labels_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe 'Sort labels', :js do
it 'sorts by date' do
click_button 'Name'
- sort_options = find('ul.dropdown-menu').all('li').collect(&:text)
+ sort_options = find('ul[role="listbox"]').all('li').collect(&:text)
expect(sort_options[0]).to eq('Name')
expect(sort_options[1]).to eq('Name, descending')
@@ -37,7 +37,7 @@ RSpec.describe 'Sort labels', :js do
expect(sort_options[4]).to eq('Updated date')
expect(sort_options[5]).to eq('Oldest updated')
- click_button 'Name, descending'
+ find('li', text: 'Name, descending').click
# assert default sorting
within '.other-labels' do
diff --git a/spec/features/projects/milestones/milestones_sorting_spec.rb b/spec/features/projects/milestones/milestones_sorting_spec.rb
index c47350fb663..5ba4289fd11 100644
--- a/spec/features/projects/milestones/milestones_sorting_spec.rb
+++ b/spec/features/projects/milestones/milestones_sorting_spec.rb
@@ -42,10 +42,10 @@ RSpec.describe 'Milestones sorting', :js do
milestones_for_sort_by.each do |sort_by, expected_milestones|
within '[data-testid=milestone_sort_by_dropdown]' do
click_button selected_sort_order
- milestones = find('.gl-new-dropdown-contents').all('.gl-new-dropdown-item-text-wrapper p').map(&:text)
+ milestones = find('ul[role="listbox"]').all('li').map(&:text)
expect(milestones).to eq(ordered_milestones)
- click_button sort_by
+ find('li', text: sort_by).click
expect(page).to have_button(sort_by)
end
diff --git a/spec/features/projects/packages_spec.rb b/spec/features/projects/packages_spec.rb
index f518cc1fc63..bbe913cf1e5 100644
--- a/spec/features/projects/packages_spec.rb
+++ b/spec/features/projects/packages_spec.rb
@@ -49,7 +49,7 @@ RSpec.describe 'Packages' do
it 'allows you to delete a package' do
find('[data-testid="delete-dropdown"]').click
find('[data-testid="action-delete"]').click
- click_button('Delete package')
+ click_button('Permanently delete')
expect(page).to have_content 'Package deleted successfully'
expect(page).not_to have_content(package.name)
diff --git a/spec/features/projects/pipeline_schedules_spec.rb b/spec/features/projects/pipeline_schedules_spec.rb
index dcc46f5d223..4ed0a11da38 100644
--- a/spec/features/projects/pipeline_schedules_spec.rb
+++ b/spec/features/projects/pipeline_schedules_spec.rb
@@ -11,6 +11,10 @@ RSpec.describe 'Pipeline Schedules', :js do
let(:scope) { nil }
let!(:user) { create(:user) }
+ before do
+ stub_feature_flags(pipeline_schedules_vue: false)
+ end
+
context 'logged in as the pipeline schedule owner' do
before do
project.add_developer(user)
diff --git a/spec/features/projects/pipelines/legacy_pipeline_spec.rb b/spec/features/projects/pipelines/legacy_pipeline_spec.rb
index 250a336469c..d93c951791d 100644
--- a/spec/features/projects/pipelines/legacy_pipeline_spec.rb
+++ b/spec/features/projects/pipelines/legacy_pipeline_spec.rb
@@ -735,6 +735,8 @@ RSpec.describe 'Pipeline', :js do
end
it 'displays the PipelineSchedule in an inactive state' do
+ stub_feature_flags(pipeline_schedules_vue: false)
+
visit project_pipeline_schedules_path(project)
page.click_link('Inactive')
diff --git a/spec/features/projects/pipelines/pipeline_spec.rb b/spec/features/projects/pipelines/pipeline_spec.rb
index 51a6fbc4d36..0b43e13996f 100644
--- a/spec/features/projects/pipelines/pipeline_spec.rb
+++ b/spec/features/projects/pipelines/pipeline_spec.rb
@@ -860,6 +860,8 @@ RSpec.describe 'Pipeline', :js do
end
it 'displays the PipelineSchedule in an inactive state' do
+ stub_feature_flags(pipeline_schedules_vue: false)
+
visit project_pipeline_schedules_path(project)
page.click_link('Inactive')
diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb
index 404e51048bc..1190b0f3558 100644
--- a/spec/features/projects/pipelines/pipelines_spec.rb
+++ b/spec/features/projects/pipelines/pipelines_spec.rb
@@ -673,7 +673,7 @@ RSpec.describe 'Pipelines', :js do
end
context 'when variables are specified' do
- it 'creates a new pipeline with variables' do
+ it 'creates a new pipeline with variables', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/375552' do
page.within(find("[data-testid='ci-variable-row']")) do
find("[data-testid='pipeline-form-ci-variable-key']").set('key_name')
find("[data-testid='pipeline-form-ci-variable-value']").set('value')
@@ -701,7 +701,7 @@ RSpec.describe 'Pipelines', :js do
it { expect(page).to have_content('Missing CI config file') }
- it 'creates a pipeline after first request failed and a valid gitlab-ci.yml file is available when trying again' do
+ it 'creates a pipeline after first request failed and a valid gitlab-ci.yml file is available when trying again', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/375552' do
stub_ci_pipeline_to_return_yaml_file
expect do
diff --git a/spec/features/projects/releases/user_creates_release_spec.rb b/spec/features/projects/releases/user_creates_release_spec.rb
index d82c4229b71..4eb7581222e 100644
--- a/spec/features/projects/releases/user_creates_release_spec.rb
+++ b/spec/features/projects/releases/user_creates_release_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe 'User creates release', :js do
let_it_be(:user) { create(:user) }
let(:new_page_url) { new_project_release_path(project) }
+ let(:tag_name) { 'new-tag' }
before do
project.add_developer(user)
@@ -33,6 +34,8 @@ RSpec.describe 'User creates release', :js do
end
it 'defaults the "Create from" dropdown to the project\'s default branch' do
+ select_new_tag_name(tag_name)
+
expect(page.find('[data-testid="create-from-field"] .ref-selector button')).to have_content(project.default_branch)
end
diff --git a/spec/features/projects/settings/repository_settings_spec.rb b/spec/features/projects/settings/repository_settings_spec.rb
index 63e0ee4a251..d9bdbf7aa1a 100644
--- a/spec/features/projects/settings/repository_settings_spec.rb
+++ b/spec/features/projects/settings/repository_settings_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe 'Projects > Settings > Repository settings' do
context 'for maintainer' do
let(:role) { :maintainer }
- context 'Deploy tokens' do
+ context 'Deploy tokens', :js do
let!(:deploy_token) { create(:deploy_token, projects: [project]) }
before do
diff --git a/spec/features/projects/settings/user_changes_default_branch_spec.rb b/spec/features/projects/settings/user_changes_default_branch_spec.rb
index 84e6c50cf61..508bbcc5327 100644
--- a/spec/features/projects/settings/user_changes_default_branch_spec.rb
+++ b/spec/features/projects/settings/user_changes_default_branch_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe 'Projects > Settings > User changes default branch' do
- include Select2Helper
-
let(:user) { create(:user) }
before do
@@ -17,16 +15,21 @@ RSpec.describe 'Projects > Settings > User changes default branch' do
let(:project) { create(:project, :repository, namespace: user.namespace) }
it 'allows to change the default branch', :js do
+ dropdown_selector = '[data-testid="default-branch-dropdown"]'
# Otherwise, running JS may overwrite our change to project_default_branch
wait_for_requests
- select2('fix', from: '#project_default_branch')
+ expect(page).to have_selector(dropdown_selector)
+ find(dropdown_selector).click
+
+ fill_in 'Search branch', with: 'fix'
+ click_button 'fix'
page.within '#default-branch-settings' do
click_button 'Save changes'
end
- expect(find('#project_default_branch', visible: false).value).to eq 'fix'
+ expect(find("#{dropdown_selector} input", visible: false).value).to eq 'fix'
end
end
@@ -34,7 +37,7 @@ RSpec.describe 'Projects > Settings > User changes default branch' do
let(:project) { create(:project_empty_repo, namespace: user.namespace) }
it 'does not show default branch selector' do
- expect(page).not_to have_selector('#project_default_branch')
+ expect(page).not_to have_selector('[data-testid="default-branch-dropdown"]')
end
end
end
diff --git a/spec/features/projects/settings/user_transfers_a_project_spec.rb b/spec/features/projects/settings/user_transfers_a_project_spec.rb
index 6041dca305b..23e10a36cee 100644
--- a/spec/features/projects/settings/user_transfers_a_project_spec.rb
+++ b/spec/features/projects/settings/user_transfers_a_project_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe 'Projects > Settings > User transfers a project', :js do
let(:group) { create(:group) }
before do
- stub_const('Gitlab::QueryLimiting::Transaction::THRESHOLD', 120)
+ allow(Gitlab::QueryLimiting::Transaction).to receive(:threshold).and_return(120)
group.add_owner(user)
sign_in(user)
diff --git a/spec/features/projects/settings/webhooks_settings_spec.rb b/spec/features/projects/settings/webhooks_settings_spec.rb
index d525544ac15..25752bcaf45 100644
--- a/spec/features/projects/settings/webhooks_settings_spec.rb
+++ b/spec/features/projects/settings/webhooks_settings_spec.rb
@@ -48,10 +48,10 @@ RSpec.describe 'Projects > Settings > Webhook Settings' do
expect(page).to have_content('Releases events')
end
- it 'create webhook' do
+ it 'create webhook', :js do
visit webhooks_path
- fill_in 'hook_url', with: url
+ fill_in 'URL', with: url
check 'Tag push events'
fill_in 'hook_push_events_branch_filter', with: 'master'
check 'Enable SSL verification'
@@ -66,12 +66,12 @@ RSpec.describe 'Projects > Settings > Webhook Settings' do
expect(page).to have_content('Job events')
end
- it 'edit existing webhook' do
+ it 'edit existing webhook', :js do
hook
visit webhooks_path
click_link 'Edit'
- fill_in 'hook_url', with: url
+ fill_in 'URL', with: url
check 'Enable SSL verification'
click_button 'Save changes'
diff --git a/spec/features/projects/show/user_interacts_with_stars_spec.rb b/spec/features/projects/show/user_interacts_with_stars_spec.rb
index e0dd4f65010..158b6aa9b46 100644
--- a/spec/features/projects/show/user_interacts_with_stars_spec.rb
+++ b/spec/features/projects/show/user_interacts_with_stars_spec.rb
@@ -13,6 +13,14 @@ RSpec.describe 'Projects > Show > User interacts with project stars' do
visit(project_path(project))
end
+ it 'retains the star count even after a page reload' do
+ star_project
+
+ reload_page
+
+ expect(page).to have_css('.star-count', text: 1)
+ end
+
it 'toggles the star' do
star_project
@@ -63,6 +71,10 @@ end
private
+def reload_page
+ visit current_path
+end
+
def star_project
click_button(_('Star'))
wait_for_requests
diff --git a/spec/features/projects/show/user_sees_collaboration_links_spec.rb b/spec/features/projects/show/user_sees_collaboration_links_spec.rb
index 1440db141a6..c63427e56e6 100644
--- a/spec/features/projects/show/user_sees_collaboration_links_spec.rb
+++ b/spec/features/projects/show/user_sees_collaboration_links_spec.rb
@@ -5,8 +5,8 @@ require 'spec_helper'
RSpec.describe 'Projects > Show > Collaboration links', :js do
using RSpec::Parameterized::TableSyntax
- let(:project) { create(:project, :repository, :public) }
- let(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository, :public) }
+ let_it_be(:user) { create(:user) }
before do
sign_in(user)
@@ -17,7 +17,7 @@ RSpec.describe 'Projects > Show > Collaboration links', :js do
end
context 'with developer user' do
- before do
+ before_all do
project.add_developer(user)
end
diff --git a/spec/features/projects/user_sorts_projects_spec.rb b/spec/features/projects/user_sorts_projects_spec.rb
index b9b28398279..c40f01f3aa1 100644
--- a/spec/features/projects/user_sorts_projects_spec.rb
+++ b/spec/features/projects/user_sorts_projects_spec.rb
@@ -24,7 +24,6 @@ RSpec.describe 'User sorts projects and order persists' do
end
it "is set on the group_canonical_path" do
- stub_feature_flags(group_overview_tabs_vue: false)
visit(group_canonical_path(group))
within '[data-testid=group_sort_by_dropdown]' do
@@ -33,7 +32,6 @@ RSpec.describe 'User sorts projects and order persists' do
end
it "is set on the details_group_path" do
- stub_feature_flags(group_overview_tabs_vue: false)
visit(details_group_path(group))
within '[data-testid=group_sort_by_dropdown]' do
@@ -42,7 +40,7 @@ RSpec.describe 'User sorts projects and order persists' do
end
end
- context "from explore projects" do
+ context "from explore projects", :js do
before do
sign_in(user)
visit(explore_projects_path)
@@ -50,10 +48,10 @@ RSpec.describe 'User sorts projects and order persists' do
first(:link, 'Updated date').click
end
- it_behaves_like "sort order persists across all views", 'Updated date', 'Updated date'
+ it_behaves_like "sort order persists across all views", 'Updated date', 'Updated'
end
- context 'from dashboard projects' do
+ context 'from dashboard projects', :js do
before do
sign_in(user)
visit(dashboard_projects_path)
@@ -66,29 +64,29 @@ RSpec.describe 'User sorts projects and order persists' do
context 'from group homepage', :js do
before do
- stub_feature_flags(group_overview_tabs_vue: false)
sign_in(user)
visit(group_canonical_path(group))
within '[data-testid=group_sort_by_dropdown]' do
find('button.gl-dropdown-toggle').click
- first(:button, 'Last created').click
+ first(:button, 'Created').click
+ wait_for_requests
end
end
- it_behaves_like "sort order persists across all views", "Created date", "Last created"
+ it_behaves_like "sort order persists across all views", "Created date", "Created"
end
context 'from group details', :js do
before do
- stub_feature_flags(group_overview_tabs_vue: false)
sign_in(user)
visit(details_group_path(group))
within '[data-testid=group_sort_by_dropdown]' do
find('button.gl-dropdown-toggle').click
- first(:button, 'Most stars').click
+ first(:button, 'Stars').click
+ wait_for_requests
end
end
- it_behaves_like "sort order persists across all views", "Stars", "Most stars"
+ it_behaves_like "sort order persists across all views", "Stars", "Stars"
end
end
diff --git a/spec/features/projects/wiki/user_views_wiki_in_project_page_spec.rb b/spec/features/projects/wiki/user_views_wiki_in_project_page_spec.rb
index fbb5c24f6e1..db2b3fc2f4b 100644
--- a/spec/features/projects/wiki/user_views_wiki_in_project_page_spec.rb
+++ b/spec/features/projects/wiki/user_views_wiki_in_project_page_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe 'Projects > Wiki > User views wiki in project page' do
end
context 'when repository is disabled for project' do
- let_it_be(:project) do
+ let(:project) do
create(:project,
:wiki_repo,
:repository_disabled,
@@ -17,16 +17,31 @@ RSpec.describe 'Projects > Wiki > User views wiki in project page' do
end
context 'when wiki homepage contains a link' do
- before do
- create(:wiki_page, wiki: project.wiki, title: 'home', content: '[some link](other-page)')
+ shared_examples 'wiki homepage contains a link' do
+ it 'displays the correct URL for the link' do
+ visit project_path(project)
+ expect(page).to have_link(
+ 'some link',
+ href: project_wiki_path(project, 'other-page')
+ )
+ end
end
- it 'displays the correct URL for the link' do
- visit project_path(project)
- expect(page).to have_link(
- 'some link',
- href: project_wiki_path(project, 'other-page')
- )
+ context 'when using markdown' do
+ before do
+ create(:wiki_page, wiki: project.wiki, title: 'home', content: '[some link](other-page)')
+ end
+
+ it_behaves_like 'wiki homepage contains a link'
+ end
+
+ context 'when using asciidoc' do
+ before do
+ create(:wiki_page, wiki: project.wiki, title: 'home', content: 'link:other-page[some link]',
+ format: :asciidoc)
+ end
+
+ it_behaves_like 'wiki homepage contains a link'
end
end
end
diff --git a/spec/features/protected_branches_spec.rb b/spec/features/protected_branches_spec.rb
index 389a51a10e0..174716d646d 100644
--- a/spec/features/protected_branches_spec.rb
+++ b/spec/features/protected_branches_spec.rb
@@ -183,7 +183,7 @@ RSpec.describe 'Protected Branches', :js do
end
include_examples 'Deploy keys with protected branches' do
- let(:all_dropdown_sections) { %w(Roles Deploy\ Keys) }
+ let(:all_dropdown_sections) { ['Roles', 'Deploy Keys'] }
end
end
end
diff --git a/spec/features/runners_spec.rb b/spec/features/runners_spec.rb
index 482f3d62f36..cee0910aef7 100644
--- a/spec/features/runners_spec.rb
+++ b/spec/features/runners_spec.rb
@@ -160,16 +160,6 @@ RSpec.describe 'Runners' do
end
context 'shared runner text' do
- context 'when application settings have no shared_runners_text' do
- it 'user sees default shared runners description' do
- visit project_runners_path(project)
-
- page.within("[data-testid='shared-runners-description']") do
- expect(page).to have_content('The same shared runner executes code from multiple projects')
- end
- end
- end
-
context 'when application settings have shared_runners_text' do
let(:shared_runners_text) { 'custom **shared** runners description' }
let(:shared_runners_html) { 'custom shared runners description' }
diff --git a/spec/features/search/user_searches_for_code_spec.rb b/spec/features/search/user_searches_for_code_spec.rb
index e2c8708be78..50e6eb66466 100644
--- a/spec/features/search/user_searches_for_code_spec.rb
+++ b/spec/features/search/user_searches_for_code_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe 'User searches for code' do
context 'when signed in' do
before do
+ stub_feature_flags(search_page_vertical_nav: false)
project.add_maintainer(user)
sign_in(user)
end
@@ -214,6 +215,7 @@ RSpec.describe 'User searches for code' do
let(:project) { create(:project, :public, :repository) }
before do
+ stub_feature_flags(search_page_vertical_nav: false)
visit(project_path(project))
end
diff --git a/spec/features/search/user_searches_for_comments_spec.rb b/spec/features/search/user_searches_for_comments_spec.rb
index 5185a2460dc..a6793bc3aa7 100644
--- a/spec/features/search/user_searches_for_comments_spec.rb
+++ b/spec/features/search/user_searches_for_comments_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'User searches for comments' do
let(:user) { create(:user) }
before do
+ stub_feature_flags(search_page_vertical_nav: false)
project.add_reporter(user)
sign_in(user)
diff --git a/spec/features/search/user_searches_for_commits_spec.rb b/spec/features/search/user_searches_for_commits_spec.rb
index 2dceda09d7c..4ec2a9e6cff 100644
--- a/spec/features/search/user_searches_for_commits_spec.rb
+++ b/spec/features/search/user_searches_for_commits_spec.rb
@@ -3,13 +3,12 @@
require 'spec_helper'
RSpec.describe 'User searches for commits', :js do
- include CycleAnalyticsHelpers
-
let(:project) { create(:project, :repository) }
let(:sha) { '6d394385cf567f80a8fd85055db1ab4c5295806f' }
let(:user) { create(:user) }
before do
+ stub_feature_flags(search_page_vertical_nav: false)
project.add_reporter(user)
sign_in(user)
@@ -34,7 +33,12 @@ RSpec.describe 'User searches for commits', :js do
context 'when searching by message' do
it 'finds a commit and holds on /search page' do
- create_commit('Message referencing another sha: "deadbeef"', project, user, 'master')
+ project.repository.commit_files(
+ user,
+ message: 'Message referencing another sha: "deadbeef"',
+ branch_name: 'master',
+ actions: [{ action: :create, file_path: 'a/new.file', contents: 'new file' }]
+ )
submit_search('deadbeef')
diff --git a/spec/features/search/user_searches_for_issues_spec.rb b/spec/features/search/user_searches_for_issues_spec.rb
index c23a54594d4..51d2f355848 100644
--- a/spec/features/search/user_searches_for_issues_spec.rb
+++ b/spec/features/search/user_searches_for_issues_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe 'User searches for issues', :js do
before do
project.add_maintainer(user)
sign_in(user)
+ stub_feature_flags(search_page_vertical_nav: false)
visit(search_path)
end
@@ -110,6 +111,7 @@ RSpec.describe 'User searches for issues', :js do
before do
stub_feature_flags(block_anonymous_global_searches: false)
+ stub_feature_flags(search_page_vertical_nav: false)
visit(search_path)
end
@@ -127,6 +129,7 @@ RSpec.describe 'User searches for issues', :js do
context 'when block_anonymous_global_searches is enabled' do
before do
+ stub_feature_flags(search_page_vertical_nav: false)
visit(search_path)
end
diff --git a/spec/features/search/user_searches_for_merge_requests_spec.rb b/spec/features/search/user_searches_for_merge_requests_spec.rb
index 61c61d793db..a4fbe3a6e59 100644
--- a/spec/features/search/user_searches_for_merge_requests_spec.rb
+++ b/spec/features/search/user_searches_for_merge_requests_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe 'User searches for merge requests', :js do
end
before do
+ stub_feature_flags(search_page_vertical_nav: false)
project.add_maintainer(user)
sign_in(user)
diff --git a/spec/features/search/user_searches_for_milestones_spec.rb b/spec/features/search/user_searches_for_milestones_spec.rb
index 61f2e8e0c8f..6773059830c 100644
--- a/spec/features/search/user_searches_for_milestones_spec.rb
+++ b/spec/features/search/user_searches_for_milestones_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe 'User searches for milestones', :js do
before do
project.add_maintainer(user)
sign_in(user)
+ stub_feature_flags(search_page_vertical_nav: false)
visit(search_path)
end
diff --git a/spec/features/search/user_searches_for_projects_spec.rb b/spec/features/search/user_searches_for_projects_spec.rb
index 562da56275c..5902859d1f5 100644
--- a/spec/features/search/user_searches_for_projects_spec.rb
+++ b/spec/features/search/user_searches_for_projects_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe 'User searches for projects', :js do
context 'when signed out' do
context 'when block_anonymous_global_searches is disabled' do
before do
+ stub_feature_flags(search_page_vertical_nav: false)
allow(Gitlab::ApplicationRateLimiter).to receive(:threshold).with(:search_rate_limit).and_return(1000)
allow(Gitlab::ApplicationRateLimiter).to receive(:threshold).with(:search_rate_limit_unauthenticated).and_return(1000)
stub_feature_flags(block_anonymous_global_searches: false)
diff --git a/spec/features/search/user_searches_for_users_spec.rb b/spec/features/search/user_searches_for_users_spec.rb
index a5cf12fa068..e21a66fed92 100644
--- a/spec/features/search/user_searches_for_users_spec.rb
+++ b/spec/features/search/user_searches_for_users_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe 'User searches for users' do
let(:user3) { create(:user, username: 'gob_2018', name: 'George Oscar Bluth') }
before do
+ stub_feature_flags(search_page_vertical_nav: false)
sign_in(user1)
end
diff --git a/spec/features/search/user_searches_for_wiki_pages_spec.rb b/spec/features/search/user_searches_for_wiki_pages_spec.rb
index 9808383adb7..2e390309022 100644
--- a/spec/features/search/user_searches_for_wiki_pages_spec.rb
+++ b/spec/features/search/user_searches_for_wiki_pages_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe 'User searches for wiki pages', :js do
let!(:wiki_page) { create(:wiki_page, wiki: project.wiki, title: 'directory/title', content: 'Some Wiki content') }
before do
+ stub_feature_flags(search_page_vertical_nav: false)
project.add_maintainer(user)
sign_in(user)
@@ -18,6 +19,10 @@ RSpec.describe 'User searches for wiki pages', :js do
include_examples 'search timeouts', 'wiki_blobs'
shared_examples 'search wiki blobs' do
+ before do
+ stub_feature_flags(search_page_vertical_nav: false)
+ end
+
it 'finds a page' do
find('[data-testid="project-filter"]').click
diff --git a/spec/features/search/user_uses_header_search_field_spec.rb b/spec/features/search/user_uses_header_search_field_spec.rb
index 41288a34fb2..827e3984896 100644
--- a/spec/features/search/user_uses_header_search_field_spec.rb
+++ b/spec/features/search/user_uses_header_search_field_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe 'User uses header search field', :js do
end
before do
+ stub_feature_flags(search_page_vertical_nav: false)
allow(Gitlab::ApplicationRateLimiter).to receive(:threshold).and_return(0)
allow(Gitlab::ApplicationRateLimiter).to receive(:threshold).with(:search_rate_limit).and_return(1000)
allow(Gitlab::ApplicationRateLimiter).to receive(:threshold).with(:search_rate_limit_unauthenticated).and_return(1000)
diff --git a/spec/features/snippets/search_snippets_spec.rb b/spec/features/snippets/search_snippets_spec.rb
index 46bc3b7caad..69b9a0aa64d 100644
--- a/spec/features/snippets/search_snippets_spec.rb
+++ b/spec/features/snippets/search_snippets_spec.rb
@@ -3,6 +3,10 @@
require 'spec_helper'
RSpec.describe 'Search Snippets' do
+ before do
+ stub_feature_flags(search_page_vertical_nav: false)
+ end
+
it 'user searches for snippets by title' do
public_snippet = create(:personal_snippet, :public, title: 'Beginning and Middle')
private_snippet = create(:personal_snippet, :private, title: 'Middle and End')
diff --git a/spec/features/tags/developer_creates_tag_spec.rb b/spec/features/tags/developer_creates_tag_spec.rb
index ca76a94092e..5657115fb3c 100644
--- a/spec/features/tags/developer_creates_tag_spec.rb
+++ b/spec/features/tags/developer_creates_tag_spec.rb
@@ -46,18 +46,6 @@ RSpec.describe 'Developer creates tag' do
end
end
- it 'with multiline release notes parses the release note as Markdown' do
- create_tag_in_form(tag: 'v4.0', ref: 'master', desc: "Awesome release notes\n\n- hello\n- world")
-
- expect(page).to have_current_path(
- project_tag_path(project, 'v4.0'), ignore_query: true)
- expect(page).to have_content 'v4.0'
- page.within '.description' do
- expect(page).to have_content 'Awesome release notes'
- expect(page).to have_selector('ul li', count: 2)
- end
- end
-
it 'opens dropdown for ref', :js do
click_link 'New tag'
ref_row = find('.form-group:nth-of-type(2) .col-sm-12')
@@ -73,19 +61,6 @@ RSpec.describe 'Developer creates tag' do
end
end
- context 'from new tag page' do
- before do
- visit new_project_tag_path(project)
- end
-
- it 'description has emoji autocomplete', :js do
- find('#release_description').native.send_keys('')
- fill_in 'release_description', with: ':'
-
- expect(page).to have_selector('.atwho-view')
- end
- end
-
def create_tag_in_form(tag:, ref:, message: nil, desc: nil)
click_link 'New tag'
fill_in 'tag_name', with: tag
diff --git a/spec/features/unsubscribe_links_spec.rb b/spec/features/unsubscribe_links_spec.rb
index 5317f586390..12d2f0a9bb6 100644
--- a/spec/features/unsubscribe_links_spec.rb
+++ b/spec/features/unsubscribe_links_spec.rb
@@ -2,14 +2,15 @@
require 'spec_helper'
-RSpec.describe 'Unsubscribe links', :sidekiq_might_not_need_inline do
+RSpec.describe 'Unsubscribe links', :sidekiq_inline do
include Warden::Test::Helpers
- let(:recipient) { create(:user) }
- let(:author) { create(:user) }
- let(:project) { create(:project, :public) }
- let(:params) { { title: 'A bug!', description: 'Fix it!', assignees: [recipient] } }
- let(:issue) { Issues::CreateService.new(project: project, current_user: author, params: params, spam_params: nil).execute }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:author) { create(:user).tap { |u| project.add_reporter(u) } }
+ let_it_be(:recipient) { create(:user) }
+
+ let(:params) { { title: 'A bug!', description: 'Fix it!', assignee_ids: [recipient.id] } }
+ let(:issue) { Issues::CreateService.new(project: project, current_user: author, params: params, spam_params: nil).execute[:issue] }
let(:mail) { ActionMailer::Base.deliveries.last }
let(:body) { Capybara::Node::Simple.new(mail.default_part_body.to_s) }
diff --git a/spec/features/uploads/user_uploads_avatar_to_profile_spec.rb b/spec/features/uploads/user_uploads_avatar_to_profile_spec.rb
index cbd2d30d726..02f9d57fcfe 100644
--- a/spec/features/uploads/user_uploads_avatar_to_profile_spec.rb
+++ b/spec/features/uploads/user_uploads_avatar_to_profile_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe 'User uploads avatar to profile' do
wait_for_all_requests
- data_uri = find('.avatar-image .avatar')['src']
+ data_uri = find('.avatar-image .gl-avatar')['src']
expect(page.find('.header-user-avatar')['src']).to eq data_uri
expect(page.find('[data-testid="sidebar-user-avatar"]')['src']).to eq data_uri
end
diff --git a/spec/features/user_opens_link_to_comment_spec.rb b/spec/features/user_opens_link_to_comment_spec.rb
index 3fb1505ff5b..59dea91c666 100644
--- a/spec/features/user_opens_link_to_comment_spec.rb
+++ b/spec/features/user_opens_link_to_comment_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe 'User opens link to comment', :js do
wait_for_requests
- expect(find('#discussion-preferences-dropdown')).to have_content('Sort or filter')
+ expect(find('#discussion-preferences-dropdown')).to have_content(_('Sort or filter'))
expect(page).not_to have_content('Something went wrong while fetching comments')
# Auto-switching to show all notes shouldn't be persisted
diff --git a/spec/features/users/signup_spec.rb b/spec/features/users/signup_spec.rb
index de53e722603..9b1a102f07b 100644
--- a/spec/features/users/signup_spec.rb
+++ b/spec/features/users/signup_spec.rb
@@ -349,7 +349,6 @@ RSpec.describe 'Signup' do
end
it 'redirects to step 2 of the signup process, sets the role and redirects back' do
- stub_feature_flags(about_your_company_registration_flow: false)
visit new_user_registration_path
fill_in_signup_form
diff --git a/spec/features/work_items/work_item_spec.rb b/spec/features/work_items/work_item_spec.rb
new file mode 100644
index 00000000000..686b82de868
--- /dev/null
+++ b/spec/features/work_items/work_item_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Work item', :js do
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:work_item) { create(:work_item, project: project) }
+
+ context 'for signed in user' do
+ before do
+ project.add_developer(user)
+
+ sign_in(user)
+
+ visit project_work_items_path(project, work_items_path: work_item.id)
+ end
+
+ context 'in work item description' do
+ it 'shows GFM autocomplete', :aggregate_failures do
+ click_button "Edit description"
+
+ find('[aria-label="Description"]').send_keys("@#{user.username}")
+
+ wait_for_requests
+
+ page.within('.atwho-container') do
+ expect(page).to have_text(user.name)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/finders/ci/runners_finder_spec.rb b/spec/finders/ci/runners_finder_spec.rb
index 8d3c375385a..18eecd0f073 100644
--- a/spec/finders/ci/runners_finder_spec.rb
+++ b/spec/finders/ci/runners_finder_spec.rb
@@ -319,6 +319,17 @@ RSpec.describe Ci::RunnersFinder do
end
end
+ context 'with :all_available membership' do
+ let(:membership) { :all_available }
+
+ it 'returns runners available to group' do
+ expect(subject).to match_array([runner_project_7, runner_project_6, runner_project_5,
+ runner_project_4, runner_project_3, runner_project_2,
+ runner_project_1, runner_sub_group_4, runner_sub_group_3,
+ runner_sub_group_2, runner_sub_group_1, runner_group, runner_instance])
+ end
+ end
+
context 'with unknown membership' do
let(:membership) { :unsupported }
@@ -400,11 +411,31 @@ RSpec.describe Ci::RunnersFinder do
with_them do
before do
- create(:group_member, user_permission, group: group, user: user)
+ create(:group_member, user_permission, group: sub_group_1, user: user)
+ end
+
+ context 'with :sub_group_1 as target group' do
+ let(:target_group) { sub_group_1 }
+
+ it 'returns no runners' do
+ is_expected.to be_empty
+ end
end
- it 'returns no runners' do
- expect(subject).to be_empty
+ context 'with :group as target group' do
+ let(:target_group) { group }
+
+ it 'returns no runners' do
+ is_expected.to be_empty
+ end
+
+ context 'with :all_available membership' do
+ let(:membership) { :all_available }
+
+ it 'returns no runners' do
+ expect(subject).to be_empty
+ end
+ end
end
end
end
diff --git a/spec/finders/clusters/agent_authorizations_finder_spec.rb b/spec/finders/clusters/agent_authorizations_finder_spec.rb
index 687906db0d7..2d90f32adc5 100644
--- a/spec/finders/clusters/agent_authorizations_finder_spec.rb
+++ b/spec/finders/clusters/agent_authorizations_finder_spec.rb
@@ -9,6 +9,10 @@ RSpec.describe Clusters::AgentAuthorizationsFinder do
let_it_be(:subgroup2) { create(:group, parent: subgroup1) }
let_it_be(:bottom_level_group) { create(:group, parent: subgroup2) }
+ let_it_be(:non_ancestor_group) { create(:group, parent: top_level_group) }
+ let_it_be(:non_ancestor_project) { create(:project, namespace: non_ancestor_group) }
+ let_it_be(:non_ancestor_agent) { create(:cluster_agent, project: non_ancestor_project) }
+
let_it_be(:agent_configuration_project) { create(:project, namespace: subgroup1) }
let_it_be(:requesting_project, reload: true) { create(:project, namespace: bottom_level_group) }
@@ -56,6 +60,20 @@ RSpec.describe Clusters::AgentAuthorizationsFinder do
it { is_expected.to be_empty }
end
+ context 'agent configuration project shares a root namespace, but does not belong to an ancestor of the given project' do
+ let!(:project_authorization) { create(:agent_project_authorization, agent: non_ancestor_agent, project: requesting_project) }
+
+ it { is_expected.to match_array([project_authorization]) }
+
+ context 'agent_authorization_include_descendants feature flag is disabled' do
+ before do
+ stub_feature_flags(agent_authorization_include_descendants: false)
+ end
+
+ it { is_expected.to be_empty }
+ end
+ end
+
context 'with project authorizations present' do
let!(:authorization) { create(:agent_project_authorization, agent: production_agent, project: requesting_project) }
@@ -116,6 +134,20 @@ RSpec.describe Clusters::AgentAuthorizationsFinder do
end
end
+ context 'agent configuration project does not belong to an ancestor of the authorized group' do
+ let!(:group_authorization) { create(:agent_group_authorization, agent: non_ancestor_agent, group: bottom_level_group) }
+
+ it { is_expected.to match_array([group_authorization]) }
+
+ context 'agent_authorization_include_descendants feature flag is disabled' do
+ before do
+ stub_feature_flags(agent_authorization_include_descendants: false)
+ end
+
+ it { is_expected.to be_empty }
+ end
+ end
+
it_behaves_like 'access_as' do
let!(:authorization) { create(:agent_group_authorization, agent: production_agent, group: top_level_group, config: config) }
end
diff --git a/spec/finders/groups/accepting_group_transfers_finder_spec.rb b/spec/finders/groups/accepting_group_transfers_finder_spec.rb
index 1a6c6f9243b..06e6fa05892 100644
--- a/spec/finders/groups/accepting_group_transfers_finder_spec.rb
+++ b/spec/finders/groups/accepting_group_transfers_finder_spec.rb
@@ -117,19 +117,6 @@ RSpec.describe Groups::AcceptingGroupTransfersFinder do
expect(result).to contain_exactly(great_grandparent_group)
end
end
-
- context 'when the feature flag `include_groups_from_group_shares_in_group_transfer_locations` is turned off' do
- before do
- stub_feature_flags(include_groups_from_group_shares_in_group_transfer_locations: false)
- end
-
- it 'excludes the groups where the user has OWNER access through group shares' do
- expect(result).not_to include(
- shared_with_group_where_direct_owner_as_owner,
- subgroup_of_shared_with_group_where_direct_owner_as_owner
- )
- end
- end
end
end
end
diff --git a/spec/finders/merge_requests_finder_spec.rb b/spec/finders/merge_requests_finder_spec.rb
index deeca6132e0..349ffd09324 100644
--- a/spec/finders/merge_requests_finder_spec.rb
+++ b/spec/finders/merge_requests_finder_spec.rb
@@ -228,9 +228,9 @@ RSpec.describe MergeRequestsFinder do
end
describe ':label_name parameter' do
- let(:common_labels) { create_list(:label, 3) }
- let(:distinct_labels) { create_list(:label, 3) }
- let(:merge_requests) do
+ let_it_be(:common_labels) { create_list(:label, 3) }
+ let_it_be(:distinct_labels) { create_list(:label, 3) }
+ let_it_be(:merge_requests) do
common_attrs = {
source_project: project1, target_project: project1, author: user
}
@@ -496,20 +496,15 @@ RSpec.describe MergeRequestsFinder do
context 'filtering by approved by username' do
let(:params) { { approved_by_usernames: user2.username } }
+ where(:sort) { [nil] + %w(milestone merged_at merged_at_desc closed_at closed_at_desc) }
+
before do
create(:approval, merge_request: merge_request3, user: user2)
end
- it 'returns merge requests approved by that user' do
- merge_requests = described_class.new(user, params).execute
-
- expect(merge_requests).to contain_exactly(merge_request3)
- end
-
- context 'with sorting by milestone' do
- let(:params) { { approved_by_usernames: user2.username, sort: 'milestone' } }
-
+ with_them do
it 'returns merge requests approved by that user' do
+ params = { approved_by_usernames: user2.username, sort: sort }
merge_requests = described_class.new(user, params).execute
expect(merge_requests).to contain_exactly(merge_request3)
@@ -566,7 +561,7 @@ RSpec.describe MergeRequestsFinder do
end
context 'filtering by created_at/updated_at' do
- let(:new_project) { create(:project, forked_from_project: project1) }
+ let_it_be(:new_project) { create(:project, forked_from_project: project1) }
let!(:new_merge_request) do
create(:merge_request,
@@ -589,7 +584,7 @@ RSpec.describe MergeRequestsFinder do
target_project: new_project)
end
- before do
+ before_all do
new_project.add_maintainer(user)
end
@@ -651,10 +646,10 @@ RSpec.describe MergeRequestsFinder do
end
context 'filtering by the merge request deployments' do
- let(:gstg) { create(:environment, project: project4, name: 'gstg') }
- let(:gprd) { create(:environment, project: project4, name: 'gprd') }
+ let_it_be(:gstg) { create(:environment, project: project4, name: 'gstg') }
+ let_it_be(:gprd) { create(:environment, project: project4, name: 'gprd') }
- let(:mr1) do
+ let_it_be(:mr1) do
create(
:merge_request,
:simple,
@@ -665,7 +660,7 @@ RSpec.describe MergeRequestsFinder do
)
end
- let(:mr2) do
+ let_it_be(:mr2) do
create(
:merge_request,
:simple,
@@ -676,7 +671,7 @@ RSpec.describe MergeRequestsFinder do
)
end
- let(:deploy1) do
+ let_it_be(:deploy1) do
create(
:deployment,
:success,
@@ -688,7 +683,7 @@ RSpec.describe MergeRequestsFinder do
)
end
- let(:deploy2) do
+ let_it_be(:deploy2) do
create(
:deployment,
:success,
@@ -700,7 +695,7 @@ RSpec.describe MergeRequestsFinder do
)
end
- before do
+ before_all do
deploy1.link_merge_requests(MergeRequest.where(id: mr1.id))
deploy2.link_merge_requests(MergeRequest.where(id: mr2.id))
end
@@ -838,13 +833,13 @@ RSpec.describe MergeRequestsFinder do
end
context 'when projects require different access levels for merge requests' do
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
- let(:public_project) { create(:project, :public) }
- let(:internal) { create(:project, :internal) }
- let(:private_project) { create(:project, :private) }
- let(:public_with_private_repo) { create(:project, :public, :repository, :repository_private) }
- let(:internal_with_private_repo) { create(:project, :internal, :repository, :repository_private) }
+ let_it_be(:public_project) { create(:project, :public) }
+ let_it_be(:internal) { create(:project, :internal) }
+ let_it_be(:private_project) { create(:project, :private) }
+ let_it_be(:public_with_private_repo) { create(:project, :public, :repository, :repository_private) }
+ let_it_be(:internal_with_private_repo) { create(:project, :internal, :repository, :repository_private) }
let(:merge_requests) { described_class.new(user, {}).execute }
@@ -855,7 +850,7 @@ RSpec.describe MergeRequestsFinder do
let!(:mr_internal_private_repo_access) { create(:merge_request, source_project: internal_with_private_repo) }
context 'with admin user' do
- let(:user) { create(:user, :admin) }
+ let_it_be(:user) { create(:user, :admin) }
context 'when admin mode is enabled', :enable_admin_mode do
it 'returns all merge requests' do
@@ -973,7 +968,7 @@ RSpec.describe MergeRequestsFinder do
let_it_be(:labels) { create_list(:label, 2, project: project) }
let_it_be(:merge_requests) { create_list(:merge_request, 4, :unique_branches, author: user, target_project: project, source_project: project, labels: labels) }
- before do
+ before_all do
project.add_developer(user)
end
diff --git a/spec/finders/packages/nuget/package_finder_spec.rb b/spec/finders/packages/nuget/package_finder_spec.rb
index 415bf796a72..6a6eebca778 100644
--- a/spec/finders/packages/nuget/package_finder_spec.rb
+++ b/spec/finders/packages/nuget/package_finder_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Packages::Nuget::PackageFinder do
let_it_be(:subgroup) { create(:group, parent: group) }
let_it_be(:project) { create(:project, namespace: subgroup) }
let_it_be_with_refind(:package1) { create(:nuget_package, project: project) }
- let_it_be(:package2) { create(:nuget_package, name: package1.name, version: '2.0.0', project: project) }
+ let_it_be(:package2) { create(:nuget_package, name: package1.name, version: '2.0.0-ABC', project: project) }
let_it_be(:package3) { create(:nuget_package, name: 'Another.Dummy.Package', project: project) }
let_it_be(:other_package_1) { create(:nuget_package, name: package1.name, version: package1.version) }
let_it_be(:other_package_2) { create(:nuget_package, name: package1.name, version: package2.version) }
@@ -43,7 +43,13 @@ RSpec.describe Packages::Nuget::PackageFinder do
end
context 'with valid version' do
- let(:package_version) { '2.0.0' }
+ let(:package_version) { '2.0.0-ABC' }
+
+ it { is_expected.to match_array([package2]) }
+ end
+
+ context 'with varying case version' do
+ let(:package_version) { '2.0.0-abC' }
it { is_expected.to match_array([package2]) }
end
diff --git a/spec/finders/personal_access_tokens_finder_spec.rb b/spec/finders/personal_access_tokens_finder_spec.rb
index f22bff62082..21380cb6632 100644
--- a/spec/finders/personal_access_tokens_finder_spec.rb
+++ b/spec/finders/personal_access_tokens_finder_spec.rb
@@ -7,6 +7,50 @@ RSpec.describe PersonalAccessTokensFinder do
described_class.new(options, current_user)
end
+ describe '# searches PATs' do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:time_token) do
+ create(:personal_access_token, created_at: DateTime.new(2022, 01, 02),
+ last_used_at: DateTime.new(2022, 01, 02))
+ end
+
+ let_it_be(:name_token) { create(:personal_access_token, name: 'test_1') }
+
+ let_it_be(:impersonated_token) do
+ create(:personal_access_token, :impersonation,
+ created_at: DateTime.new(2022, 01, 02),
+ last_used_at: DateTime.new(2022, 01, 02),
+ name: 'imp_token'
+ )
+ end
+
+ shared_examples 'finding tokens by user and options' do
+ subject { finder(option, user).execute }
+
+ it 'finds exactly' do
+ subject
+
+ is_expected.to contain_exactly(*result)
+ end
+ end
+
+ context 'by' do
+ where(:option, :user, :result) do
+ { created_before: DateTime.new(2022, 01, 03) } | create(:admin) | lazy { [time_token, impersonated_token] }
+ { created_after: DateTime.new(2022, 01, 01) } | create(:admin) | lazy { [time_token, name_token, impersonated_token] }
+ { last_used_before: DateTime.new(2022, 01, 03) } | create(:admin) | lazy { [time_token, impersonated_token] }
+ { last_used_before: DateTime.new(2022, 01, 03) } | create(:admin) | lazy { [time_token, impersonated_token] }
+ { impersonation: true } | create(:admin) | lazy { [impersonated_token] }
+ { search: 'test' } | create(:admin) | lazy { [name_token] }
+ end
+
+ with_them do
+ it_behaves_like 'finding tokens by user and options'
+ end
+ end
+ end
+
describe '#execute' do
let(:user) { create(:user) }
let(:params) { {} }
diff --git a/spec/fixtures/api/schemas/board.json b/spec/fixtures/api/schemas/board.json
deleted file mode 100644
index 7c146647948..00000000000
--- a/spec/fixtures/api/schemas/board.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- "type": "object",
- "required" : [
- "id"
- ],
- "properties" : {
- "id": { "type": "integer" },
- "name": { "type": "string" }
- }
-}
diff --git a/spec/fixtures/api/schemas/boards.json b/spec/fixtures/api/schemas/boards.json
deleted file mode 100644
index 117564ef77a..00000000000
--- a/spec/fixtures/api/schemas/boards.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "type": "array",
- "items": { "$ref": "board.json" }
-}
diff --git a/spec/fixtures/api/schemas/current-board.json b/spec/fixtures/api/schemas/current-board.json
deleted file mode 100644
index 2ddc038e908..00000000000
--- a/spec/fixtures/api/schemas/current-board.json
+++ /dev/null
@@ -1,16 +0,0 @@
-{
- "type": "object",
- "allOf": [
- { "$ref": "board.json" },
- {
- "required" : [
- "id",
- "name"
- ],
- "properties": {
- "id": { "type": "integer" },
- "name": { "type": "string" }
- }
- }
- ]
-}
diff --git a/spec/fixtures/api/schemas/ml/get_experiment.json b/spec/fixtures/api/schemas/ml/get_experiment.json
index cf8da7f999f..482455a89e1 100644
--- a/spec/fixtures/api/schemas/ml/get_experiment.json
+++ b/spec/fixtures/api/schemas/ml/get_experiment.json
@@ -6,18 +6,31 @@
"properties": {
"experiment": {
"type": "object",
- "required" : [
+ "required": [
"experiment_id",
"name",
"artifact_location",
"lifecycle_stage"
],
- "properties" : {
- "experiment_id": { "type": "string" },
- "name": { "type": "string" },
- "artifact_location": { "type": "string" },
- "lifecycle_stage": { "type": { "enum" : ["active", "deleted"] } }
+ "properties": {
+ "experiment_id": {
+ "type": "string"
+ },
+ "name": {
+ "type": "string"
+ },
+ "artifact_location": {
+ "type": "string"
+ },
+ "lifecycle_stage": {
+ "type": {
+ "enum": [
+ "active",
+ "deleted"
+ ]
+ }
+ }
}
}
}
-}
+} \ No newline at end of file
diff --git a/spec/fixtures/api/schemas/ml/list_experiments.json b/spec/fixtures/api/schemas/ml/list_experiments.json
new file mode 100644
index 00000000000..4c3e834abc6
--- /dev/null
+++ b/spec/fixtures/api/schemas/ml/list_experiments.json
@@ -0,0 +1,39 @@
+{
+ "type": "object",
+ "required": [
+ "experiments"
+ ],
+ "properties": {
+ "experiments": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "required": [
+ "experiment_id",
+ "name",
+ "artifact_location",
+ "lifecycle_stage"
+ ],
+ "properties": {
+ "experiment_id": {
+ "type": "string"
+ },
+ "name": {
+ "type": "string"
+ },
+ "artifact_location": {
+ "type": "string"
+ },
+ "lifecycle_stage": {
+ "type": {
+ "enum": [
+ "active",
+ "deleted"
+ ]
+ }
+ }
+ }
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/spec/fixtures/api/schemas/public_api/v4/user/admin.json b/spec/fixtures/api/schemas/public_api/v4/user/admin.json
index 8d06e16848f..f0d3cf3ba0e 100644
--- a/spec/fixtures/api/schemas/public_api/v4/user/admin.json
+++ b/spec/fixtures/api/schemas/public_api/v4/user/admin.json
@@ -4,6 +4,7 @@
"id",
"username",
"email",
+ "commit_email",
"name",
"state",
"avatar_url",
@@ -12,6 +13,7 @@
"is_admin",
"bio",
"location",
+ "pronouns",
"skype",
"linkedin",
"twitter",
diff --git a/spec/fixtures/ci_secure_files/sample.cer b/spec/fixtures/ci_secure_files/sample.cer
new file mode 100644
index 00000000000..9cca06d53c9
--- /dev/null
+++ b/spec/fixtures/ci_secure_files/sample.cer
Binary files differ
diff --git a/spec/fixtures/ci_secure_files/sample.mobileprovision b/spec/fixtures/ci_secure_files/sample.mobileprovision
new file mode 100644
index 00000000000..89bf7246b75
--- /dev/null
+++ b/spec/fixtures/ci_secure_files/sample.mobileprovision
Binary files differ
diff --git a/spec/fixtures/ci_secure_files/sample.p12 b/spec/fixtures/ci_secure_files/sample.p12
new file mode 100644
index 00000000000..c74df26a8d4
--- /dev/null
+++ b/spec/fixtures/ci_secure_files/sample.p12
Binary files differ
diff --git a/spec/fixtures/git-cheat-sheet.pdf b/spec/fixtures/git-cheat-sheet.pdf
deleted file mode 100644
index 5ef905c0889..00000000000
--- a/spec/fixtures/git-cheat-sheet.pdf
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:35967a21e5d856eaea89d2e5dd55a5e3b5f4e1e4efe3b000ef6d60b31600f1d2
-size 7352215
diff --git a/spec/fixtures/lib/gitlab/import_export/complex/project.json b/spec/fixtures/lib/gitlab/import_export/complex/project.json
index f3fc69e4936..a03177ba85e 100644
--- a/spec/fixtures/lib/gitlab/import_export/complex/project.json
+++ b/spec/fixtures/lib/gitlab/import_export/complex/project.json
@@ -6957,6 +6957,12 @@
"duration": null,
"source": "push",
"merge_request_id": null,
+ "pipeline_metadata": {
+ "id": 2,
+ "pipeline_id": 36,
+ "project_id": 5,
+ "title": "Build pipeline"
+ },
"notes": [
{
"id": 2147483547,
diff --git a/spec/fixtures/lib/gitlab/import_export/complex/tree/project/ci_pipelines.ndjson b/spec/fixtures/lib/gitlab/import_export/complex/tree/project/ci_pipelines.ndjson
index a9d04ec5d6d..0c19f23cc24 100644
--- a/spec/fixtures/lib/gitlab/import_export/complex/tree/project/ci_pipelines.ndjson
+++ b/spec/fixtures/lib/gitlab/import_export/complex/tree/project/ci_pipelines.ndjson
@@ -1,7 +1,7 @@
{"id":19,"project_id":5,"ref":"master","sha":"2ea1f3dec713d940208fb5ce4a38765ecb5d3f73","before_sha":null,"push_data":null,"created_at":"2016-03-22T15:20:35.763Z","updated_at":"2016-03-22T15:20:35.763Z","tag":null,"yaml_errors":null,"committed_at":null,"status":"failed","started_at":null,"finished_at":null,"duration":null,"stages":[{"id":24,"project_id":5,"pipeline_id":40,"name":"test","status":1,"created_at":"2016-03-22T15:44:44.772Z","updated_at":"2016-03-29T06:44:44.634Z","statuses":[{"id":79,"project_id":5,"status":"failed","finished_at":"2016-03-29T06:28:12.695Z","trace":"Sed culpa est et facere saepe vel id ab. Quas temporibus aut similique dolorem consequatur corporis aut praesentium. Cum officia molestiae sit earum excepturi.\n\nSint possimus aut ratione quia. Quis nesciunt ratione itaque illo. Tenetur est dolor assumenda possimus voluptatem quia minima. Accusamus reprehenderit ut et itaque non reiciendis incidunt.\n\nRerum suscipit quibusdam dolore nam omnis. Consequatur ipsa nihil ut enim blanditiis delectus. Nulla quis hic occaecati mollitia qui placeat. Quo rerum sed perferendis a accusantium consequatur commodi ut. Sit quae et cumque vel eius tempora nostrum.\n\nUllam dolorem et itaque sint est. Ea molestias quia provident dolorem vitae error et et. Ea expedita officiis iste non. Qui vitae odit saepe illum. Dolores enim ratione deserunt tempore expedita amet non neque.\n\nEligendi asperiores voluptatibus omnis repudiandae expedita distinctio qui aliquid. Autem aut doloremque distinctio ab. Nostrum sapiente repudiandae aspernatur ea et quae voluptas. Officiis perspiciatis nisi laudantium asperiores error eligendi ab. Eius quia amet magni omnis exercitationem voluptatum et.\n\nVoluptatem ullam labore quas dicta est ex voluptas. Pariatur ea modi voluptas consequatur dolores perspiciatis similique. Numquam in distinctio perspiciatis ut qui earum. Quidem omnis mollitia facere aut beatae. Ea est iure et voluptatem.","created_at":"2016-03-22T15:20:35.950Z","updated_at":"2016-03-29T06:28:12.696Z","started_at":null,"runner_id":null,"coverage":null,"commit_id":40,"commands":"$ build command","job_id":null,"name":"test build 1","deploy":false,"options":null,"allow_failure":false,"stage":"test","trigger_request_id":null,"stage_idx":1,"tag":null,"ref":"master","user_id":null,"target_url":null,"description":null,"erased_by_id":null,"erased_at":null},{"id":80,"project_id":5,"status":"success","finished_at":null,"trace":"Impedit et optio nemo ipsa. Non ad non quis ut sequi laudantium omnis velit. Corporis a enim illo eos. Quia totam tempore inventore ad est.\n\nNihil recusandae cupiditate eaque voluptatem molestias sint. Consequatur id voluptatem cupiditate harum. Consequuntur iusto quaerat reiciendis aut autem libero est. Quisquam dolores veritatis rerum et sint maxime ullam libero. Id quas porro ut perspiciatis rem amet vitae.\n\nNemo inventore minus blanditiis magnam. Modi consequuntur nostrum aut voluptatem ex. Sunt rerum rem optio mollitia qui aliquam officiis officia. Aliquid eos et id aut minus beatae reiciendis.\n\nDolores non in temporibus dicta. Fugiat voluptatem est aspernatur expedita voluptatum nam qui. Quia et eligendi sit quae sint tempore exercitationem eos. Est sapiente corrupti quidem at. Qui magni odio repudiandae saepe tenetur optio dolore.\n\nEos placeat soluta at dolorem adipisci provident. Quo commodi id reprehenderit possimus quo tenetur. Ipsum et quae eligendi laborum. Et qui nesciunt at quasi quidem voluptatem cum rerum. Excepturi non facilis aut sunt vero sed.\n\nQui explicabo ratione ut eligendi recusandae. Quis quasi quas molestiae consequatur voluptatem et voluptatem. Ex repellat saepe occaecati aperiam ea eveniet dignissimos facilis.","created_at":"2016-03-22T15:20:35.966Z","updated_at":"2016-03-22T15:20:35.966Z","started_at":null,"runner_id":null,"coverage":null,"commit_id":40,"commands":"$ build command","job_id":null,"name":"test build 2","deploy":false,"options":null,"allow_failure":false,"stage":"test","trigger_request_id":null,"stage_idx":1,"tag":null,"ref":"master","user_id":null,"target_url":null,"description":null,"erased_by_id":null,"erased_at":null}]}]}
{"id":20,"project_id":5,"ref":"master","sha":"ce84140e8b878ce6e7c4d298c7202ff38170e3ac","before_sha":null,"push_data":null,"created_at":"2016-03-22T15:20:35.763Z","updated_at":"2016-03-22T15:20:35.763Z","tag":false,"yaml_errors":null,"committed_at":null,"status":"failed","started_at":null,"finished_at":null,"duration":null,"stages":[],"source":"external_pull_request_event","external_pull_request":{"id":3,"pull_request_iid":4,"source_branch":"feature","target_branch":"master","source_repository":"the-repository","target_repository":"the-repository","source_sha":"ce84140e8b878ce6e7c4d298c7202ff38170e3ac","target_sha":"a09386439ca39abe575675ffd4b89ae824fec22f","status":"open","created_at":"2016-03-22T15:20:35.763Z","updated_at":"2016-03-22T15:20:35.763Z"}}
{"id":26,"project_id":5,"ref":"master","sha":"048721d90c449b244b7b4c53a9186b04330174ec","before_sha":null,"push_data":null,"created_at":"2016-03-22T15:20:35.757Z","updated_at":"2016-03-22T15:20:35.757Z","tag":false,"yaml_errors":null,"committed_at":null,"status":"failed","started_at":null,"finished_at":null,"duration":null,"source":"merge_request_event","merge_request_id":27,"stages":[{"id":21,"project_id":5,"pipeline_id":37,"name":"test","status":1,"created_at":"2016-03-22T15:44:44.772Z","updated_at":"2016-03-29T06:44:44.634Z","statuses":[{"id":74,"project_id":5,"status":"success","finished_at":null,"trace":"Ad ut quod repudiandae iste dolor doloribus. Adipisci consequuntur deserunt omnis quasi eveniet et sed fugit. Aut nemo omnis molestiae impedit ex consequatur ducimus. Voluptatum exercitationem quia aut est et hic dolorem.\n\nQuasi repellendus et eaque magni eum facilis. Dolorem aperiam nam nihil pariatur praesentium ad aliquam. Commodi enim et eos tenetur. Odio voluptatibus laboriosam mollitia rerum exercitationem magnam consequuntur. Tenetur ea vel eum corporis.\n\nVoluptatibus optio in aliquid est voluptates. Ad a ut ab placeat vero blanditiis. Earum aspernatur quia beatae expedita voluptatem dignissimos provident. Quis minima id nemo ut aut est veritatis provident.\n\nRerum voluptatem quidem eius maiores magnam veniam. Voluptatem aperiam aut voluptate et nulla deserunt voluptas. Quaerat aut accusantium laborum est dolorem architecto reiciendis. Aliquam asperiores doloribus omnis maxime enim nesciunt. Eum aut rerum repellendus debitis et ut eius.\n\nQuaerat assumenda ea sit consequatur autem in. Cum eligendi voluptatem quo sed. Ut fuga iusto cupiditate autem sint.\n\nOfficia totam officiis architecto corporis molestiae amet ut. Tempora sed dolorum rerum omnis voluptatem accusantium sit eum. Quia debitis ipsum quidem aliquam inventore sunt consequatur qui.","created_at":"2016-03-22T15:20:35.846Z","updated_at":"2016-03-22T15:20:35.846Z","started_at":null,"runner_id":null,"coverage":null,"commit_id":37,"commands":"$ build command","job_id":null,"name":"test build 2","deploy":false,"options":null,"allow_failure":false,"stage":"test","trigger_request_id":null,"stage_idx":1,"tag":null,"ref":"master","user_id":null,"target_url":null,"description":null,"erased_by_id":null,"erased_at":null},{"id":73,"project_id":5,"status":"canceled","finished_at":null,"trace":null,"created_at":"2016-03-22T15:20:35.842Z","updated_at":"2016-03-22T15:20:35.842Z","started_at":null,"runner_id":null,"coverage":null,"commit_id":37,"commands":"$ build command","job_id":null,"name":"test build 1","deploy":false,"options":null,"allow_failure":false,"stage":"test","trigger_request_id":null,"stage_idx":1,"tag":null,"ref":"master","user_id":null,"target_url":null,"description":null,"erased_by_id":null,"erased_at":null}]}],"merge_request":{"id":27,"target_branch":"feature","source_branch":"feature_conflict","source_project_id":2147483547,"author_id":1,"assignee_id":null,"title":"MR1","created_at":"2016-06-14T15:02:36.568Z","updated_at":"2016-06-14T15:02:56.815Z","state":"opened","merge_status":"unchecked","target_project_id":5,"iid":9,"description":null,"position":0,"updated_by_id":null,"merge_error":null,"diff_head_sha":"HEAD","source_branch_sha":"ABCD","target_branch_sha":"DCBA","merge_params":{"force_remove_source_branch":null}}}
-{"id":36,"project_id":5,"ref":null,"sha":"sha-notes","before_sha":null,"push_data":null,"created_at":"2016-03-22T15:20:35.755Z","updated_at":"2016-03-22T15:20:35.755Z","tag":null,"yaml_errors":null,"committed_at":null,"status":"failed","started_at":null,"finished_at":null,"user_id":2147483547,"duration":null,"source":"push","merge_request_id":null,"notes":[{"id":2147483547,"note":"Natus rerum qui dolorem dolorum voluptas.","noteable_type":"Commit","author_id":1,"created_at":"2016-03-22T15:19:59.469Z","updated_at":"2016-03-22T15:19:59.469Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":"be93687618e4b132087f430a4d8fc3a609c9b77c","noteable_id":36,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"}}],"stages":[{"id":11,"project_id":5,"pipeline_id":36,"name":"test","status":1,"created_at":"2016-03-22T15:44:44.772Z","updated_at":"2016-03-29T06:44:44.634Z","statuses":[{"id":71,"project_id":5,"status":"failed","finished_at":"2016-03-29T06:28:12.630Z","trace":null,"created_at":"2016-03-22T15:20:35.772Z","updated_at":"2016-03-29T06:28:12.634Z","started_at":null,"runner_id":null,"coverage":null,"commit_id":36,"commands":"$ build command","job_id":null,"name":"test build 1","deploy":false,"options":{"image":"busybox:latest"},"allow_failure":false,"stage":"test","trigger_request_id":null,"stage_idx":1,"stage_id":11,"tag":null,"ref":"master","user_id":null,"target_url":null,"description":null,"erased_by_id":null,"erased_at":null,"type":"Ci::Build","token":"abcd","artifacts_file_store":1,"artifacts_metadata_store":1,"artifacts_size":10},{"id":72,"project_id":5,"status":"success","finished_at":null,"trace":"Porro ea qui ut dolores. Labore ab nemo explicabo aspernatur quis voluptates corporis. Et quasi delectus est sit aperiam perspiciatis asperiores. Repudiandae cum aut consectetur accusantium officia sunt.\n\nQuidem dolore iusto quaerat ut aut inventore et molestiae. Libero voluptates atque nemo qui. Nulla temporibus ipsa similique facere.\n\nAliquam ipsam perferendis qui fugit accusantium omnis id voluptatum. Dignissimos aliquid dicta eos voluptatem assumenda quia. Sed autem natus unde dolor et non nisi et. Consequuntur nihil consequatur rerum est.\n\nSimilique neque est iste ducimus qui fuga cupiditate. Libero autem est aut fuga. Consectetur natus quis non ducimus ut dolore. Magni voluptatibus eius et maxime aut.\n\nAd officiis tempore voluptate vitae corrupti explicabo labore est. Consequatur expedita et sunt nihil aut. Deleniti porro iusto molestiae et beatae.\n\nDeleniti modi nulla qui et labore sequi corrupti. Qui voluptatem assumenda eum cupiditate et. Nesciunt ipsam ut ea possimus eum. Consectetur quidem suscipit atque dolore itaque voluptatibus et cupiditate.","created_at":"2016-03-22T15:20:35.777Z","updated_at":"2016-03-22T15:20:35.777Z","started_at":null,"runner_id":null,"coverage":null,"commit_id":36,"commands":"$ deploy command","job_id":null,"name":"test build 2","deploy":false,"options":null,"allow_failure":false,"stage":"deploy","trigger_request_id":null,"stage_idx":1,"stage_id":12,"tag":null,"ref":"master","user_id":null,"target_url":null,"description":null,"erased_by_id":null,"erased_at":null}]},{"id":12,"project_id":5,"pipeline_id":36,"name":"deploy","status":2,"created_at":"2016-03-22T15:45:45.772Z","updated_at":"2016-03-29T06:45:45.634Z"}]}
+{"id":36,"project_id":5,"ref":null,"sha":"sha-notes","before_sha":null,"push_data":null,"created_at":"2016-03-22T15:20:35.755Z","updated_at":"2016-03-22T15:20:35.755Z","tag":null,"yaml_errors":null,"committed_at":null,"status":"failed","started_at":null,"finished_at":null,"user_id":2147483547,"duration":null,"source":"push","merge_request_id":null,"pipeline_metadata": {"id": 2, "project_id": 5, "pipeline_id": 36, "title": "Build pipeline"},"notes":[{"id":2147483547,"note":"Natus rerum qui dolorem dolorum voluptas.","noteable_type":"Commit","author_id":1,"created_at":"2016-03-22T15:19:59.469Z","updated_at":"2016-03-22T15:19:59.469Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":"be93687618e4b132087f430a4d8fc3a609c9b77c","noteable_id":36,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"}}],"stages":[{"id":11,"project_id":5,"pipeline_id":36,"name":"test","status":1,"created_at":"2016-03-22T15:44:44.772Z","updated_at":"2016-03-29T06:44:44.634Z","statuses":[{"id":71,"project_id":5,"status":"failed","finished_at":"2016-03-29T06:28:12.630Z","trace":null,"created_at":"2016-03-22T15:20:35.772Z","updated_at":"2016-03-29T06:28:12.634Z","started_at":null,"runner_id":null,"coverage":null,"commit_id":36,"commands":"$ build command","job_id":null,"name":"test build 1","deploy":false,"options":{"image":"busybox:latest"},"allow_failure":false,"stage":"test","trigger_request_id":null,"stage_idx":1,"stage_id":11,"tag":null,"ref":"master","user_id":null,"target_url":null,"description":null,"erased_by_id":null,"erased_at":null,"type":"Ci::Build","token":"abcd","artifacts_file_store":1,"artifacts_metadata_store":1,"artifacts_size":10},{"id":72,"project_id":5,"status":"success","finished_at":null,"trace":"Porro ea qui ut dolores. Labore ab nemo explicabo aspernatur quis voluptates corporis. Et quasi delectus est sit aperiam perspiciatis asperiores. Repudiandae cum aut consectetur accusantium officia sunt.\n\nQuidem dolore iusto quaerat ut aut inventore et molestiae. Libero voluptates atque nemo qui. Nulla temporibus ipsa similique facere.\n\nAliquam ipsam perferendis qui fugit accusantium omnis id voluptatum. Dignissimos aliquid dicta eos voluptatem assumenda quia. Sed autem natus unde dolor et non nisi et. Consequuntur nihil consequatur rerum est.\n\nSimilique neque est iste ducimus qui fuga cupiditate. Libero autem est aut fuga. Consectetur natus quis non ducimus ut dolore. Magni voluptatibus eius et maxime aut.\n\nAd officiis tempore voluptate vitae corrupti explicabo labore est. Consequatur expedita et sunt nihil aut. Deleniti porro iusto molestiae et beatae.\n\nDeleniti modi nulla qui et labore sequi corrupti. Qui voluptatem assumenda eum cupiditate et. Nesciunt ipsam ut ea possimus eum. Consectetur quidem suscipit atque dolore itaque voluptatibus et cupiditate.","created_at":"2016-03-22T15:20:35.777Z","updated_at":"2016-03-22T15:20:35.777Z","started_at":null,"runner_id":null,"coverage":null,"commit_id":36,"commands":"$ deploy command","job_id":null,"name":"test build 2","deploy":false,"options":null,"allow_failure":false,"stage":"deploy","trigger_request_id":null,"stage_idx":1,"stage_id":12,"tag":null,"ref":"master","user_id":null,"target_url":null,"description":null,"erased_by_id":null,"erased_at":null}]},{"id":12,"project_id":5,"pipeline_id":36,"name":"deploy","status":2,"created_at":"2016-03-22T15:45:45.772Z","updated_at":"2016-03-29T06:45:45.634Z"}]}
{"id":38,"iid":1,"project_id":5,"ref":"master","sha":"5f923865dde3436854e9ceb9cdb7815618d4e849","before_sha":null,"push_data":null,"created_at":"2016-03-22T15:20:35.759Z","updated_at":"2016-03-22T15:20:35.759Z","tag":null,"yaml_errors":null,"committed_at":null,"status":"failed","started_at":null,"finished_at":null,"duration":null,"stages":[{"id":22,"project_id":5,"pipeline_id":38,"name":"test","status":1,"created_at":"2016-03-22T15:44:44.772Z","updated_at":"2016-03-29T06:44:44.634Z","statuses":[{"id":76,"project_id":5,"status":"success","finished_at":null,"trace":"Et rerum quia ea cumque ut modi non. Libero eaque ipsam architecto maiores expedita deleniti. Ratione quia qui est id.\n\nQuod sit officiis sed unde inventore veniam quisquam velit. Ea harum cum quibusdam quisquam minima quo possimus non. Temporibus itaque aliquam aut rerum veritatis at.\n\nMagnam ipsum eius recusandae qui quis sit maiores eum. Et animi iusto aut itaque. Doloribus harum deleniti nobis accusantium et libero.\n\nRerum fuga perferendis magni commodi officiis id repudiandae. Consequatur ratione consequatur suscipit facilis sunt iure est dicta. Qui unde quasi facilis et quae nesciunt. Magnam iste et nobis officiis tenetur. Aspernatur quo et temporibus non in.\n\nNisi rerum velit est ad enim sint molestiae consequuntur. Quaerat nisi nesciunt quasi officiis. Possimus non blanditiis laborum quos.\n\nRerum laudantium facere animi qui. Ipsa est iusto magnam nihil. Enim omnis occaecati non dignissimos ut recusandae eum quasi. Qui maxime dolor et nemo voluptates incidunt quia.","created_at":"2016-03-22T15:20:35.882Z","updated_at":"2016-03-22T15:20:35.882Z","started_at":null,"runner_id":null,"coverage":null,"commit_id":38,"commands":"$ build command","job_id":null,"name":"test build 2","deploy":false,"options":null,"allow_failure":false,"stage":"test","trigger_request_id":null,"stage_idx":1,"tag":null,"ref":"master","user_id":null,"target_url":null,"description":null,"erased_by_id":null,"erased_at":null},{"id":75,"project_id":5,"status":"failed","finished_at":null,"trace":"Sed et iste recusandae dicta corporis. Sunt alias porro fugit sunt. Fugiat omnis nihil dignissimos aperiam explicabo doloremque sit aut. Harum fugit expedita quia rerum ut consequatur laboriosam aliquam.\n\nNatus libero ut ut tenetur earum. Tempora omnis autem omnis et libero dolores illum autem. Deleniti eos sunt mollitia ipsam. Cum dolor repellendus dolorum sequi officia. Ullam sunt in aut pariatur excepturi.\n\nDolor nihil debitis et est eos. Cumque eos eum saepe ducimus autem. Alias architecto consequatur aut pariatur possimus. Aut quos aut incidunt quam velit et. Quas voluptatum ad dolorum dignissimos.\n\nUt voluptates consectetur illo et. Est commodi accusantium vel quo. Eos qui fugiat soluta porro.\n\nRatione possimus alias vel maxime sint totam est repellat. Ipsum corporis eos sint voluptatem eos odit. Temporibus libero nulla harum eligendi labore similique ratione magnam. Suscipit sequi in omnis neque.\n\nLaudantium dolor amet omnis placeat mollitia aut molestiae. Aut rerum similique ipsum quod illo quas unde. Sunt aut veritatis eos omnis porro. Rem veritatis mollitia praesentium dolorem. Consequatur sequi ad cumque earum omnis quia necessitatibus.","created_at":"2016-03-22T15:20:35.864Z","updated_at":"2016-03-22T15:20:35.864Z","started_at":null,"runner_id":null,"coverage":null,"commit_id":38,"commands":"$ build command","job_id":null,"name":"test build 1","deploy":false,"options":null,"allow_failure":false,"stage":"test","trigger_request_id":null,"stage_idx":1,"tag":null,"ref":"master","user_id":null,"target_url":null,"description":null,"erased_by_id":null,"erased_at":null}]}]}
{"id":39,"project_id":5,"ref":"master","sha":"d2d430676773caa88cdaf7c55944073b2fd5561a","before_sha":null,"push_data":null,"created_at":"2016-03-22T15:20:35.761Z","updated_at":"2016-03-22T15:20:35.761Z","tag":null,"yaml_errors":null,"committed_at":null,"status":"failed","started_at":null,"finished_at":null,"duration":null,"stages":[{"id":23,"project_id":5,"pipeline_id":39,"name":"test","status":1,"created_at":"2016-03-22T15:44:44.772Z","updated_at":"2016-03-29T06:44:44.634Z","statuses":[{"id":78,"project_id":5,"status":"success","finished_at":null,"trace":"Dolorem deserunt quas quia error hic quo cum vel. Natus voluptatem cumque expedita numquam odit. Eos expedita nostrum corporis consequatur est recusandae.\n\nCulpa blanditiis rerum repudiandae alias voluptatem. Velit iusto est ullam consequatur doloribus porro. Corporis voluptas consectetur est veniam et quia quae.\n\nEt aut magni fuga nesciunt officiis molestias. Quaerat et nam necessitatibus qui rerum. Architecto quia officiis voluptatem laborum est recusandae. Quasi ducimus soluta odit necessitatibus labore numquam dignissimos. Quia facere sint temporibus inventore sunt nihil saepe dolorum.\n\nFacere dolores quis dolores a. Est minus nostrum nihil harum. Earum laborum et ipsum unde neque sit nemo. Corrupti est consequatur minima fugit. Illum voluptatem illo error ducimus officia qui debitis.\n\nDignissimos porro a autem harum aut. Aut id reprehenderit et exercitationem. Est et quisquam ipsa temporibus molestiae. Architecto natus dolore qui fugiat incidunt. Autem odit veniam excepturi et voluptatibus culpa ipsum eos.\n\nAmet quo quisquam dignissimos soluta modi dolores. Sint omnis eius optio corporis dolor. Eligendi animi porro quia placeat ut.","created_at":"2016-03-22T15:20:35.927Z","updated_at":"2016-03-22T15:20:35.927Z","started_at":null,"runner_id":null,"coverage":null,"commit_id":39,"commands":"$ build command","job_id":null,"name":"test build 2","deploy":false,"options":null,"allow_failure":false,"stage":"test","trigger_request_id":null,"stage_idx":1,"tag":null,"ref":"master","user_id":null,"target_url":null,"description":null,"erased_by_id":null,"erased_at":null},{"id":77,"project_id":5,"status":"failed","finished_at":null,"trace":"Rerum ut et suscipit est perspiciatis. Inventore debitis cum eius vitae. Ex incidunt id velit aut quo nisi. Laboriosam repellat deserunt eius reiciendis architecto et. Est harum quos nesciunt nisi consectetur.\n\nAlias esse omnis sint officia est consequatur in nobis. Dignissimos dolorum vel eligendi nesciunt dolores sit. Veniam mollitia ducimus et exercitationem molestiae libero sed. Atque omnis debitis laudantium voluptatibus qui. Repellendus tempore est commodi pariatur.\n\nExpedita voluptate illum est alias non. Modi nesciunt ab assumenda laborum nulla consequatur molestias doloremque. Magnam quod officia vel explicabo accusamus ut voluptatem incidunt. Rerum ut aliquid ullam saepe. Est eligendi debitis beatae blanditiis reiciendis.\n\nQui fuga sit dolores libero maiores et suscipit. Consectetur asperiores omnis minima impedit eos fugiat. Similique omnis nisi sed vero inventore ipsum aliquam exercitationem.\n\nBlanditiis magni iure dolorum omnis ratione delectus molestiae. Atque officia dolor voluptatem culpa quod. Incidunt suscipit quidem possimus veritatis non vel. Iusto aliquid et id quia quasi.\n\nVel facere velit blanditiis incidunt cupiditate sed maiores consequuntur. Quasi quia dicta consequuntur et quia voluptatem iste id. Incidunt et rerum fuga esse sint.","created_at":"2016-03-22T15:20:35.905Z","updated_at":"2016-03-22T15:20:35.905Z","started_at":null,"runner_id":null,"coverage":null,"commit_id":39,"commands":"$ build command","job_id":null,"name":"test build 1","deploy":false,"options":null,"allow_failure":false,"stage":"test","trigger_request_id":null,"stage_idx":1,"tag":null,"ref":"master","user_id":null,"target_url":null,"description":null,"erased_by_id":null,"erased_at":null}]}]}
{"id":41,"project_id":5,"ref":"master","sha":"2ea1f3dec713d940208fb5ce4a38765ecb5d3f73","before_sha":null,"push_data":null,"created_at":"2016-03-22T15:20:35.763Z","updated_at":"2016-03-22T15:20:35.763Z","tag":null,"yaml_errors":null,"committed_at":null,"status":"failed","started_at":null,"finished_at":null,"duration":null,"stages":[]}
diff --git a/spec/fixtures/markdown/markdown_golden_master_examples.yml b/spec/fixtures/markdown/markdown_golden_master_examples.yml
index 495d00026d7..6a1e75348cf 100644
--- a/spec/fixtures/markdown/markdown_golden_master_examples.yml
+++ b/spec/fixtures/markdown/markdown_golden_master_examples.yml
@@ -427,6 +427,14 @@
html: |-
<a class="no-attachment-icon" href="http://localhost:8080/png/U9nJK73CoKnELT2rKt3AJx9IS2mjoKZDAybCJYp9pCzJ24ejB4qjBk5I0Cagw09LWPLZKLTSa9zNdCe5L8bcO5u-K6MHGY8kWo7ARNHr2QY7MW00AeWxTG00" target="_blank" rel="noopener noreferrer" data-diagram="plantuml" data-diagram-src="data:text/plain;base64,ICBBbGljZSAtPiBCb2I6IEF1dGhlbnRpY2F0aW9uIFJlcXVlc3QKICBCb2IgLS0+IEFsaWNlOiBBdXRoZW50aWNhdGlvbiBSZXNwb25zZQoKICBBbGljZSAtPiBCb2I6IEFub3RoZXIgYXV0aGVudGljYXRpb24gUmVxdWVzdAogIEFsaWNlIDwtLSBCb2I6IEFub3RoZXIgYXV0aGVudGljYXRpb24gUmVzcG9uc2UK"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" decoding="async" class="lazy" data-src="http://localhost:8080/png/U9nJK73CoKnELT2rKt3AJx9IS2mjoKZDAybCJYp9pCzJ24ejB4qjBk5I0Cagw09LWPLZKLTSa9zNdCe5L8bcO5u-K6MHGY8kWo7ARNHr2QY7MW00AeWxTG00"></a>
+- name: diagram_plantuml_unicode
+ markdown: |-
+ ```plantuml
+ A -> B : Text with norwegian characters: æøå
+ ```
+ html: |-
+ <a class="no-attachment-icon" href="http://localhost:8080/png/U9npLD2rKt1Ii588IQqeKIZFBCbGoCilAazDpqpCKqZEI2nAJ2v9BIgsKZYyxF2Emqkv07hO4WG0" target="_blank" rel="noopener noreferrer" data-diagram="plantuml" data-diagram-src="data:text/plain;base64,QSAtPiBCIDogVGV4dCB3aXRoIG5vcndlZ2lhbiBjaGFyYWN0ZXJzOiDDpsO4w6UK"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" decoding="async" class="lazy" data-src="http://localhost:8080/png/U9npLD2rKt1Ii588IQqeKIZFBCbGoCilAazDpqpCKqZEI2nAJ2v9BIgsKZYyxF2Emqkv07hO4WG0"></a>
+
- name: div
markdown: |-
<div>plain text</div>
@@ -762,18 +770,18 @@
# responsibility of unit tests. These tests are about the structure of the HTML.
uri_substitution: *uri_substitution
data_attribute_id_substitution:
- - regex: '(data-user|data-project|data-issue|data-iid|data-merge-request|data-milestone)(=")(\d+?)(")'
+ - regex: '(data-user|data-project|data-issue|data-iid|data-merge-request|data-milestone|data-label)(=")(\d+?)(")'
replacement: '\1\2ID\4'
text_attribute_substitution:
- - regex: '(title)(=")(.+?)(")'
+ - regex: '(title)(=")([^"]*)(")'
replacement: '\1\2TEXT\4'
path_attribute_id_substitution:
- regex: '(group|project)(\d+)'
replacement: '\1ID'
markdown: |-
- Hi @gfm_user - thank you for reporting this bug (#1) we hope to fix it in %1.1 as part of !1
+ Hi @gfm_user - thank you for reporting this ~"UX bug" (#1) we hope to fix it in %1.1 as part of !1
html: |-
- <p data-sourcepos="1:1-1:92" dir="auto">Hi <a href="/gfm_user" data-reference-type="user" data-user="1" data-container="body" data-placement="top" class="gfm gfm-project_member js-user-link" title="John Doe1">@gfm_user</a> - thank you for reporting this bug (<a href="/group1/project1/-/issues/1" data-reference-type="issue" data-original="#1" data-link="false" data-link-reference="false" data-project="11" data-issue="11" data-project-path="group1/project1" data-iid="1" data-issue-type="issue" data-container="body" data-placement="top" title="My title 1" class="gfm gfm-issue">#1</a>) we hope to fix it in <a href="/group1/project1/-/milestones/1" data-reference-type="milestone" data-original="%1.1" data-link="false" data-link-reference="false" data-project="11" data-milestone="11" data-container="body" data-placement="top" title="" class="gfm gfm-milestone has-tooltip">%1.1</a> as part of <a href="/group1/project1/-/merge_requests/1" data-reference-type="merge_request" data-original="!1" data-link="false" data-link-reference="false" data-project="11" data-merge-request="11" data-project-path="group1/project1" data-iid="1" data-container="body" data-placement="top" title="My title 2" class="gfm gfm-merge_request">!1</a></p>
+ <p data-sourcepos="1:1-1:98" dir="auto">Hi <a href="/gfm_user" data-reference-type="user" data-user="1" data-container="body" data-placement="top" class="gfm gfm-project_member js-user-link" title="John Doe1">@gfm_user</a> - thank you for reporting this <span class="gl-label gl-label-sm"><a href="/groupID/projectID/-/issues?label_name=UX+bug" data-reference-type="label" data-original='~"UX bug"' data-link="false" data-link-reference="false" data-project="ID" data-label="2" data-container="body" data-placement="top" title="TEXT" class="gfm gfm-label has-tooltip gl-link gl-label-link"><span class="gl-label-text gl-label-text-light" data-container="body" data-html="true" style="background-color: #990000">UX bug</span></a></span> (<a href="/group1/project1/-/issues/1" data-reference-type="issue" data-original="#1" data-link="false" data-link-reference="false" data-project="11" data-issue="11" data-project-path="group1/project1" data-iid="1" data-issue-type="issue" data-container="body" data-placement="top" title="My title 1" class="gfm gfm-issue">#1</a>) we hope to fix it in <a href="/group1/project1/-/milestones/1" data-reference-type="milestone" data-original="%1.1" data-link="false" data-link-reference="false" data-project="11" data-milestone="11" data-container="body" data-placement="top" title="" class="gfm gfm-milestone has-tooltip">%1.1</a> as part of <a href="/group1/project1/-/merge_requests/1" data-reference-type="merge_request" data-original="!1" data-link="false" data-link-reference="false" data-project="11" data-merge-request="11" data-project-path="group1/project1" data-iid="1" data-container="body" data-placement="top" title="My title 2" class="gfm gfm-merge_request">!1</a></p>
- name: strike
markdown: |-
~~del~~
diff --git a/spec/fixtures/packages/rpm/hello-0.0.1-1.fc29.src.rpm b/spec/fixtures/packages/rpm/hello-0.0.1-1.fc29.src.rpm
new file mode 100644
index 00000000000..8284faab80a
--- /dev/null
+++ b/spec/fixtures/packages/rpm/hello-0.0.1-1.fc29.src.rpm
Binary files differ
diff --git a/spec/fixtures/packages/rpm/payload.json b/spec/fixtures/packages/rpm/payload.json
new file mode 100644
index 00000000000..0240dbaca21
--- /dev/null
+++ b/spec/fixtures/packages/rpm/payload.json
@@ -0,0 +1,47 @@
+{
+ "files": [
+ "/usr/bin/hello.sh"
+ ],
+ "changelogs": [
+ {
+ "changelogtext": "First build",
+ "changelogtime": 1662552000
+ }
+ ],
+ "requirements": [
+ {
+ "requirename": "bash",
+ "requireversion": "",
+ "requireflags": 0
+ },
+ {
+ "requirename": "rpmlib(CompressedFileNames)",
+ "requireversion": "3.0.4-1",
+ "requireflags": 16777226
+ }
+ ],
+ "provides": [
+ {
+ "providename": "hello",
+ "provideflags": 8,
+ "provideversion": "0.0.1-1.fc29"
+ },
+ {
+ "providename": "hello(x86-64)",
+ "provideflags": 8,
+ "provideversion": "0.0.1-1.fc29"
+ }
+ ],
+ "name": "hello",
+ "version": "0.0.1",
+ "release": "1.fc29",
+ "summary": "Simple RPM package",
+ "description": "Fake rpm package",
+ "arch": "x86_64",
+ "license": "MIT",
+ "sourcerpm": "hello-0.0.1-1.fc29.src.rpm",
+ "group": "Unspecified",
+ "buildhost": "localhost",
+ "packager": null,
+ "vendor": null
+} \ No newline at end of file
diff --git a/spec/fixtures/packages/rpm/repodata/repomd.xml b/spec/fixtures/packages/rpm/repodata/repomd.xml
new file mode 100644
index 00000000000..4554ee9a6d0
--- /dev/null
+++ b/spec/fixtures/packages/rpm/repodata/repomd.xml
@@ -0,0 +1,27 @@
+<repomd xmlns="http://gitlab.com/api/v4/projects/1/packages/rpm/repodata/repomd.xml" xmlns:rpm="http://gitlab.com/api/v4/projects/1/packages/rpm/repodata/repomd.xml">
+ <revision>1644602779</revision>
+ <data type="filelists">
+ <checksum type="sha256">6503673de76312406ff8ecb06d9733c32b546a65abae4d4170d9b51fb75bf253</checksum>
+ <open-checksum type="sha256">7652c7496daa2507f08675a5b4f59a5428aaba72997400ae3d5e7bab8e3d9cc1</open-checksum>
+ <location href="repodata/6503673de76312406ff8ecb06d9733c32b546a65abae4d4170d9b51fb75bf253-filelists.xml"/>
+ <timestamp>1644602784</timestamp>
+ <size>1144067</size>
+ <open-size>25734004</open-size>
+ </data>
+ <data type="primary">
+ <checksum type="sha256">80279a863b6236e60c3e63036b8a9a25e3764dfb3121292b91e9f583af9e7b7e</checksum>
+ <open-checksum type="sha256">f852f3bb39f89520434d97f6913716dc448077ad49f2e5200327367f98a89d55</open-checksum>
+ <location href="repodata/80279a863b6236e60c3e63036b8a9a25e3764dfb3121292b91e9f583af9e7b7e-primary.xml"/>
+ <timestamp>1644602784</timestamp>
+ <size>66996</size>
+ <open-size>1008586</open-size>
+ </data>
+ <data type="other">
+ <checksum type="sha256">34408890500ec72c0f181542a91f7ff9320d2ef32c8e613540a5b9e1b8763e02</checksum>
+ <open-checksum type="sha256">acac5033036264cd26100713b014242471ade45487c28c7793466a84af512624</open-checksum>
+ <location href="repodata/34408890500ec72c0f181542a91f7ff9320d2ef32c8e613540a5b9e1b8763e02-other.xml"/>
+ <timestamp>1644602784</timestamp>
+ <size>43329</size>
+ <open-size>730393</open-size>
+ </data>
+</repomd>
diff --git a/spec/fixtures/packages/rubygems/package.gemspec b/spec/fixtures/packages/rubygems/package.gemspec
index ea03414cc6f..60acd078fad 100644
--- a/spec/fixtures/packages/rubygems/package.gemspec
+++ b/spec/fixtures/packages/rubygems/package.gemspec
@@ -30,7 +30,7 @@ Gem::Specification.new do |s|
s.platform = Gem::Platform::RUBY
s.post_install_message = 'Installed, thank you!'
s.rdoc_options = ['--main', 'README.md']
- s.required_ruby_version = '>= 2.7.0'
+ s.required_ruby_version = '>= 2.7.0' # rubocop:disable Gemspec/RequiredRubyVersion
s.required_rubygems_version = '>= 1.8.11'
s.requirements = 'A high powered server or calculator'
s.rubygems_version = '1.8.09'
diff --git a/spec/fixtures/sample.pdf b/spec/fixtures/sample.pdf
new file mode 100644
index 00000000000..81ea09d7d12
--- /dev/null
+++ b/spec/fixtures/sample.pdf
@@ -0,0 +1,13 @@
+%PDF-1.3
+%�����������
+4 0 obj
+<< /Length 5 0 R /Filter /FlateDecode >>
+stream
+xe���0 ��>ō�@���
+ba�d�1U�V��_8��n�e}�� fXU�`\F�d2�����S%,�Q]�;XC�9�+Qy���k>a2>31B4�;���d)!Md�M�-�B��F���N�[v��~��E�5���^�Z_�� ΢�o�l.�
+endstream
+endobj
+5 0 obj
+155
+endobj
+2 0 obj
diff --git a/spec/fixtures/security_reports/deprecated/gl-sast-report.json b/spec/fixtures/security_reports/deprecated/gl-sast-report.json
deleted file mode 100644
index c5b0148fe3e..00000000000
--- a/spec/fixtures/security_reports/deprecated/gl-sast-report.json
+++ /dev/null
@@ -1,964 +0,0 @@
-[
- {
- "category": "sast",
- "message": "Probable insecure usage of temp file/directory.",
- "cve": "python/hardcoded/hardcoded-tmp.py:52865813c884a507be1f152d654245af34aba8a391626d01f1ab6d3f52ec8779:B108",
- "severity": "Medium",
- "confidence": "Medium",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/hardcoded/hardcoded-tmp.py",
- "start_line": 1,
- "end_line": 1
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B108",
- "value": "B108",
- "url": "https://docs.openstack.org/bandit/latest/plugins/b108_hardcoded_tmp_directory.html"
- }
- ],
- "priority": "Medium",
- "file": "python/hardcoded/hardcoded-tmp.py",
- "line": 1,
- "url": "https://docs.openstack.org/bandit/latest/plugins/b108_hardcoded_tmp_directory.html",
- "tool": "bandit"
- },
- {
- "category": "sast",
- "name": "Predictable pseudorandom number generator",
- "message": "Predictable pseudorandom number generator",
- "cve": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy:47:PREDICTABLE_RANDOM",
- "severity": "Medium",
- "confidence": "Medium",
- "scanner": {
- "id": "find_sec_bugs",
- "name": "Find Security Bugs"
- },
- "location": {
- "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
- "start_line": 47,
- "end_line": 47,
- "class": "com.gitlab.security_products.tests.App",
- "method": "generateSecretToken2"
- },
- "identifiers": [
- {
- "type": "find_sec_bugs_type",
- "name": "Find Security Bugs-PREDICTABLE_RANDOM",
- "value": "PREDICTABLE_RANDOM",
- "url": "https://find-sec-bugs.github.io/bugs.htm#PREDICTABLE_RANDOM"
- }
- ],
- "priority": "Medium",
- "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
- "line": 47,
- "url": "https://find-sec-bugs.github.io/bugs.htm#PREDICTABLE_RANDOM",
- "tool": "find_sec_bugs"
- },
- {
- "category": "sast",
- "name": "Predictable pseudorandom number generator",
- "message": "Predictable pseudorandom number generator",
- "cve": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy:41:PREDICTABLE_RANDOM",
- "severity": "Medium",
- "confidence": "Medium",
- "scanner": {
- "id": "find_sec_bugs",
- "name": "Find Security Bugs"
- },
- "location": {
- "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
- "start_line": 41,
- "end_line": 41,
- "class": "com.gitlab.security_products.tests.App",
- "method": "generateSecretToken1"
- },
- "identifiers": [
- {
- "type": "find_sec_bugs_type",
- "name": "Find Security Bugs-PREDICTABLE_RANDOM",
- "value": "PREDICTABLE_RANDOM",
- "url": "https://find-sec-bugs.github.io/bugs.htm#PREDICTABLE_RANDOM"
- }
- ],
- "priority": "Medium",
- "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
- "line": 41,
- "url": "https://find-sec-bugs.github.io/bugs.htm#PREDICTABLE_RANDOM",
- "tool": "find_sec_bugs"
- },
- {
- "category": "sast",
- "message": "Use of insecure MD2, MD4, or MD5 hash function.",
- "cve": "python/imports/imports-aliases.py:cb203b465dffb0cb3a8e8bd8910b84b93b0a5995a938e4b903dbb0cd6ffa1254:B303",
- "severity": "Medium",
- "confidence": "High",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/imports/imports-aliases.py",
- "start_line": 11,
- "end_line": 11
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B303",
- "value": "B303"
- }
- ],
- "priority": "Medium",
- "file": "python/imports/imports-aliases.py",
- "line": 11,
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Use of insecure MD2, MD4, or MD5 hash function.",
- "cve": "python/imports/imports-aliases.py:a7173c43ae66bd07466632d819d450e0071e02dbf782763640d1092981f9631b:B303",
- "severity": "Medium",
- "confidence": "High",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/imports/imports-aliases.py",
- "start_line": 12,
- "end_line": 12
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B303",
- "value": "B303"
- }
- ],
- "priority": "Medium",
- "file": "python/imports/imports-aliases.py",
- "line": 12,
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Use of insecure MD2, MD4, or MD5 hash function.",
- "cve": "python/imports/imports-aliases.py:017017b77deb0b8369b6065947833eeea752a92ec8a700db590fece3e934cf0d:B303",
- "severity": "Medium",
- "confidence": "High",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/imports/imports-aliases.py",
- "start_line": 13,
- "end_line": 13
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B303",
- "value": "B303"
- }
- ],
- "priority": "Medium",
- "file": "python/imports/imports-aliases.py",
- "line": 13,
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Use of insecure MD2, MD4, or MD5 hash function.",
- "cve": "python/imports/imports-aliases.py:45fc8c53aea7b84f06bc4e590cc667678d6073c4c8a1d471177ca2146fb22db2:B303",
- "severity": "Medium",
- "confidence": "High",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/imports/imports-aliases.py",
- "start_line": 14,
- "end_line": 14
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B303",
- "value": "B303"
- }
- ],
- "priority": "Medium",
- "file": "python/imports/imports-aliases.py",
- "line": 14,
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Pickle library appears to be in use, possible security issue.",
- "cve": "python/imports/imports-aliases.py:5f200d47291e7bbd8352db23019b85453ca048dd98ea0c291260fa7d009963a4:B301",
- "severity": "Medium",
- "confidence": "High",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/imports/imports-aliases.py",
- "start_line": 15,
- "end_line": 15
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B301",
- "value": "B301"
- }
- ],
- "priority": "Medium",
- "file": "python/imports/imports-aliases.py",
- "line": 15,
- "tool": "bandit"
- },
- {
- "category": "sast",
- "name": "ECB mode is insecure",
- "message": "ECB mode is insecure",
- "cve": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy:29:ECB_MODE",
- "severity": "Medium",
- "confidence": "High",
- "scanner": {
- "id": "find_sec_bugs",
- "name": "Find Security Bugs"
- },
- "location": {
- "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
- "start_line": 29,
- "end_line": 29,
- "class": "com.gitlab.security_products.tests.App",
- "method": "insecureCypher"
- },
- "identifiers": [
- {
- "type": "find_sec_bugs_type",
- "name": "Find Security Bugs-ECB_MODE",
- "value": "ECB_MODE",
- "url": "https://find-sec-bugs.github.io/bugs.htm#ECB_MODE"
- }
- ],
- "priority": "Medium",
- "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
- "line": 29,
- "url": "https://find-sec-bugs.github.io/bugs.htm#ECB_MODE",
- "tool": "find_sec_bugs"
- },
- {
- "category": "sast",
- "name": "Cipher with no integrity",
- "message": "Cipher with no integrity",
- "cve": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy:29:CIPHER_INTEGRITY",
- "severity": "Medium",
- "confidence": "High",
- "scanner": {
- "id": "find_sec_bugs",
- "name": "Find Security Bugs"
- },
- "location": {
- "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
- "start_line": 29,
- "end_line": 29,
- "class": "com.gitlab.security_products.tests.App",
- "method": "insecureCypher"
- },
- "identifiers": [
- {
- "type": "find_sec_bugs_type",
- "name": "Find Security Bugs-CIPHER_INTEGRITY",
- "value": "CIPHER_INTEGRITY",
- "url": "https://find-sec-bugs.github.io/bugs.htm#CIPHER_INTEGRITY"
- }
- ],
- "priority": "Medium",
- "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
- "line": 29,
- "url": "https://find-sec-bugs.github.io/bugs.htm#CIPHER_INTEGRITY",
- "tool": "find_sec_bugs"
- },
- {
- "category": "sast",
- "message": "Probable insecure usage of temp file/directory.",
- "cve": "python/hardcoded/hardcoded-tmp.py:63dd4d626855555b816985d82c4614a790462a0a3ada89dc58eb97f9c50f3077:B108",
- "severity": "Medium",
- "confidence": "Medium",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/hardcoded/hardcoded-tmp.py",
- "start_line": 14,
- "end_line": 14
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B108",
- "value": "B108",
- "url": "https://docs.openstack.org/bandit/latest/plugins/b108_hardcoded_tmp_directory.html"
- }
- ],
- "priority": "Medium",
- "file": "python/hardcoded/hardcoded-tmp.py",
- "line": 14,
- "url": "https://docs.openstack.org/bandit/latest/plugins/b108_hardcoded_tmp_directory.html",
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Probable insecure usage of temp file/directory.",
- "cve": "python/hardcoded/hardcoded-tmp.py:4ad6d4c40a8c263fc265f3384724014e0a4f8dd6200af83e51ff120420038031:B108",
- "severity": "Medium",
- "confidence": "Medium",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/hardcoded/hardcoded-tmp.py",
- "start_line": 10,
- "end_line": 10
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B108",
- "value": "B108",
- "url": "https://docs.openstack.org/bandit/latest/plugins/b108_hardcoded_tmp_directory.html"
- }
- ],
- "priority": "Medium",
- "file": "python/hardcoded/hardcoded-tmp.py",
- "line": 10,
- "url": "https://docs.openstack.org/bandit/latest/plugins/b108_hardcoded_tmp_directory.html",
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Consider possible security implications associated with Popen module.",
- "cve": "python/imports/imports-aliases.py:2c3e1fa1e54c3c6646e8bcfaee2518153c6799b77587ff8d9a7b0631f6d34785:B404",
- "severity": "Low",
- "confidence": "High",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/imports/imports-aliases.py",
- "start_line": 1,
- "end_line": 1
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B404",
- "value": "B404"
- }
- ],
- "priority": "Low",
- "file": "python/imports/imports-aliases.py",
- "line": 1,
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Consider possible security implications associated with pickle module.",
- "cve": "python/imports/imports.py:af58d07f6ad519ef5287fcae65bf1a6999448a1a3a8bc1ac2a11daa80d0b96bf:B403",
- "severity": "Low",
- "confidence": "High",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/imports/imports.py",
- "start_line": 2,
- "end_line": 2
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B403",
- "value": "B403"
- }
- ],
- "priority": "Low",
- "file": "python/imports/imports.py",
- "line": 2,
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Consider possible security implications associated with subprocess module.",
- "cve": "python/imports/imports.py:8de9bc98029d212db530785a5f6780cfa663548746ff228ab8fa96c5bb82f089:B404",
- "severity": "Low",
- "confidence": "High",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/imports/imports.py",
- "start_line": 4,
- "end_line": 4
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B404",
- "value": "B404"
- }
- ],
- "priority": "Low",
- "file": "python/imports/imports.py",
- "line": 4,
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Possible hardcoded password: 'blerg'",
- "cve": "python/hardcoded/hardcoded-passwords.py:97c30f1d76d2a88913e3ce9ae74087874d740f87de8af697a9c455f01119f633:B106",
- "severity": "Low",
- "confidence": "Medium",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/hardcoded/hardcoded-passwords.py",
- "start_line": 22,
- "end_line": 22
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B106",
- "value": "B106",
- "url": "https://docs.openstack.org/bandit/latest/plugins/b106_hardcoded_password_funcarg.html"
- }
- ],
- "priority": "Low",
- "file": "python/hardcoded/hardcoded-passwords.py",
- "line": 22,
- "url": "https://docs.openstack.org/bandit/latest/plugins/b106_hardcoded_password_funcarg.html",
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Possible hardcoded password: 'root'",
- "cve": "python/hardcoded/hardcoded-passwords.py:7431c73a0bc16d94ece2a2e75ef38f302574d42c37ac0c3c38ad0b3bf8a59f10:B105",
- "severity": "Low",
- "confidence": "Medium",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/hardcoded/hardcoded-passwords.py",
- "start_line": 5,
- "end_line": 5
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B105",
- "value": "B105",
- "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html"
- }
- ],
- "priority": "Low",
- "file": "python/hardcoded/hardcoded-passwords.py",
- "line": 5,
- "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html",
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Possible hardcoded password: ''",
- "cve": "python/hardcoded/hardcoded-passwords.py:d2d1857c27caedd49c57bfbcdc23afcc92bd66a22701fcdc632869aab4ca73ee:B105",
- "severity": "Low",
- "confidence": "Medium",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/hardcoded/hardcoded-passwords.py",
- "start_line": 9,
- "end_line": 9
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B105",
- "value": "B105",
- "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html"
- }
- ],
- "priority": "Low",
- "file": "python/hardcoded/hardcoded-passwords.py",
- "line": 9,
- "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html",
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Possible hardcoded password: 'ajklawejrkl42348swfgkg'",
- "cve": "python/hardcoded/hardcoded-passwords.py:fb3866215a61393a5c9c32a3b60e2058171a23219c353f722cbd3567acab21d2:B105",
- "severity": "Low",
- "confidence": "Medium",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/hardcoded/hardcoded-passwords.py",
- "start_line": 13,
- "end_line": 13
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B105",
- "value": "B105",
- "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html"
- }
- ],
- "priority": "Low",
- "file": "python/hardcoded/hardcoded-passwords.py",
- "line": 13,
- "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html",
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Possible hardcoded password: 'blerg'",
- "cve": "python/hardcoded/hardcoded-passwords.py:63c62a8b7e1e5224439bd26b28030585ac48741e28ca64561a6071080c560a5f:B105",
- "severity": "Low",
- "confidence": "Medium",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/hardcoded/hardcoded-passwords.py",
- "start_line": 23,
- "end_line": 23
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B105",
- "value": "B105",
- "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html"
- }
- ],
- "priority": "Low",
- "file": "python/hardcoded/hardcoded-passwords.py",
- "line": 23,
- "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html",
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Possible hardcoded password: 'blerg'",
- "cve": "python/hardcoded/hardcoded-passwords.py:4311b06d08df8fa58229b341c531da8e1a31ec4520597bdff920cd5c098d86f9:B105",
- "severity": "Low",
- "confidence": "Medium",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/hardcoded/hardcoded-passwords.py",
- "start_line": 24,
- "end_line": 24
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B105",
- "value": "B105",
- "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html"
- }
- ],
- "priority": "Low",
- "file": "python/hardcoded/hardcoded-passwords.py",
- "line": 24,
- "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html",
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Consider possible security implications associated with subprocess module.",
- "cve": "python/imports/imports-function.py:5858400c2f39047787702de44d03361ef8d954c9d14bd54ee1c2bef9e6a7df93:B404",
- "severity": "Low",
- "confidence": "High",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/imports/imports-function.py",
- "start_line": 4,
- "end_line": 4
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B404",
- "value": "B404"
- }
- ],
- "priority": "Low",
- "file": "python/imports/imports-function.py",
- "line": 4,
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Consider possible security implications associated with pickle module.",
- "cve": "python/imports/imports-function.py:dbda3cf4190279d30e0aad7dd137eca11272b0b225e8af4e8bf39682da67d956:B403",
- "severity": "Low",
- "confidence": "High",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/imports/imports-function.py",
- "start_line": 2,
- "end_line": 2
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B403",
- "value": "B403"
- }
- ],
- "priority": "Low",
- "file": "python/imports/imports-function.py",
- "line": 2,
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Consider possible security implications associated with Popen module.",
- "cve": "python/imports/imports-from.py:eb8a0db9cd1a8c1ab39a77e6025021b1261cc2a0b026b2f4a11fca4e0636d8dd:B404",
- "severity": "Low",
- "confidence": "High",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/imports/imports-from.py",
- "start_line": 7,
- "end_line": 7
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B404",
- "value": "B404"
- }
- ],
- "priority": "Low",
- "file": "python/imports/imports-from.py",
- "line": 7,
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "subprocess call with shell=True seems safe, but may be changed in the future, consider rewriting without shell",
- "cve": "python/imports/imports-aliases.py:f99f9721e27537fbcb6699a4cf39c6740d6234d2c6f06cfc2d9ea977313c483d:B602",
- "severity": "Low",
- "confidence": "High",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/imports/imports-aliases.py",
- "start_line": 9,
- "end_line": 9
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B602",
- "value": "B602",
- "url": "https://docs.openstack.org/bandit/latest/plugins/b602_subprocess_popen_with_shell_equals_true.html"
- }
- ],
- "priority": "Low",
- "file": "python/imports/imports-aliases.py",
- "line": 9,
- "url": "https://docs.openstack.org/bandit/latest/plugins/b602_subprocess_popen_with_shell_equals_true.html",
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Consider possible security implications associated with subprocess module.",
- "cve": "python/imports/imports-from.py:332a12ab1146698f614a905ce6a6a5401497a12281aef200e80522711c69dcf4:B404",
- "severity": "Low",
- "confidence": "High",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/imports/imports-from.py",
- "start_line": 6,
- "end_line": 6
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B404",
- "value": "B404"
- }
- ],
- "priority": "Low",
- "file": "python/imports/imports-from.py",
- "line": 6,
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Consider possible security implications associated with Popen module.",
- "cve": "python/imports/imports-from.py:0a48de4a3d5348853a03666cb574697e3982998355e7a095a798bd02a5947276:B404",
- "severity": "Low",
- "confidence": "High",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/imports/imports-from.py",
- "start_line": 1,
- "end_line": 2
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B404",
- "value": "B404"
- }
- ],
- "priority": "Low",
- "file": "python/imports/imports-from.py",
- "line": 1,
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Consider possible security implications associated with pickle module.",
- "cve": "python/imports/imports-aliases.py:51b71661dff994bde3529639a727a678c8f5c4c96f00d300913f6d5be1bbdf26:B403",
- "severity": "Low",
- "confidence": "High",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/imports/imports-aliases.py",
- "start_line": 7,
- "end_line": 8
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B403",
- "value": "B403"
- }
- ],
- "priority": "Low",
- "file": "python/imports/imports-aliases.py",
- "line": 7,
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Consider possible security implications associated with loads module.",
- "cve": "python/imports/imports-aliases.py:6ff02aeb3149c01ab68484d794a94f58d5d3e3bb0d58557ef4153644ea68ea54:B403",
- "severity": "Low",
- "confidence": "High",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/imports/imports-aliases.py",
- "start_line": 6,
- "end_line": 6
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B403",
- "value": "B403"
- }
- ],
- "priority": "Low",
- "file": "python/imports/imports-aliases.py",
- "line": 6,
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Statically-sized arrays can be improperly restricted, leading to potential overflows or other issues (CWE-119!/CWE-120)",
- "cve": "c/subdir/utils.c:b466873101951fe96e1332f6728eb7010acbbd5dfc3b65d7d53571d091a06d9e:CWE-119!/CWE-120",
- "confidence": "Low",
- "solution": "Perform bounds checking, use functions that limit length, or ensure that the size is larger than the maximum possible length",
- "scanner": {
- "id": "flawfinder",
- "name": "Flawfinder"
- },
- "location": {
- "file": "c/subdir/utils.c",
- "start_line": 4
- },
- "identifiers": [
- {
- "type": "flawfinder_func_name",
- "name": "Flawfinder - char",
- "value": "char"
- },
- {
- "type": "cwe",
- "name": "CWE-119",
- "value": "119",
- "url": "https://cwe.mitre.org/data/definitions/119.html"
- },
- {
- "type": "cwe",
- "name": "CWE-120",
- "value": "120",
- "url": "https://cwe.mitre.org/data/definitions/120.html"
- }
- ],
- "file": "c/subdir/utils.c",
- "line": 4,
- "url": "https://cwe.mitre.org/data/definitions/119.html",
- "tool": "flawfinder"
- },
- {
- "category": "sast",
- "message": "Check when opening files - can an attacker redirect it (via symlinks), force the opening of special file type (e.g., device files), move things around to create a race condition, control its ancestors, or change its contents? (CWE-362)",
- "cve": "c/subdir/utils.c:bab681140fcc8fc3085b6bba74081b44ea145c1c98b5e70cf19ace2417d30770:CWE-362",
- "confidence": "Low",
- "scanner": {
- "id": "flawfinder",
- "name": "Flawfinder"
- },
- "location": {
- "file": "c/subdir/utils.c",
- "start_line": 8
- },
- "identifiers": [
- {
- "type": "flawfinder_func_name",
- "name": "Flawfinder - fopen",
- "value": "fopen"
- },
- {
- "type": "cwe",
- "name": "CWE-362",
- "value": "362",
- "url": "https://cwe.mitre.org/data/definitions/362.html"
- }
- ],
- "file": "c/subdir/utils.c",
- "line": 8,
- "url": "https://cwe.mitre.org/data/definitions/362.html",
- "tool": "flawfinder"
- },
- {
- "category": "sast",
- "message": "Statically-sized arrays can be improperly restricted, leading to potential overflows or other issues (CWE-119!/CWE-120)",
- "cve": "cplusplus/src/hello.cpp:c8c6dd0afdae6814194cf0930b719f757ab7b379cf8f261e7f4f9f2f323a818a:CWE-119!/CWE-120",
- "confidence": "Low",
- "solution": "Perform bounds checking, use functions that limit length, or ensure that the size is larger than the maximum possible length",
- "scanner": {
- "id": "flawfinder",
- "name": "Flawfinder"
- },
- "location": {
- "file": "cplusplus/src/hello.cpp",
- "start_line": 6
- },
- "identifiers": [
- {
- "type": "flawfinder_func_name",
- "name": "Flawfinder - char",
- "value": "char"
- },
- {
- "type": "cwe",
- "name": "CWE-119",
- "value": "119",
- "url": "https://cwe.mitre.org/data/definitions/119.html"
- },
- {
- "type": "cwe",
- "name": "CWE-120",
- "value": "120",
- "url": "https://cwe.mitre.org/data/definitions/120.html"
- }
- ],
- "file": "cplusplus/src/hello.cpp",
- "line": 6,
- "url": "https://cwe.mitre.org/data/definitions/119.html",
- "tool": "flawfinder"
- },
- {
- "category": "sast",
- "message": "Does not check for buffer overflows when copying to destination [MS-banned] (CWE-120)",
- "cve": "cplusplus/src/hello.cpp:331c04062c4fe0c7c486f66f59e82ad146ab33cdd76ae757ca41f392d568cbd0:CWE-120",
- "confidence": "Low",
- "solution": "Consider using snprintf, strcpy_s, or strlcpy (warning: strncpy easily misused)",
- "scanner": {
- "id": "flawfinder",
- "name": "Flawfinder"
- },
- "location": {
- "file": "cplusplus/src/hello.cpp",
- "start_line": 7
- },
- "identifiers": [
- {
- "type": "flawfinder_func_name",
- "name": "Flawfinder - strcpy",
- "value": "strcpy"
- },
- {
- "type": "cwe",
- "name": "CWE-120",
- "value": "120",
- "url": "https://cwe.mitre.org/data/definitions/120.html"
- }
- ],
- "file": "cplusplus/src/hello.cpp",
- "line": 7,
- "url": "https://cwe.mitre.org/data/definitions/120.html",
- "tool": "flawfinder"
- }
-] \ No newline at end of file
diff --git a/spec/fixtures/security_reports/feature-branch/gl-sast-report.json b/spec/fixtures/security_reports/feature-branch/gl-sast-report.json
index 51761583c70..083042e19ff 100644
--- a/spec/fixtures/security_reports/feature-branch/gl-sast-report.json
+++ b/spec/fixtures/security_reports/feature-branch/gl-sast-report.json
@@ -171,7 +171,7 @@
},
"type": "sast",
"status": "success",
- "start_time": "placeholder-value",
- "end_time": "placeholder-value"
+ "start_time": "2022-08-10T22:37:00",
+ "end_time": "2022-08-10T22:38:00"
}
} \ No newline at end of file
diff --git a/spec/fixtures/security_reports/master/gl-common-scanning-report-names.json b/spec/fixtures/security_reports/master/gl-common-scanning-report-names.json
index ef2ff7443d3..cdad960026c 100644
--- a/spec/fixtures/security_reports/master/gl-common-scanning-report-names.json
+++ b/spec/fixtures/security_reports/master/gl-common-scanning-report-names.json
@@ -12,8 +12,29 @@
"id": "gemnasium",
"name": "Gemnasium"
},
- "location": {},
- "identifiers": [],
+ "location": {
+ "file": "yarn/yarn.lock",
+ "dependency": {
+ "package": {
+ "name": "io.netty/netty"
+ },
+ "version": "3.9.1.Final"
+ }
+ },
+ "identifiers": [
+ {
+ "value": "2017-11429",
+ "type": "cwe",
+ "name": "CWE-2017-11429",
+ "url": "https://cve.mitre.org/cgi-bin/cwename.cgi?name=CWE-2017-11429"
+ },
+ {
+ "value": "2017-11429",
+ "type": "cve",
+ "name": "CVE-2017-11429",
+ "url": "https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2017-11429"
+ }
+ ],
"links": [
{
"url": "https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-1020"
@@ -33,8 +54,29 @@
"id": "gemnasium",
"name": "Gemnasium"
},
- "location": {},
- "identifiers": [],
+ "location": {
+ "file": "yarn/yarn.lock",
+ "dependency": {
+ "package": {
+ "name": "io.netty/netty"
+ },
+ "version": "3.9.1.Final"
+ }
+ },
+ "identifiers": [
+ {
+ "value": "2017-11429",
+ "type": "cwe",
+ "name": "CWE-2017-11429",
+ "url": "https://cve.mitre.org/cgi-bin/cwename.cgi?name=CWE-2017-11429"
+ },
+ {
+ "value": "2017-11429",
+ "type": "cve",
+ "name": "CVE-2017-11429",
+ "url": "https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2017-11429"
+ }
+ ],
"links": [
{
"name": "CVE-1030",
@@ -161,8 +203,9 @@
"version": "2.18.0"
},
"type": "dependency_scanning",
- "start_time": "placeholder-value",
- "end_time": "placeholder-value",
+ "start_time": "2022-08-10T21:37:00",
+ "end_time": "2022-08-10T21:38:00",
"status": "success"
- }
-} \ No newline at end of file
+ },
+ "version": "14.0.6"
+}
diff --git a/spec/fixtures/security_reports/master/gl-common-scanning-report.json b/spec/fixtures/security_reports/master/gl-common-scanning-report.json
index 1295b44d4df..4c494963a79 100644
--- a/spec/fixtures/security_reports/master/gl-common-scanning-report.json
+++ b/spec/fixtures/security_reports/master/gl-common-scanning-report.json
@@ -12,7 +12,15 @@
"id": "gemnasium",
"name": "Gemnasium"
},
- "location": {},
+ "location": {
+ "file": "some/kind/of/file.c",
+ "dependency": {
+ "package": {
+ "name": "io.netty/netty"
+ },
+ "version": "3.9.1.Final"
+ }
+ },
"identifiers": [
{
"type": "GitLab",
@@ -27,18 +35,8 @@
],
"details": {
"commit": {
- "name": [
- {
- "lang": "en",
- "value": "The Commit"
- }
- ],
- "description": [
- {
- "lang": "en",
- "value": "Commit where the vulnerability was identified"
- }
- ],
+ "name": "the commit",
+ "description": "description",
"type": "commit",
"value": "41df7b7eb3be2b5be2c406c2f6d28cd6631eeb19"
}
@@ -56,7 +54,15 @@
"id": "gemnasium",
"name": "Gemnasium"
},
- "location": {},
+ "location": {
+ "file": "some/kind/of/file.c",
+ "dependency": {
+ "package": {
+ "name": "io.netty/netty"
+ },
+ "version": "3.9.1.Final"
+ }
+ },
"identifiers": [
{
"type": "GitLab",
@@ -71,18 +77,8 @@
],
"details": {
"commit": {
- "name": [
- {
- "lang": "en",
- "value": "The Commit"
- }
- ],
- "description": [
- {
- "lang": "en",
- "value": "Commit where the vulnerability was identified"
- }
- ],
+ "name": "the commit",
+ "description": "description",
"type": "commit",
"value": "41df7b7eb3be2b5be2c406c2f6d28cd6631eeb19"
}
@@ -100,7 +96,15 @@
"id": "gemnasium",
"name": "Gemnasium"
},
- "location": {},
+ "location": {
+ "file": "some/kind/of/file.c",
+ "dependency": {
+ "package": {
+ "name": "io.netty/netty"
+ },
+ "version": "3.9.1.Final"
+ }
+ },
"identifiers": [
{
"type": "GitLab",
@@ -115,18 +119,8 @@
],
"details": {
"commit": {
- "name": [
- {
- "lang": "en",
- "value": "The Commit"
- }
- ],
- "description": [
- {
- "lang": "en",
- "value": "Commit where the vulnerability was identified"
- }
- ],
+ "name": "the commit",
+ "description": "description",
"type": "commit",
"value": "41df7b7eb3be2b5be2c406c2f6d28cd6631eeb19"
}
@@ -144,7 +138,15 @@
"id": "gemnasium",
"name": "Gemnasium"
},
- "location": {},
+ "location": {
+ "file": "some/kind/of/file.c",
+ "dependency": {
+ "package": {
+ "name": "io.netty/netty"
+ },
+ "version": "3.9.1.Final"
+ }
+ },
"identifiers": [
{
"type": "GitLab",
@@ -159,18 +161,8 @@
],
"details": {
"commit": {
- "name": [
- {
- "lang": "en",
- "value": "The Commit"
- }
- ],
- "description": [
- {
- "lang": "en",
- "value": "Commit where the vulnerability was identified"
- }
- ],
+ "name": "the commit",
+ "description": "description",
"type": "commit",
"value": "41df7b7eb3be2b5be2c406c2f6d28cd6631eeb19"
}
@@ -258,7 +250,15 @@
}
]
},
- "location": {},
+ "location": {
+ "file": "some/kind/of/file.c",
+ "dependency": {
+ "package": {
+ "name": "io.netty/netty"
+ },
+ "version": "3.9.1.Final"
+ }
+ },
"identifiers": [
{
"type": "GitLab",
@@ -273,18 +273,8 @@
],
"details": {
"commit": {
- "name": [
- {
- "lang": "en",
- "value": "The Commit"
- }
- ],
- "description": [
- {
- "lang": "en",
- "value": "Commit where the vulnerability was identified"
- }
- ],
+ "name": "the commit",
+ "description": "description",
"type": "commit",
"value": "41df7b7eb3be2b5be2c406c2f6d28cd6631eeb19"
}
@@ -373,7 +363,15 @@
}
]
},
- "location": {},
+ "location": {
+ "file": "some/kind/of/file.c",
+ "dependency": {
+ "package": {
+ "name": "io.netty/netty"
+ },
+ "version": "3.9.1.Final"
+ }
+ },
"identifiers": [
{
"type": "GitLab",
@@ -400,8 +398,22 @@
"id": "gemnasium",
"name": "Gemnasium"
},
- "location": {},
- "identifiers": [],
+ "location": {
+ "file": "some/kind/of/file.c",
+ "dependency": {
+ "package": {
+ "name": "io.netty/netty"
+ },
+ "version": "3.9.1.Final"
+ }
+ },
+ "identifiers": [
+ {
+ "type": "GitLab",
+ "name": "Foo vulnerability",
+ "value": "foo"
+ }
+ ],
"links": []
}
],
@@ -442,8 +454,8 @@
"cve": "CVE-1020"
}
],
- "summary": "",
- "diff": ""
+ "summary": "this fixes CVE-1020",
+ "diff": "dG90YWxseSBsZWdpdGltYXRlIGRpZmYsIDEwLzEwIHdvdWxkIGFwcGx5"
},
{
"fixes": [
@@ -452,8 +464,8 @@
"id": "bb2fbeb1b71ea360ce3f86f001d4e84823c3ffe1a1f7d41ba7466b14cfa953d3"
}
],
- "summary": "",
- "diff": ""
+ "summary": "this fixes CVE",
+ "diff": "dG90YWxseSBsZWdpdGltYXRlIGRpZmYsIDEwLzEwIHdvdWxkIGFwcGx5"
},
{
"fixes": [
@@ -462,8 +474,8 @@
"id": "bb2fbeb1b71ea360ce3f86f001d4e84823c3ffe1a1f7d41ba7466b14cfa953d3"
}
],
- "summary": "",
- "diff": ""
+ "summary": "this fixed CVE",
+ "diff": "dG90YWxseSBsZWdpdGltYXRlIGRpZmYsIDEwLzEwIHdvdWxkIGFwcGx5"
},
{
"fixes": [
@@ -472,8 +484,8 @@
"cve": "CVE-1"
}
],
- "summary": "",
- "diff": ""
+ "summary": "this fixes CVE-1",
+ "diff": "dG90YWxseSBsZWdpdGltYXRlIGRpZmYsIDEwLzEwIHdvdWxkIGFwcGx5"
}
],
"dependency_files": [],
@@ -497,9 +509,9 @@
"version": "2.18.0"
},
"type": "dependency_scanning",
- "start_time": "placeholder-value",
- "end_time": "placeholder-value",
+ "start_time": "2022-08-10T21:37:00",
+ "end_time": "2022-08-10T21:38:00",
"status": "success"
},
"version": "14.0.2"
-} \ No newline at end of file
+}
diff --git a/spec/fixtures/security_reports/master/gl-sast-report-minimal.json b/spec/fixtures/security_reports/master/gl-sast-report-minimal.json
index 5e9273d43b1..53d15224b30 100644
--- a/spec/fixtures/security_reports/master/gl-sast-report-minimal.json
+++ b/spec/fixtures/security_reports/master/gl-sast-report-minimal.json
@@ -62,7 +62,7 @@
},
"type": "sast",
"status": "success",
- "start_time": "placeholder-value",
- "end_time": "placeholder-value"
+ "start_time": "2022-08-10T21:37:00",
+ "end_time": "2022-08-10T21:38:00"
}
} \ No newline at end of file
diff --git a/spec/fixtures/security_reports/master/gl-sast-report-semgrep-for-gosec.json b/spec/fixtures/security_reports/master/gl-sast-report-semgrep-for-gosec.json
index f01d26a69c9..8fa85c30b56 100644
--- a/spec/fixtures/security_reports/master/gl-sast-report-semgrep-for-gosec.json
+++ b/spec/fixtures/security_reports/master/gl-sast-report-semgrep-for-gosec.json
@@ -62,6 +62,13 @@
},
"version": "0.82.0"
},
+ "primary_identifiers": [
+ {
+ "type": "semgrep_id",
+ "name": "gosec.G106-1",
+ "value": "gosec.G106-1"
+ }
+ ],
"type": "sast",
"start_time": "2022-03-15T20:36:58",
"end_time": "2022-03-15T20:37:05",
diff --git a/spec/fixtures/security_reports/master/gl-sast-report-semgrep-for-multiple-findings.json b/spec/fixtures/security_reports/master/gl-sast-report-semgrep-for-multiple-findings.json
new file mode 100644
index 00000000000..cbdfdb86f6b
--- /dev/null
+++ b/spec/fixtures/security_reports/master/gl-sast-report-semgrep-for-multiple-findings.json
@@ -0,0 +1,134 @@
+{
+ "version": "14.0.4",
+ "vulnerabilities": [
+ {
+ "id": "985a5666dcae22adef5ac12f8a8a2dacf9b9b481ae5d87cd0ac1712b0fd64864",
+ "category": "sast",
+ "message": "Deserialization of Untrusted Data",
+ "description": "Avoid using `load()`. `PyYAML.load` can create arbitrary Python\nobjects. A malicious actor could exploit this to run arbitrary\ncode. Use `safe_load()` instead.\n",
+ "cve": "",
+ "severity": "Critical",
+ "scanner": {
+ "id": "semgrep",
+ "name": "Semgrep"
+ },
+ "location": {
+ "file": "app/app.py",
+ "start_line": 39
+ },
+ "identifiers": [
+ {
+ "type": "semgrep_id",
+ "name": "bandit.B506",
+ "value": "bandit.B506",
+ "url": "https://semgrep.dev/r/gitlab.bandit.B506"
+ },
+ {
+ "type": "cwe",
+ "name": "CWE-502",
+ "value": "502",
+ "url": "https://cwe.mitre.org/data/definitions/502.html"
+ },
+ {
+ "type": "bandit_test_id",
+ "name": "Bandit Test ID B506",
+ "value": "B506"
+ }
+ ],
+ "tracking": {
+ "type": "source",
+ "items": [
+ {
+ "file": "app/app.py",
+ "line_start": 39,
+ "line_end": 39,
+ "signatures": [
+ {
+ "algorithm": "scope_offset",
+ "value": "app/app.py|yaml_hammer[0]:13"
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {
+ "id": "79f6537b7ec83c7717f5bd1a4f12645916caafefe2e4359148d889855505aa67",
+ "category": "sast",
+ "message": "Key Exchange without Entity Authentication",
+ "description": "Audit the use of ssh.InsecureIgnoreHostKey\n",
+ "cve": "",
+ "severity": "Medium",
+ "scanner": {
+ "id": "semgrep",
+ "name": "Semgrep"
+ },
+ "location": {
+ "file": "og.go",
+ "start_line": 8
+ },
+ "identifiers": [
+ {
+ "type": "semgrep_id",
+ "name": "gosec.G106-1",
+ "value": "gosec.G106-1"
+ },
+ {
+ "type": "cwe",
+ "name": "CWE-322",
+ "value": "322",
+ "url": "https://cwe.mitre.org/data/definitions/322.html"
+ },
+ {
+ "type": "gosec_rule_id",
+ "name": "Gosec Rule ID G106",
+ "value": "G106"
+ }
+ ],
+ "tracking": {
+ "type": "source",
+ "items": [
+ {
+ "file": "og.go",
+ "line_start": 8,
+ "line_end": 8,
+ "signatures": [
+ {
+ "algorithm": "scope_offset",
+ "value": "og.go|foo[0]:1"
+ }
+ ]
+ }
+ ]
+ }
+ }
+ ],
+ "scan": {
+ "scanner": {
+ "id": "semgrep",
+ "name": "Semgrep",
+ "url": "https://github.com/returntocorp/semgrep",
+ "vendor": {
+ "name": "GitLab"
+ },
+ "version": "0.82.0"
+ },
+ "primary_identifiers": [
+ {
+ "type": "semgrep_id",
+ "name": "bandit.B506",
+ "value": "bandit.B506",
+ "url": "https://semgrep.dev/r/gitlab.bandit.B506"
+ },
+ {
+ "type": "semgrep_id",
+ "name": "gosec.G106-1",
+ "value": "gosec.G106-1"
+ }
+ ],
+ "type": "sast",
+ "start_time": "2022-03-15T20:36:58",
+ "end_time": "2022-03-15T20:37:05",
+ "status": "success"
+ }
+} \ No newline at end of file
diff --git a/spec/fixtures/security_reports/master/gl-sast-report.json b/spec/fixtures/security_reports/master/gl-sast-report.json
index 1aa8db1a65f..0ec31252e97 100644
--- a/spec/fixtures/security_reports/master/gl-sast-report.json
+++ b/spec/fixtures/security_reports/master/gl-sast-report.json
@@ -194,7 +194,7 @@
},
"type": "sast",
"status": "success",
- "start_time": "placeholder-value",
- "end_time": "placeholder-value"
+ "start_time": "2022-08-10T21:37:00",
+ "end_time": "2022-08-10T21:38:00"
}
} \ No newline at end of file
diff --git a/spec/fixtures/security_reports/master/gl-secret-detection-report.json b/spec/fixtures/security_reports/master/gl-secret-detection-report.json
index 21d4f3f1798..cb97b60ced1 100644
--- a/spec/fixtures/security_reports/master/gl-secret-detection-report.json
+++ b/spec/fixtures/security_reports/master/gl-secret-detection-report.json
@@ -18,6 +18,9 @@
"file": "aws-key.py",
"dependency": {
"package": {}
+ },
+ "commit": {
+ "sha": "e9c3a56590d5bed4155c0d128f1552d52fdcc7ae"
}
},
"identifiers": [
diff --git a/spec/frontend/__helpers__/class_spec_helper.js b/spec/frontend/__helpers__/class_spec_helper.js
deleted file mode 100644
index b26f087f0c5..00000000000
--- a/spec/frontend/__helpers__/class_spec_helper.js
+++ /dev/null
@@ -1,10 +0,0 @@
-// eslint-disable-next-line jest/no-export
-export default class ClassSpecHelper {
- static itShouldBeAStaticMethod(base, method) {
- return it('should be a static method', () => {
- expect(Object.prototype.hasOwnProperty.call(base, method)).toBeTruthy();
- });
- }
-}
-
-window.ClassSpecHelper = ClassSpecHelper;
diff --git a/spec/frontend/__helpers__/class_spec_helper_spec.js b/spec/frontend/__helpers__/class_spec_helper_spec.js
deleted file mode 100644
index 533d5687bde..00000000000
--- a/spec/frontend/__helpers__/class_spec_helper_spec.js
+++ /dev/null
@@ -1,26 +0,0 @@
-/* global ClassSpecHelper */
-
-import './class_spec_helper';
-
-describe('ClassSpecHelper', () => {
- let testContext;
-
- beforeEach(() => {
- testContext = {};
- });
-
- describe('itShouldBeAStaticMethod', () => {
- beforeEach(() => {
- class TestClass {
- instanceMethod() {
- this.prop = 'val';
- }
- static staticMethod() {}
- }
-
- testContext.TestClass = TestClass;
- });
-
- ClassSpecHelper.itShouldBeAStaticMethod(ClassSpecHelper, 'itShouldBeAStaticMethod');
- });
-});
diff --git a/spec/frontend/__helpers__/dom_shims/index.js b/spec/frontend/__helpers__/dom_shims/index.js
index 742d55196b4..3b41e2ca2a7 100644
--- a/spec/frontend/__helpers__/dom_shims/index.js
+++ b/spec/frontend/__helpers__/dom_shims/index.js
@@ -11,3 +11,4 @@ import './window_scroll_to';
import './scroll_by';
import './size_properties';
import './image_element_properties';
+import './text_encoder';
diff --git a/spec/frontend/__helpers__/dom_shims/text_encoder.js b/spec/frontend/__helpers__/dom_shims/text_encoder.js
new file mode 100644
index 00000000000..d3d5221a003
--- /dev/null
+++ b/spec/frontend/__helpers__/dom_shims/text_encoder.js
@@ -0,0 +1,4 @@
+import { TextEncoder, TextDecoder } from 'util';
+
+global.TextEncoder = TextEncoder;
+global.TextDecoder = TextDecoder;
diff --git a/spec/frontend/__helpers__/graphql_transformer.js b/spec/frontend/__helpers__/graphql_transformer.js
new file mode 100644
index 00000000000..e776e2ea6ac
--- /dev/null
+++ b/spec/frontend/__helpers__/graphql_transformer.js
@@ -0,0 +1,8 @@
+/* eslint-disable import/no-commonjs */
+const loader = require('graphql-tag/loader');
+
+module.exports = {
+ process(src) {
+ return loader.call({ cacheable() {} }, src);
+ },
+};
diff --git a/spec/frontend/__helpers__/shared_test_setup.js b/spec/frontend/__helpers__/shared_test_setup.js
index 45a7b8e0352..2fe9fe89a90 100644
--- a/spec/frontend/__helpers__/shared_test_setup.js
+++ b/spec/frontend/__helpers__/shared_test_setup.js
@@ -1,7 +1,7 @@
/* Common setup for both unit and integration test environments */
-import { config as testUtilsConfig } from '@vue/test-utils';
import * as jqueryMatchers from 'custom-jquery-matchers';
import Vue from 'vue';
+import { enableAutoDestroy } from '@vue/test-utils';
import 'jquery';
import Translate from '~/vue_shared/translate';
import setWindowLocation from './set_window_location_helper';
@@ -13,6 +13,8 @@ import './dom_shims';
import './jquery';
import '~/commons/bootstrap';
+enableAutoDestroy(afterEach);
+
// This module has some fairly decent visual test coverage in it's own repository.
jest.mock('@gitlab/favicon-overlay');
jest.mock('~/lib/utils/axios_utils', () => jest.requireActual('helpers/mocks/axios_utils'));
@@ -44,16 +46,6 @@ Object.entries(jqueryMatchers).forEach(([matcherName, matcherFactory]) => {
expect.extend(customMatchers);
-testUtilsConfig.deprecationWarningHandler = (method, message) => {
- const ALLOWED_DEPRECATED_METHODS = [
- // https://gitlab.com/gitlab-org/gitlab/-/issues/295679
- 'finding components with `find` or `get`',
- ];
- if (!ALLOWED_DEPRECATED_METHODS.includes(method)) {
- global.console.error(message);
- }
-};
-
Object.assign(global, {
requestIdleCallback(cb) {
const start = Date.now();
@@ -72,6 +64,7 @@ Object.assign(global, {
beforeEach(() => {
// make sure that each test actually tests something
// see https://jestjs.io/docs/en/expect#expecthasassertions
+ // eslint-disable-next-line jest/no-standalone-expect
expect.hasAssertions();
// Reset the mocked window.location. This ensures tests don't interfere with
diff --git a/spec/frontend/__helpers__/stub_component.js b/spec/frontend/__helpers__/stub_component.js
index 4f9d1ee6f5d..3e9af994ee3 100644
--- a/spec/frontend/__helpers__/stub_component.js
+++ b/spec/frontend/__helpers__/stub_component.js
@@ -38,7 +38,7 @@ export function stubComponent(Component, options = {}) {
// Do not render any slots/scoped slots except default
// This differs from VTU behavior which renders all slots
template: '<div><slot></slot></div>',
- // allows wrapper.find(Component) to work for stub
+ // allows wrapper.findComponent(Component) to work for stub
$_vueTestUtils_original: Component,
...options,
};
diff --git a/spec/frontend/__helpers__/vue_mount_component_helper.js b/spec/frontend/__helpers__/vue_mount_component_helper.js
deleted file mode 100644
index ed43355ea5b..00000000000
--- a/spec/frontend/__helpers__/vue_mount_component_helper.js
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Deprecated. Please do not use.
- * Please see https://gitlab.com/groups/gitlab-org/-/epics/2445
- */
-const mountComponent = (Component, props = {}, el = null) =>
- new Component({
- propsData: props,
- }).$mount(el);
-
-/**
- * Deprecated. Please do not use.
- * Please see https://gitlab.com/groups/gitlab-org/-/epics/2445
- */
-export const createComponentWithStore = (Component, store, propsData = {}) =>
- new Component({
- store,
- propsData,
- });
-
-/**
- * Deprecated. Please do not use.
- * Please see https://gitlab.com/groups/gitlab-org/-/epics/2445
- */
-export const mountComponentWithStore = (Component, { el, props, store }) =>
- new Component({
- store,
- propsData: props || {},
- }).$mount(el);
-
-/**
- * Deprecated. Please do not use.
- * Please see https://gitlab.com/groups/gitlab-org/-/epics/2445
- */
-export default mountComponent;
diff --git a/spec/frontend/__helpers__/vue_test_utils_helper_spec.js b/spec/frontend/__helpers__/vue_test_utils_helper_spec.js
index ae180c3b49d..466333f8a89 100644
--- a/spec/frontend/__helpers__/vue_test_utils_helper_spec.js
+++ b/spec/frontend/__helpers__/vue_test_utils_helper_spec.js
@@ -140,11 +140,12 @@ describe('Vue test utils helpers', () => {
const text = 'foo bar';
const options = { selector: 'div' };
const mockDiv = document.createElement('div');
- const mockVm = new Vue({ render: (h) => h('div') }).$mount();
+ let mockVm;
let wrapper;
beforeEach(() => {
jest.spyOn(vtu, 'createWrapper');
+ mockVm = new Vue({ render: (h) => h('div') }).$mount();
wrapper = extendedWrapper(
shallowMount({
diff --git a/spec/frontend/__mocks__/monaco-editor/index.js b/spec/frontend/__mocks__/monaco-editor/index.js
index 384f9993150..d09672a4ecf 100644
--- a/spec/frontend/__mocks__/monaco-editor/index.js
+++ b/spec/frontend/__mocks__/monaco-editor/index.js
@@ -8,10 +8,8 @@ import 'monaco-editor/esm/vs/language/css/monaco.contribution';
import 'monaco-editor/esm/vs/language/json/monaco.contribution';
import 'monaco-editor/esm/vs/language/html/monaco.contribution';
import 'monaco-editor/esm/vs/basic-languages/monaco.contribution';
-import 'monaco-yaml/lib/esm/monaco.contribution';
// This language starts trying to spin up web workers which obviously breaks in Jest environment
jest.mock('monaco-editor/esm/vs/language/typescript/tsMode');
-jest.mock('monaco-yaml/lib/esm/yamlMode');
export * from 'monaco-editor/esm/vs/editor/editor.api';
diff --git a/spec/frontend/__mocks__/monaco-yaml/index.js b/spec/frontend/__mocks__/monaco-yaml/index.js
new file mode 100644
index 00000000000..36681854d0b
--- /dev/null
+++ b/spec/frontend/__mocks__/monaco-yaml/index.js
@@ -0,0 +1,4 @@
+const setDiagnosticsOptions = jest.fn();
+const yamlDefaults = {};
+
+export { setDiagnosticsOptions, yamlDefaults };
diff --git a/spec/frontend/access_tokens/components/access_token_table_app_spec.js b/spec/frontend/access_tokens/components/access_token_table_app_spec.js
index aed3db4aa4c..2fa14810578 100644
--- a/spec/frontend/access_tokens/components/access_token_table_app_spec.js
+++ b/spec/frontend/access_tokens/components/access_token_table_app_spec.js
@@ -1,6 +1,6 @@
import { GlButton, GlPagination, GlTable } from '@gitlab/ui';
-import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import AccessTokenTableApp from '~/access_tokens/components/access_token_table_app.vue';
import { EVENT_SUCCESS, PAGE_SIZE } from '~/access_tokens/components/constants';
import { __, s__, sprintf } from '~/locale';
@@ -11,7 +11,7 @@ describe('~/access_tokens/components/access_token_table_app', () => {
const accessTokenType = 'personal access token';
const accessTokenTypePlural = 'personal access tokens';
- const initialActiveAccessTokens = [];
+ const information = undefined;
const noActiveTokensMessage = 'This user has no active personal access tokens.';
const showRole = false;
@@ -43,11 +43,12 @@ describe('~/access_tokens/components/access_token_table_app', () => {
];
const createComponent = (props = {}) => {
- wrapper = mount(AccessTokenTableApp, {
+ wrapper = mountExtended(AccessTokenTableApp, {
provide: {
accessTokenType,
accessTokenTypePlural,
- initialActiveAccessTokens,
+ information,
+ initialActiveAccessTokens: defaultActiveAccessTokens,
noActiveTokensMessage,
showRole,
...props,
@@ -71,8 +72,8 @@ describe('~/access_tokens/components/access_token_table_app', () => {
wrapper?.destroy();
});
- it('should render the `GlTable` with default empty message', () => {
- createComponent();
+ it('should render an empty table with a default message', () => {
+ createComponent({ initialActiveAccessTokens: [] });
const cells = findCells();
expect(cells).toHaveLength(1);
@@ -81,58 +82,61 @@ describe('~/access_tokens/components/access_token_table_app', () => {
);
});
- it('should render the `GlTable` with custom empty message', () => {
+ it('should render an empty table with a custom message', () => {
const noTokensMessage = 'This group has no active access tokens.';
- createComponent({ noActiveTokensMessage: noTokensMessage });
+ createComponent({ initialActiveAccessTokens: [], noActiveTokensMessage: noTokensMessage });
const cells = findCells();
expect(cells).toHaveLength(1);
expect(cells.at(0).text()).toBe(noTokensMessage);
});
- it('should render an h5 element', () => {
+ it('should show a title indicating the amount of tokens', () => {
createComponent();
expect(wrapper.find('h5').text()).toBe(
sprintf(__('Active %{accessTokenTypePlural} (%{totalAccessTokens})'), {
accessTokenTypePlural,
- totalAccessTokens: initialActiveAccessTokens.length,
+ totalAccessTokens: defaultActiveAccessTokens.length,
}),
);
});
- it('should render the `GlTable` component with default 6 column headers', () => {
- createComponent();
+ it('should render information section', () => {
+ const info = 'This is my information';
+ createComponent({ information: info });
- const headers = findHeaders();
- expect(headers).toHaveLength(6);
- [
- __('Token name'),
- __('Scopes'),
- s__('AccessTokens|Created'),
- __('Last Used'),
- __('Expires'),
- __('Action'),
- ].forEach((text, index) => {
- expect(headers.at(index).text()).toBe(text);
- });
+ expect(wrapper.findByTestId('information-section').text()).toBe(info);
});
- it('should render the `GlTable` component with 7 headers', () => {
- createComponent({ showRole: true });
+ describe('table headers', () => {
+ it('should include `Action` column', () => {
+ createComponent();
+
+ const headers = findHeaders();
+ expect(headers.wrappers.map((header) => header.text())).toStrictEqual([
+ __('Token name'),
+ __('Scopes'),
+ s__('AccessTokens|Created'),
+ __('Last Used'),
+ __('Expires'),
+ __('Action'),
+ ]);
+ });
- const headers = findHeaders();
- expect(headers).toHaveLength(7);
- [
- __('Token name'),
- __('Scopes'),
- s__('AccessTokens|Created'),
- __('Last Used'),
- __('Expires'),
- __('Role'),
- __('Action'),
- ].forEach((text, index) => {
- expect(headers.at(index).text()).toBe(text);
+ it('should include `Role` column', () => {
+ createComponent({ showRole: true });
+
+ const headers = findHeaders();
+ expect(headers.wrappers.map((header) => header.text())).toStrictEqual([
+ __('Token name'),
+ __('Scopes'),
+ s__('AccessTokens|Created'),
+ __('Last Used'),
+ __('Expires'),
+ __('Role'),
+ __('Action'),
+ ]);
});
});
@@ -150,8 +154,8 @@ describe('~/access_tokens/components/access_token_table_app', () => {
expect(assistiveElement.text()).toBe(s__('AccessTokens|The last time a token was used'));
});
- it('updates the table after a success AJAX event', async () => {
- createComponent({ showRole: true });
+ it('updates the table after new tokens are created', async () => {
+ createComponent({ initialActiveAccessTokens: [], showRole: true });
await triggerSuccess();
const cells = findCells();
@@ -190,16 +194,43 @@ describe('~/access_tokens/components/access_token_table_app', () => {
expect(button.props('category')).toBe('tertiary');
});
- describe('revoke path', () => {
- beforeEach(() => {
- createComponent({ showRole: true });
+ describe('when revoke_path is', () => {
+ describe('absent in all tokens', () => {
+ it('should not include `Action` column', () => {
+ createComponent({
+ initialActiveAccessTokens: defaultActiveAccessTokens.map(
+ ({ revoke_path, ...rest }) => rest,
+ ),
+ showRole: true,
+ });
+
+ const headers = findHeaders();
+ expect(headers).toHaveLength(6);
+ [
+ __('Token name'),
+ __('Scopes'),
+ s__('AccessTokens|Created'),
+ __('Last Used'),
+ __('Expires'),
+ __('Role'),
+ ].forEach((text, index) => {
+ expect(headers.at(index).text()).toBe(text);
+ });
+ });
});
it.each([{ revoke_path: null }, { revoke_path: undefined }])(
- 'with %p, does not show revoke button',
- async (input) => {
- await triggerSuccess(defaultActiveAccessTokens.map((data) => ({ ...data, ...input })));
-
+ '%p in some tokens, does not show revoke button',
+ (input) => {
+ createComponent({
+ initialActiveAccessTokens: [
+ defaultActiveAccessTokens.map((data) => ({ ...data, ...input }))[0],
+ defaultActiveAccessTokens[1],
+ ],
+ showRole: true,
+ });
+
+ expect(findHeaders().at(6).text()).toBe(__('Action'));
expect(findCells().at(6).findComponent(GlButton).exists()).toBe(false);
},
);
@@ -207,7 +238,6 @@ describe('~/access_tokens/components/access_token_table_app', () => {
it('sorts rows alphabetically', async () => {
createComponent({ showRole: true });
- await triggerSuccess();
const cells = findCells();
@@ -226,7 +256,6 @@ describe('~/access_tokens/components/access_token_table_app', () => {
it('sorts rows by date', async () => {
createComponent({ showRole: true });
- await triggerSuccess();
const cells = findCells();
@@ -242,14 +271,20 @@ describe('~/access_tokens/components/access_token_table_app', () => {
expect(cells.at(10).text()).toBe('Never');
});
- it('should show the pagination component when needed', async () => {
- createComponent();
- expect(findPagination().exists()).toBe(false);
+ describe('pagination', () => {
+ it('does not show pagination component', () => {
+ createComponent({
+ initialActiveAccessTokens: Array(PAGE_SIZE).fill(defaultActiveAccessTokens[0]),
+ });
- await triggerSuccess(Array(PAGE_SIZE).fill(defaultActiveAccessTokens[0]));
- expect(findPagination().exists()).toBe(false);
+ expect(findPagination().exists()).toBe(false);
+ });
- await triggerSuccess(Array(PAGE_SIZE + 1).fill(defaultActiveAccessTokens[0]));
- expect(findPagination().exists()).toBe(true);
+ it('shows the pagination component', () => {
+ createComponent({
+ initialActiveAccessTokens: Array(PAGE_SIZE + 1).fill(defaultActiveAccessTokens[0]),
+ });
+ expect(findPagination().exists()).toBe(true);
+ });
});
});
diff --git a/spec/frontend/access_tokens/components/new_access_token_app_spec.js b/spec/frontend/access_tokens/components/new_access_token_app_spec.js
index d12d200d214..b4af11169ad 100644
--- a/spec/frontend/access_tokens/components/new_access_token_app_spec.js
+++ b/spec/frontend/access_tokens/components/new_access_token_app_spec.js
@@ -22,6 +22,8 @@ describe('~/access_tokens/components/new_access_token_app', () => {
});
};
+ const findButtonEl = () => document.querySelector('[type=submit]');
+
const triggerSuccess = async (newToken = 'new token') => {
wrapper
.findComponent(DomElementListener)
@@ -41,7 +43,7 @@ describe('~/access_tokens/components/new_access_token_app', () => {
<input type="text" id="expires_at" value="2022-01-01"/>
<input type="text" value='1'/>
<input type="checkbox" checked/>
- <input type="submit" value="Create"/>
+ <button type="submit" value="Create" class="disabled" disabled="disabled"/>
</form>`,
);
@@ -120,10 +122,10 @@ describe('~/access_tokens/components/new_access_token_app', () => {
});
it('should not reset the submit button value', async () => {
- expect(document.querySelector('input[type=submit]').value).toBe('Create');
+ expect(findButtonEl().value).toBe('Create');
await triggerSuccess();
- expect(document.querySelector('input[type=submit]').value).toBe('Create');
+ expect(findButtonEl().value).toBe('Create');
});
});
});
@@ -162,6 +164,17 @@ describe('~/access_tokens/components/new_access_token_app', () => {
expect(wrapper.findComponent(GlAlert).exists()).toBe(false);
});
+
+ it('should enable the submit button', async () => {
+ const button = findButtonEl();
+ expect(button).toBeDisabled();
+ expect(button.className).toBe('disabled');
+
+ await triggerError();
+
+ expect(button).not.toBeDisabled();
+ expect(button.className).toBe('');
+ });
});
describe('before error or success', () => {
diff --git a/spec/frontend/access_tokens/index_spec.js b/spec/frontend/access_tokens/index_spec.js
index 55575ab25fc..1157e44f41a 100644
--- a/spec/frontend/access_tokens/index_spec.js
+++ b/spec/frontend/access_tokens/index_spec.js
@@ -1,7 +1,4 @@
-/* eslint-disable vue/require-prop-types */
-/* eslint-disable vue/one-component-per-file */
import { createWrapper } from '@vue/test-utils';
-import Vue from 'vue';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import {
@@ -10,10 +7,11 @@ import {
initNewAccessTokenApp,
initTokensApp,
} from '~/access_tokens';
-import * as AccessTokenTableApp from '~/access_tokens/components/access_token_table_app.vue';
+import AccessTokenTableApp from '~/access_tokens/components/access_token_table_app.vue';
import ExpiresAtField from '~/access_tokens/components/expires_at_field.vue';
-import * as NewAccessTokenApp from '~/access_tokens/components/new_access_token_app.vue';
-import * as TokensApp from '~/access_tokens/components/tokens_app.vue';
+import NewAccessTokenApp from '~/access_tokens/components/new_access_token_app.vue';
+import TokensApp from '~/access_tokens/components/tokens_app.vue';
+import { FORM_SELECTOR } from '~/access_tokens/components/constants';
import { FEED_TOKEN, INCOMING_EMAIL_TOKEN, STATIC_OBJECT_TOKEN } from '~/access_tokens/constants';
import { __, sprintf } from '~/locale';
@@ -28,26 +26,7 @@ describe('access tokens', () => {
describe('initAccessTokenTableApp', () => {
const accessTokenType = 'personal access token';
const accessTokenTypePlural = 'personal access tokens';
- const initialActiveAccessTokens = [{ id: '1' }];
-
- const FakeAccessTokenTableApp = Vue.component('FakeComponent', {
- inject: [
- 'accessTokenType',
- 'accessTokenTypePlural',
- 'initialActiveAccessTokens',
- 'noActiveTokensMessage',
- 'showRole',
- ],
- props: [
- 'accessTokenType',
- 'accessTokenTypePlural',
- 'initialActiveAccessTokens',
- 'noActiveTokensMessage',
- 'showRole',
- ],
- render: () => null,
- });
- AccessTokenTableApp.default = FakeAccessTokenTableApp;
+ const initialActiveAccessTokens = [{ revoked_path: '1' }];
it('mounts the component and provides required values', () => {
setHTMLFixture(
@@ -60,19 +39,18 @@ describe('access tokens', () => {
);
const vueInstance = initAccessTokenTableApp();
-
wrapper = createWrapper(vueInstance);
- const component = wrapper.findComponent(FakeAccessTokenTableApp);
+ const component = wrapper.findComponent({ name: 'AccessTokenTableRoot' });
expect(component.exists()).toBe(true);
-
- expect(component.props()).toMatchObject({
+ expect(wrapper.findComponent(AccessTokenTableApp).vm).toMatchObject({
// Required value
accessTokenType,
accessTokenTypePlural,
initialActiveAccessTokens,
// Default values
+ information: undefined,
noActiveTokensMessage: sprintf(__('This user has no active %{accessTokenTypePlural}.'), {
accessTokenTypePlural,
}),
@@ -81,12 +59,14 @@ describe('access tokens', () => {
});
it('mounts the component and provides all values', () => {
+ const information = 'Additional information';
const noActiveTokensMessage = 'This group has no active access tokens.';
setHTMLFixture(
`<div id="js-access-token-table-app"
data-access-token-type="${accessTokenType}"
data-access-token-type-plural="${accessTokenTypePlural}"
data-initial-active-access-tokens=${JSON.stringify(initialActiveAccessTokens)}
+ data-information="${information}"
data-no-active-tokens-message="${noActiveTokensMessage}"
data-show-role
>
@@ -94,15 +74,15 @@ describe('access tokens', () => {
);
const vueInstance = initAccessTokenTableApp();
-
wrapper = createWrapper(vueInstance);
- const component = wrapper.findComponent(FakeAccessTokenTableApp);
+ const component = wrapper.findComponent({ name: 'AccessTokenTableRoot' });
expect(component.exists()).toBe(true);
- expect(component.props()).toMatchObject({
+ expect(component.findComponent(AccessTokenTableApp).vm).toMatchObject({
accessTokenType,
accessTokenTypePlural,
initialActiveAccessTokens,
+ information,
noActiveTokensMessage,
showRole: true,
});
@@ -157,23 +137,16 @@ describe('access tokens', () => {
it('mounts the component and sets `accessTokenType` prop', () => {
const accessTokenType = 'personal access token';
setHTMLFixture(
- `<div id="js-new-access-token-app" data-access-token-type="${accessTokenType}"></div>`,
+ `<div id="js-new-access-token-app" data-access-token-type="${accessTokenType}"></div>
+ <form id="${FORM_SELECTOR.slice(1)}"></form>`,
);
- const FakeNewAccessTokenApp = Vue.component('FakeComponent', {
- inject: ['accessTokenType'],
- props: ['accessTokenType'],
- render: () => null,
- });
- NewAccessTokenApp.default = FakeNewAccessTokenApp;
-
const vueInstance = initNewAccessTokenApp();
-
wrapper = createWrapper(vueInstance);
- const component = wrapper.findComponent(FakeNewAccessTokenApp);
+ const component = wrapper.findComponent({ name: 'NewAccessTokenRoot' });
expect(component.exists()).toBe(true);
- expect(component.props('accessTokenType')).toEqual(accessTokenType);
+ expect(component.findComponent(NewAccessTokenApp).vm).toMatchObject({ accessTokenType });
});
it('returns `null`', () => {
@@ -192,20 +165,12 @@ describe('access tokens', () => {
`<div id="js-tokens-app" data-tokens-data=${JSON.stringify(tokensData)}></div>`,
);
- const FakeTokensApp = Vue.component('FakeComponent', {
- inject: ['tokenTypes'],
- props: ['tokenTypes'],
- render: () => null,
- });
- TokensApp.default = FakeTokensApp;
-
const vueInstance = initTokensApp();
-
wrapper = createWrapper(vueInstance);
- const component = wrapper.findComponent(FakeTokensApp);
+ const component = wrapper.findComponent(TokensApp);
expect(component.exists()).toBe(true);
- expect(component.props('tokenTypes')).toEqual(tokensData);
+ expect(component.vm).toMatchObject({ tokenTypes: tokensData });
});
it('returns `null`', () => {
diff --git a/spec/frontend/admin/broadcast_messages/components/base_spec.js b/spec/frontend/admin/broadcast_messages/components/base_spec.js
new file mode 100644
index 00000000000..020e1c1d7c1
--- /dev/null
+++ b/spec/frontend/admin/broadcast_messages/components/base_spec.js
@@ -0,0 +1,112 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlPagination } from '@gitlab/ui';
+import AxiosMockAdapter from 'axios-mock-adapter';
+import { TEST_HOST } from 'helpers/test_constants';
+import waitForPromises from 'helpers/wait_for_promises';
+import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
+import { createAlert } from '~/flash';
+import axios from '~/lib/utils/axios_utils';
+import { redirectTo } from '~/lib/utils/url_utility';
+import BroadcastMessagesBase from '~/admin/broadcast_messages/components/base.vue';
+import MessagesTable from '~/admin/broadcast_messages/components/messages_table.vue';
+import { generateMockMessages, MOCK_MESSAGES } from '../mock_data';
+
+jest.mock('~/flash');
+jest.mock('~/lib/utils/url_utility');
+
+describe('BroadcastMessagesBase', () => {
+ let wrapper;
+ let axiosMock;
+
+ useMockLocationHelper();
+
+ const findTable = () => wrapper.findComponent(MessagesTable);
+ const findPagination = () => wrapper.findComponent(GlPagination);
+
+ function createComponent(props = {}) {
+ wrapper = shallowMount(BroadcastMessagesBase, {
+ propsData: {
+ page: 1,
+ messagesCount: MOCK_MESSAGES.length,
+ messages: MOCK_MESSAGES,
+ ...props,
+ },
+ });
+ }
+
+ beforeEach(() => {
+ axiosMock = new AxiosMockAdapter(axios);
+ });
+
+ afterEach(() => {
+ axiosMock.restore();
+ wrapper.destroy();
+ });
+
+ it('renders the table and pagination when there are existing messages', () => {
+ createComponent();
+
+ expect(findTable().exists()).toBe(true);
+ expect(findPagination().exists()).toBe(true);
+ });
+
+ it('does not render the table when there are no visible messages', () => {
+ createComponent({ messages: [] });
+
+ expect(findTable().exists()).toBe(false);
+ expect(findPagination().exists()).toBe(true);
+ });
+
+ it('does not remove a deleted message if it was not in visibleMessages', async () => {
+ createComponent();
+
+ findTable().vm.$emit('delete-message', -1);
+ await waitForPromises();
+
+ expect(axiosMock.history.delete).toHaveLength(0);
+ expect(wrapper.vm.visibleMessages.length).toBe(MOCK_MESSAGES.length);
+ });
+
+ it('does not remove a deleted message if the request fails', async () => {
+ createComponent();
+ const { id, delete_path } = MOCK_MESSAGES[0];
+ axiosMock.onDelete(delete_path).replyOnce(500);
+
+ findTable().vm.$emit('delete-message', id);
+ await waitForPromises();
+
+ expect(wrapper.vm.visibleMessages.find((m) => m.id === id)).not.toBeUndefined();
+ expect(createAlert).toHaveBeenCalledWith(
+ expect.objectContaining({
+ message: BroadcastMessagesBase.i18n.deleteError,
+ }),
+ );
+ });
+
+ it('removes a deleted message from visibleMessages on success', async () => {
+ createComponent();
+ const { id, delete_path } = MOCK_MESSAGES[0];
+ axiosMock.onDelete(delete_path).replyOnce(200);
+
+ findTable().vm.$emit('delete-message', id);
+ await waitForPromises();
+
+ expect(wrapper.vm.visibleMessages.find((m) => m.id === id)).toBeUndefined();
+ expect(wrapper.vm.totalMessages).toBe(MOCK_MESSAGES.length - 1);
+ });
+
+ it('redirects to the first page when totalMessages changes from 21 to 20', async () => {
+ window.location.pathname = `${TEST_HOST}/admin/broadcast_messages`;
+
+ const messages = generateMockMessages(21);
+ const { id, delete_path } = messages[0];
+ createComponent({ messages, messagesCount: messages.length });
+
+ axiosMock.onDelete(delete_path).replyOnce(200);
+
+ findTable().vm.$emit('delete-message', id);
+ await waitForPromises();
+
+ expect(redirectTo).toHaveBeenCalledWith(`${TEST_HOST}/admin/broadcast_messages?page=1`);
+ });
+});
diff --git a/spec/frontend/admin/broadcast_messages/components/messages_table_spec.js b/spec/frontend/admin/broadcast_messages/components/messages_table_spec.js
new file mode 100644
index 00000000000..349fab03853
--- /dev/null
+++ b/spec/frontend/admin/broadcast_messages/components/messages_table_spec.js
@@ -0,0 +1,51 @@
+import { mount } from '@vue/test-utils';
+import MessagesTable from '~/admin/broadcast_messages/components/messages_table.vue';
+import { MOCK_MESSAGES } from '../mock_data';
+
+describe('MessagesTable', () => {
+ let wrapper;
+
+ const findRows = () => wrapper.findAll('[data-testid="message-row"]');
+ const findTargetRoles = () => wrapper.find('[data-testid="target-roles-th"]');
+ const findDeleteButton = (id) => wrapper.find(`[data-testid="delete-message-${id}"]`);
+
+ function createComponent(props = {}, glFeatures = {}) {
+ wrapper = mount(MessagesTable, {
+ provide: {
+ glFeatures,
+ },
+ propsData: {
+ messages: MOCK_MESSAGES,
+ ...props,
+ },
+ });
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders a table row for each message', () => {
+ createComponent();
+
+ expect(findRows()).toHaveLength(MOCK_MESSAGES.length);
+ });
+
+ it('renders the "Target Roles" column when roleTargetedBroadcastMessages is enabled', () => {
+ createComponent({}, { roleTargetedBroadcastMessages: true });
+ expect(findTargetRoles().exists()).toBe(true);
+ });
+
+ it('does not render the "Target Roles" column when roleTargetedBroadcastMessages is disabled', () => {
+ createComponent();
+ expect(findTargetRoles().exists()).toBe(false);
+ });
+
+ it('emits a delete-message event when a delete button is clicked', () => {
+ const { id } = MOCK_MESSAGES[0];
+ createComponent();
+ findDeleteButton(id).element.click();
+ expect(wrapper.emitted('delete-message')).toHaveLength(1);
+ expect(wrapper.emitted('delete-message')[0]).toEqual([id]);
+ });
+});
diff --git a/spec/frontend/admin/broadcast_messages/mock_data.js b/spec/frontend/admin/broadcast_messages/mock_data.js
new file mode 100644
index 00000000000..8dd98c2319d
--- /dev/null
+++ b/spec/frontend/admin/broadcast_messages/mock_data.js
@@ -0,0 +1,17 @@
+const generateMockMessage = (id) => ({
+ id,
+ delete_path: `/admin/broadcast_messages/${id}.js`,
+ edit_path: `/admin/broadcast_messages/${id}/edit`,
+ starts_at: new Date().toISOString(),
+ ends_at: new Date().toISOString(),
+ preview: '<div>YEET</div>',
+ status: 'Expired',
+ target_path: '*/welcome',
+ target_roles: 'Maintainer, Owner',
+ type: 'Banner',
+});
+
+export const generateMockMessages = (n) =>
+ [...Array(n).keys()].map((id) => generateMockMessage(id + 1));
+
+export const MOCK_MESSAGES = generateMockMessages(5).map((id) => generateMockMessage(id));
diff --git a/spec/frontend/admin/deploy_keys/components/table_spec.js b/spec/frontend/admin/deploy_keys/components/table_spec.js
index a18506c0916..4d4a2caedde 100644
--- a/spec/frontend/admin/deploy_keys/components/table_spec.js
+++ b/spec/frontend/admin/deploy_keys/components/table_spec.js
@@ -9,7 +9,7 @@ import { stubComponent } from 'helpers/stub_component';
import DeployKeysTable from '~/admin/deploy_keys/components/table.vue';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
import Api, { DEFAULT_PER_PAGE } from '~/api';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
jest.mock('~/api');
jest.mock('~/flash');
@@ -243,7 +243,7 @@ describe('DeployKeysTable', () => {
itRendersTheEmptyState();
it('displays flash', () => {
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: DeployKeysTable.i18n.apiErrorMessage,
captureError: true,
error,
diff --git a/spec/frontend/admin/users/components/users_table_spec.js b/spec/frontend/admin/users/components/users_table_spec.js
index fe07f0fce00..a0aec347b6b 100644
--- a/spec/frontend/admin/users/components/users_table_spec.js
+++ b/spec/frontend/admin/users/components/users_table_spec.js
@@ -10,7 +10,7 @@ import AdminUserActions from '~/admin/users/components/user_actions.vue';
import AdminUserAvatar from '~/admin/users/components/user_avatar.vue';
import AdminUsersTable from '~/admin/users/components/users_table.vue';
import getUsersGroupCountsQuery from '~/admin/users/graphql/queries/get_users_group_counts.query.graphql';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import AdminUserDate from '~/vue_shared/components/user_date.vue';
import { users, paths, createGroupCountResponse } from '../mock_data';
@@ -135,7 +135,7 @@ describe('AdminUsersTable component', () => {
});
it('creates a flash message and captures the error', () => {
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'Could not load user group counts. Please refresh the page to try again.',
captureError: true,
error: expect.any(Error),
diff --git a/spec/frontend/alert_management/components/alert_management_table_spec.js b/spec/frontend/alert_management/components/alert_management_table_spec.js
index 3e1438c37d6..7fb4f2d2463 100644
--- a/spec/frontend/alert_management/components/alert_management_table_spec.js
+++ b/spec/frontend/alert_management/components/alert_management_table_spec.js
@@ -1,4 +1,4 @@
-import { GlTable, GlAlert, GlLoadingIcon, GlDropdown, GlIcon, GlAvatar } from '@gitlab/ui';
+import { GlTable, GlAlert, GlLoadingIcon, GlDropdown, GlIcon, GlAvatar, GlLink } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
@@ -31,6 +31,7 @@ describe('AlertManagementTable', () => {
const findSearch = () => wrapper.findComponent(FilteredSearchBar);
const findSeverityColumnHeader = () => wrapper.findByTestId('alert-management-severity-sort');
const findFirstIDField = () => wrapper.findAllByTestId('idField').at(0);
+ const findFirstIDLink = () => wrapper.findAllByTestId('idField').at(0).findComponent(GlLink);
const findAssignees = () => wrapper.findAllByTestId('assigneesField');
const findSeverityFields = () => wrapper.findAllByTestId('severityField');
const findIssueFields = () => wrapper.findAllByTestId('issueField');
@@ -135,10 +136,11 @@ describe('AlertManagementTable', () => {
expect(findLoader().exists()).toBe(false);
expect(findAlertsTable().exists()).toBe(true);
expect(findAlerts()).toHaveLength(mockAlerts.length);
- expect(findAlerts().at(0).classes()).toContain('gl-hover-bg-blue-50');
+ expect(findAlerts().at(0).classes()).toContain('gl-hover-bg-gray-50');
+ expect(findAlerts().at(0).classes()).not.toContain('gl-hover-border-blue-200');
});
- it('displays the alert ID and title formatted correctly', () => {
+ it('displays the alert ID and title as a link', () => {
mountComponent({
data: { alerts: { list: mockAlerts }, alertsCount, errored: false },
loading: false,
@@ -146,6 +148,8 @@ describe('AlertManagementTable', () => {
expect(findFirstIDField().exists()).toBe(true);
expect(findFirstIDField().text()).toBe(`#${mockAlerts[0].iid} ${mockAlerts[0].title}`);
+ expect(findFirstIDLink().text()).toBe(`#${mockAlerts[0].iid} ${mockAlerts[0].title}`);
+ expect(findFirstIDLink().attributes('href')).toBe('/1527542/details');
});
it('displays status dropdown', () => {
@@ -266,7 +270,8 @@ describe('AlertManagementTable', () => {
alerts: {
list: [
{
- iid: 1,
+ iid: '1',
+ title: 'SyntaxError: Invalid or unexpected token',
status: 'acknowledged',
startedAt: '2020-03-17T23:18:14.996Z',
severity: 'high',
diff --git a/spec/frontend/alerts_settings/components/alerts_settings_form_spec.js b/spec/frontend/alerts_settings/components/alerts_settings_form_spec.js
index fb9e97e7505..e0075aa71d9 100644
--- a/spec/frontend/alerts_settings/components/alerts_settings_form_spec.js
+++ b/spec/frontend/alerts_settings/components/alerts_settings_form_spec.js
@@ -304,12 +304,12 @@ describe('AlertsSettingsForm', () => {
});
describe.each`
- payload | resetPayloadAndMappingConfirmed | disabled
- ${validSamplePayload} | ${true} | ${undefined}
- ${emptySamplePayload} | ${true} | ${undefined}
- ${validSamplePayload} | ${false} | ${'disabled'}
- ${emptySamplePayload} | ${false} | ${undefined}
- `('', ({ payload, resetPayloadAndMappingConfirmed, disabled }) => {
+ context | payload | resetPayloadAndMappingConfirmed | disabled
+ ${'valid payload, confirmed and enabled'} | ${validSamplePayload} | ${true} | ${undefined}
+ ${'empty payload, confirmed and enabled'} | ${emptySamplePayload} | ${true} | ${undefined}
+ ${'valid payload, unconfirmed and disabled'} | ${validSamplePayload} | ${false} | ${'disabled'}
+ ${'empty payload, unconfirmed and enabled'} | ${emptySamplePayload} | ${false} | ${undefined}
+ `('given $context', ({ payload, resetPayloadAndMappingConfirmed, disabled }) => {
const payloadResetMsg = resetPayloadAndMappingConfirmed
? 'was confirmed'
: 'was not confirmed';
@@ -333,12 +333,12 @@ describe('AlertsSettingsForm', () => {
describe('action buttons for sample payload', () => {
describe.each`
- resetPayloadAndMappingConfirmed | payloadExample | caption
- ${false} | ${validSamplePayload} | ${'Edit payload'}
- ${true} | ${emptySamplePayload} | ${'Parse payload fields'}
- ${true} | ${validSamplePayload} | ${'Parse payload fields'}
- ${false} | ${emptySamplePayload} | ${'Parse payload fields'}
- `('', ({ resetPayloadAndMappingConfirmed, payloadExample, caption }) => {
+ context | resetPayloadAndMappingConfirmed | payloadExample | caption
+ ${'valid payload, unconfirmed'} | ${false} | ${validSamplePayload} | ${'Edit payload'}
+ ${'empty payload, confirmed'} | ${true} | ${emptySamplePayload} | ${'Parse payload fields'}
+ ${'valid payload, confirmed'} | ${true} | ${validSamplePayload} | ${'Parse payload fields'}
+ ${'empty payload, unconfirmed'} | ${false} | ${emptySamplePayload} | ${'Parse payload fields'}
+ `('given $context', ({ resetPayloadAndMappingConfirmed, payloadExample, caption }) => {
const samplePayloadMsg = payloadExample ? 'was provided' : 'was not provided';
const payloadResetMsg = resetPayloadAndMappingConfirmed
? 'was confirmed'
@@ -402,24 +402,27 @@ describe('AlertsSettingsForm', () => {
${true} | ${true} | ${2} | ${false}
${true} | ${false} | ${1} | ${false}
${false} | ${true} | ${1} | ${false}
- `('', ({ alertFieldsProvided, multiIntegrations, integrationOption, visible }) => {
- const visibleMsg = visible ? 'rendered' : 'not rendered';
- const alertFieldsMsg = alertFieldsProvided ? 'provided' : 'not provided';
- const integrationType = integrationOption === 1 ? typeSet.http : typeSet.prometheus;
- const multiIntegrationsEnabled = multiIntegrations ? 'enabled' : 'not enabled';
+ `(
+ 'given alertFieldsProvided: $alertFieldsProvided, multiIntegrations: $multiIntegrations, integrationOption: $integrationOption, visible: $visible',
+ ({ alertFieldsProvided, multiIntegrations, integrationOption, visible }) => {
+ const visibleMsg = visible ? 'rendered' : 'not rendered';
+ const alertFieldsMsg = alertFieldsProvided ? 'provided' : 'not provided';
+ const integrationType = integrationOption === 1 ? typeSet.http : typeSet.prometheus;
+ const multiIntegrationsEnabled = multiIntegrations ? 'enabled' : 'not enabled';
+
+ it(`is ${visibleMsg} when multiIntegrations are ${multiIntegrationsEnabled}, integration type is ${integrationType} and alert fields are ${alertFieldsMsg}`, async () => {
+ createComponent({
+ multiIntegrations,
+ props: {
+ alertFields: alertFieldsProvided ? alertFields : [],
+ },
+ });
+ await selectOptionAtIndex(integrationOption);
- it(`is ${visibleMsg} when multiIntegrations are ${multiIntegrationsEnabled}, integration type is ${integrationType} and alert fields are ${alertFieldsMsg}`, async () => {
- createComponent({
- multiIntegrations,
- props: {
- alertFields: alertFieldsProvided ? alertFields : [],
- },
+ expect(findMappingBuilder().exists()).toBe(visible);
});
- await selectOptionAtIndex(integrationOption);
-
- expect(findMappingBuilder().exists()).toBe(visible);
- });
- });
+ },
+ );
});
describe('Form validation', () => {
diff --git a/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js b/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js
index 0266adeb6c7..fcefcb7cf66 100644
--- a/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js
+++ b/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js
@@ -30,7 +30,7 @@ import {
INTEGRATION_INACTIVE_PAYLOAD_TEST_ERROR,
DELETE_INTEGRATION_ERROR,
} from '~/alerts_settings/utils/error_messages';
-import createFlash, { FLASH_TYPES } from '~/flash';
+import { createAlert, VARIANT_SUCCESS } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import httpStatusCodes from '~/lib/utils/http_status';
import {
@@ -327,7 +327,7 @@ describe('AlertsSettingsWrapper', () => {
await waitForPromises();
- expect(createFlash).toHaveBeenCalledWith({ message: ADD_INTEGRATION_ERROR });
+ expect(createAlert).toHaveBeenCalledWith({ message: ADD_INTEGRATION_ERROR });
});
it('shows an error alert when integration token reset fails', async () => {
@@ -336,7 +336,7 @@ describe('AlertsSettingsWrapper', () => {
findAlertsSettingsForm().vm.$emit('reset-token', {});
await waitForPromises();
- expect(createFlash).toHaveBeenCalledWith({ message: RESET_INTEGRATION_TOKEN_ERROR });
+ expect(createAlert).toHaveBeenCalledWith({ message: RESET_INTEGRATION_TOKEN_ERROR });
});
it('shows an error alert when integration update fails', async () => {
@@ -345,7 +345,7 @@ describe('AlertsSettingsWrapper', () => {
findAlertsSettingsForm().vm.$emit('update-integration', {});
await waitForPromises();
- expect(createFlash).toHaveBeenCalledWith({ message: UPDATE_INTEGRATION_ERROR });
+ expect(createAlert).toHaveBeenCalledWith({ message: UPDATE_INTEGRATION_ERROR });
});
describe('Test alert failure', () => {
@@ -360,17 +360,17 @@ describe('AlertsSettingsWrapper', () => {
it('shows an error alert when integration test payload is invalid', async () => {
mock.onPost(/(.*)/).replyOnce(httpStatusCodes.UNPROCESSABLE_ENTITY);
await wrapper.vm.testAlertPayload({ endpoint: '', data: '', token: '' });
- expect(createFlash).toHaveBeenCalledWith({ message: INTEGRATION_PAYLOAD_TEST_ERROR });
- expect(createFlash).toHaveBeenCalledTimes(1);
+ expect(createAlert).toHaveBeenCalledWith({ message: INTEGRATION_PAYLOAD_TEST_ERROR });
+ expect(createAlert).toHaveBeenCalledTimes(1);
});
it('shows an error alert when integration is not activated', async () => {
mock.onPost(/(.*)/).replyOnce(httpStatusCodes.FORBIDDEN);
await wrapper.vm.testAlertPayload({ endpoint: '', data: '', token: '' });
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: INTEGRATION_INACTIVE_PAYLOAD_TEST_ERROR,
});
- expect(createFlash).toHaveBeenCalledTimes(1);
+ expect(createAlert).toHaveBeenCalledTimes(1);
});
});
@@ -444,9 +444,9 @@ describe('AlertsSettingsWrapper', () => {
jest.spyOn(alertsUpdateService, 'updateTestAlert').mockResolvedValueOnce({});
findAlertsSettingsForm().vm.$emit('test-alert-payload', '');
await waitForPromises();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: i18n.alertSent,
- type: FLASH_TYPES.SUCCESS,
+ variant: VARIANT_SUCCESS,
});
});
@@ -454,7 +454,7 @@ describe('AlertsSettingsWrapper', () => {
jest.spyOn(alertsUpdateService, 'updateTestAlert').mockRejectedValueOnce({});
findAlertsSettingsForm().vm.$emit('test-alert-payload', '');
await waitForPromises();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: INTEGRATION_PAYLOAD_TEST_ERROR,
});
});
@@ -486,7 +486,7 @@ describe('AlertsSettingsWrapper', () => {
await destroyHttpIntegration(wrapper);
await waitForPromises();
- expect(createFlash).toHaveBeenCalledWith({ message: 'Houston, we have a problem' });
+ expect(createAlert).toHaveBeenCalledWith({ message: 'Houston, we have a problem' });
});
it('displays flash if mutation had a non-recoverable error', async () => {
@@ -497,7 +497,7 @@ describe('AlertsSettingsWrapper', () => {
await destroyHttpIntegration(wrapper);
await waitForPromises();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: DELETE_INTEGRATION_ERROR,
});
});
diff --git a/spec/frontend/api/projects_api_spec.js b/spec/frontend/api/projects_api_spec.js
index 8f40b557e1f..8459021421f 100644
--- a/spec/frontend/api/projects_api_spec.js
+++ b/spec/frontend/api/projects_api_spec.js
@@ -1,5 +1,7 @@
import MockAdapter from 'axios-mock-adapter';
+import getTransferLocationsResponse from 'test_fixtures/api/projects/transfer_locations_page_1.json';
import * as projectsApi from '~/api/projects_api';
+import { DEFAULT_PER_PAGE } from '~/api';
import axios from '~/lib/utils/axios_utils';
describe('~/api/projects_api.js', () => {
@@ -59,4 +61,25 @@ describe('~/api/projects_api.js', () => {
});
});
});
+
+ describe('getTransferLocations', () => {
+ beforeEach(() => {
+ jest.spyOn(axios, 'get');
+ });
+
+ it('retrieves transfer locations from the correct URL and returns them in the response data', async () => {
+ const params = { page: 1 };
+ const expectedUrl = '/api/v7/projects/1/transfer_locations';
+
+ mock.onGet(expectedUrl).replyOnce(200, { data: getTransferLocationsResponse });
+
+ await expect(projectsApi.getTransferLocations(projectId, params)).resolves.toMatchObject({
+ data: { data: getTransferLocationsResponse },
+ });
+
+ expect(axios.get).toHaveBeenCalledWith(expectedUrl, {
+ params: { ...params, per_page: DEFAULT_PER_PAGE },
+ });
+ });
+ });
});
diff --git a/spec/frontend/awards_handler_spec.js b/spec/frontend/awards_handler_spec.js
index b14bc5122b9..1a54b9909ba 100644
--- a/spec/frontend/awards_handler_spec.js
+++ b/spec/frontend/awards_handler_spec.js
@@ -185,7 +185,9 @@ describe('AwardsHandler', () => {
describe('::getAwardUrl', () => {
it('returns the url for request', () => {
- expect(awardsHandler.getAwardUrl()).toBe('http://test.host/-/snippets/1/toggle_award_emoji');
+ expect(awardsHandler.getAwardUrl()).toBe(
+ document.querySelector('.js-awards-block').dataset.awardUrl,
+ );
});
});
diff --git a/spec/frontend/badges/components/badge_form_spec.js b/spec/frontend/badges/components/badge_form_spec.js
index 6d8a00eb50b..0a736df7075 100644
--- a/spec/frontend/badges/components/badge_form_spec.js
+++ b/spec/frontend/badges/components/badge_form_spec.js
@@ -1,195 +1,183 @@
import MockAdapter from 'axios-mock-adapter';
-import Vue, { nextTick } from 'vue';
-import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
+import Vue from 'vue';
+import Vuex from 'vuex';
+import { mount } from '@vue/test-utils';
import { DUMMY_IMAGE_URL, TEST_HOST } from 'helpers/test_constants';
-import { mountComponentWithStore } from 'helpers/vue_mount_component_helper';
import BadgeForm from '~/badges/components/badge_form.vue';
import createEmptyBadge from '~/badges/empty_badge';
-import store from '~/badges/store';
+
+import createState from '~/badges/store/state';
+import mutations from '~/badges/store/mutations';
+import actions from '~/badges/store/actions';
+
import axios from '~/lib/utils/axios_utils';
-// avoid preview background process
-BadgeForm.methods.debouncedPreview = () => {};
+Vue.use(Vuex);
describe('BadgeForm component', () => {
- const Component = Vue.extend(BadgeForm);
let axiosMock;
- let vm;
+ let mockedActions;
+ let wrapper;
+
+ const createComponent = (propsData, customState = {}) => {
+ mockedActions = Object.fromEntries(Object.keys(actions).map((name) => [name, jest.fn()]));
+
+ const store = new Vuex.Store({
+ state: {
+ ...createState(),
+ ...customState,
+ },
+ mutations,
+ actions: mockedActions,
+ });
- beforeEach(() => {
- setHTMLFixture(`
- <div id="dummy-element"></div>
- `);
+ wrapper = mount(BadgeForm, {
+ store,
+ propsData,
+ attachTo: document.body,
+ });
+ };
+ beforeEach(() => {
axiosMock = new MockAdapter(axios);
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
axiosMock.restore();
- resetHTMLFixture();
});
- describe('methods', () => {
- beforeEach(() => {
- vm = mountComponentWithStore(Component, {
- el: '#dummy-element',
- store,
- props: {
- isEditing: false,
- },
- });
- });
+ it('stops editing when cancel button is clicked', async () => {
+ createComponent({ isEditing: true });
- describe('onCancel', () => {
- it('calls stopEditing', () => {
- jest.spyOn(vm, 'stopEditing').mockImplementation(() => {});
+ const cancelButton = wrapper.find('.row-content-block button');
- vm.onCancel();
+ await cancelButton.trigger('click');
- expect(vm.stopEditing).toHaveBeenCalled();
- });
- });
+ expect(mockedActions.stopEditing).toHaveBeenCalled();
});
- const sharedSubmitTests = (submitAction) => {
+ const sharedSubmitTests = (submitAction, props) => {
const nameSelector = '#badge-name';
const imageUrlSelector = '#badge-image-url';
- const findImageUrlElement = () => vm.$el.querySelector(imageUrlSelector);
+ const findImageUrl = () => wrapper.find(imageUrlSelector);
const linkUrlSelector = '#badge-link-url';
- const findLinkUrlElement = () => vm.$el.querySelector(linkUrlSelector);
+ const findLinkUrl = () => wrapper.find(linkUrlSelector);
const setValue = (inputElementSelector, value) => {
- const inputElement = vm.$el.querySelector(inputElementSelector);
- inputElement.value = value;
- inputElement.dispatchEvent(new Event('input'));
+ const input = wrapper.find(inputElementSelector);
+ return input.setValue(value);
};
const submitForm = () => {
- const submitButton = vm.$el.querySelector('button[type="submit"]');
- submitButton.click();
+ const submitButton = wrapper.find('button[type="submit"]');
+ return submitButton.trigger('click');
};
const expectInvalidInput = (inputElementSelector) => {
- const inputElement = vm.$el.querySelector(inputElementSelector);
+ const input = wrapper.find(inputElementSelector);
- expect(inputElement.checkValidity()).toBe(false);
- const feedbackElement = vm.$el.querySelector(`${inputElementSelector} + .invalid-feedback`);
+ expect(input.element.checkValidity()).toBe(false);
+ const feedbackElement = wrapper.find(`${inputElementSelector} + .invalid-feedback`);
- expect(feedbackElement).toBeVisible();
+ expect(feedbackElement.isVisible()).toBe(true);
};
- beforeEach(async () => {
- jest.spyOn(vm, submitAction).mockReturnValue(Promise.resolve());
- store.replaceState({
- ...store.state,
+ beforeEach(() => {
+ createComponent(props, {
badgeInAddForm: createEmptyBadge(),
badgeInEditForm: createEmptyBadge(),
isSaving: false,
});
- await nextTick();
setValue(nameSelector, 'TestBadge');
setValue(linkUrlSelector, `${TEST_HOST}/link/url`);
setValue(imageUrlSelector, `${window.location.origin}${DUMMY_IMAGE_URL}`);
});
- it('returns immediately if imageUrl is empty', () => {
- setValue(imageUrlSelector, '');
+ it('returns immediately if imageUrl is empty', async () => {
+ await setValue(imageUrlSelector, '');
- submitForm();
+ await submitForm();
expectInvalidInput(imageUrlSelector);
- expect(vm[submitAction]).not.toHaveBeenCalled();
+ expect(mockedActions[submitAction]).not.toHaveBeenCalled();
});
- it('returns immediately if imageUrl is malformed', () => {
- setValue(imageUrlSelector, 'not-a-url');
+ it('returns immediately if imageUrl is malformed', async () => {
+ await setValue(imageUrlSelector, 'not-a-url');
- submitForm();
+ await submitForm();
expectInvalidInput(imageUrlSelector);
- expect(vm[submitAction]).not.toHaveBeenCalled();
+ expect(mockedActions[submitAction]).not.toHaveBeenCalled();
});
- it('returns immediately if linkUrl is empty', () => {
- setValue(linkUrlSelector, '');
+ it('returns immediately if linkUrl is empty', async () => {
+ await setValue(linkUrlSelector, '');
- submitForm();
+ await submitForm();
expectInvalidInput(linkUrlSelector);
- expect(vm[submitAction]).not.toHaveBeenCalled();
+ expect(mockedActions[submitAction]).not.toHaveBeenCalled();
});
- it('returns immediately if linkUrl is malformed', () => {
- setValue(linkUrlSelector, 'not-a-url');
+ it('returns immediately if linkUrl is malformed', async () => {
+ await setValue(linkUrlSelector, 'not-a-url');
- submitForm();
+ await submitForm();
expectInvalidInput(linkUrlSelector);
- expect(vm[submitAction]).not.toHaveBeenCalled();
+ expect(mockedActions[submitAction]).not.toHaveBeenCalled();
});
- it(`calls ${submitAction}`, () => {
- submitForm();
+ it(`calls ${submitAction}`, async () => {
+ await submitForm();
- expect(findImageUrlElement().checkValidity()).toBe(true);
- expect(findLinkUrlElement().checkValidity()).toBe(true);
- expect(vm[submitAction]).toHaveBeenCalled();
+ expect(findImageUrl().element.checkValidity()).toBe(true);
+ expect(findLinkUrl().element.checkValidity()).toBe(true);
+ expect(mockedActions[submitAction]).toHaveBeenCalled();
});
};
describe('if isEditing is false', () => {
- beforeEach(() => {
- vm = mountComponentWithStore(Component, {
- el: '#dummy-element',
- store,
- props: {
- isEditing: false,
- },
- });
- });
+ const props = { isEditing: false };
it('renders one button', () => {
- expect(vm.$el.querySelector('.row-content-block')).toBeNull();
- const buttons = vm.$el.querySelectorAll('.form-group:last-of-type button');
+ createComponent(props);
+
+ expect(wrapper.find('.row-content-block').exists()).toBe(false);
+ const buttons = wrapper.findAll('.form-group:last-of-type button');
- expect(buttons.length).toBe(1);
- const buttonAddElement = buttons[0];
+ expect(buttons).toHaveLength(1);
+ const buttonAddWrapper = buttons.at(0);
- expect(buttonAddElement).toBeVisible();
- expect(buttonAddElement).toHaveText('Add badge');
+ expect(buttonAddWrapper.isVisible()).toBe(true);
+ expect(buttonAddWrapper.text()).toBe('Add badge');
});
- sharedSubmitTests('addBadge');
+ sharedSubmitTests('addBadge', props);
});
describe('if isEditing is true', () => {
- beforeEach(() => {
- vm = mountComponentWithStore(Component, {
- el: '#dummy-element',
- store,
- props: {
- isEditing: true,
- },
- });
- });
+ const props = { isEditing: true };
it('renders two buttons', () => {
- const buttons = vm.$el.querySelectorAll('.row-content-block button');
+ createComponent(props);
+ const buttons = wrapper.findAll('.row-content-block button');
- expect(buttons.length).toBe(2);
- const buttonSaveElement = buttons[1];
+ expect(buttons).toHaveLength(2);
- expect(buttonSaveElement).toBeVisible();
- expect(buttonSaveElement).toHaveText('Save changes');
- const buttonCancelElement = buttons[0];
+ const saveButton = buttons.at(1);
+ expect(saveButton.isVisible()).toBe(true);
+ expect(saveButton.text()).toBe('Save changes');
- expect(buttonCancelElement).toBeVisible();
- expect(buttonCancelElement).toHaveText('Cancel');
+ const cancelButton = buttons.at(0);
+ expect(cancelButton.isVisible()).toBe(true);
+ expect(cancelButton.text()).toBe('Cancel');
});
- sharedSubmitTests('saveBadge');
+ sharedSubmitTests('saveBadge', props);
});
});
diff --git a/spec/frontend/badges/components/badge_list_row_spec.js b/spec/frontend/badges/components/badge_list_row_spec.js
index ad8426f3168..ee7ccac974a 100644
--- a/spec/frontend/badges/components/badge_list_row_spec.js
+++ b/spec/frontend/badges/components/badge_list_row_spec.js
@@ -1,103 +1,118 @@
-import Vue, { nextTick } from 'vue';
+import Vue from 'vue';
+import Vuex from 'vuex';
+import { mount } from '@vue/test-utils';
+
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
-import { mountComponentWithStore } from 'helpers/vue_mount_component_helper';
import BadgeListRow from '~/badges/components/badge_list_row.vue';
import { GROUP_BADGE, PROJECT_BADGE } from '~/badges/constants';
-import store from '~/badges/store';
+
+import createState from '~/badges/store/state';
+import mutations from '~/badges/store/mutations';
+import actions from '~/badges/store/actions';
+
import { createDummyBadge } from '../dummy_badge';
+Vue.use(Vuex);
+
describe('BadgeListRow component', () => {
- const Component = Vue.extend(BadgeListRow);
let badge;
- let vm;
-
- beforeEach(() => {
- setHTMLFixture(`
- <div id="delete-badge-modal" class="modal"></div>
- <div id="dummy-element"></div>
- `);
- store.replaceState({
- ...store.state,
- kind: PROJECT_BADGE,
+ let wrapper;
+ let mockedActions;
+
+ const createComponent = (kind) => {
+ setHTMLFixture(`<div id="delete-badge-modal" class="modal"></div>`);
+
+ mockedActions = Object.fromEntries(Object.keys(actions).map((name) => [name, jest.fn()]));
+
+ const store = new Vuex.Store({
+ state: {
+ ...createState(),
+ kind: PROJECT_BADGE,
+ },
+ mutations,
+ actions: mockedActions,
});
+
badge = createDummyBadge();
- vm = mountComponentWithStore(Component, {
- el: '#dummy-element',
+ badge.kind = kind;
+ wrapper = mount(BadgeListRow, {
+ attachTo: document.body,
store,
- props: { badge },
+ propsData: { badge },
});
- });
+ };
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
resetHTMLFixture();
});
- it('renders the badge', () => {
- const badgeElement = vm.$el.querySelector('.project-badge');
+ describe('for a project badge', () => {
+ beforeEach(() => {
+ createComponent(PROJECT_BADGE);
+ });
- expect(badgeElement).not.toBeNull();
- expect(badgeElement.getAttribute('src')).toBe(badge.renderedImageUrl);
- });
+ it('renders the badge', () => {
+ const badgeImage = wrapper.find('.project-badge');
- it('renders the badge name', () => {
- expect(vm.$el.innerText).toMatch(badge.name);
- });
+ expect(badgeImage.exists()).toBe(true);
+ expect(badgeImage.attributes('src')).toBe(badge.renderedImageUrl);
+ });
- it('renders the badge link', () => {
- expect(vm.$el.innerText).toMatch(badge.linkUrl);
- });
+ it('renders the badge name', () => {
+ expect(wrapper.text()).toMatch(badge.name);
+ });
- it('renders the badge kind', () => {
- expect(vm.$el.innerText).toMatch('Project Badge');
- });
+ it('renders the badge link', () => {
+ expect(wrapper.text()).toMatch(badge.linkUrl);
+ });
- it('shows edit and delete buttons', () => {
- const buttons = vm.$el.querySelectorAll('.table-button-footer button');
+ it('renders the badge kind', () => {
+ expect(wrapper.text()).toMatch('Project Badge');
+ });
- expect(buttons).toHaveLength(2);
- const buttonEditElement = buttons[0];
+ it('shows edit and delete buttons', () => {
+ const buttons = wrapper.findAll('.table-button-footer button');
- expect(buttonEditElement).toBeVisible();
- expect(buttonEditElement).toHaveSpriteIcon('pencil');
- const buttonDeleteElement = buttons[1];
+ expect(buttons).toHaveLength(2);
+ const editButton = buttons.at(0);
- expect(buttonDeleteElement).toBeVisible();
- expect(buttonDeleteElement).toHaveSpriteIcon('remove');
- });
+ expect(editButton.isVisible()).toBe(true);
+ expect(editButton.element).toHaveSpriteIcon('pencil');
- it('calls editBadge when clicking then edit button', () => {
- jest.spyOn(vm, 'editBadge').mockImplementation(() => {});
+ const deleteButton = buttons.at(1);
+ expect(deleteButton.isVisible()).toBe(true);
+ expect(deleteButton.element).toHaveSpriteIcon('remove');
+ });
- const editButton = vm.$el.querySelector('.table-button-footer button:first-of-type');
- editButton.click();
+ it('calls editBadge when clicking then edit button', async () => {
+ const editButton = wrapper.find('.table-button-footer button:first-of-type');
- expect(vm.editBadge).toHaveBeenCalled();
- });
+ await editButton.trigger('click');
+
+ expect(mockedActions.editBadge).toHaveBeenCalled();
+ });
- it('calls updateBadgeInModal and shows modal when clicking then delete button', async () => {
- jest.spyOn(vm, 'updateBadgeInModal').mockImplementation(() => {});
+ it('calls updateBadgeInModal and shows modal when clicking then delete button', async () => {
+ const deleteButton = wrapper.find('.table-button-footer button:last-of-type');
- const deleteButton = vm.$el.querySelector('.table-button-footer button:last-of-type');
- deleteButton.click();
+ await deleteButton.trigger('click');
- await nextTick();
- expect(vm.updateBadgeInModal).toHaveBeenCalled();
+ expect(mockedActions.updateBadgeInModal).toHaveBeenCalled();
+ });
});
describe('for a group badge', () => {
- beforeEach(async () => {
- badge.kind = GROUP_BADGE;
-
- await nextTick();
+ beforeEach(() => {
+ createComponent(GROUP_BADGE);
});
it('renders the badge kind', () => {
- expect(vm.$el.innerText).toMatch('Group Badge');
+ expect(wrapper.text()).toMatch('Group Badge');
});
it('hides edit and delete buttons', () => {
- const buttons = vm.$el.querySelectorAll('.table-button-footer button');
+ const buttons = wrapper.findAll('.table-button-footer button');
expect(buttons).toHaveLength(0);
});
diff --git a/spec/frontend/badges/components/badge_list_spec.js b/spec/frontend/badges/components/badge_list_spec.js
index 32cd9483ef8..606b1bc9cce 100644
--- a/spec/frontend/badges/components/badge_list_spec.js
+++ b/spec/frontend/badges/components/badge_list_spec.js
@@ -1,83 +1,96 @@
-import Vue, { nextTick } from 'vue';
-import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
-import { mountComponentWithStore } from 'helpers/vue_mount_component_helper';
+import Vue from 'vue';
+import Vuex from 'vuex';
+import { mount } from '@vue/test-utils';
+
import BadgeList from '~/badges/components/badge_list.vue';
import { GROUP_BADGE, PROJECT_BADGE } from '~/badges/constants';
-import store from '~/badges/store';
+
+import createState from '~/badges/store/state';
+import mutations from '~/badges/store/mutations';
+import actions from '~/badges/store/actions';
+
import { createDummyBadge } from '../dummy_badge';
-describe('BadgeList component', () => {
- const Component = Vue.extend(BadgeList);
- const numberOfDummyBadges = 3;
- let vm;
-
- beforeEach(() => {
- setHTMLFixture('<div id="dummy-element"></div>');
- const badges = [];
- for (let id = 0; id < numberOfDummyBadges; id += 1) {
- badges.push({ id, ...createDummyBadge() });
- }
- store.replaceState({
- ...store.state,
- badges,
- kind: PROJECT_BADGE,
- isLoading: false,
- });
+Vue.use(Vuex);
- // Can be removed once GlLoadingIcon no longer throws a warning
- jest.spyOn(global.console, 'warn').mockImplementation(() => jest.fn());
+const numberOfDummyBadges = 3;
+const badges = Array.from({ length: numberOfDummyBadges }).map((_, idx) => ({
+ ...createDummyBadge(),
+ id: idx,
+}));
- vm = mountComponentWithStore(Component, {
- el: '#dummy-element',
- store,
+describe('BadgeList component', () => {
+ let wrapper;
+
+ const createComponent = (customState) => {
+ const mockedActions = Object.fromEntries(Object.keys(actions).map((name) => [name, jest.fn()]));
+
+ const store = new Vuex.Store({
+ state: {
+ ...createState(),
+ isLoading: false,
+ ...customState,
+ },
+ mutations,
+ actions: mockedActions,
});
- });
+
+ wrapper = mount(BadgeList, { store });
+ };
afterEach(() => {
- vm.$destroy();
- resetHTMLFixture();
+ wrapper.destroy();
});
- it('renders a header with the badge count', () => {
- const header = vm.$el.querySelector('.card-header');
+ describe('for project badges', () => {
+ it('renders a header with the badge count', () => {
+ createComponent({
+ kind: PROJECT_BADGE,
+ badges,
+ });
- expect(header).toHaveText(new RegExp(`Your badges\\s+${numberOfDummyBadges}`));
- });
+ const header = wrapper.find('.card-header');
- it('renders a row for each badge', () => {
- const rows = vm.$el.querySelectorAll('.gl-responsive-table-row');
+ expect(header.text()).toMatchInterpolatedText('Your badges 3');
+ });
- expect(rows).toHaveLength(numberOfDummyBadges);
- });
+ it('renders a row for each badge', () => {
+ createComponent({
+ kind: PROJECT_BADGE,
+ badges,
+ });
- it('renders a message if no badges exist', async () => {
- store.state.badges = [];
+ const rows = wrapper.findAll('.gl-responsive-table-row');
- await nextTick();
- expect(vm.$el.innerText).toMatch('This project has no badges');
- });
+ expect(rows).toHaveLength(numberOfDummyBadges);
+ });
- it('shows a loading icon when loading', async () => {
- store.state.isLoading = true;
+ it('renders a message if no badges exist', () => {
+ createComponent({
+ kind: PROJECT_BADGE,
+ badges: [],
+ });
- await nextTick();
- const loadingIcon = vm.$el.querySelector('.gl-spinner');
+ expect(wrapper.text()).toMatch('This project has no badges');
+ });
- expect(loadingIcon).toBeVisible();
- });
+ it('shows a loading icon when loading', () => {
+ createComponent({ isLoading: true });
- describe('for group badges', () => {
- beforeEach(async () => {
- store.state.kind = GROUP_BADGE;
+ const loadingIcon = wrapper.find('.gl-spinner');
- await nextTick();
+ expect(loadingIcon.isVisible()).toBe(true);
});
+ });
- it('renders a message if no badges exist', async () => {
- store.state.badges = [];
+ describe('for group badges', () => {
+ it('renders a message if no badges exist', () => {
+ createComponent({
+ kind: GROUP_BADGE,
+ badges: [],
+ });
- await nextTick();
- expect(vm.$el.innerText).toMatch('This group has no badges');
+ expect(wrapper.text()).toMatch('This group has no badges');
});
});
});
diff --git a/spec/frontend/badges/components/badge_spec.js b/spec/frontend/badges/components/badge_spec.js
index 19b3a9f23a6..b468e38f19e 100644
--- a/spec/frontend/badges/components/badge_spec.js
+++ b/spec/frontend/badges/components/badge_spec.js
@@ -1,138 +1,78 @@
-import Vue, { nextTick } from 'vue';
-import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
-import mountComponent from 'helpers/vue_mount_component_helper';
+import { nextTick } from 'vue';
+import { mount } from '@vue/test-utils';
+
import { DUMMY_IMAGE_URL, TEST_HOST } from 'spec/test_constants';
import Badge from '~/badges/components/badge.vue';
describe('Badge component', () => {
- const Component = Vue.extend(Badge);
const dummyProps = {
imageUrl: DUMMY_IMAGE_URL,
linkUrl: `${TEST_HOST}/badge/link/url`,
};
- let vm;
+ let wrapper;
const findElements = () => {
- const buttons = vm.$el.querySelectorAll('button');
+ const buttons = wrapper.findAll('button');
return {
- badgeImage: vm.$el.querySelector('img.project-badge'),
- loadingIcon: vm.$el.querySelector('.gl-spinner'),
- reloadButton: buttons[buttons.length - 1],
+ badgeImage: wrapper.find('img.project-badge'),
+ loadingIcon: wrapper.find('.gl-spinner'),
+ reloadButton: buttons.at(buttons.length - 1),
};
};
- const createComponent = (props, el = null) => {
- vm = mountComponent(Component, props, el);
- const { badgeImage } = findElements();
- return new Promise((resolve) => {
- badgeImage.addEventListener('load', resolve);
- // Manually dispatch load event as it is not triggered
- badgeImage.dispatchEvent(new Event('load'));
- }).then(() => nextTick());
+ const createComponent = (propsData) => {
+ wrapper = mount(Badge, { propsData });
};
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
- describe('watchers', () => {
- describe('imageUrl', () => {
- it('sets isLoading and resets numRetries and hasError', async () => {
- const props = { ...dummyProps };
- await createComponent(props);
- expect(vm.isLoading).toBe(false);
- vm.hasError = true;
- vm.numRetries = 42;
-
- vm.imageUrl = `${props.imageUrl}#something/else`;
- await nextTick();
- expect(vm.isLoading).toBe(true);
- expect(vm.numRetries).toBe(0);
- expect(vm.hasError).toBe(false);
- });
- });
+ beforeEach(() => {
+ return createComponent({ ...dummyProps }, '#dummy-element');
});
- describe('methods', () => {
- beforeEach(async () => {
- await createComponent({ ...dummyProps });
- });
+ it('shows a badge image after loading', async () => {
+ const { badgeImage, loadingIcon, reloadButton } = findElements();
+ badgeImage.element.dispatchEvent(new Event('load'));
- it('onError resets isLoading and sets hasError', () => {
- vm.hasError = false;
- vm.isLoading = true;
+ await nextTick();
- vm.onError();
+ expect(badgeImage.isVisible()).toBe(true);
+ expect(loadingIcon.isVisible()).toBe(false);
+ expect(reloadButton.isVisible()).toBe(false);
+ expect(wrapper.find('.btn-group').isVisible()).toBe(false);
+ });
- expect(vm.hasError).toBe(true);
- expect(vm.isLoading).toBe(false);
- });
+ it('shows a loading icon when loading', () => {
+ const { badgeImage, loadingIcon, reloadButton } = findElements();
- it('onLoad sets isLoading', () => {
- vm.isLoading = true;
+ expect(badgeImage.isVisible()).toBe(false);
+ expect(loadingIcon.isVisible()).toBe(true);
+ expect(reloadButton.isVisible()).toBe(false);
+ expect(wrapper.find('.btn-group').isVisible()).toBe(false);
+ });
- vm.onLoad();
+ it('shows an error and reload button if loading failed', async () => {
+ const { badgeImage, loadingIcon, reloadButton } = findElements();
+ badgeImage.element.dispatchEvent(new Event('error'));
- expect(vm.isLoading).toBe(false);
- });
+ await nextTick();
- it('reloadImage resets isLoading and hasError and increases numRetries', () => {
- vm.hasError = true;
- vm.isLoading = false;
- vm.numRetries = 0;
+ expect(badgeImage.isVisible()).toBe(false);
+ expect(loadingIcon.isVisible()).toBe(false);
+ expect(reloadButton.isVisible()).toBe(true);
+ expect(reloadButton.element).toHaveSpriteIcon('retry');
+ expect(wrapper.text()).toBe('No badge image');
+ });
- vm.reloadImage();
+ it('retries an image when loading failed and reload button is clicked', async () => {
+ const { badgeImage, reloadButton } = findElements();
+ badgeImage.element.dispatchEvent(new Event('error'));
+ await nextTick();
- expect(vm.hasError).toBe(false);
- expect(vm.isLoading).toBe(true);
- expect(vm.numRetries).toBe(1);
- });
- });
+ await reloadButton.trigger('click');
- describe('behavior', () => {
- beforeEach(() => {
- setHTMLFixture('<div id="dummy-element"></div>');
- return createComponent({ ...dummyProps }, '#dummy-element');
- });
-
- afterEach(() => {
- resetHTMLFixture();
- });
-
- it('shows a badge image after loading', () => {
- expect(vm.isLoading).toBe(false);
- expect(vm.hasError).toBe(false);
- const { badgeImage, loadingIcon, reloadButton } = findElements();
-
- expect(badgeImage).toBeVisible();
- expect(loadingIcon).toBeHidden();
- expect(reloadButton).toBeHidden();
- expect(vm.$el.querySelector('.btn-group')).toBeHidden();
- });
-
- it('shows a loading icon when loading', async () => {
- vm.isLoading = true;
-
- await nextTick();
- const { badgeImage, loadingIcon, reloadButton } = findElements();
-
- expect(badgeImage).toBeHidden();
- expect(loadingIcon).toBeVisible();
- expect(reloadButton).toBeHidden();
- expect(vm.$el.querySelector('.btn-group')).toBeHidden();
- });
-
- it('shows an error and reload button if loading failed', async () => {
- vm.hasError = true;
-
- await nextTick();
- const { badgeImage, loadingIcon, reloadButton } = findElements();
-
- expect(badgeImage).toBeHidden();
- expect(loadingIcon).toBeHidden();
- expect(reloadButton).toBeVisible();
- expect(reloadButton).toHaveSpriteIcon('retry');
- expect(vm.$el.innerText.trim()).toBe('No badge image');
- });
+ expect(badgeImage.attributes('src')).toBe(`${dummyProps.imageUrl}#retries=1`);
});
});
diff --git a/spec/frontend/batch_comments/components/drafts_count_spec.js b/spec/frontend/batch_comments/components/drafts_count_spec.js
index 390ef21929c..c3a7946c85c 100644
--- a/spec/frontend/batch_comments/components/drafts_count_spec.js
+++ b/spec/frontend/batch_comments/components/drafts_count_spec.js
@@ -1,40 +1,36 @@
-import Vue, { nextTick } from 'vue';
-import { mountComponentWithStore } from 'helpers/vue_mount_component_helper';
+import { nextTick } from 'vue';
+import { mount } from '@vue/test-utils';
import DraftsCount from '~/batch_comments/components/drafts_count.vue';
import { createStore } from '~/batch_comments/stores';
describe('Batch comments drafts count component', () => {
- let vm;
- let Component;
-
- beforeAll(() => {
- Component = Vue.extend(DraftsCount);
- });
+ let store;
+ let wrapper;
beforeEach(() => {
- const store = createStore();
+ store = createStore();
store.state.batchComments.drafts.push('comment');
- vm = mountComponentWithStore(Component, { store });
+ wrapper = mount(DraftsCount, { store });
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
it('renders count', () => {
- expect(vm.$el.textContent).toContain('1');
+ expect(wrapper.text()).toContain('1');
});
it('renders screen reader text', async () => {
- const el = vm.$el.querySelector('.sr-only');
+ const el = wrapper.find('.sr-only');
- expect(el.textContent).toContain('draft');
-
- vm.$store.state.batchComments.drafts.push('comment 2');
+ expect(el.text()).toContain('draft');
+ store.state.batchComments.drafts.push('comment 2');
await nextTick();
- expect(el.textContent).toContain('drafts');
+
+ expect(el.text()).toContain('drafts');
});
});
diff --git a/spec/frontend/batch_comments/components/preview_item_spec.js b/spec/frontend/batch_comments/components/preview_item_spec.js
index 91e6b84a216..6a104f0c787 100644
--- a/spec/frontend/batch_comments/components/preview_item_spec.js
+++ b/spec/frontend/batch_comments/components/preview_item_spec.js
@@ -1,5 +1,4 @@
-import Vue from 'vue';
-import { mountComponentWithStore } from 'helpers/vue_mount_component_helper';
+import { mount } from '@vue/test-utils';
import PreviewItem from '~/batch_comments/components/preview_item.vue';
import { createStore } from '~/batch_comments/stores';
import diffsModule from '~/diffs/store/modules';
@@ -8,8 +7,7 @@ import '~/behaviors/markdown/render_gfm';
import { createDraft } from '../mock_data';
describe('Batch comments draft preview item component', () => {
- let vm;
- let Component;
+ let wrapper;
let draft;
function createComponent(isLast = false, extra = {}, extendStore = () => {}) {
@@ -24,21 +22,17 @@ describe('Batch comments draft preview item component', () => {
...extra,
};
- vm = mountComponentWithStore(Component, { store, props: { draft, isLast } });
+ wrapper = mount(PreviewItem, { store, propsData: { draft, isLast } });
}
- beforeAll(() => {
- Component = Vue.extend(PreviewItem);
- });
-
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
it('renders text content', () => {
createComponent(false, { note_html: '<img src="" /><p>Hello world</p>' });
- expect(vm.$el.querySelector('.review-preview-item-content').innerHTML).toEqual(
+ expect(wrapper.find('.review-preview-item-content').element.innerHTML).toBe(
'<p>Hello world</p>',
);
});
@@ -47,9 +41,7 @@ describe('Batch comments draft preview item component', () => {
it('renders file path', () => {
createComponent(false, { file_path: 'index.js', file_hash: 'abc', position: {} });
- expect(vm.$el.querySelector('.review-preview-item-header-text').textContent).toContain(
- 'index.js',
- );
+ expect(wrapper.find('.review-preview-item-header-text').text()).toContain('index.js');
});
it('renders new line position', () => {
@@ -66,7 +58,7 @@ describe('Batch comments draft preview item component', () => {
},
});
- expect(vm.$el.querySelector('.bold').textContent).toContain(':+1');
+ expect(wrapper.find('.bold').text()).toContain(':+1');
});
it('renders old line position', () => {
@@ -82,7 +74,7 @@ describe('Batch comments draft preview item component', () => {
},
});
- expect(vm.$el.querySelector('.bold').textContent).toContain(':2');
+ expect(wrapper.find('.bold').text()).toContain(':2');
});
it('renders image position', () => {
@@ -92,7 +84,7 @@ describe('Batch comments draft preview item component', () => {
position: { position_type: 'image', x: 10, y: 20 },
});
- expect(vm.$el.querySelector('.bold').textContent).toContain('10x 20y');
+ expect(wrapper.find('.bold').text()).toContain('10x 20y');
});
});
@@ -113,15 +105,13 @@ describe('Batch comments draft preview item component', () => {
});
it('renders title', () => {
- expect(vm.$el.querySelector('.review-preview-item-header-text').textContent).toContain(
+ expect(wrapper.find('.review-preview-item-header-text').text()).toContain(
"Author 'Nick' Name's thread",
);
});
it('renders thread resolved text', () => {
- expect(vm.$el.querySelector('.draft-note-resolution').textContent).toContain(
- 'Thread will be resolved',
- );
+ expect(wrapper.find('.draft-note-resolution').text()).toContain('Thread will be resolved');
});
});
@@ -131,9 +121,7 @@ describe('Batch comments draft preview item component', () => {
store.state.notes.discussions.push({});
});
- expect(vm.$el.querySelector('.review-preview-item-header-text').textContent).toContain(
- 'Your new comment',
- );
+ expect(wrapper.find('.review-preview-item-header-text').text()).toContain('Your new comment');
});
});
});
diff --git a/spec/frontend/batch_comments/components/publish_button_spec.js b/spec/frontend/batch_comments/components/publish_button_spec.js
index 9a782ec09b6..5e3fa3e9446 100644
--- a/spec/frontend/batch_comments/components/publish_button_spec.js
+++ b/spec/frontend/batch_comments/components/publish_button_spec.js
@@ -1,38 +1,34 @@
-import Vue, { nextTick } from 'vue';
-import { mountComponentWithStore } from 'helpers/vue_mount_component_helper';
+import { nextTick } from 'vue';
+import { mount } from '@vue/test-utils';
import PublishButton from '~/batch_comments/components/publish_button.vue';
import { createStore } from '~/batch_comments/stores';
describe('Batch comments publish button component', () => {
- let vm;
- let Component;
-
- beforeAll(() => {
- Component = Vue.extend(PublishButton);
- });
+ let wrapper;
+ let store;
beforeEach(() => {
- const store = createStore();
+ store = createStore();
- vm = mountComponentWithStore(Component, { store, props: { shouldPublish: true } });
+ wrapper = mount(PublishButton, { store, propsData: { shouldPublish: true } });
- jest.spyOn(vm.$store, 'dispatch').mockImplementation();
+ jest.spyOn(store, 'dispatch').mockImplementation();
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
- it('dispatches publishReview on click', () => {
- vm.$el.click();
+ it('dispatches publishReview on click', async () => {
+ await wrapper.trigger('click');
- expect(vm.$store.dispatch).toHaveBeenCalledWith('batchComments/publishReview', undefined);
+ expect(store.dispatch).toHaveBeenCalledWith('batchComments/publishReview', undefined);
});
it('sets loading when isPublishing is true', async () => {
- vm.$store.state.batchComments.isPublishing = true;
+ store.state.batchComments.isPublishing = true;
await nextTick();
- expect(vm.$el.getAttribute('disabled')).toBe('disabled');
+ expect(wrapper.attributes('disabled')).toBe('disabled');
});
});
diff --git a/spec/frontend/behaviors/bind_in_out_spec.js b/spec/frontend/behaviors/bind_in_out_spec.js
index 4d958e30b4d..7b40b1d3cd7 100644
--- a/spec/frontend/behaviors/bind_in_out_spec.js
+++ b/spec/frontend/behaviors/bind_in_out_spec.js
@@ -1,4 +1,3 @@
-import ClassSpecHelper from 'helpers/class_spec_helper';
import BindInOut from '~/behaviors/bind_in_out';
describe('BindInOut', () => {
@@ -142,7 +141,9 @@ describe('BindInOut', () => {
testContext.initAll = BindInOut.initAll();
});
- ClassSpecHelper.itShouldBeAStaticMethod(BindInOut, 'initAll');
+ it('should be a static method', () => {
+ expect(BindInOut.initAll).toEqual(expect.any(Function));
+ });
it('should call .querySelectorAll', () => {
expect(document.querySelectorAll).toHaveBeenCalledWith('*[data-bind-in]');
@@ -169,7 +170,9 @@ describe('BindInOut', () => {
testContext.init = BindInOut.init({}, {});
});
- ClassSpecHelper.itShouldBeAStaticMethod(BindInOut, 'init');
+ it('should be a static method', () => {
+ expect(BindInOut.init).toEqual(expect.any(Function));
+ });
it('should call .addEvents', () => {
expect(BindInOut.prototype.addEvents).toHaveBeenCalled();
diff --git a/spec/frontend/blame/blame_redirect_spec.js b/spec/frontend/blame/blame_redirect_spec.js
new file mode 100644
index 00000000000..beb10139b3a
--- /dev/null
+++ b/spec/frontend/blame/blame_redirect_spec.js
@@ -0,0 +1,70 @@
+import redirectToCorrectPage from '~/blame/blame_redirect';
+import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
+import { createAlert } from '~/flash';
+
+jest.mock('~/flash');
+
+describe('Blame page redirect', () => {
+ beforeEach(() => {
+ global.window = Object.create(window);
+ const url = 'https://gitlab.com/flightjs/Flight/-/blame/master/file.json';
+ Object.defineProperty(window, 'location', {
+ writable: true,
+ value: {
+ href: url,
+ hash: '',
+ search: '',
+ },
+ });
+
+ setHTMLFixture(`<div class="js-per-page" data-per-page="1000"></div>`);
+ });
+
+ afterEach(() => {
+ createAlert.mockClear();
+ resetHTMLFixture();
+ });
+
+ it('performs redirect to further pages when needed', () => {
+ window.location.hash = '#L1001';
+ redirectToCorrectPage();
+ expect(window.location.href).toMatch('?page=2');
+ });
+
+ it('performs redirect back to first page when needed', () => {
+ window.location.href = 'https://gitlab.com/flightjs/Flight/-/blame/master/file.json';
+ window.location.search = '?page=200';
+ window.location.hash = '#L999';
+ redirectToCorrectPage();
+ expect(window.location.href).toMatch('?page=1');
+ });
+
+ it('doesn`t perform redirect when the line is still on page 1', () => {
+ window.location.hash = '#L1000';
+ redirectToCorrectPage();
+ expect(window.location.href).not.toMatch('?page');
+ });
+
+ it('doesn`t perform redirect when "no_pagination" param is present', () => {
+ window.location.href = 'https://gitlab.com/flightjs/Flight/-/blame/master/file.json';
+ window.location.search = '?no_pagination=true';
+ window.location.hash = '#L1001';
+ redirectToCorrectPage();
+ expect(window.location.href).not.toMatch('?page');
+ });
+
+ it('doesn`t perform redirect when perPage is not present', () => {
+ setHTMLFixture(`<div class="js-per-page"></div>`);
+ window.location.hash = '#L1001';
+ redirectToCorrectPage();
+ expect(window.location.href).not.toMatch('?page');
+ });
+
+ it('shows alert with a message', () => {
+ window.location.hash = '#L1001';
+ redirectToCorrectPage();
+ expect(createAlert).toHaveBeenCalledWith({
+ message: 'Please wait a few moments while we load the file history for this line.',
+ });
+ });
+});
diff --git a/spec/frontend/blob/3d_viewer/mesh_object_spec.js b/spec/frontend/blob/3d_viewer/mesh_object_spec.js
index 3014af073f5..1b0fd362778 100644
--- a/spec/frontend/blob/3d_viewer/mesh_object_spec.js
+++ b/spec/frontend/blob/3d_viewer/mesh_object_spec.js
@@ -1,4 +1,4 @@
-import { BoxGeometry } from 'three/build/three.module';
+import { BoxGeometry } from 'three';
import MeshObject from '~/blob/3d_viewer/mesh_object';
describe('Mesh object', () => {
diff --git a/spec/frontend/blob/blob_blame_link_spec.js b/spec/frontend/blob/blob_blame_link_spec.js
index 0d19177a11f..060e8803520 100644
--- a/spec/frontend/blob/blob_blame_link_spec.js
+++ b/spec/frontend/blob/blob_blame_link_spec.js
@@ -29,19 +29,19 @@ describe('Blob links', () => {
it('adds wrapper elements with correct classes', () => {
const wrapper = document.querySelector('.line-links');
- expect(wrapper).toBeTruthy();
+ expect(wrapper).not.toBeNull();
expect(wrapper.classList).toContain('diff-line-num');
});
it('adds blame link with correct classes and path', () => {
const blameLink = document.querySelector('.file-line-blame');
- expect(blameLink).toBeTruthy();
+ expect(blameLink).not.toBeNull();
expect(blameLink.getAttribute('href')).toBe('/blamePath#L5');
});
it('adds line link within wraper with correct classes and path', () => {
const lineLink = document.querySelector('.file-line-num');
- expect(lineLink).toBeTruthy();
+ expect(lineLink).not.toBeNull();
expect(lineLink.getAttribute('href')).toBe('#L5');
});
});
diff --git a/spec/frontend/blob/components/blob_content_spec.js b/spec/frontend/blob/components/blob_content_spec.js
index 788ee0a86ab..f7b819b6e94 100644
--- a/spec/frontend/blob/components/blob_content_spec.js
+++ b/spec/frontend/blob/components/blob_content_spec.js
@@ -91,13 +91,13 @@ describe('Blob Content component', () => {
it(`properly proxies ${BLOB_RENDER_EVENT_LOAD} event`, () => {
expect(wrapper.emitted(BLOB_RENDER_EVENT_LOAD)).toBeUndefined();
findErrorEl().vm.$emit(BLOB_RENDER_EVENT_LOAD);
- expect(wrapper.emitted(BLOB_RENDER_EVENT_LOAD)).toBeTruthy();
+ expect(wrapper.emitted(BLOB_RENDER_EVENT_LOAD)).toHaveLength(1);
});
it(`properly proxies ${BLOB_RENDER_EVENT_SHOW_SOURCE} event`, () => {
expect(wrapper.emitted(BLOB_RENDER_EVENT_SHOW_SOURCE)).toBeUndefined();
findErrorEl().vm.$emit(BLOB_RENDER_EVENT_SHOW_SOURCE);
- expect(wrapper.emitted(BLOB_RENDER_EVENT_SHOW_SOURCE)).toBeTruthy();
+ expect(wrapper.emitted(BLOB_RENDER_EVENT_SHOW_SOURCE)).toHaveLength(1);
});
});
});
diff --git a/spec/frontend/blob/components/table_contents_spec.js b/spec/frontend/blob/components/table_contents_spec.js
index 2cbac809a0d..5fe328b65ff 100644
--- a/spec/frontend/blob/components/table_contents_spec.js
+++ b/spec/frontend/blob/components/table_contents_spec.js
@@ -71,6 +71,11 @@ describe('Markdown table of contents component', () => {
expect(dropdownItems.exists()).toBe(true);
expect(dropdownItems.length).toBe(4);
+
+ // make sure that this only happens once
+ await setLoaded(true);
+
+ expect(wrapper.findAllComponents(GlDropdownItem).length).toBe(4);
});
it('sets padding for dropdown items', async () => {
diff --git a/spec/frontend/boards/board_card_inner_spec.js b/spec/frontend/boards/board_card_inner_spec.js
index 2c3ec69f9ae..3ebc51c4bcb 100644
--- a/spec/frontend/boards/board_card_inner_spec.js
+++ b/spec/frontend/boards/board_card_inner_spec.js
@@ -5,7 +5,7 @@ import { nextTick } from 'vue';
import setWindowLocation from 'helpers/set_window_location_helper';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import { mountExtended } from 'helpers/vue_test_utils_helper';
-import BoardBlockedIcon from '~/boards/components/board_blocked_icon.vue';
+import IssuableBlockedIcon from '~/vue_shared/components/issuable_blocked_icon/issuable_blocked_icon.vue';
import BoardCardInner from '~/boards/components/board_card_inner.vue';
import BoardCardMoveToPosition from '~/boards/components/board_card_move_to_position.vue';
import WorkItemTypeIcon from '~/work_items/components/work_item_type_icon.vue';
@@ -39,7 +39,7 @@ describe('Board card component', () => {
let list;
let store;
- const findBoardBlockedIcon = () => wrapper.findComponent(BoardBlockedIcon);
+ const findIssuableBlockedIcon = () => wrapper.findComponent(IssuableBlockedIcon);
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findEpicCountablesTotalTooltip = () => wrapper.findComponent(GlTooltip);
const findEpicCountables = () => wrapper.findByTestId('epic-countables');
@@ -189,7 +189,7 @@ describe('Board card component', () => {
},
});
- expect(findBoardBlockedIcon().exists()).toBe(true);
+ expect(findIssuableBlockedIcon().exists()).toBe(true);
});
it('does not show blocked icon if issue is not blocked', () => {
@@ -200,7 +200,7 @@ describe('Board card component', () => {
},
});
- expect(findBoardBlockedIcon().exists()).toBe(false);
+ expect(findIssuableBlockedIcon().exists()).toBe(false);
});
});
@@ -595,5 +595,10 @@ describe('Board card component', () => {
expect(findEpicCountablesTotalWeight().text()).toBe('15');
expect(findEpicProgressTooltip().text()).toBe('10 of 15 weight completed');
});
+
+ it('does not render the move to position icon', () => {
+ createWrapper();
+ expect(findMoveToPositionComponent().exists()).toBe(false);
+ });
});
});
diff --git a/spec/frontend/boards/components/board_filtered_search_spec.js b/spec/frontend/boards/components/board_filtered_search_spec.js
index 731578e15a3..1a07b9f0b78 100644
--- a/spec/frontend/boards/components/board_filtered_search_spec.js
+++ b/spec/frontend/boards/components/board_filtered_search_spec.js
@@ -126,6 +126,7 @@ describe('BoardFilteredSearch', () => {
{ type: 'weight', value: { data: '2', operator: '=' } },
{ type: 'iteration', value: { data: 'Any&3', operator: '=' } },
{ type: 'release', value: { data: 'v1.0.0', operator: '=' } },
+ { type: 'health_status', value: { data: 'onTrack', operator: '=' } },
];
jest.spyOn(urlUtility, 'updateHistory');
findFilteredSearch().vm.$emit('onFilter', mockFilters);
@@ -134,7 +135,7 @@ describe('BoardFilteredSearch', () => {
title: '',
replace: true,
url:
- 'http://test.host/?author_username=root&label_name[]=label&label_name[]=label%262&assignee_username=root&milestone_title=New%20Milestone&iteration_id=Any&iteration_cadence_id=3&types=INCIDENT&weight=2&release_tag=v1.0.0',
+ 'http://test.host/?author_username=root&label_name[]=label&label_name[]=label%262&assignee_username=root&milestone_title=New%20Milestone&iteration_id=Any&iteration_cadence_id=3&types=INCIDENT&weight=2&release_tag=v1.0.0&health_status=onTrack',
});
});
@@ -160,7 +161,9 @@ describe('BoardFilteredSearch', () => {
describe('when url params are already set', () => {
beforeEach(() => {
- createComponent({ initialFilterParams: { authorUsername: 'root', labelName: ['label'] } });
+ createComponent({
+ initialFilterParams: { authorUsername: 'root', labelName: ['label'], healthStatus: 'Any' },
+ });
jest.spyOn(store, 'dispatch');
});
@@ -169,6 +172,7 @@ describe('BoardFilteredSearch', () => {
expect(findFilteredSearch().props('initialFilterValue')).toEqual([
{ type: 'author', value: { data: 'root', operator: '=' } },
{ type: 'label', value: { data: 'label', operator: '=' } },
+ { type: 'health_status', value: { data: 'Any', operator: '=' } },
]);
});
});
diff --git a/spec/frontend/boards/stores/actions_spec.js b/spec/frontend/boards/stores/actions_spec.js
index e919300228a..78859525a63 100644
--- a/spec/frontend/boards/stores/actions_spec.js
+++ b/spec/frontend/boards/stores/actions_spec.js
@@ -1047,60 +1047,58 @@ describe('moveIssueCard and undoMoveIssueCard', () => {
let undoMutations;
describe('when re-ordering card', () => {
- beforeEach(
- ({
- itemId = 123,
- fromListId = 'gid://gitlab/List/1',
- toListId = 'gid://gitlab/List/1',
- originalIssue = { foo: 'bar' },
- originalIndex = 0,
- moveBeforeId = undefined,
- moveAfterId = undefined,
- allItemsLoadedInList = true,
- listPosition = undefined,
- } = {}) => {
- state = {
- boardLists: {
- [toListId]: { listType: ListType.backlog },
- [fromListId]: { listType: ListType.backlog },
- },
- boardItems: { [itemId]: originalIssue },
- boardItemsByListId: { [fromListId]: [123] },
- };
- params = {
- itemId,
- fromListId,
- toListId,
- moveBeforeId,
- moveAfterId,
- listPosition,
- allItemsLoadedInList,
- };
- moveMutations = [
- { type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload: { itemId, listId: fromListId } },
- {
- type: types.ADD_BOARD_ITEM_TO_LIST,
- payload: {
- itemId,
- listId: toListId,
- moveBeforeId,
- moveAfterId,
- listPosition,
- allItemsLoadedInList,
- atIndex: originalIndex,
- },
- },
- ];
- undoMutations = [
- { type: types.UPDATE_BOARD_ITEM, payload: originalIssue },
- { type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload: { itemId, listId: fromListId } },
- {
- type: types.ADD_BOARD_ITEM_TO_LIST,
- payload: { itemId, listId: fromListId, atIndex: originalIndex },
+ beforeEach(() => {
+ const itemId = 123;
+ const fromListId = 'gid://gitlab/List/1';
+ const toListId = 'gid://gitlab/List/1';
+ const originalIssue = { foo: 'bar' };
+ const originalIndex = 0;
+ const moveBeforeId = undefined;
+ const moveAfterId = undefined;
+ const allItemsLoadedInList = true;
+ const listPosition = undefined;
+
+ state = {
+ boardLists: {
+ [toListId]: { listType: ListType.backlog },
+ [fromListId]: { listType: ListType.backlog },
+ },
+ boardItems: { [itemId]: originalIssue },
+ boardItemsByListId: { [fromListId]: [123] },
+ };
+ params = {
+ itemId,
+ fromListId,
+ toListId,
+ moveBeforeId,
+ moveAfterId,
+ listPosition,
+ allItemsLoadedInList,
+ };
+ moveMutations = [
+ { type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload: { itemId, listId: fromListId } },
+ {
+ type: types.ADD_BOARD_ITEM_TO_LIST,
+ payload: {
+ itemId,
+ listId: toListId,
+ moveBeforeId,
+ moveAfterId,
+ listPosition,
+ allItemsLoadedInList,
+ atIndex: originalIndex,
},
- ];
- },
- );
+ },
+ ];
+ undoMutations = [
+ { type: types.UPDATE_BOARD_ITEM, payload: originalIssue },
+ { type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload: { itemId, listId: fromListId } },
+ {
+ type: types.ADD_BOARD_ITEM_TO_LIST,
+ payload: { itemId, listId: fromListId, atIndex: originalIndex },
+ },
+ ];
+ });
it('moveIssueCard commits a correct set of actions', () => {
testAction({
@@ -1144,42 +1142,40 @@ describe('moveIssueCard and undoMoveIssueCard', () => {
},
],
])('when %s', (_, { toListType, fromListType }) => {
- beforeEach(
- ({
- itemId = 123,
- fromListId = 'gid://gitlab/List/1',
- toListId = 'gid://gitlab/List/2',
- originalIssue = { foo: 'bar' },
- originalIndex = 0,
- moveBeforeId = undefined,
- moveAfterId = undefined,
- } = {}) => {
- state = {
- boardLists: {
- [fromListId]: { listType: fromListType },
- [toListId]: { listType: toListType },
- },
- boardItems: { [itemId]: originalIssue },
- boardItemsByListId: { [fromListId]: [123], [toListId]: [] },
- };
- params = { itemId, fromListId, toListId, moveBeforeId, moveAfterId };
- moveMutations = [
- { type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload: { itemId, listId: fromListId } },
- {
- type: types.ADD_BOARD_ITEM_TO_LIST,
- payload: { itemId, listId: toListId, moveBeforeId, moveAfterId },
- },
- ];
- undoMutations = [
- { type: types.UPDATE_BOARD_ITEM, payload: originalIssue },
- { type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload: { itemId, listId: toListId } },
- {
- type: types.ADD_BOARD_ITEM_TO_LIST,
- payload: { itemId, listId: fromListId, atIndex: originalIndex },
- },
- ];
- },
- );
+ beforeEach(() => {
+ const itemId = 123;
+ const fromListId = 'gid://gitlab/List/1';
+ const toListId = 'gid://gitlab/List/2';
+ const originalIssue = { foo: 'bar' };
+ const originalIndex = 0;
+ const moveBeforeId = undefined;
+ const moveAfterId = undefined;
+
+ state = {
+ boardLists: {
+ [fromListId]: { listType: fromListType },
+ [toListId]: { listType: toListType },
+ },
+ boardItems: { [itemId]: originalIssue },
+ boardItemsByListId: { [fromListId]: [123], [toListId]: [] },
+ };
+ params = { itemId, fromListId, toListId, moveBeforeId, moveAfterId };
+ moveMutations = [
+ { type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload: { itemId, listId: fromListId } },
+ {
+ type: types.ADD_BOARD_ITEM_TO_LIST,
+ payload: { itemId, listId: toListId, moveBeforeId, moveAfterId },
+ },
+ ];
+ undoMutations = [
+ { type: types.UPDATE_BOARD_ITEM, payload: originalIssue },
+ { type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload: { itemId, listId: toListId } },
+ {
+ type: types.ADD_BOARD_ITEM_TO_LIST,
+ payload: { itemId, listId: fromListId, atIndex: originalIndex },
+ },
+ ];
+ });
it('moveIssueCard commits a correct set of actions', () => {
testAction({
@@ -1216,47 +1212,45 @@ describe('moveIssueCard and undoMoveIssueCard', () => {
},
],
])('when %s', (_, { toListType, fromListType }) => {
- beforeEach(
- ({
- itemId = 123,
- fromListId = 'gid://gitlab/List/1',
- toListId = 'gid://gitlab/List/2',
- originalIssue = { foo: 'bar' },
- originalIndex = 0,
- moveBeforeId = undefined,
- moveAfterId = undefined,
- } = {}) => {
- state = {
- boardLists: {
- [fromListId]: { listType: fromListType },
- [toListId]: { listType: toListType },
- },
- boardItems: { [itemId]: originalIssue },
- boardItemsByListId: { [fromListId]: [123], [toListId]: [] },
- };
- params = { itemId, fromListId, toListId, moveBeforeId, moveAfterId };
- moveMutations = [
- { type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload: { itemId, listId: fromListId } },
- {
- type: types.ADD_BOARD_ITEM_TO_LIST,
- payload: { itemId, listId: toListId, moveBeforeId, moveAfterId },
- },
- {
- type: types.ADD_BOARD_ITEM_TO_LIST,
- payload: { itemId, listId: fromListId, atIndex: originalIndex },
- },
- ];
- undoMutations = [
- { type: types.UPDATE_BOARD_ITEM, payload: originalIssue },
- { type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload: { itemId, listId: fromListId } },
- { type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload: { itemId, listId: toListId } },
- {
- type: types.ADD_BOARD_ITEM_TO_LIST,
- payload: { itemId, listId: fromListId, atIndex: originalIndex },
- },
- ];
- },
- );
+ beforeEach(() => {
+ const itemId = 123;
+ const fromListId = 'gid://gitlab/List/1';
+ const toListId = 'gid://gitlab/List/2';
+ const originalIssue = { foo: 'bar' };
+ const originalIndex = 0;
+ const moveBeforeId = undefined;
+ const moveAfterId = undefined;
+
+ state = {
+ boardLists: {
+ [fromListId]: { listType: fromListType },
+ [toListId]: { listType: toListType },
+ },
+ boardItems: { [itemId]: originalIssue },
+ boardItemsByListId: { [fromListId]: [123], [toListId]: [] },
+ };
+ params = { itemId, fromListId, toListId, moveBeforeId, moveAfterId };
+ moveMutations = [
+ { type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload: { itemId, listId: fromListId } },
+ {
+ type: types.ADD_BOARD_ITEM_TO_LIST,
+ payload: { itemId, listId: toListId, moveBeforeId, moveAfterId },
+ },
+ {
+ type: types.ADD_BOARD_ITEM_TO_LIST,
+ payload: { itemId, listId: fromListId, atIndex: originalIndex },
+ },
+ ];
+ undoMutations = [
+ { type: types.UPDATE_BOARD_ITEM, payload: originalIssue },
+ { type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload: { itemId, listId: fromListId } },
+ { type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload: { itemId, listId: toListId } },
+ {
+ type: types.ADD_BOARD_ITEM_TO_LIST,
+ payload: { itemId, listId: fromListId, atIndex: originalIndex },
+ },
+ ];
+ });
it('moveIssueCard commits a correct set of actions', () => {
testAction({
diff --git a/spec/frontend/captcha/init_recaptcha_script_spec.js b/spec/frontend/captcha/init_recaptcha_script_spec.js
index af07c9e474e..78480821d95 100644
--- a/spec/frontend/captcha/init_recaptcha_script_spec.js
+++ b/spec/frontend/captcha/init_recaptcha_script_spec.js
@@ -1,5 +1,4 @@
import {
- RECAPTCHA_API_URL_PREFIX,
RECAPTCHA_ONLOAD_CALLBACK_NAME,
clearMemoizeCache,
initRecaptchaScript,
@@ -26,7 +25,7 @@ describe('initRecaptchaScript', () => {
<head>
<script
class="js-recaptcha-script"
- src="${RECAPTCHA_API_URL_PREFIX}?onload=${RECAPTCHA_ONLOAD_CALLBACK_NAME}&render=explicit"
+ src="undefined?onload=recaptchaOnloadCallback&render=explicit"
/>
</head>
`);
diff --git a/spec/frontend/ci_variable_list/components/ci_admin_variables_spec.js b/spec/frontend/ci_variable_list/components/ci_admin_variables_spec.js
index 920ceaefb70..864041141b8 100644
--- a/spec/frontend/ci_variable_list/components/ci_admin_variables_spec.js
+++ b/spec/frontend/ci_variable_list/components/ci_admin_variables_spec.js
@@ -4,8 +4,8 @@ import { GlLoadingIcon, GlTable } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
-import { resolvers } from '~/ci_variable_list/graphql/resolvers';
+import { createAlert } from '~/flash';
+import { resolvers } from '~/ci_variable_list/graphql/settings';
import ciAdminVariables from '~/ci_variable_list/components/ci_admin_variables.vue';
import ciVariableSettings from '~/ci_variable_list/components/ci_variable_settings.vue';
@@ -92,8 +92,8 @@ describe('Ci Admin Variable list', () => {
);
});
- it('createFlash was not called', () => {
- expect(createFlash).not.toHaveBeenCalled();
+ it('createAlert was not called', () => {
+ expect(createAlert).not.toHaveBeenCalled();
});
});
@@ -104,8 +104,8 @@ describe('Ci Admin Variable list', () => {
await createComponentWithApollo();
});
- it('calls createFlash with the expected error message', () => {
- expect(createFlash).toHaveBeenCalledWith({ message: variableFetchErrorText });
+ it('calls createAlert with the expected error message', () => {
+ expect(createAlert).toHaveBeenCalledWith({ message: variableFetchErrorText });
});
});
});
@@ -153,7 +153,7 @@ describe('Ci Admin Variable list', () => {
await nextTick();
expect(wrapper.vm.$apollo.mutate).toHaveBeenCalled();
- expect(createFlash).toHaveBeenCalledWith({ message: graphQLErrorMessage });
+ expect(createAlert).toHaveBeenCalledWith({ message: graphQLErrorMessage });
},
);
@@ -171,7 +171,7 @@ describe('Ci Admin Variable list', () => {
await findCiSettings().vm.$emit(event, newVariable);
expect(wrapper.vm.$apollo.mutate).toHaveBeenCalled();
- expect(createFlash).toHaveBeenCalledWith({ message: genericMutationErrorText });
+ expect(createAlert).toHaveBeenCalledWith({ message: genericMutationErrorText });
},
);
});
diff --git a/spec/frontend/ci_variable_list/components/ci_group_variables_spec.js b/spec/frontend/ci_variable_list/components/ci_group_variables_spec.js
index e45656acfd8..8a48e73eb9f 100644
--- a/spec/frontend/ci_variable_list/components/ci_group_variables_spec.js
+++ b/spec/frontend/ci_variable_list/components/ci_group_variables_spec.js
@@ -4,8 +4,8 @@ import { GlLoadingIcon, GlTable } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
-import { resolvers } from '~/ci_variable_list/graphql/resolvers';
+import { createAlert } from '~/flash';
+import { resolvers } from '~/ci_variable_list/graphql/settings';
import { convertToGraphQLId } from '~/graphql_shared/utils';
import ciGroupVariables from '~/ci_variable_list/components/ci_group_variables.vue';
@@ -95,8 +95,8 @@ describe('Ci Group Variable list', () => {
);
});
- it('createFlash was not called', () => {
- expect(createFlash).not.toHaveBeenCalled();
+ it('createAlert was not called', () => {
+ expect(createAlert).not.toHaveBeenCalled();
});
});
@@ -107,8 +107,8 @@ describe('Ci Group Variable list', () => {
await createComponentWithApollo();
});
- it('calls createFlash with the expected error message', () => {
- expect(createFlash).toHaveBeenCalledWith({ message: variableFetchErrorText });
+ it('calls createAlert with the expected error message', () => {
+ expect(createAlert).toHaveBeenCalledWith({ message: variableFetchErrorText });
});
});
});
@@ -158,7 +158,7 @@ describe('Ci Group Variable list', () => {
await nextTick();
expect(wrapper.vm.$apollo.mutate).toHaveBeenCalled();
- expect(createFlash).toHaveBeenCalledWith({ message: graphQLErrorMessage });
+ expect(createAlert).toHaveBeenCalledWith({ message: graphQLErrorMessage });
},
);
@@ -176,7 +176,7 @@ describe('Ci Group Variable list', () => {
await findCiSettings().vm.$emit(event, newVariable);
expect(wrapper.vm.$apollo.mutate).toHaveBeenCalled();
- expect(createFlash).toHaveBeenCalledWith({ message: genericMutationErrorText });
+ expect(createAlert).toHaveBeenCalledWith({ message: genericMutationErrorText });
},
);
});
diff --git a/spec/frontend/ci_variable_list/components/ci_project_variables_spec.js b/spec/frontend/ci_variable_list/components/ci_project_variables_spec.js
index 867f8e0cf8f..c630278fbde 100644
--- a/spec/frontend/ci_variable_list/components/ci_project_variables_spec.js
+++ b/spec/frontend/ci_variable_list/components/ci_project_variables_spec.js
@@ -4,8 +4,8 @@ import { GlLoadingIcon, GlTable } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
-import { resolvers } from '~/ci_variable_list/graphql/resolvers';
+import { createAlert } from '~/flash';
+import { resolvers } from '~/ci_variable_list/graphql/settings';
import { convertToGraphQLId } from '~/graphql_shared/utils';
import ciProjectVariables from '~/ci_variable_list/components/ci_project_variables.vue';
@@ -112,8 +112,8 @@ describe('Ci Project Variable list', () => {
);
});
- it('createFlash was not called', () => {
- expect(createFlash).not.toHaveBeenCalled();
+ it('createAlert was not called', () => {
+ expect(createAlert).not.toHaveBeenCalled();
});
});
@@ -125,8 +125,8 @@ describe('Ci Project Variable list', () => {
await createComponentWithApollo();
});
- it('calls createFlash with the expected error message', () => {
- expect(createFlash).toHaveBeenCalledWith({ message: variableFetchErrorText });
+ it('calls createAlert with the expected error message', () => {
+ expect(createAlert).toHaveBeenCalledWith({ message: variableFetchErrorText });
});
});
@@ -138,8 +138,8 @@ describe('Ci Project Variable list', () => {
await createComponentWithApollo();
});
- it('calls createFlash with the expected error message', () => {
- expect(createFlash).toHaveBeenCalledWith({ message: environmentFetchErrorText });
+ it('calls createAlert with the expected error message', () => {
+ expect(createAlert).toHaveBeenCalledWith({ message: environmentFetchErrorText });
});
});
});
@@ -190,7 +190,7 @@ describe('Ci Project Variable list', () => {
await nextTick();
expect(wrapper.vm.$apollo.mutate).toHaveBeenCalled();
- expect(createFlash).toHaveBeenCalledWith({ message: graphQLErrorMessage });
+ expect(createAlert).toHaveBeenCalledWith({ message: graphQLErrorMessage });
},
);
@@ -208,7 +208,7 @@ describe('Ci Project Variable list', () => {
await findCiSettings().vm.$emit(event, newVariable);
expect(wrapper.vm.$apollo.mutate).toHaveBeenCalled();
- expect(createFlash).toHaveBeenCalledWith({ message: genericMutationErrorText });
+ expect(createAlert).toHaveBeenCalledWith({ message: genericMutationErrorText });
},
);
});
diff --git a/spec/frontend/ci_variable_list/components/legacy_ci_variable_settings_spec.js b/spec/frontend/ci_variable_list/components/legacy_ci_variable_settings_spec.js
index 9c941f99982..7def4dd4f29 100644
--- a/spec/frontend/ci_variable_list/components/legacy_ci_variable_settings_spec.js
+++ b/spec/frontend/ci_variable_list/components/legacy_ci_variable_settings_spec.js
@@ -9,11 +9,11 @@ Vue.use(Vuex);
describe('Ci variable table', () => {
let wrapper;
let store;
- let isGroup;
+ let isProject;
- const createComponent = (groupState) => {
+ const createComponent = (projectState) => {
store = createStore();
- store.state.isGroup = groupState;
+ store.state.isProject = projectState;
jest.spyOn(store, 'dispatch').mockImplementation();
wrapper = shallowMount(LegacyCiVariableSettings, {
store,
@@ -25,14 +25,14 @@ describe('Ci variable table', () => {
});
it('dispatches fetchEnvironments when mounted', () => {
- isGroup = false;
- createComponent(isGroup);
+ isProject = true;
+ createComponent(isProject);
expect(store.dispatch).toHaveBeenCalledWith('fetchEnvironments');
});
it('does not dispatch fetchenvironments when in group context', () => {
- isGroup = true;
- createComponent(isGroup);
+ isProject = false;
+ createComponent(isProject);
expect(store.dispatch).not.toHaveBeenCalled();
});
});
diff --git a/spec/frontend/ci_variable_list/mocks.js b/spec/frontend/ci_variable_list/mocks.js
index 6d633c8b740..6f3e73f8b83 100644
--- a/spec/frontend/ci_variable_list/mocks.js
+++ b/spec/frontend/ci_variable_list/mocks.js
@@ -45,6 +45,12 @@ const createDefaultVars = ({ withScope = true, kind } = {}) => {
return {
__typename: `Ci${kind}VariableConnection`,
+ pageInfo: {
+ startCursor: 'adsjsd12kldpsa',
+ endCursor: 'adsjsd12kldpsa',
+ hasPreviousPage: false,
+ hasNextPage: true,
+ },
nodes: base,
};
};
diff --git a/spec/frontend/ci_variable_list/store/actions_spec.js b/spec/frontend/ci_variable_list/store/actions_spec.js
index eb31fcd3ef4..e8c81a53a55 100644
--- a/spec/frontend/ci_variable_list/store/actions_spec.js
+++ b/spec/frontend/ci_variable_list/store/actions_spec.js
@@ -5,7 +5,7 @@ import * as actions from '~/ci_variable_list/store/actions';
import * as types from '~/ci_variable_list/store/mutation_types';
import getInitialState from '~/ci_variable_list/store/state';
import { prepareDataForDisplay, prepareEnvironments } from '~/ci_variable_list/store/utils';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import mockData from '../services/mock_data';
@@ -118,7 +118,7 @@ describe('CI variable list store actions', () => {
},
],
);
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
});
@@ -155,7 +155,7 @@ describe('CI variable list store actions', () => {
},
],
);
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
});
@@ -192,7 +192,7 @@ describe('CI variable list store actions', () => {
},
],
);
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
});
@@ -219,7 +219,7 @@ describe('CI variable list store actions', () => {
mock.onGet(state.endpoint).reply(500);
await testAction(actions.fetchVariables, {}, state, [], [{ type: 'requestVariables' }]);
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'There was an error fetching the variables.',
});
});
@@ -249,7 +249,7 @@ describe('CI variable list store actions', () => {
await testAction(actions.fetchEnvironments, {}, state, [], [{ type: 'requestEnvironments' }]);
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'There was an error fetching the environments information.',
});
});
diff --git a/spec/frontend/clusters_list/store/actions_spec.js b/spec/frontend/clusters_list/store/actions_spec.js
index 7663f329b3f..09b1f80ff9b 100644
--- a/spec/frontend/clusters_list/store/actions_spec.js
+++ b/spec/frontend/clusters_list/store/actions_spec.js
@@ -5,7 +5,7 @@ import waitForPromises from 'helpers/wait_for_promises';
import { MAX_REQUESTS } from '~/clusters_list/constants';
import * as actions from '~/clusters_list/store/actions';
import * as types from '~/clusters_list/store/mutation_types';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import Poll from '~/lib/utils/poll';
import { apiData } from '../mock_data';
@@ -98,7 +98,7 @@ describe('Clusters store actions', () => {
},
],
);
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: expect.stringMatching('error'),
});
});
diff --git a/spec/frontend/code_navigation/utils/index_spec.js b/spec/frontend/code_navigation/utils/index_spec.js
index 700c912029c..6f0d93c466c 100644
--- a/spec/frontend/code_navigation/utils/index_spec.js
+++ b/spec/frontend/code_navigation/utils/index_spec.js
@@ -87,5 +87,13 @@ describe('addInteractionClass', () => {
expect(spans[1].textContent).toBe('Text');
expect(spans[2].textContent).toBe(' ');
});
+
+ it('adds the correct class names to wrapped nodes', () => {
+ setHTMLFixture(
+ '<div data-path="index.js"><div class="blob-content"><div id="LC1" class="line"><span class="test"> Text </span></div></div></div>',
+ );
+ addInteractionClass({ ...params, wrapTextNodes: true });
+ expect(findAllSpans()[1].classList.contains('test')).toBe(true);
+ });
});
});
diff --git a/spec/frontend/commit/commit_box_pipeline_mini_graph_spec.js b/spec/frontend/commit/commit_box_pipeline_mini_graph_spec.js
index fddc767953a..16737003fa0 100644
--- a/spec/frontend/commit/commit_box_pipeline_mini_graph_spec.js
+++ b/spec/frontend/commit/commit_box_pipeline_mini_graph_spec.js
@@ -5,7 +5,7 @@ import { shallowMount } from '@vue/test-utils';
import createMockApollo from 'helpers/mock_apollo_helper';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import CommitBoxPipelineMiniGraph from '~/projects/commit_box/info/components/commit_box_pipeline_mini_graph.vue';
import PipelineMiniGraph from '~/pipelines/components/pipeline_mini_graph/pipeline_mini_graph.vue';
import { COMMIT_BOX_POLL_INTERVAL } from '~/projects/commit_box/info/constants';
@@ -178,12 +178,12 @@ describe('Commit box pipeline mini graph', () => {
});
describe('error state', () => {
- it('createFlash should show if there is an error fetching the data', async () => {
+ it('createAlert should show if there is an error fetching the data', async () => {
createComponent({ handler: failedHandler });
await waitForPromises();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'There was a problem fetching linked pipelines.',
});
});
diff --git a/spec/frontend/commit/commit_pipeline_status_component_spec.js b/spec/frontend/commit/commit_pipeline_status_component_spec.js
index 73720c1cc88..e75fb697a7b 100644
--- a/spec/frontend/commit/commit_pipeline_status_component_spec.js
+++ b/spec/frontend/commit/commit_pipeline_status_component_spec.js
@@ -3,7 +3,7 @@ import { shallowMount } from '@vue/test-utils';
import Visibility from 'visibilityjs';
import { nextTick } from 'vue';
import fixture from 'test_fixtures/pipelines/pipelines.json';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import Poll from '~/lib/utils/poll';
import CommitPipelineStatus from '~/projects/tree/components/commit_pipeline_status_component.vue';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
@@ -170,7 +170,7 @@ describe('Commit pipeline status component', () => {
});
it('displays flash error message', () => {
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
});
});
diff --git a/spec/frontend/commit/components/commit_box_pipeline_status_spec.js b/spec/frontend/commit/components/commit_box_pipeline_status_spec.js
index db7b7b45397..8d455f8a3d7 100644
--- a/spec/frontend/commit/components/commit_box_pipeline_status_spec.js
+++ b/spec/frontend/commit/components/commit_box_pipeline_status_spec.js
@@ -4,7 +4,7 @@ import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
import CommitBoxPipelineStatus from '~/projects/commit_box/info/components/commit_box_pipeline_status.vue';
import {
@@ -78,7 +78,7 @@ describe('Commit box pipeline status', () => {
expect(findStatusIcon().exists()).toBe(true);
expect(findLoadingIcon().exists()).toBe(false);
- expect(createFlash).toHaveBeenCalledTimes(0);
+ expect(createAlert).toHaveBeenCalledTimes(0);
});
it('should link to the latest pipeline', () => {
@@ -97,12 +97,12 @@ describe('Commit box pipeline status', () => {
});
describe('error state', () => {
- it('createFlash should show if there is an error fetching the pipeline status', async () => {
+ it('createAlert should show if there is an error fetching the pipeline status', async () => {
createComponent(failedHandler);
await waitForPromises();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: PIPELINE_STATUS_FETCH_ERROR,
});
});
diff --git a/spec/frontend/content_editor/components/content_editor_spec.js b/spec/frontend/content_editor/components/content_editor_spec.js
index ae52cb05eaf..c1c2a125515 100644
--- a/spec/frontend/content_editor/components/content_editor_spec.js
+++ b/spec/frontend/content_editor/components/content_editor_spec.js
@@ -13,6 +13,7 @@ import MediaBubbleMenu from '~/content_editor/components/bubble_menus/media_bubb
import TopToolbar from '~/content_editor/components/top_toolbar.vue';
import LoadingIndicator from '~/content_editor/components/loading_indicator.vue';
import waitForPromises from 'helpers/wait_for_promises';
+import { KEYDOWN_EVENT } from '~/content_editor/constants';
jest.mock('~/emoji');
@@ -26,12 +27,13 @@ describe('ContentEditor', () => {
const findEditorStateObserver = () => wrapper.findComponent(EditorStateObserver);
const findLoadingIndicator = () => wrapper.findComponent(LoadingIndicator);
const findContentEditorAlert = () => wrapper.findComponent(ContentEditorAlert);
- const createWrapper = ({ markdown } = {}) => {
+ const createWrapper = ({ markdown, autofocus } = {}) => {
wrapper = shallowMountExtended(ContentEditor, {
propsData: {
renderMarkdown,
uploadsPath,
markdown,
+ autofocus,
},
stubs: {
EditorStateObserver,
@@ -70,14 +72,22 @@ describe('ContentEditor', () => {
expect(editorContent.classes()).toContain('md');
});
- it('renders ContentEditorProvider component', async () => {
- await createWrapper();
+ it('allows setting the tiptap editor to autofocus', async () => {
+ createWrapper({ autofocus: 'start' });
+
+ await nextTick();
+
+ expect(findEditorContent().props().editor.options.autofocus).toBe('start');
+ });
+
+ it('renders ContentEditorProvider component', () => {
+ createWrapper();
expect(wrapper.findComponent(ContentEditorProvider).exists()).toBe(true);
});
- it('renders top toolbar component', async () => {
- await createWrapper();
+ it('renders top toolbar component', () => {
+ createWrapper();
expect(wrapper.findComponent(TopToolbar).exists()).toBe(true);
});
@@ -213,6 +223,17 @@ describe('ContentEditor', () => {
});
});
+ describe('when editorStateObserver emits keydown event', () => {
+ it('bubbles up event', () => {
+ const event = new Event('keydown');
+
+ createWrapper();
+
+ findEditorStateObserver().vm.$emit(KEYDOWN_EVENT, event);
+ expect(wrapper.emitted(KEYDOWN_EVENT)).toEqual([[event]]);
+ });
+ });
+
it.each`
name | component
${'formatting'} | ${FormattingBubbleMenu}
diff --git a/spec/frontend/content_editor/components/editor_state_observer_spec.js b/spec/frontend/content_editor/components/editor_state_observer_spec.js
index e8c2d8c8793..9b42f61c98c 100644
--- a/spec/frontend/content_editor/components/editor_state_observer_spec.js
+++ b/spec/frontend/content_editor/components/editor_state_observer_spec.js
@@ -4,7 +4,7 @@ import EditorStateObserver, {
tiptapToComponentMap,
} from '~/content_editor/components/editor_state_observer.vue';
import eventHubFactory from '~/helpers/event_hub_factory';
-import { ALERT_EVENT } from '~/content_editor/constants';
+import { ALERT_EVENT, KEYDOWN_EVENT } from '~/content_editor/constants';
import { createTestEditor } from '../test_utils';
describe('content_editor/components/editor_state_observer', () => {
@@ -14,6 +14,7 @@ describe('content_editor/components/editor_state_observer', () => {
let onSelectionUpdateListener;
let onTransactionListener;
let onAlertListener;
+ let onKeydownListener;
let eventHub;
const buildEditor = () => {
@@ -30,6 +31,7 @@ describe('content_editor/components/editor_state_observer', () => {
selectionUpdate: onSelectionUpdateListener,
transaction: onTransactionListener,
[ALERT_EVENT]: onAlertListener,
+ [KEYDOWN_EVENT]: onKeydownListener,
},
});
};
@@ -39,6 +41,7 @@ describe('content_editor/components/editor_state_observer', () => {
onSelectionUpdateListener = jest.fn();
onTransactionListener = jest.fn();
onAlertListener = jest.fn();
+ onKeydownListener = jest.fn();
buildEditor();
});
@@ -67,8 +70,9 @@ describe('content_editor/components/editor_state_observer', () => {
});
it.each`
- event | listener
- ${ALERT_EVENT} | ${() => onAlertListener}
+ event | listener
+ ${ALERT_EVENT} | ${() => onAlertListener}
+ ${KEYDOWN_EVENT} | ${() => onKeydownListener}
`('listens to $event event in the eventBus object', ({ event, listener }) => {
const args = {};
@@ -97,6 +101,7 @@ describe('content_editor/components/editor_state_observer', () => {
it.each`
event
${ALERT_EVENT}
+ ${KEYDOWN_EVENT}
`('removes $event event hook from eventHub', ({ event }) => {
jest.spyOn(eventHub, '$off');
jest.spyOn(eventHub, '$on');
diff --git a/spec/frontend/content_editor/components/suggestions_dropdown_spec.js b/spec/frontend/content_editor/components/suggestions_dropdown_spec.js
new file mode 100644
index 00000000000..e72eb892e74
--- /dev/null
+++ b/spec/frontend/content_editor/components/suggestions_dropdown_spec.js
@@ -0,0 +1,286 @@
+import { GlAvatarLabeled, GlDropdownItem } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import SuggestionsDropdown from '~/content_editor/components/suggestions_dropdown.vue';
+
+describe('~/content_editor/components/suggestions_dropdown', () => {
+ let wrapper;
+
+ const buildWrapper = ({ propsData } = {}) => {
+ wrapper = extendedWrapper(
+ shallowMount(SuggestionsDropdown, {
+ propsData: {
+ nodeType: 'reference',
+ command: jest.fn(),
+ ...propsData,
+ },
+ }),
+ );
+ };
+
+ const exampleUser = { username: 'root', avatar_url: 'root_avatar.png', type: 'User' };
+ const exampleIssue = { iid: 123, title: 'Test Issue' };
+ const exampleMergeRequest = { iid: 224, title: 'Test MR' };
+ const exampleMilestone1 = { iid: 21, title: '13' };
+ const exampleMilestone2 = { iid: 24, title: 'Milestone with spaces' };
+
+ const exampleCommand = {
+ name: 'due',
+ description: 'Set due date',
+ params: ['<in 2 days | this Friday | December 31st>'],
+ };
+ const exampleEpic = {
+ iid: 8884,
+ title: '❓ Remote Development | Solution validation',
+ reference: 'gitlab-org&8884',
+ };
+ const exampleLabel1 = {
+ title: 'Create',
+ color: '#E44D2A',
+ type: 'GroupLabel',
+ textColor: '#FFFFFF',
+ };
+ const exampleLabel2 = {
+ title: 'Weekly Team Announcement',
+ color: '#E44D2A',
+ type: 'GroupLabel',
+ textColor: '#FFFFFF',
+ };
+ const exampleLabel3 = {
+ title: 'devops::create',
+ color: '#E44D2A',
+ type: 'GroupLabel',
+ textColor: '#FFFFFF',
+ };
+ const exampleVulnerability = {
+ id: 60850147,
+ title: 'System procs network activity',
+ };
+ const exampleSnippet = {
+ id: 2420859,
+ title: 'Project creation QueryRecorder logs',
+ };
+ const exampleEmoji = {
+ c: 'people',
+ e: '😃',
+ d: 'smiling face with open mouth',
+ u: '6.0',
+ name: 'smiley',
+ };
+
+ const insertedEmojiProps = {
+ name: 'smiley',
+ title: 'smiling face with open mouth',
+ moji: '😃',
+ unicodeVersion: '6.0',
+ };
+
+ describe('on item select', () => {
+ it.each`
+ nodeType | referenceType | char | reference | insertedText | insertedProps
+ ${'reference'} | ${'user'} | ${'@'} | ${exampleUser} | ${`@root`} | ${{}}
+ ${'reference'} | ${'issue'} | ${'#'} | ${exampleIssue} | ${`#123`} | ${{}}
+ ${'reference'} | ${'merge_request'} | ${'!'} | ${exampleMergeRequest} | ${`!224`} | ${{}}
+ ${'reference'} | ${'milestone'} | ${'%'} | ${exampleMilestone1} | ${`%13`} | ${{}}
+ ${'reference'} | ${'milestone'} | ${'%'} | ${exampleMilestone2} | ${`%Milestone with spaces`} | ${{ originalText: '%"Milestone with spaces"' }}
+ ${'reference'} | ${'command'} | ${'/'} | ${exampleCommand} | ${'/due'} | ${{}}
+ ${'reference'} | ${'epic'} | ${'&'} | ${exampleEpic} | ${`gitlab-org&8884`} | ${{}}
+ ${'reference'} | ${'label'} | ${'~'} | ${exampleLabel1} | ${`Create`} | ${{}}
+ ${'reference'} | ${'label'} | ${'~'} | ${exampleLabel2} | ${`Weekly Team Announcement`} | ${{ originalText: '~"Weekly Team Announcement"' }}
+ ${'reference'} | ${'label'} | ${'~'} | ${exampleLabel3} | ${`devops::create`} | ${{ originalText: '~"devops::create"', text: 'devops::create' }}
+ ${'reference'} | ${'vulnerability'} | ${'[vulnerability:'} | ${exampleVulnerability} | ${`[vulnerability:60850147]`} | ${{}}
+ ${'reference'} | ${'snippet'} | ${'$'} | ${exampleSnippet} | ${`$2420859`} | ${{}}
+ ${'emoji'} | ${'emoji'} | ${':'} | ${exampleEmoji} | ${`😃`} | ${insertedEmojiProps}
+ `(
+ 'runs a command to insert the selected $referenceType',
+ ({ char, nodeType, referenceType, reference, insertedText, insertedProps }) => {
+ const commandSpy = jest.fn();
+
+ buildWrapper({
+ propsData: {
+ char,
+ command: commandSpy,
+ nodeType,
+ nodeProps: {
+ referenceType,
+ test: 'prop',
+ },
+ items: [reference],
+ },
+ });
+
+ wrapper.findComponent(GlDropdownItem).vm.$emit('click');
+
+ expect(commandSpy).toHaveBeenCalledWith(
+ expect.objectContaining({
+ text: insertedText,
+ test: 'prop',
+ ...insertedProps,
+ }),
+ );
+ },
+ );
+ });
+
+ describe('rendering user references', () => {
+ it('displays avatar labeled component', () => {
+ buildWrapper({
+ propsData: {
+ char: '@',
+ nodeProps: {
+ referenceType: 'user',
+ },
+ items: [exampleUser],
+ },
+ });
+
+ expect(wrapper.findComponent(GlAvatarLabeled).attributes()).toEqual(
+ expect.objectContaining({
+ label: exampleUser.username,
+ shape: 'circle',
+ src: exampleUser.avatar_url,
+ }),
+ );
+ });
+
+ describe.each`
+ referenceType | char | reference | displaysID
+ ${'issue'} | ${'#'} | ${exampleIssue} | ${true}
+ ${'merge_request'} | ${'!'} | ${exampleMergeRequest} | ${true}
+ ${'milestone'} | ${'%'} | ${exampleMilestone1} | ${false}
+ `('rendering $referenceType references', ({ referenceType, char, reference, displaysID }) => {
+ it(`displays ${referenceType} ID and title`, () => {
+ buildWrapper({
+ propsData: {
+ char,
+ nodeType: 'reference',
+ nodeProps: {
+ referenceType,
+ },
+ items: [reference],
+ },
+ });
+
+ if (displaysID) expect(wrapper.text()).toContain(`${reference.iid}`);
+ else expect(wrapper.text()).not.toContain(`${reference.iid}`);
+ expect(wrapper.text()).toContain(`${reference.title}`);
+ });
+ });
+
+ describe.each`
+ referenceType | char | reference
+ ${'snippet'} | ${'$'} | ${exampleSnippet}
+ ${'vulnerability'} | ${'[vulnerability:'} | ${exampleVulnerability}
+ `('rendering $referenceType references', ({ referenceType, char, reference }) => {
+ it(`displays ${referenceType} ID and title`, () => {
+ buildWrapper({
+ propsData: {
+ char,
+ nodeProps: {
+ referenceType,
+ },
+ items: [reference],
+ },
+ });
+
+ expect(wrapper.text()).toContain(`${reference.id}`);
+ expect(wrapper.text()).toContain(`${reference.title}`);
+ });
+ });
+
+ describe('rendering label references', () => {
+ it.each`
+ label | displayedTitle | displayedColor
+ ${exampleLabel1} | ${'Create'} | ${'rgb(228, 77, 42)' /* #E44D2A */}
+ ${exampleLabel2} | ${'Weekly Team Announcement'} | ${'rgb(228, 77, 42)' /* #E44D2A */}
+ ${exampleLabel3} | ${'devops::create'} | ${'rgb(228, 77, 42)' /* #E44D2A */}
+ `('displays label title and color', ({ label, displayedTitle, displayedColor }) => {
+ buildWrapper({
+ propsData: {
+ char: '~',
+ nodeProps: {
+ referenceType: 'label',
+ },
+ items: [label],
+ },
+ });
+
+ expect(wrapper.text()).toContain(displayedTitle);
+ expect(wrapper.text()).not.toContain('"'); // no quotes in the dropdown list
+ expect(wrapper.findByTestId('label-color-box').attributes().style).toEqual(
+ `background-color: ${displayedColor};`,
+ );
+ });
+ });
+
+ describe('rendering epic references', () => {
+ it('displays epic title and reference', () => {
+ buildWrapper({
+ propsData: {
+ char: '&',
+ nodeProps: {
+ referenceType: 'epic',
+ },
+ items: [exampleEpic],
+ },
+ });
+
+ expect(wrapper.text()).toContain(`${exampleEpic.reference}`);
+ expect(wrapper.text()).toContain(`${exampleEpic.title}`);
+ });
+ });
+
+ describe('rendering a command (quick action)', () => {
+ it('displays command name with a slash', () => {
+ buildWrapper({
+ propsData: {
+ char: '/',
+ nodeProps: {
+ referenceType: 'command',
+ },
+ items: [exampleCommand],
+ },
+ });
+
+ expect(wrapper.text()).toContain(`${exampleCommand.name} `);
+ });
+ });
+
+ describe('rendering emoji references', () => {
+ it('displays emoji', () => {
+ const testEmojis = [
+ {
+ c: 'people',
+ e: '😄',
+ d: 'smiling face with open mouth and smiling eyes',
+ u: '6.0',
+ name: 'smile',
+ },
+ {
+ c: 'people',
+ e: '😸',
+ d: 'grinning cat face with smiling eyes',
+ u: '6.0',
+ name: 'smile_cat',
+ },
+ { c: 'people', e: '😃', d: 'smiling face with open mouth', u: '6.0', name: 'smiley' },
+ ];
+
+ buildWrapper({
+ propsData: {
+ char: ':',
+ nodeType: 'emoji',
+ nodeProps: {},
+ items: testEmojis,
+ },
+ });
+
+ testEmojis.forEach((testEmoji) => {
+ expect(wrapper.text()).toContain(testEmoji.e);
+ expect(wrapper.text()).toContain(testEmoji.d);
+ expect(wrapper.text()).toContain(testEmoji.name);
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/content_editor/components/wrappers/label_spec.js b/spec/frontend/content_editor/components/wrappers/label_spec.js
new file mode 100644
index 00000000000..9e58669b0ea
--- /dev/null
+++ b/spec/frontend/content_editor/components/wrappers/label_spec.js
@@ -0,0 +1,36 @@
+import { GlLabel } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import LabelWrapper from '~/content_editor/components/wrappers/label.vue';
+
+describe('content/components/wrappers/label', () => {
+ let wrapper;
+
+ const createWrapper = async (node = {}) => {
+ wrapper = shallowMountExtended(LabelWrapper, {
+ propsData: { node },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it("renders a GlLabel with the node's text and color", () => {
+ createWrapper({ attrs: { color: '#ff0000', text: 'foo bar', originalText: '~"foo bar"' } });
+
+ const glLabel = wrapper.findComponent(GlLabel);
+
+ expect(glLabel.props()).toMatchObject(
+ expect.objectContaining({
+ title: 'foo bar',
+ backgroundColor: '#ff0000',
+ }),
+ );
+ });
+
+ it('renders a scoped label if there is a "::" in the label', () => {
+ createWrapper({ attrs: { color: '#ff0000', text: 'foo::bar', originalText: '~"foo::bar"' } });
+
+ expect(wrapper.findComponent(GlLabel).props().scoped).toBe(true);
+ });
+});
diff --git a/spec/frontend/content_editor/extensions/heading_spec.js b/spec/frontend/content_editor/extensions/heading_spec.js
new file mode 100644
index 00000000000..2fa25e03cdc
--- /dev/null
+++ b/spec/frontend/content_editor/extensions/heading_spec.js
@@ -0,0 +1,54 @@
+import Heading from '~/content_editor/extensions/heading';
+import { createTestEditor, createDocBuilder, triggerNodeInputRule } from '../test_utils';
+
+describe('content_editor/extensions/heading', () => {
+ let tiptapEditor;
+ let doc;
+ let p;
+ let heading;
+
+ beforeEach(() => {
+ tiptapEditor = createTestEditor({ extensions: [Heading] });
+ ({
+ builders: { doc, p, heading },
+ } = createDocBuilder({
+ tiptapEditor,
+ names: {
+ heading: { nodeType: Heading.name },
+ },
+ }));
+ });
+
+ describe('when typing a valid heading input rule', () => {
+ it.each`
+ level | inputRuleText
+ ${1} | ${'# '}
+ ${2} | ${'## '}
+ ${3} | ${'### '}
+ ${4} | ${'#### '}
+ ${5} | ${'##### '}
+ ${6} | ${'###### '}
+ `('inserts a heading node for $inputRuleText', ({ level, inputRuleText }) => {
+ const expectedDoc = doc(heading({ level }));
+
+ triggerNodeInputRule({ tiptapEditor, inputRuleText });
+
+ expect(tiptapEditor.getJSON()).toEqual(expectedDoc.toJSON());
+ });
+ });
+
+ describe('when typing a invalid heading input rule', () => {
+ it.each`
+ inputRuleText
+ ${'#hi'}
+ ${'#\n'}
+ `('does not insert a heading node for $inputRuleText', ({ inputRuleText }) => {
+ const expectedDoc = doc(p());
+
+ triggerNodeInputRule({ tiptapEditor, inputRuleText });
+
+ // no change to the document
+ expect(tiptapEditor.getJSON()).toEqual(expectedDoc.toJSON());
+ });
+ });
+});
diff --git a/spec/frontend/content_editor/markdown_processing_spec_helper.js b/spec/frontend/content_editor/markdown_processing_spec_helper.js
index 228d009e42c..6f10f294fb0 100644
--- a/spec/frontend/content_editor/markdown_processing_spec_helper.js
+++ b/spec/frontend/content_editor/markdown_processing_spec_helper.js
@@ -1,7 +1,10 @@
import fs from 'fs';
import jsYaml from 'js-yaml';
import { memoize } from 'lodash';
+import MockAdapter from 'axios-mock-adapter';
+import axios from 'axios';
import { createContentEditor } from '~/content_editor';
+import httpStatus from '~/lib/utils/http_status';
const getFocusedMarkdownExamples = memoize(
() => process.env.FOCUSED_MARKDOWN_EXAMPLES?.split(',') || [],
@@ -42,6 +45,11 @@ const loadMarkdownApiExamples = (markdownYamlPath) => {
};
const testSerializesHtmlToMarkdownForElement = async ({ markdown, html }) => {
+ const mock = new MockAdapter(axios);
+
+ // Ignore any API requests from the suggestions plugin
+ mock.onGet().reply(httpStatus.OK, []);
+
const contentEditor = createContentEditor({
// Overwrite renderMarkdown to always return this specific html
renderMarkdown: () => html,
@@ -55,6 +63,8 @@ const testSerializesHtmlToMarkdownForElement = async ({ markdown, html }) => {
// Assert that the markdown we ended up with after sending it through all the ContentEditor
// plumbing matches the original markdown from the YAML.
expect(serializedContent.trim()).toBe(markdown.trim());
+
+ mock.restore();
};
// describeMarkdownProcesssing
@@ -74,7 +84,7 @@ export const describeMarkdownProcessing = (description, markdownYamlPath) => {
return;
}
- it(exampleName, async () => {
+ it(`${exampleName}`, async () => {
await testSerializesHtmlToMarkdownForElement(example);
});
});
diff --git a/spec/frontend/content_editor/services/markdown_serializer_spec.js b/spec/frontend/content_editor/services/markdown_serializer_spec.js
index 56394c85e8b..32193d97fd8 100644
--- a/spec/frontend/content_editor/services/markdown_serializer_spec.js
+++ b/spec/frontend/content_editor/services/markdown_serializer_spec.js
@@ -1204,6 +1204,24 @@ Oranges are orange [^1]
);
});
+ it('correctly adds a space between a preceding block element and a markdown table', () => {
+ expect(
+ serialize(
+ bulletList(listItem(paragraph('List item 1')), listItem(paragraph('List item 2'))),
+ table(tableRow(tableHeader(paragraph('header'))), tableRow(tableCell(paragraph('cell')))),
+ ).trim(),
+ ).toBe(
+ `
+* List item 1
+* List item 2
+
+| header |
+|--------|
+| cell |
+ `.trim(),
+ );
+ });
+
it('correctly serializes reference definition', () => {
expect(
serialize(
diff --git a/spec/frontend/content_editor/services/track_input_rules_and_shortcuts_spec.js b/spec/frontend/content_editor/services/track_input_rules_and_shortcuts_spec.js
index 459780cc7cf..8c1a3831a74 100644
--- a/spec/frontend/content_editor/services/track_input_rules_and_shortcuts_spec.js
+++ b/spec/frontend/content_editor/services/track_input_rules_and_shortcuts_spec.js
@@ -44,7 +44,7 @@ describe('content_editor/services/track_input_rules_and_shortcuts', () => {
describe('when creating a heading using an keyboard shortcut', () => {
it('sends a tracking event indicating that a heading was created using an input rule', async () => {
- const shortcuts = Heading.config.addKeyboardShortcuts.call(Heading);
+ const shortcuts = Heading.parent.config.addKeyboardShortcuts.call(Heading);
const [firstShortcut] = Object.keys(shortcuts);
const nodeName = Heading.name;
diff --git a/spec/frontend/contributors/component/contributors_spec.js b/spec/frontend/contributors/component/contributors_spec.js
index bdf3b3636ed..2f0b5719326 100644
--- a/spec/frontend/contributors/component/contributors_spec.js
+++ b/spec/frontend/contributors/component/contributors_spec.js
@@ -1,4 +1,5 @@
import { mount } from '@vue/test-utils';
+import { GlLoadingIcon } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
import Vue, { nextTick } from 'vue';
import ContributorsCharts from '~/contributors/components/contributors.vue';
@@ -52,14 +53,14 @@ describe('Contributors charts', () => {
it('should display loader whiled loading data', async () => {
wrapper.vm.$store.state.loading = true;
await nextTick();
- expect(wrapper.find('.contributors-loader').exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
it('should render charts when loading completed and there is chart data', async () => {
wrapper.vm.$store.state.loading = false;
wrapper.vm.$store.state.chartData = chartData;
await nextTick();
- expect(wrapper.find('.contributors-loader').exists()).toBe(false);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(false);
expect(wrapper.find('.contributors-charts').exists()).toBe(true);
expect(wrapper.element).toMatchSnapshot();
});
diff --git a/spec/frontend/contributors/store/actions_spec.js b/spec/frontend/contributors/store/actions_spec.js
index ef0ff8ca208..865f683a91a 100644
--- a/spec/frontend/contributors/store/actions_spec.js
+++ b/spec/frontend/contributors/store/actions_spec.js
@@ -2,7 +2,7 @@ import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
import * as actions from '~/contributors/stores/actions';
import * as types from '~/contributors/stores/mutation_types';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
jest.mock('~/flash.js');
@@ -47,7 +47,7 @@ describe('Contributors store actions', () => {
[{ type: types.SET_LOADING_STATE, payload: true }],
[],
);
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: expect.stringMatching('error'),
});
});
diff --git a/spec/frontend/crm/contacts_root_spec.js b/spec/frontend/crm/contacts_root_spec.js
index 7aaaf480c44..ec7172434bf 100644
--- a/spec/frontend/crm/contacts_root_spec.js
+++ b/spec/frontend/crm/contacts_root_spec.js
@@ -87,7 +87,7 @@ describe('Customer relations contacts root app', () => {
editButtonLabel: 'Edit',
title: 'Customer relations contacts',
newContact: 'New contact',
- errorText: 'Something went wrong. Please try again.',
+ errorMsg: 'Something went wrong. Please try again.',
},
serverErrorMessage: '',
filterSearchKey: 'contacts',
@@ -117,6 +117,18 @@ describe('Customer relations contacts root app', () => {
expect(wrapper.text()).toContain('Something went wrong. Please try again.');
});
+
+ it('should be removed on error-alert-dismissed event', async () => {
+ mountComponent({ queryHandler: jest.fn().mockRejectedValue('ERROR') });
+ await waitForPromises();
+
+ expect(wrapper.text()).toContain('Something went wrong. Please try again.');
+
+ findTable().vm.$emit('error-alert-dismissed');
+ await waitForPromises();
+
+ expect(wrapper.text()).not.toContain('Something went wrong. Please try again.');
+ });
});
describe('on successful load', () => {
diff --git a/spec/frontend/crm/organizations_root_spec.js b/spec/frontend/crm/organizations_root_spec.js
index a0b56596177..1fcf6aa8f50 100644
--- a/spec/frontend/crm/organizations_root_spec.js
+++ b/spec/frontend/crm/organizations_root_spec.js
@@ -91,7 +91,7 @@ describe('Customer relations organizations root app', () => {
editButtonLabel: 'Edit',
title: 'Customer relations organizations',
newOrganization: 'New organization',
- errorText: 'Something went wrong. Please try again.',
+ errorMsg: 'Something went wrong. Please try again.',
},
serverErrorMessage: '',
filterSearchKey: 'organizations',
diff --git a/spec/frontend/cycle_analytics/value_stream_metrics_spec.js b/spec/frontend/cycle_analytics/value_stream_metrics_spec.js
index 9c8cd6a3dbc..948dc5c9be2 100644
--- a/spec/frontend/cycle_analytics/value_stream_metrics_spec.js
+++ b/spec/frontend/cycle_analytics/value_stream_metrics_spec.js
@@ -8,7 +8,7 @@ import { METRIC_TYPE_SUMMARY } from '~/api/analytics_api';
import { VSA_METRICS_GROUPS, METRICS_POPOVER_CONTENT } from '~/analytics/shared/constants';
import { prepareTimeMetricsData } from '~/analytics/shared/utils';
import MetricTile from '~/analytics/shared/components/metric_tile.vue';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import { group } from './mock_data';
jest.mock('~/flash');
@@ -177,7 +177,7 @@ describe('ValueStreamMetrics', () => {
});
it('should render an error message', () => {
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: `There was an error while fetching value stream analytics ${fakeReqName} data.`,
});
});
diff --git a/spec/frontend/deploy_freeze/components/deploy_freeze_modal_spec.js b/spec/frontend/deploy_freeze/components/deploy_freeze_modal_spec.js
index bbafdc000db..113e0d8f60d 100644
--- a/spec/frontend/deploy_freeze/components/deploy_freeze_modal_spec.js
+++ b/spec/frontend/deploy_freeze/components/deploy_freeze_modal_spec.js
@@ -5,8 +5,9 @@ import Vuex from 'vuex';
import Api from '~/api';
import DeployFreezeModal from '~/deploy_freeze/components/deploy_freeze_modal.vue';
import createStore from '~/deploy_freeze/store';
-import TimezoneDropdown from '~/vue_shared/components/timezone_dropdown.vue';
-import { freezePeriodsFixture, timezoneDataFixture } from '../helpers';
+import TimezoneDropdown from '~/vue_shared/components/timezone_dropdown/timezone_dropdown.vue';
+import { freezePeriodsFixture } from '../helpers';
+import { timezoneDataFixture } from '../../vue_shared/components/timezone_dropdown/helpers';
jest.mock('~/api');
@@ -52,7 +53,7 @@ describe('Deploy freeze modal', () => {
describe('Basic interactions', () => {
it('button is disabled when freeze period is invalid', () => {
- expect(submitDeployFreezeButton().attributes('disabled')).toBeTruthy();
+ expect(submitDeployFreezeButton().attributes('disabled')).toBe('true');
});
});
@@ -92,7 +93,7 @@ describe('Deploy freeze modal', () => {
});
it('disables the add deploy freeze button', () => {
- expect(submitDeployFreezeButton().attributes('disabled')).toBeTruthy();
+ expect(submitDeployFreezeButton().attributes('disabled')).toBe('true');
});
});
@@ -103,7 +104,7 @@ describe('Deploy freeze modal', () => {
});
it('does not disable the submit button', () => {
- expect(submitDeployFreezeButton().attributes('disabled')).toBeFalsy();
+ expect(submitDeployFreezeButton().attributes('disabled')).toBeUndefined();
});
});
});
diff --git a/spec/frontend/deploy_freeze/components/deploy_freeze_settings_spec.js b/spec/frontend/deploy_freeze/components/deploy_freeze_settings_spec.js
index 637efe30022..27d8fea9d5e 100644
--- a/spec/frontend/deploy_freeze/components/deploy_freeze_settings_spec.js
+++ b/spec/frontend/deploy_freeze/components/deploy_freeze_settings_spec.js
@@ -5,7 +5,7 @@ import DeployFreezeModal from '~/deploy_freeze/components/deploy_freeze_modal.vu
import DeployFreezeSettings from '~/deploy_freeze/components/deploy_freeze_settings.vue';
import DeployFreezeTable from '~/deploy_freeze/components/deploy_freeze_table.vue';
import createStore from '~/deploy_freeze/store';
-import { timezoneDataFixture } from '../helpers';
+import { timezoneDataFixture } from '../../vue_shared/components/timezone_dropdown/helpers';
Vue.use(Vuex);
diff --git a/spec/frontend/deploy_freeze/components/deploy_freeze_table_spec.js b/spec/frontend/deploy_freeze/components/deploy_freeze_table_spec.js
index 137776edfab..c2d6eb399bc 100644
--- a/spec/frontend/deploy_freeze/components/deploy_freeze_table_spec.js
+++ b/spec/frontend/deploy_freeze/components/deploy_freeze_table_spec.js
@@ -5,7 +5,8 @@ import Vuex from 'vuex';
import DeployFreezeTable from '~/deploy_freeze/components/deploy_freeze_table.vue';
import createStore from '~/deploy_freeze/store';
import { RECEIVE_FREEZE_PERIODS_SUCCESS } from '~/deploy_freeze/store/mutation_types';
-import { freezePeriodsFixture, timezoneDataFixture } from '../helpers';
+import { freezePeriodsFixture } from '../helpers';
+import { timezoneDataFixture } from '../../vue_shared/components/timezone_dropdown/helpers';
Vue.use(Vuex);
diff --git a/spec/frontend/deploy_freeze/helpers.js b/spec/frontend/deploy_freeze/helpers.js
index 43e66183ab5..920901c97a8 100644
--- a/spec/frontend/deploy_freeze/helpers.js
+++ b/spec/frontend/deploy_freeze/helpers.js
@@ -1,10 +1,3 @@
import freezePeriodsFixture from 'test_fixtures/api/freeze-periods/freeze_periods.json';
-import timezoneDataFixture from 'test_fixtures/timezones/short.json';
-import { secondsToHours } from '~/lib/utils/datetime_utility';
-export { freezePeriodsFixture, timezoneDataFixture };
-
-export const findTzByName = (identifier = '') =>
- timezoneDataFixture.find(({ name }) => name.toLowerCase() === identifier.toLowerCase());
-
-export const formatTz = ({ offset, name }) => `[UTC ${secondsToHours(offset)}] ${name}`;
+export { freezePeriodsFixture };
diff --git a/spec/frontend/deploy_freeze/store/actions_spec.js b/spec/frontend/deploy_freeze/store/actions_spec.js
index ad67afdce75..ce0c924bed2 100644
--- a/spec/frontend/deploy_freeze/store/actions_spec.js
+++ b/spec/frontend/deploy_freeze/store/actions_spec.js
@@ -7,7 +7,8 @@ import getInitialState from '~/deploy_freeze/store/state';
import createFlash from '~/flash';
import * as logger from '~/lib/logger';
import axios from '~/lib/utils/axios_utils';
-import { freezePeriodsFixture, timezoneDataFixture } from '../helpers';
+import { freezePeriodsFixture } from '../helpers';
+import { timezoneDataFixture } from '../../vue_shared/components/timezone_dropdown/helpers';
jest.mock('~/api.js');
jest.mock('~/flash.js');
diff --git a/spec/frontend/deploy_freeze/store/mutations_spec.js b/spec/frontend/deploy_freeze/store/mutations_spec.js
index 878a755088c..984105d6655 100644
--- a/spec/frontend/deploy_freeze/store/mutations_spec.js
+++ b/spec/frontend/deploy_freeze/store/mutations_spec.js
@@ -2,7 +2,12 @@ import * as types from '~/deploy_freeze/store/mutation_types';
import mutations from '~/deploy_freeze/store/mutations';
import state from '~/deploy_freeze/store/state';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
-import { findTzByName, formatTz, freezePeriodsFixture, timezoneDataFixture } from '../helpers';
+import { formatTimezone } from '~/lib/utils/datetime_utility';
+import { freezePeriodsFixture } from '../helpers';
+import {
+ timezoneDataFixture,
+ findTzByName,
+} from '../../vue_shared/components/timezone_dropdown/helpers';
describe('Deploy freeze mutations', () => {
let stateCopy;
@@ -28,9 +33,9 @@ describe('Deploy freeze mutations', () => {
describe('RECEIVE_FREEZE_PERIODS_SUCCESS', () => {
it('should set freeze periods and format timezones from identifiers to names', () => {
const timezoneNames = {
- 'Europe/Berlin': '[UTC 2] Berlin',
+ 'Europe/Berlin': '[UTC + 2] Berlin',
'Etc/UTC': '[UTC 0] UTC',
- 'America/New_York': '[UTC -4] Eastern Time (US & Canada)',
+ 'America/New_York': '[UTC - 4] Eastern Time (US & Canada)',
};
mutations[types.RECEIVE_FREEZE_PERIODS_SUCCESS](stateCopy, freezePeriodsFixture);
@@ -51,7 +56,7 @@ describe('Deploy freeze mutations', () => {
it('should set the cron timezone', () => {
const selectedTz = findTzByName('Pacific Time (US & Canada)');
const timezone = {
- formattedTimezone: formatTz(selectedTz),
+ formattedTimezone: formatTimezone(selectedTz),
identifier: selectedTz.identifier,
};
mutations[types.SET_SELECTED_TIMEZONE](stateCopy, timezone);
diff --git a/spec/frontend/deploy_tokens/components/new_deploy_token_spec.js b/spec/frontend/deploy_tokens/components/new_deploy_token_spec.js
new file mode 100644
index 00000000000..19e9ba8b268
--- /dev/null
+++ b/spec/frontend/deploy_tokens/components/new_deploy_token_spec.js
@@ -0,0 +1,103 @@
+import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import { GlButton, GlFormCheckbox, GlFormInput, GlFormInputGroup, GlDatepicker } from '@gitlab/ui';
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
+import { TEST_HOST } from 'helpers/test_constants';
+import NewDeployToken from '~/deploy_tokens/components/new_deploy_token.vue';
+import waitForPromises from 'helpers/wait_for_promises';
+
+const createNewTokenPath = `${TEST_HOST}/create`;
+const deployTokensHelpUrl = `${TEST_HOST}/help`;
+describe('New Deploy Token', () => {
+ let wrapper;
+
+ const factory = (options = {}) => {
+ const defaults = {
+ containerRegistryEnabled: true,
+ packagesRegistryEnabled: true,
+ tokenType: 'project',
+ };
+ const { containerRegistryEnabled, packagesRegistryEnabled, tokenType } = {
+ ...defaults,
+ ...options,
+ };
+ return shallowMount(NewDeployToken, {
+ propsData: {
+ deployTokensHelpUrl,
+ containerRegistryEnabled,
+ packagesRegistryEnabled,
+ createNewTokenPath,
+ tokenType,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('without a container registry', () => {
+ beforeEach(() => {
+ wrapper = factory({ containerRegistryEnabled: false });
+ });
+
+ it('should not show the read registry scope', () => {
+ wrapper
+ .findAllComponents(GlFormCheckbox)
+ .wrappers.forEach((checkbox) => expect(checkbox.text()).not.toBe('read_registry'));
+ });
+ });
+
+ describe('with a container registry', () => {
+ beforeEach(() => {
+ wrapper = factory();
+ });
+
+ it('should show the read registry scope', () => {
+ const checkbox = wrapper.findAllComponents(GlFormCheckbox).at(1);
+ expect(checkbox.text()).toBe('read_registry');
+ });
+
+ it('should make a request to create a token on submit', () => {
+ const mockAxios = new MockAdapter(axios);
+
+ const date = new Date();
+ const formInputs = wrapper.findAllComponents(GlFormInput);
+ const name = formInputs.at(0);
+ const username = formInputs.at(2);
+ name.vm.$emit('input', 'test name');
+ username.vm.$emit('input', 'test username');
+
+ const datepicker = wrapper.findAllComponents(GlDatepicker).at(0);
+ datepicker.vm.$emit('input', date);
+
+ const [readRepo, readRegistry] = wrapper.findAllComponents(GlFormCheckbox).wrappers;
+ readRepo.vm.$emit('input', true);
+ readRegistry.vm.$emit('input', true);
+
+ mockAxios
+ .onPost(createNewTokenPath, {
+ deploy_token: {
+ name: 'test name',
+ expires_at: date.toISOString(),
+ username: 'test username',
+ read_repository: true,
+ read_registry: true,
+ },
+ })
+ .replyOnce(200, { username: 'test token username', token: 'test token' });
+
+ wrapper.findAllComponents(GlButton).at(0).vm.$emit('click');
+
+ return waitForPromises()
+ .then(() => nextTick())
+ .then(() => {
+ const [tokenUsername, tokenValue] = wrapper.findAllComponents(GlFormInputGroup).wrappers;
+
+ expect(tokenUsername.props('value')).toBe('test token username');
+ expect(tokenValue.props('value')).toBe('test token');
+ });
+ });
+ });
+});
diff --git a/spec/frontend/design_management/components/delete_button_spec.js b/spec/frontend/design_management/components/delete_button_spec.js
index cee1eec792d..426a61f5a47 100644
--- a/spec/frontend/design_management/components/delete_button_spec.js
+++ b/spec/frontend/design_management/components/delete_button_spec.js
@@ -29,12 +29,12 @@ describe('Batch delete button component', () => {
createComponent();
expect(findButton().exists()).toBe(true);
- expect(findButton().attributes('disabled')).toBeFalsy();
+ expect(findButton().attributes('disabled')).toBeUndefined();
});
it('renders disabled button when design is deleting', () => {
createComponent({ isDeleting: true });
- expect(findButton().attributes('disabled')).toBeTruthy();
+ expect(findButton().attributes('disabled')).toBe('true');
});
it('emits `delete-selected-designs` event on modal ok click', async () => {
@@ -45,7 +45,7 @@ describe('Batch delete button component', () => {
findModal().vm.$emit('ok');
await nextTick();
- expect(wrapper.emitted('delete-selected-designs')).toBeTruthy();
+ expect(wrapper.emitted('delete-selected-designs')).toHaveLength(1);
});
it('renders slot content', () => {
diff --git a/spec/frontend/design_management/components/design_notes/design_reply_form_spec.js b/spec/frontend/design_management/components/design_notes/design_reply_form_spec.js
index e36f5c79e3e..5fd61b25edc 100644
--- a/spec/frontend/design_management/components/design_notes/design_reply_form_spec.js
+++ b/spec/frontend/design_management/components/design_notes/design_reply_form_spec.js
@@ -1,16 +1,10 @@
import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
import Autosave from '~/autosave';
+import { confirmAction } from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal';
import DesignReplyForm from '~/design_management/components/design_notes/design_reply_form.vue';
-const showModal = jest.fn();
-
-const GlModal = {
- template: '<div><slot name="modal-title"></slot><slot></slot><slot name="modal-ok"></slot></div>',
- methods: {
- show: showModal,
- },
-};
+jest.mock('~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal');
describe('Design reply form component', () => {
let wrapper;
@@ -19,7 +13,6 @@ describe('Design reply form component', () => {
const findTextarea = () => wrapper.find('textarea');
const findSubmitButton = () => wrapper.findComponent({ ref: 'submitButton' });
const findCancelButton = () => wrapper.findComponent({ ref: 'cancelButton' });
- const findModal = () => wrapper.findComponent({ ref: 'cancelCommentModal' });
function createComponent(props = {}, mountOptions = {}) {
wrapper = mount(DesignReplyForm, {
@@ -29,7 +22,6 @@ describe('Design reply form component', () => {
noteableId: 'gid://gitlab/DesignManagement::Design/6',
...props,
},
- stubs: { GlModal },
...mountOptions,
});
}
@@ -42,6 +34,7 @@ describe('Design reply form component', () => {
afterEach(() => {
wrapper.destroy();
window.gon = originalGon;
+ confirmAction.mockReset();
});
it('textarea has focus after component mount', () => {
@@ -102,7 +95,7 @@ describe('Design reply form component', () => {
});
it('submit button is disabled', () => {
- expect(findSubmitButton().attributes().disabled).toBeTruthy();
+ expect(findSubmitButton().attributes().disabled).toBe('disabled');
});
it('does not emit submitForm event on textarea ctrl+enter keydown', async () => {
@@ -111,7 +104,7 @@ describe('Design reply form component', () => {
});
await nextTick();
- expect(wrapper.emitted('submit-form')).toBeFalsy();
+ expect(wrapper.emitted('submit-form')).toBeUndefined();
});
it('does not emit submitForm event on textarea meta+enter keydown', async () => {
@@ -120,13 +113,13 @@ describe('Design reply form component', () => {
});
await nextTick();
- expect(wrapper.emitted('submit-form')).toBeFalsy();
+ expect(wrapper.emitted('submit-form')).toBeUndefined();
});
it('emits cancelForm event on pressing escape button on textarea', () => {
findTextarea().trigger('keyup.esc');
- expect(wrapper.emitted('cancel-form')).toBeTruthy();
+ expect(wrapper.emitted('cancel-form')).toHaveLength(1);
});
it('emits cancelForm event on clicking Cancel button', () => {
@@ -144,7 +137,7 @@ describe('Design reply form component', () => {
});
it('submit button is enabled', () => {
- expect(findSubmitButton().attributes().disabled).toBeFalsy();
+ expect(findSubmitButton().attributes().disabled).toBeUndefined();
});
it('emits submitForm event on Comment button click', async () => {
@@ -153,7 +146,7 @@ describe('Design reply form component', () => {
findSubmitButton().vm.$emit('click');
await nextTick();
- expect(wrapper.emitted('submit-form')).toBeTruthy();
+ expect(wrapper.emitted('submit-form')).toHaveLength(1);
expect(autosaveResetSpy).toHaveBeenCalled();
});
@@ -165,7 +158,7 @@ describe('Design reply form component', () => {
});
await nextTick();
- expect(wrapper.emitted('submit-form')).toBeTruthy();
+ expect(wrapper.emitted('submit-form')).toHaveLength(1);
expect(autosaveResetSpy).toHaveBeenCalled();
});
@@ -177,7 +170,7 @@ describe('Design reply form component', () => {
});
await nextTick();
- expect(wrapper.emitted('submit-form')).toBeTruthy();
+ expect(wrapper.emitted('submit-form')).toHaveLength(1);
expect(autosaveResetSpy).toHaveBeenCalled();
});
@@ -185,13 +178,13 @@ describe('Design reply form component', () => {
findTextarea().setValue('test2');
await nextTick();
- expect(wrapper.emitted('input')).toBeTruthy();
+ expect(wrapper.emitted('input')).toEqual([['test'], ['test2']]);
});
it('emits cancelForm event on Escape key if text was not changed', () => {
findTextarea().trigger('keyup.esc');
- expect(wrapper.emitted('cancel-form')).toBeTruthy();
+ expect(wrapper.emitted('cancel-form')).toHaveLength(1);
});
it('opens confirmation modal on Escape key when text has changed', async () => {
@@ -199,13 +192,13 @@ describe('Design reply form component', () => {
await nextTick();
findTextarea().trigger('keyup.esc');
- expect(showModal).toHaveBeenCalled();
+ expect(confirmAction).toHaveBeenCalled();
});
it('emits cancelForm event on Cancel button click if text was not changed', () => {
findCancelButton().trigger('click');
- expect(wrapper.emitted('cancel-form')).toBeTruthy();
+ expect(wrapper.emitted('cancel-form')).toHaveLength(1);
});
it('opens confirmation modal on Cancel button click when text has changed', async () => {
@@ -213,17 +206,41 @@ describe('Design reply form component', () => {
await nextTick();
findCancelButton().trigger('click');
- expect(showModal).toHaveBeenCalled();
+ expect(confirmAction).toHaveBeenCalled();
});
- it('emits cancelForm event on modal Ok button click', () => {
+ it('emits cancelForm event when confirmed', async () => {
+ confirmAction.mockResolvedValueOnce(true);
const autosaveResetSpy = jest.spyOn(wrapper.vm.autosaveDiscussion, 'reset');
+ wrapper.setProps({ value: 'test3' });
+ await nextTick();
+
findTextarea().trigger('keyup.esc');
- findModal().vm.$emit('ok');
+ await nextTick();
+
+ expect(confirmAction).toHaveBeenCalled();
+ await nextTick();
- expect(wrapper.emitted('cancel-form')).toBeTruthy();
+ expect(wrapper.emitted('cancel-form')).toHaveLength(1);
expect(autosaveResetSpy).toHaveBeenCalled();
});
+
+ it("doesn't emit cancelForm event when not confirmed", async () => {
+ confirmAction.mockResolvedValueOnce(false);
+ const autosaveResetSpy = jest.spyOn(wrapper.vm.autosaveDiscussion, 'reset');
+
+ wrapper.setProps({ value: 'test3' });
+ await nextTick();
+
+ findTextarea().trigger('keyup.esc');
+ await nextTick();
+
+ expect(confirmAction).toHaveBeenCalled();
+ await nextTick();
+
+ expect(wrapper.emitted('cancel-form')).toBeUndefined();
+ expect(autosaveResetSpy).not.toHaveBeenCalled();
+ });
});
});
diff --git a/spec/frontend/design_management/components/design_overlay_spec.js b/spec/frontend/design_management/components/design_overlay_spec.js
index 056959425a6..169f2dbdccb 100644
--- a/spec/frontend/design_management/components/design_overlay_spec.js
+++ b/spec/frontend/design_management/components/design_overlay_spec.js
@@ -170,6 +170,14 @@ describe('Design overlay component', () => {
});
it('should call an update active discussion mutation when clicking a note without moving it', async () => {
+ createComponent({
+ notes,
+ dimensions: {
+ width: 400,
+ height: 400,
+ },
+ });
+
const note = notes[0];
const { position } = note;
const mutationVariables = {
diff --git a/spec/frontend/design_management/pages/design/index_spec.js b/spec/frontend/design_management/pages/design/index_spec.js
index 774e37a8b21..a11463ab663 100644
--- a/spec/frontend/design_management/pages/design/index_spec.js
+++ b/spec/frontend/design_management/pages/design/index_spec.js
@@ -23,7 +23,7 @@ import {
DESIGN_SNOWPLOW_EVENT_TYPES,
DESIGN_SERVICE_PING_EVENT_TYPES,
} from '~/design_management/utils/tracking';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import mockAllVersions from '../../mock_data/all_versions';
import design from '../../mock_data/design';
import mockResponseWithDesigns from '../../mock_data/designs';
@@ -301,8 +301,8 @@ describe('Design management design index page', () => {
wrapper.vm.onDesignQueryResult({ data: mockResponseNoDesigns, loading: false });
await nextTick();
- expect(createFlash).toHaveBeenCalledTimes(1);
- expect(createFlash).toHaveBeenCalledWith({ message: DESIGN_NOT_FOUND_ERROR });
+ expect(createAlert).toHaveBeenCalledTimes(1);
+ expect(createAlert).toHaveBeenCalledWith({ message: DESIGN_NOT_FOUND_ERROR });
expect(router.push).toHaveBeenCalledTimes(1);
expect(router.push).toHaveBeenCalledWith({ name: DESIGNS_ROUTE_NAME });
});
@@ -323,8 +323,8 @@ describe('Design management design index page', () => {
wrapper.vm.onDesignQueryResult({ data: mockResponseWithDesigns, loading: false });
await nextTick();
- expect(createFlash).toHaveBeenCalledTimes(1);
- expect(createFlash).toHaveBeenCalledWith({ message: DESIGN_VERSION_NOT_EXIST_ERROR });
+ expect(createAlert).toHaveBeenCalledTimes(1);
+ expect(createAlert).toHaveBeenCalledWith({ message: DESIGN_VERSION_NOT_EXIST_ERROR });
expect(router.push).toHaveBeenCalledTimes(1);
expect(router.push).toHaveBeenCalledWith({ name: DESIGNS_ROUTE_NAME });
});
diff --git a/spec/frontend/design_management/pages/index_spec.js b/spec/frontend/design_management/pages/index_spec.js
index 1033b509419..76ece922ded 100644
--- a/spec/frontend/design_management/pages/index_spec.js
+++ b/spec/frontend/design_management/pages/index_spec.js
@@ -29,7 +29,7 @@ import {
DESIGN_TRACKING_PAGE_NAME,
DESIGN_SNOWPLOW_EVENT_TYPES,
} from '~/design_management/utils/tracking';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import DesignDropzone from '~/vue_shared/components/upload_dropzone/upload_dropzone.vue';
import {
designListQueryResponse,
@@ -808,7 +808,7 @@ describe('Design management index page', () => {
await moveDesigns(wrapper);
await waitForPromises();
- expect(createFlash).toHaveBeenCalledWith({ message: 'Houston, we have a problem' });
+ expect(createAlert).toHaveBeenCalledWith({ message: 'Houston, we have a problem' });
});
it('displays alert if mutation had a non-recoverable error', async () => {
diff --git a/spec/frontend/design_management/utils/cache_update_spec.js b/spec/frontend/design_management/utils/cache_update_spec.js
index 5e2c37e24a1..42777adfd58 100644
--- a/spec/frontend/design_management/utils/cache_update_spec.js
+++ b/spec/frontend/design_management/utils/cache_update_spec.js
@@ -10,7 +10,7 @@ import {
ADD_IMAGE_DIFF_NOTE_ERROR,
UPDATE_IMAGE_DIFF_NOTE_ERROR,
} from '~/design_management/utils/error_messages';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import design from '../mock_data/design';
jest.mock('~/flash.js');
@@ -32,10 +32,10 @@ describe('Design Management cache update', () => {
${'updateStoreAfterUploadDesign'} | ${updateStoreAfterUploadDesign} | ${mockErrors[0]} | ${[]}
${'updateStoreAfterUpdateImageDiffNote'} | ${updateStoreAfterRepositionImageDiffNote} | ${UPDATE_IMAGE_DIFF_NOTE_ERROR} | ${[]}
`('$fnName handles errors in response', ({ subject, extraArgs, errorMessage }) => {
- expect(createFlash).not.toHaveBeenCalled();
+ expect(createAlert).not.toHaveBeenCalled();
expect(() => subject(mockStore, { errors: mockErrors }, {}, ...extraArgs)).toThrow();
- expect(createFlash).toHaveBeenCalledTimes(1);
- expect(createFlash).toHaveBeenCalledWith({ message: errorMessage });
+ expect(createAlert).toHaveBeenCalledTimes(1);
+ expect(createAlert).toHaveBeenCalledWith({ message: errorMessage });
});
});
});
diff --git a/spec/frontend/diffs/components/app_spec.js b/spec/frontend/diffs/components/app_spec.js
index b88206c3b9a..936f4744e94 100644
--- a/spec/frontend/diffs/components/app_spec.js
+++ b/spec/frontend/diffs/components/app_spec.js
@@ -152,6 +152,30 @@ describe('diffs/components/app', () => {
});
});
+ describe('fetch diff with no changes', () => {
+ beforeEach(() => {
+ const fetchResolver = () => {
+ store.state.diffs.retrievingBatches = false;
+ return Promise.resolve({ real_size: null });
+ };
+
+ createComponent();
+ jest.spyOn(wrapper.vm, 'fetchDiffFilesMeta').mockImplementation(fetchResolver);
+
+ return nextTick();
+ });
+
+ it('diff counter to be 0 after fetch', async () => {
+ expect(wrapper.vm.diffFilesLength).toEqual(0);
+ wrapper.vm.fetchData(false);
+
+ await nextTick();
+
+ expect(wrapper.vm.fetchDiffFilesMeta).toHaveBeenCalled();
+ expect(wrapper.vm.diffFilesLength).toEqual(0);
+ });
+ });
+
describe('codequality diff', () => {
it('does not fetch code quality data on FOSS', async () => {
createComponent();
diff --git a/spec/frontend/diffs/components/commit_item_spec.js b/spec/frontend/diffs/components/commit_item_spec.js
index 440f169be86..75d55376d09 100644
--- a/spec/frontend/diffs/components/commit_item_spec.js
+++ b/spec/frontend/diffs/components/commit_item_spec.js
@@ -82,7 +82,7 @@ describe('diffs/components/commit_item', () => {
const imgElement = avatarElement.find('img');
expect(avatarElement.attributes('href')).toBe(commit.author.web_url);
- expect(imgElement.classes()).toContain('s32');
+ expect(imgElement.classes()).toContain('gl-avatar-s32');
expect(imgElement.attributes('alt')).toBe(commit.author.name);
expect(imgElement.attributes('src')).toBe(commit.author.avatar_url);
});
diff --git a/spec/frontend/diffs/components/diff_content_spec.js b/spec/frontend/diffs/components/diff_content_spec.js
index 9f593ee0d49..0bce6451ce4 100644
--- a/spec/frontend/diffs/components/diff_content_spec.js
+++ b/spec/frontend/diffs/components/diff_content_spec.js
@@ -53,7 +53,7 @@ describe('DiffContent', () => {
namespaced: true,
getters: {
draftsForFile: () => () => true,
- draftForLine: () => () => true,
+ draftsForLine: () => () => true,
shouldRenderDraftRow: () => () => true,
hasParallelDraftLeft: () => () => true,
hasParallelDraftRight: () => () => true,
diff --git a/spec/frontend/diffs/components/diff_row_spec.js b/spec/frontend/diffs/components/diff_row_spec.js
index a74013dc2d4..a7a95ed2f35 100644
--- a/spec/frontend/diffs/components/diff_row_spec.js
+++ b/spec/frontend/diffs/components/diff_row_spec.js
@@ -219,7 +219,7 @@ describe('DiffRow', () => {
shouldRenderDraftRow: jest.fn(),
hasParallelDraftLeft: jest.fn(),
hasParallelDraftRight: jest.fn(),
- draftForLine: jest.fn(),
+ draftsForLine: jest.fn().mockReturnValue([]),
};
const applyMap = mapParallel(mockDiffContent);
diff --git a/spec/frontend/diffs/components/diff_row_utils_spec.js b/spec/frontend/diffs/components/diff_row_utils_spec.js
index 930b8bcdb08..8b25691ce34 100644
--- a/spec/frontend/diffs/components/diff_row_utils_spec.js
+++ b/spec/frontend/diffs/components/diff_row_utils_spec.js
@@ -216,7 +216,7 @@ describe('mapParallel', () => {
diffFile: {},
hasParallelDraftLeft: () => false,
hasParallelDraftRight: () => false,
- draftForLine: () => ({}),
+ draftsForLine: () => [],
};
const line = { left: side, right: side };
const expectation = {
@@ -234,13 +234,13 @@ describe('mapParallel', () => {
const leftExpectation = {
renderDiscussion: true,
hasDraft: false,
- lineDraft: {},
+ lineDrafts: [],
hasCommentForm: true,
};
const rightExpectation = {
renderDiscussion: false,
hasDraft: false,
- lineDraft: {},
+ lineDrafts: [],
hasCommentForm: false,
};
const mapped = utils.mapParallel(content)(line);
diff --git a/spec/frontend/diffs/components/diff_view_spec.js b/spec/frontend/diffs/components/diff_view_spec.js
index 1dd4a2f6c23..9bff6bd14f1 100644
--- a/spec/frontend/diffs/components/diff_view_spec.js
+++ b/spec/frontend/diffs/components/diff_view_spec.js
@@ -21,7 +21,7 @@ describe('DiffView', () => {
getters: {
shouldRenderDraftRow: () => false,
shouldRenderParallelDraftRow: () => () => true,
- draftForLine: () => false,
+ draftsForLine: () => false,
draftsForFile: () => false,
hasParallelDraftLeft: () => false,
hasParallelDraftRight: () => false,
@@ -75,12 +75,12 @@ describe('DiffView', () => {
});
it.each`
- type | side | container | sides | total
- ${'parallel'} | ${'left'} | ${'.old'} | ${{ left: { lineDraft: {}, renderDiscussion: true }, right: { lineDraft: {}, renderDiscussion: true } }} | ${2}
- ${'parallel'} | ${'right'} | ${'.new'} | ${{ left: { lineDraft: {}, renderDiscussion: true }, right: { lineDraft: {}, renderDiscussion: true } }} | ${2}
- ${'inline'} | ${'left'} | ${'.old'} | ${{ left: { lineDraft: {}, renderDiscussion: true } }} | ${1}
- ${'inline'} | ${'left'} | ${'.old'} | ${{ left: { lineDraft: {}, renderDiscussion: true } }} | ${1}
- ${'inline'} | ${'left'} | ${'.old'} | ${{ left: { lineDraft: {}, renderDiscussion: true } }} | ${1}
+ type | side | container | sides | total
+ ${'parallel'} | ${'left'} | ${'.old'} | ${{ left: { lineDrafts: [], renderDiscussion: true }, right: { lineDrafts: [], renderDiscussion: true } }} | ${2}
+ ${'parallel'} | ${'right'} | ${'.new'} | ${{ left: { lineDrafts: [], renderDiscussion: true }, right: { lineDrafts: [], renderDiscussion: true } }} | ${2}
+ ${'inline'} | ${'left'} | ${'.old'} | ${{ left: { lineDrafts: [], renderDiscussion: true } }} | ${1}
+ ${'inline'} | ${'left'} | ${'.old'} | ${{ left: { lineDrafts: [], renderDiscussion: true } }} | ${1}
+ ${'inline'} | ${'left'} | ${'.old'} | ${{ left: { lineDrafts: [], renderDiscussion: true } }} | ${1}
`(
'renders a $type comment row with comment cell on $side',
({ type, container, sides, total }) => {
@@ -95,7 +95,7 @@ describe('DiffView', () => {
it('renders a draft row', () => {
const wrapper = createWrapper({
- diffLines: [{ renderCommentRow: true, left: { lineDraft: { isDraft: true } } }],
+ diffLines: [{ renderCommentRow: true, left: { lineDrafts: [{ isDraft: true }] } }],
});
expect(wrapper.findComponent(DraftNote).exists()).toBe(true);
});
diff --git a/spec/frontend/diffs/components/file_row_stats_spec.js b/spec/frontend/diffs/components/file_row_stats_spec.js
index 3f5a63c19e5..7d3b60d2ba4 100644
--- a/spec/frontend/diffs/components/file_row_stats_spec.js
+++ b/spec/frontend/diffs/components/file_row_stats_spec.js
@@ -2,13 +2,21 @@ import { mount } from '@vue/test-utils';
import FileRowStats from '~/diffs/components/file_row_stats.vue';
describe('Diff file row stats', () => {
- const wrapper = mount(FileRowStats, {
- propsData: {
- file: {
- addedLines: 20,
- removedLines: 10,
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = mount(FileRowStats, {
+ propsData: {
+ file: {
+ addedLines: 20,
+ removedLines: 10,
+ },
},
- },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
});
it('renders added lines count', () => {
diff --git a/spec/frontend/diffs/mock_data/diff_code_quality.js b/spec/frontend/diffs/mock_data/diff_code_quality.js
index 2ca421a20b4..befab3b676b 100644
--- a/spec/frontend/diffs/mock_data/diff_code_quality.js
+++ b/spec/frontend/diffs/mock_data/diff_code_quality.js
@@ -36,7 +36,7 @@ export const diffCodeQuality = {
old_line: 1,
new_line: null,
codequality: [],
- lineDraft: {},
+ lineDrafts: [],
},
},
{
@@ -45,7 +45,7 @@ export const diffCodeQuality = {
old_line: 2,
new_line: 1,
codequality: [],
- lineDraft: {},
+ lineDrafts: [],
},
},
{
@@ -55,7 +55,7 @@ export const diffCodeQuality = {
new_line: 2,
codequality: [multipleFindingsArr[0]],
- lineDraft: {},
+ lineDrafts: [],
},
},
],
diff --git a/spec/frontend/diffs/store/actions_spec.js b/spec/frontend/diffs/store/actions_spec.js
index 346e43e5a72..bf75f956d7f 100644
--- a/spec/frontend/diffs/store/actions_spec.js
+++ b/spec/frontend/diffs/store/actions_spec.js
@@ -13,7 +13,7 @@ import * as diffActions from '~/diffs/store/actions';
import * as types from '~/diffs/store/mutation_types';
import * as utils from '~/diffs/store/utils';
import * as treeWorkerUtils from '~/diffs/utils/tree_worker_utils';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import * as commonUtils from '~/lib/utils/common_utils';
import { mergeUrlParams } from '~/lib/utils/url_utility';
@@ -54,7 +54,7 @@ describe('DiffsStoreActions', () => {
['requestAnimationFrame', 'requestIdleCallback'].forEach((method) => {
global[method] = originalMethods[method];
});
- createFlash.mockClear();
+ createAlert.mockClear();
mock.restore();
});
@@ -175,35 +175,10 @@ describe('DiffsStoreActions', () => {
[{ type: 'startRenderDiffsQueue' }, { type: 'startRenderDiffsQueue' }],
);
});
-
- it.each`
- viewStyle | otherView
- ${'inline'} | ${'parallel'}
- ${'parallel'} | ${'inline'}
- `(
- 'should make a request with the view parameter "$viewStyle" when the batchEndpoint already contains "$otherView"',
- ({ viewStyle, otherView }) => {
- const endpointBatch = '/fetch/diffs_batch';
-
- diffActions
- .fetchDiffFilesBatch({
- commit: () => {},
- state: {
- endpointBatch: `${endpointBatch}?view=${otherView}`,
- diffViewType: viewStyle,
- },
- })
- .then(() => {
- expect(mock.history.get[0].url).toContain(`view=${viewStyle}`);
- expect(mock.history.get[0].url).not.toContain(`view=${otherView}`);
- })
- .catch(() => {});
- },
- );
});
describe('fetchDiffFilesMeta', () => {
- const endpointMetadata = '/fetch/diffs_metadata.json?view=inline';
+ const endpointMetadata = '/fetch/diffs_metadata.json?view=inline&w=0';
const noFilesData = { ...diffMetadata };
beforeEach(() => {
@@ -216,7 +191,7 @@ describe('DiffsStoreActions', () => {
return testAction(
diffActions.fetchDiffFilesMeta,
{},
- { endpointMetadata, diffViewType: 'inline' },
+ { endpointMetadata, diffViewType: 'inline', showWhitespace: true },
[
{ type: types.SET_LOADING, payload: true },
{ type: types.SET_LOADING, payload: false },
@@ -254,8 +229,8 @@ describe('DiffsStoreActions', () => {
mock.onGet(endpointCoverage).reply(400);
await testAction(diffActions.fetchCoverageFiles, {}, { endpointCoverage }, [], []);
- expect(createFlash).toHaveBeenCalledTimes(1);
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledTimes(1);
+ expect(createAlert).toHaveBeenCalledWith({
message: expect.stringMatching('Something went wrong'),
});
});
diff --git a/spec/frontend/editor/schema/ci/ci_schema_spec.js b/spec/frontend/editor/schema/ci/ci_schema_spec.js
index c9010fbec0c..fc86907c144 100644
--- a/spec/frontend/editor/schema/ci/ci_schema_spec.js
+++ b/spec/frontend/editor/schema/ci/ci_schema_spec.js
@@ -29,12 +29,36 @@ import CacheYaml from './yaml_tests/positive_tests/cache.yml';
import FilterYaml from './yaml_tests/positive_tests/filter.yml';
import IncludeYaml from './yaml_tests/positive_tests/include.yml';
import RulesYaml from './yaml_tests/positive_tests/rules.yml';
+import ProjectPathYaml from './yaml_tests/positive_tests/project_path.yml';
+import VariablesYaml from './yaml_tests/positive_tests/variables.yml';
// YAML NEGATIVE TEST
import ArtifactsNegativeYaml from './yaml_tests/negative_tests/artifacts.yml';
import CacheNegativeYaml from './yaml_tests/negative_tests/cache.yml';
import IncludeNegativeYaml from './yaml_tests/negative_tests/include.yml';
import RulesNegativeYaml from './yaml_tests/negative_tests/rules.yml';
+import VariablesNegativeYaml from './yaml_tests/negative_tests/variables.yml';
+
+import ProjectPathIncludeEmptyYaml from './yaml_tests/negative_tests/project_path/include/empty.yml';
+import ProjectPathIncludeInvalidVariableYaml from './yaml_tests/negative_tests/project_path/include/invalid_variable.yml';
+import ProjectPathIncludeLeadSlashYaml from './yaml_tests/negative_tests/project_path/include/leading_slash.yml';
+import ProjectPathIncludeNoSlashYaml from './yaml_tests/negative_tests/project_path/include/no_slash.yml';
+import ProjectPathIncludeTailSlashYaml from './yaml_tests/negative_tests/project_path/include/tailing_slash.yml';
+import ProjectPathTriggerIncludeEmptyYaml from './yaml_tests/negative_tests/project_path/trigger/include/empty.yml';
+import ProjectPathTriggerIncludeInvalidVariableYaml from './yaml_tests/negative_tests/project_path/trigger/include/invalid_variable.yml';
+import ProjectPathTriggerIncludeLeadSlashYaml from './yaml_tests/negative_tests/project_path/trigger/include/leading_slash.yml';
+import ProjectPathTriggerIncludeNoSlashYaml from './yaml_tests/negative_tests/project_path/trigger/include/no_slash.yml';
+import ProjectPathTriggerIncludeTailSlashYaml from './yaml_tests/negative_tests/project_path/trigger/include/tailing_slash.yml';
+import ProjectPathTriggerMinimalEmptyYaml from './yaml_tests/negative_tests/project_path/trigger/minimal/empty.yml';
+import ProjectPathTriggerMinimalInvalidVariableYaml from './yaml_tests/negative_tests/project_path/trigger/minimal/invalid_variable.yml';
+import ProjectPathTriggerMinimalLeadSlashYaml from './yaml_tests/negative_tests/project_path/trigger/minimal/leading_slash.yml';
+import ProjectPathTriggerMinimalNoSlashYaml from './yaml_tests/negative_tests/project_path/trigger/minimal/no_slash.yml';
+import ProjectPathTriggerMinimalTailSlashYaml from './yaml_tests/negative_tests/project_path/trigger/minimal/tailing_slash.yml';
+import ProjectPathTriggerProjectEmptyYaml from './yaml_tests/negative_tests/project_path/trigger/project/empty.yml';
+import ProjectPathTriggerProjectInvalidVariableYaml from './yaml_tests/negative_tests/project_path/trigger/project/invalid_variable.yml';
+import ProjectPathTriggerProjectLeadSlashYaml from './yaml_tests/negative_tests/project_path/trigger/project/leading_slash.yml';
+import ProjectPathTriggerProjectNoSlashYaml from './yaml_tests/negative_tests/project_path/trigger/project/no_slash.yml';
+import ProjectPathTriggerProjectTailSlashYaml from './yaml_tests/negative_tests/project_path/trigger/project/tailing_slash.yml';
const ajv = new Ajv({
strictTypes: false,
@@ -67,6 +91,8 @@ describe('positive tests', () => {
FilterYaml,
IncludeYaml,
RulesYaml,
+ VariablesYaml,
+ ProjectPathYaml,
}),
)('schema validates %s', (_, input) => {
expect(input).toValidateJsonSchema(schema);
@@ -90,6 +116,27 @@ describe('negative tests', () => {
CacheNegativeYaml,
IncludeNegativeYaml,
RulesNegativeYaml,
+ VariablesNegativeYaml,
+ ProjectPathIncludeEmptyYaml,
+ ProjectPathIncludeInvalidVariableYaml,
+ ProjectPathIncludeLeadSlashYaml,
+ ProjectPathIncludeNoSlashYaml,
+ ProjectPathIncludeTailSlashYaml,
+ ProjectPathTriggerIncludeEmptyYaml,
+ ProjectPathTriggerIncludeInvalidVariableYaml,
+ ProjectPathTriggerIncludeLeadSlashYaml,
+ ProjectPathTriggerIncludeNoSlashYaml,
+ ProjectPathTriggerIncludeTailSlashYaml,
+ ProjectPathTriggerMinimalEmptyYaml,
+ ProjectPathTriggerMinimalInvalidVariableYaml,
+ ProjectPathTriggerMinimalLeadSlashYaml,
+ ProjectPathTriggerMinimalNoSlashYaml,
+ ProjectPathTriggerMinimalTailSlashYaml,
+ ProjectPathTriggerProjectEmptyYaml,
+ ProjectPathTriggerProjectInvalidVariableYaml,
+ ProjectPathTriggerProjectLeadSlashYaml,
+ ProjectPathTriggerProjectNoSlashYaml,
+ ProjectPathTriggerProjectTailSlashYaml,
}),
)('schema validates %s', (_, input) => {
expect(input).not.toValidateJsonSchema(schema);
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/include/empty.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/include/empty.yml
new file mode 100644
index 00000000000..d9838fbb6fd
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/include/empty.yml
@@ -0,0 +1,3 @@
+include:
+ - project: ''
+ file: '/templates/.gitlab-ci-template.yml'
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/include/invalid_variable.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/include/invalid_variable.yml
new file mode 100644
index 00000000000..32933f856c7
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/include/invalid_variable.yml
@@ -0,0 +1,3 @@
+include:
+ - project: 'slug#'
+ file: '/templates/.gitlab-ci-template.yml'
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/include/leading_slash.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/include/leading_slash.yml
new file mode 100644
index 00000000000..c463318be31
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/include/leading_slash.yml
@@ -0,0 +1,3 @@
+include:
+ - project: '/slug'
+ file: '/templates/.gitlab-ci-template.yml'
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/include/no_slash.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/include/no_slash.yml
new file mode 100644
index 00000000000..51194a1d40c
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/include/no_slash.yml
@@ -0,0 +1,3 @@
+include:
+ - project: 'slug'
+ file: '/templates/.gitlab-ci-template.yml'
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/include/tailing_slash.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/include/tailing_slash.yml
new file mode 100644
index 00000000000..91f258888d8
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/include/tailing_slash.yml
@@ -0,0 +1,3 @@
+include:
+ - project: 'slug/'
+ file: '/templates/.gitlab-ci-template.yml'
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/include/empty.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/include/empty.yml
new file mode 100644
index 00000000000..ee2bb3e8ace
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/include/empty.yml
@@ -0,0 +1,5 @@
+trigger-include:
+ trigger:
+ include:
+ - file: '/path/to/child-pipeline.yml'
+ project: ''
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/include/invalid_variable.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/include/invalid_variable.yml
new file mode 100644
index 00000000000..770305be0dc
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/include/invalid_variable.yml
@@ -0,0 +1,5 @@
+trigger-include:
+ trigger:
+ include:
+ - file: '/path/to/child-pipeline.yml'
+ project: 'slug#'
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/include/leading_slash.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/include/leading_slash.yml
new file mode 100644
index 00000000000..82fd77cf0d3
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/include/leading_slash.yml
@@ -0,0 +1,5 @@
+trigger-include:
+ trigger:
+ include:
+ - file: '/path/to/child-pipeline.yml'
+ project: '/slug'
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/include/no_slash.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/include/no_slash.yml
new file mode 100644
index 00000000000..f4ea59c7945
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/include/no_slash.yml
@@ -0,0 +1,5 @@
+trigger-include:
+ trigger:
+ include:
+ - file: '/path/to/child-pipeline.yml'
+ project: 'slug'
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/include/tailing_slash.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/include/tailing_slash.yml
new file mode 100644
index 00000000000..a0195c03352
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/include/tailing_slash.yml
@@ -0,0 +1,5 @@
+trigger-include:
+ trigger:
+ include:
+ - file: '/path/to/child-pipeline.yml'
+ project: 'slug/'
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/minimal/empty.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/minimal/empty.yml
new file mode 100644
index 00000000000..cad8dbbf430
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/minimal/empty.yml
@@ -0,0 +1,2 @@
+trigger-minimal:
+ trigger: ''
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/minimal/invalid_variable.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/minimal/invalid_variable.yml
new file mode 100644
index 00000000000..6ca37666d09
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/minimal/invalid_variable.yml
@@ -0,0 +1,2 @@
+trigger-minimal:
+ trigger: 'slug#'
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/minimal/leading_slash.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/minimal/leading_slash.yml
new file mode 100644
index 00000000000..9d7c6b44125
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/minimal/leading_slash.yml
@@ -0,0 +1,2 @@
+trigger-minimal:
+ trigger: '/slug'
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/minimal/no_slash.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/minimal/no_slash.yml
new file mode 100644
index 00000000000..acd047477c8
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/minimal/no_slash.yml
@@ -0,0 +1,2 @@
+trigger-minimal:
+ trigger: 'slug'
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/minimal/tailing_slash.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/minimal/tailing_slash.yml
new file mode 100644
index 00000000000..0fdd00da3de
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/minimal/tailing_slash.yml
@@ -0,0 +1,2 @@
+trigger-minimal:
+ trigger: 'slug/'
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/project/empty.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/project/empty.yml
new file mode 100644
index 00000000000..0aa2330cecb
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/project/empty.yml
@@ -0,0 +1,3 @@
+trigger-project:
+ trigger:
+ project: ''
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/project/invalid_variable.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/project/invalid_variable.yml
new file mode 100644
index 00000000000..3c17ec62039
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/project/invalid_variable.yml
@@ -0,0 +1,3 @@
+trigger-project:
+ trigger:
+ project: 'slug#'
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/project/leading_slash.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/project/leading_slash.yml
new file mode 100644
index 00000000000..f9884603171
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/project/leading_slash.yml
@@ -0,0 +1,3 @@
+trigger-project:
+ trigger:
+ project: '/slug'
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/project/no_slash.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/project/no_slash.yml
new file mode 100644
index 00000000000..d89e09756eb
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/project/no_slash.yml
@@ -0,0 +1,3 @@
+trigger-project:
+ trigger:
+ project: 'slug'
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/project/tailing_slash.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/project/tailing_slash.yml
new file mode 100644
index 00000000000..3c39d6be4cb
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/project_path/trigger/project/tailing_slash.yml
@@ -0,0 +1,3 @@
+trigger-project:
+ trigger:
+ project: 'slug/'
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/variables.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/variables.yml
new file mode 100644
index 00000000000..a7f23cf0d73
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/variables.yml
@@ -0,0 +1,5 @@
+# invalid variable (unknown keyword is used)
+variables:
+ FOO:
+ value: BAR
+ desc: A single value variable
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/project_path.yml b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/project_path.yml
new file mode 100644
index 00000000000..8a12cdf4f15
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/project_path.yml
@@ -0,0 +1,101 @@
+# Covers https://gitlab.com/gitlab-org/gitlab/-/merge_requests/95469
+# Test cases:
+# - include file from project
+# - trigger pipeline from project, 3 forms (see schema at ci.json)
+#
+# Sub-cases - forms of project path:
+# - common case: group/project
+# - sub-group: group/sub-group/project
+# - variable: $FOO
+# - variable in string: group/$VAR/project
+# - invalid variable: $.
+# (testing regex, that does not validate variable names)
+
+# BEGIN CASE: include yml from project
+include:
+ - project: 'group/project'
+ file: '/templates/.gitlab-ci-template.yml'
+
+ - project: 'group/sub-group/project'
+ file: '/templates/.gitlab-ci-template.yml'
+
+ - project: '$FOO'
+ file: '/templates/.gitlab-ci-template.yml'
+
+ - project: 'group/$VAR/project'
+ file: '/templates/.gitlab-ci-template.yml'
+
+ - project: '$.'
+ file: '/templates/.gitlab-ci-template.yml'
+# END CASE
+
+# BEGIN CASE: trigger minimal
+trigger-minimal:
+ trigger: 'group/project'
+
+trigger-minimal-sub-group:
+ trigger: 'group/sub-group/project'
+
+trigger-minimal-variable:
+ trigger: '$FOO'
+
+trigger-minimal-variable-in-string:
+ trigger: 'group/$VAR/project'
+
+trigger-minimal-invalid-variable:
+ trigger: '$.'
+# END CASE
+
+# BEGIN CASE: trigger project
+trigger-project:
+ trigger:
+ project: 'group/project'
+
+trigger-project-sub-group:
+ trigger:
+ project: 'group/sub-group/project'
+
+trigger-project-variable:
+ trigger:
+ project: '$FOO'
+
+trigger-project-variable-in-string:
+ trigger:
+ project: 'group/$VAR/project'
+
+trigger-project-invalid-variable:
+ trigger:
+ project: '$.'
+# END CASE
+
+# BEGIN CASE: trigger file
+trigger-include:
+ trigger:
+ include:
+ - project: 'group/project'
+ file: '/path/to/child-pipeline.yml'
+
+trigger-include-sub-group:
+ trigger:
+ include:
+ - project: 'group/sub-group/project'
+ file: '/path/to/child-pipeline.yml'
+
+trigger-include-variable:
+ trigger:
+ include:
+ - project: '$FOO'
+ file: '/path/to/child-pipeline.yml'
+
+trigger-include-variable-in-string:
+ trigger:
+ include:
+ - project: 'group/$VAR/project'
+ file: '/path/to/child-pipeline.yml'
+
+trigger-include-invalid-variable:
+ trigger:
+ include:
+ - project: '$.'
+ file: '/path/to/child-pipeline.yml'
+# END CASE
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/rules.yml b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/rules.yml
index 37cae6b4264..ef604f707b5 100644
--- a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/rules.yml
+++ b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/rules.yml
@@ -15,7 +15,9 @@ rules:changes as array of strings:
# valid workflow:rules:exists
# valid rules:changes:path
+# valid workflow:name
workflow:
+ name: 'Pipeline name'
rules:
- changes:
paths:
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/variables.yml b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/variables.yml
new file mode 100644
index 00000000000..ee71087a72e
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/variables.yml
@@ -0,0 +1,8 @@
+variables:
+ TEST_VAR: "hello world!"
+ 123456: "123456"
+ FOO:
+ value: "BAR"
+ description: "A single value variable"
+ DEPLOY_ENVIRONMENT:
+ description: "A multi-value variable"
diff --git a/spec/frontend/editor/source_editor_ci_schema_ext_spec.js b/spec/frontend/editor/source_editor_ci_schema_ext_spec.js
index 9a14e1a55eb..21f8979f1a9 100644
--- a/spec/frontend/editor/source_editor_ci_schema_ext_spec.js
+++ b/spec/frontend/editor/source_editor_ci_schema_ext_spec.js
@@ -1,4 +1,4 @@
-import { languages } from 'monaco-editor';
+import { setDiagnosticsOptions } from 'monaco-yaml';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import { TEST_HOST } from 'helpers/test_constants';
import { CiSchemaExtension } from '~/editor/extensions/source_editor_ci_schema_ext';
@@ -52,16 +52,12 @@ describe('~/editor/editor_ci_config_ext', () => {
});
describe('registerCiSchema', () => {
- beforeEach(() => {
- jest.spyOn(languages.yaml.yamlDefaults, 'setDiagnosticsOptions');
- });
-
describe('register validations options with monaco for yaml language', () => {
const mockProjectNamespace = 'namespace1';
const mockProjectPath = 'project1';
const getConfiguredYmlSchema = () => {
- return languages.yaml.yamlDefaults.setDiagnosticsOptions.mock.calls[0][0].schemas[0];
+ return setDiagnosticsOptions.mock.calls[0][0].schemas[0];
};
it('with expected basic validation configuration', () => {
@@ -77,8 +73,8 @@ describe('~/editor/editor_ci_config_ext', () => {
completion: true,
};
- expect(languages.yaml.yamlDefaults.setDiagnosticsOptions).toHaveBeenCalledTimes(1);
- expect(languages.yaml.yamlDefaults.setDiagnosticsOptions).toHaveBeenCalledWith(
+ expect(setDiagnosticsOptions).toHaveBeenCalledTimes(1);
+ expect(setDiagnosticsOptions).toHaveBeenCalledWith(
expect.objectContaining(expectedOptions),
);
});
diff --git a/spec/frontend/editor/source_editor_instance_spec.js b/spec/frontend/editor/source_editor_instance_spec.js
index 20ba23d56ff..89b5ad27690 100644
--- a/spec/frontend/editor/source_editor_instance_spec.js
+++ b/spec/frontend/editor/source_editor_instance_spec.js
@@ -160,7 +160,7 @@ describe('Source Editor Instance', () => {
});
describe('public API', () => {
- it.each(['use', 'unuse'], 'provides "%s" as public method by default', (method) => {
+ it.each(['use', 'unuse'])('provides "%s" as public method by default', (method) => {
seInstance = new SourceEditorInstance();
expect(seInstance[method]).toBeDefined();
});
diff --git a/spec/frontend/editor/source_editor_markdown_livepreview_ext_spec.js b/spec/frontend/editor/source_editor_markdown_livepreview_ext_spec.js
index fe20c23e4d7..1ff351b6554 100644
--- a/spec/frontend/editor/source_editor_markdown_livepreview_ext_spec.js
+++ b/spec/frontend/editor/source_editor_markdown_livepreview_ext_spec.js
@@ -12,7 +12,7 @@ import {
} from '~/editor/constants';
import { EditorMarkdownPreviewExtension } from '~/editor/extensions/source_editor_markdown_livepreview_ext';
import SourceEditor from '~/editor/source_editor';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import syntaxHighlight from '~/syntax_highlight';
import { spyOnApi } from './helpers';
@@ -279,7 +279,7 @@ describe('Markdown Live Preview Extension for Source Editor', () => {
mockAxios.onPost().reply(500);
await fetchPreview();
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
});
diff --git a/spec/frontend/environments/delete_environment_modal_spec.js b/spec/frontend/environments/delete_environment_modal_spec.js
index 48e4f661c1d..cc18bf754eb 100644
--- a/spec/frontend/environments/delete_environment_modal_spec.js
+++ b/spec/frontend/environments/delete_environment_modal_spec.js
@@ -6,7 +6,7 @@ import { s__, sprintf } from '~/locale';
import DeleteEnvironmentModal from '~/environments/components/delete_environment_modal.vue';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import { resolvedEnvironment } from './graphql/mock_data';
jest.mock('~/flash');
@@ -57,7 +57,7 @@ describe('~/environments/components/delete_environment_modal.vue', () => {
await nextTick();
- expect(createFlash).not.toHaveBeenCalled();
+ expect(createAlert).not.toHaveBeenCalled();
expect(deleteResolver).toHaveBeenCalledWith(
expect.anything(),
@@ -76,7 +76,7 @@ describe('~/environments/components/delete_environment_modal.vue', () => {
await waitForPromises();
- expect(createFlash).toHaveBeenCalledWith(
+ expect(createAlert).toHaveBeenCalledWith(
expect.objectContaining({
message: s__(
'Environments|An error occurred while deleting the environment. Check if the environment stopped; if not, stop it and try again.',
diff --git a/spec/frontend/environments/edit_environment_spec.js b/spec/frontend/environments/edit_environment_spec.js
index 0f2d6e95bf0..5ea23af4c16 100644
--- a/spec/frontend/environments/edit_environment_spec.js
+++ b/spec/frontend/environments/edit_environment_spec.js
@@ -3,7 +3,7 @@ import MockAdapter from 'axios-mock-adapter';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import EditEnvironment from '~/environments/components/edit_environment.vue';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import { visitUrl } from '~/lib/utils/url_utility';
@@ -85,7 +85,7 @@ describe('~/environments/components/edit.vue', () => {
await submitForm(expected, [400, { message: ['uh oh!'] }]);
- expect(createFlash).toHaveBeenCalledWith({ message: 'uh oh!' });
+ expect(createAlert).toHaveBeenCalledWith({ message: 'uh oh!' });
expect(showsLoading()).toBe(false);
});
diff --git a/spec/frontend/environments/empty_state_spec.js b/spec/frontend/environments/empty_state_spec.js
index 974afc6d032..02cf2dc3c68 100644
--- a/spec/frontend/environments/empty_state_spec.js
+++ b/spec/frontend/environments/empty_state_spec.js
@@ -4,10 +4,21 @@ import EmptyState from '~/environments/components/empty_state.vue';
import { ENVIRONMENTS_SCOPE } from '~/environments/constants';
const HELP_PATH = '/help';
+const NEW_PATH = '/new';
describe('~/environments/components/empty_state.vue', () => {
let wrapper;
+ const findNewEnvironmentLink = () =>
+ wrapper.findByRole('link', {
+ name: s__('Environments|New environment'),
+ });
+
+ const findDocsLink = () =>
+ wrapper.findByRole('link', {
+ name: s__('Environments|How do I create an environment?'),
+ });
+
const createWrapper = ({ propsData = {} } = {}) =>
mountExtended(EmptyState, {
propsData: {
@@ -15,6 +26,7 @@ describe('~/environments/components/empty_state.vue', () => {
helpPath: HELP_PATH,
...propsData,
},
+ provide: { newEnvironmentPath: NEW_PATH },
});
afterEach(() => {
@@ -44,10 +56,44 @@ describe('~/environments/components/empty_state.vue', () => {
it('shows a link to the the help path', () => {
wrapper = createWrapper();
- const link = wrapper.findByRole('link', {
- name: s__('Environments|How do I create an environment?'),
- });
+ const link = findDocsLink();
expect(link.attributes('href')).toBe(HELP_PATH);
});
+
+ it('hides a link to creating a new environment', () => {
+ const link = findNewEnvironmentLink();
+
+ expect(link.exists()).toBe(false);
+ });
+
+ describe('with search term', () => {
+ beforeEach(() => {
+ wrapper = createWrapper({ propsData: { hasTerm: true } });
+ });
+
+ it('should show text about searching', () => {
+ const header = wrapper.findByRole('heading', {
+ name: s__('Environments|No results found'),
+ });
+
+ expect(header.exists()).toBe(true);
+
+ const text = wrapper.findByText(s__('Environments|Edit your search and try again'));
+
+ expect(text.exists()).toBe(true);
+ });
+
+ it('hides the documentation link', () => {
+ const link = findDocsLink();
+
+ expect(link.exists()).toBe(false);
+ });
+
+ it('shows a link to create a new environment', () => {
+ const link = findNewEnvironmentLink();
+
+ expect(link.attributes('href')).toBe(NEW_PATH);
+ });
+ });
});
diff --git a/spec/frontend/environments/enable_review_app_modal_spec.js b/spec/frontend/environments/enable_review_app_modal_spec.js
index b6dac811ea6..7939bd600dc 100644
--- a/spec/frontend/environments/enable_review_app_modal_spec.js
+++ b/spec/frontend/environments/enable_review_app_modal_spec.js
@@ -1,7 +1,8 @@
import { shallowMount } from '@vue/test-utils';
import { GlModal } from '@gitlab/ui';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import EnableReviewAppButton from '~/environments/components/enable_review_app_modal.vue';
+import EnableReviewAppModal from '~/environments/components/enable_review_app_modal.vue';
+import { REVIEW_APP_MODAL_I18N as i18n } from '~/environments/constants';
import ModalCopyButton from '~/vue_shared/components/modal_copy_button.vue';
// hardcode uniqueId for determinism
@@ -9,10 +10,12 @@ jest.mock('lodash/uniqueId', () => (x) => `${x}77`);
const EXPECTED_COPY_PRE_ID = 'enable-review-app-copy-string-77';
-describe('Enable Review App Button', () => {
+describe('Enable Review App Modal', () => {
let wrapper;
let modal;
+ const findInstructions = () => wrapper.findAll('ol li');
+ const findInstructionAt = (i) => wrapper.findAll('ol li').at(i);
const findCopyString = () => wrapper.find(`#${EXPECTED_COPY_PRE_ID}`);
afterEach(() => {
@@ -22,29 +25,31 @@ describe('Enable Review App Button', () => {
describe('renders the modal', () => {
beforeEach(() => {
wrapper = extendedWrapper(
- shallowMount(EnableReviewAppButton, {
+ shallowMount(EnableReviewAppModal, {
propsData: {
modalId: 'fake-id',
visible: true,
},
- provide: {
- defaultBranchName: 'main',
- },
}),
);
modal = wrapper.findComponent(GlModal);
});
- it('renders the defaultBranchName copy', () => {
- expect(findCopyString().text()).toContain('- main');
+ it('displays instructions', () => {
+ expect(findInstructions().length).toBe(7);
+ expect(findInstructionAt(0).text()).toContain(i18n.instructions.step1);
+ });
+
+ it('renders the snippet to copy', () => {
+ expect(findCopyString().text()).toBe(wrapper.vm.modalInfoCopyStr);
});
it('renders the copyToClipboard button', () => {
expect(wrapper.findComponent(ModalCopyButton).props()).toMatchObject({
modalId: 'fake-id',
target: `#${EXPECTED_COPY_PRE_ID}`,
- title: 'Copy snippet text',
+ title: i18n.copyToClipboardText,
});
});
diff --git a/spec/frontend/environments/environment_external_url_spec.js b/spec/frontend/environments/environment_external_url_spec.js
index 4c133665979..5966993166b 100644
--- a/spec/frontend/environments/environment_external_url_spec.js
+++ b/spec/frontend/environments/environment_external_url_spec.js
@@ -1,16 +1,35 @@
import { mount } from '@vue/test-utils';
+import { s__, __ } from '~/locale';
import ExternalUrlComp from '~/environments/components/environment_external_url.vue';
+import ModalCopyButton from '~/vue_shared/components/modal_copy_button.vue';
describe('External URL Component', () => {
let wrapper;
- const externalUrl = 'https://gitlab.com';
+ let externalUrl;
- beforeEach(() => {
- wrapper = mount(ExternalUrlComp, { propsData: { externalUrl } });
+ describe('with safe link', () => {
+ beforeEach(() => {
+ externalUrl = 'https://gitlab.com';
+ wrapper = mount(ExternalUrlComp, { propsData: { externalUrl } });
+ });
+
+ it('should link to the provided externalUrl prop', () => {
+ expect(wrapper.attributes('href')).toBe(externalUrl);
+ expect(wrapper.find('a').exists()).toBe(true);
+ });
});
- it('should link to the provided externalUrl prop', () => {
- expect(wrapper.attributes('href')).toEqual(externalUrl);
- expect(wrapper.find('a').exists()).toBe(true);
+ describe('with unsafe link', () => {
+ beforeEach(() => {
+ externalUrl = 'postgres://gitlab';
+ wrapper = mount(ExternalUrlComp, { propsData: { externalUrl } });
+ });
+
+ it('should show a copy button instead', () => {
+ const button = wrapper.findComponent(ModalCopyButton);
+ expect(button.props('text')).toBe(externalUrl);
+ expect(button.text()).toBe(__('Copy URL'));
+ expect(button.props('title')).toBe(s__('Environments|Copy live environment URL'));
+ });
});
});
diff --git a/spec/frontend/environments/environment_folder_spec.js b/spec/frontend/environments/environment_folder_spec.js
index 48624f2324b..a37515bc3f7 100644
--- a/spec/frontend/environments/environment_folder_spec.js
+++ b/spec/frontend/environments/environment_folder_spec.js
@@ -31,6 +31,7 @@ describe('~/environments/components/environments_folder.vue', () => {
apolloProvider,
propsData: {
scope: 'available',
+ search: '',
...propsData,
},
stubs: { transition: stubTransition() },
@@ -137,13 +138,26 @@ describe('~/environments/components/environments_folder.vue', () => {
expect(environmentFolderMock).toHaveBeenCalledTimes(1);
expect(environmentFolderMock).toHaveBeenCalledWith(
{},
- {
- environment: nestedEnvironment.latest,
- scope,
- },
+ expect.objectContaining({ scope }),
expect.anything(),
expect.anything(),
);
},
);
+
+ it('should query for the entered parameter', async () => {
+ const search = 'hello';
+
+ wrapper = createWrapper({ nestedEnvironment, search }, createApolloProvider());
+
+ await nextTick();
+ await waitForPromises();
+
+ expect(environmentFolderMock).toHaveBeenCalledWith(
+ expect.anything(),
+ expect.objectContaining({ search }),
+ expect.anything(),
+ expect.anything(),
+ );
+ });
});
diff --git a/spec/frontend/environments/environments_app_spec.js b/spec/frontend/environments/environments_app_spec.js
index aff54107d6b..65a9f2907d2 100644
--- a/spec/frontend/environments/environments_app_spec.js
+++ b/spec/frontend/environments/environments_app_spec.js
@@ -71,7 +71,7 @@ describe('~/environments/components/environments_app.vue', () => {
previousPage: 1,
__typename: 'LocalPageInfo',
},
- location = '?scope=available&page=2',
+ location = '?scope=available&page=2&search=prod',
}) => {
setWindowLocation(location);
environmentAppMock.mockReturnValue(environmentsApp);
@@ -104,7 +104,7 @@ describe('~/environments/components/environments_app.vue', () => {
await createWrapperWithMocked({
environmentsApp: resolvedEnvironmentsApp,
folder: resolvedFolder,
- location: '?scope=bad&page=2',
+ location: '?scope=bad&page=2&search=prod',
});
expect(environmentAppMock).toHaveBeenCalledWith(
@@ -350,7 +350,54 @@ describe('~/environments/components/environments_app.vue', () => {
next.trigger('click');
await nextTick();
- expect(window.location.search).toBe('?scope=available&page=3');
+ expect(window.location.search).toBe('?scope=available&page=3&search=prod');
+ });
+ });
+
+ describe('search', () => {
+ let searchBox;
+
+ const waitForDebounce = async () => {
+ await nextTick();
+ jest.runOnlyPendingTimers();
+ };
+
+ beforeEach(async () => {
+ await createWrapperWithMocked({
+ environmentsApp: resolvedEnvironmentsApp,
+ folder: resolvedFolder,
+ });
+ searchBox = wrapper.findByRole('searchbox', {
+ name: s__('Environments|Search by environment name'),
+ });
+ });
+
+ it('should sync the query params to the new search', async () => {
+ searchBox.setValue('hello');
+
+ await waitForDebounce();
+
+ expect(window.location.search).toBe('?scope=available&page=1&search=hello');
+ });
+
+ it('should query for the entered parameter', async () => {
+ const search = 'hello';
+
+ searchBox.setValue(search);
+
+ await waitForDebounce();
+ await waitForPromises();
+
+ expect(environmentAppMock).toHaveBeenCalledWith(
+ expect.anything(),
+ expect.objectContaining({ search }),
+ expect.anything(),
+ expect.anything(),
+ );
+ });
+
+ it('should sync search term from query params on load', async () => {
+ expect(searchBox.element.value).toBe('prod');
});
});
});
diff --git a/spec/frontend/environments/environments_detail_header_spec.js b/spec/frontend/environments/environments_detail_header_spec.js
index 4687119127d..1f233c05fbf 100644
--- a/spec/frontend/environments/environments_detail_header_spec.js
+++ b/spec/frontend/environments/environments_detail_header_spec.js
@@ -1,10 +1,12 @@
import { GlSprintf } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import { __, s__ } from '~/locale';
import DeleteEnvironmentModal from '~/environments/components/delete_environment_modal.vue';
import EnvironmentsDetailHeader from '~/environments/components/environments_detail_header.vue';
import StopEnvironmentModal from '~/environments/components/stop_environment_modal.vue';
import TimeAgo from '~/vue_shared/components/time_ago_tooltip.vue';
+import ModalCopyButton from '~/vue_shared/components/modal_copy_button.vue';
import { createEnvironment } from './mock_data';
describe('Environments detail header component', () => {
@@ -243,4 +245,23 @@ describe('Environments detail header component', () => {
expect(findDeleteEnvironmentModal().exists()).toBe(true);
});
});
+
+ describe('when the environment has an unsafe external url', () => {
+ const externalUrl = 'postgres://staging';
+
+ beforeEach(() => {
+ createWrapper({
+ props: {
+ environment: createEnvironment({ externalUrl }),
+ },
+ });
+ });
+
+ it('should show a copy button instead', () => {
+ const button = wrapper.findComponent(ModalCopyButton);
+ expect(button.props('title')).toBe(s__('Environments|Copy live environment URL'));
+ expect(button.props('text')).toBe(externalUrl);
+ expect(button.text()).toBe(__('Copy URL'));
+ });
+ });
});
diff --git a/spec/frontend/environments/graphql/resolvers_spec.js b/spec/frontend/environments/graphql/resolvers_spec.js
index 26f0659204a..7684cca2303 100644
--- a/spec/frontend/environments/graphql/resolvers_spec.js
+++ b/spec/frontend/environments/graphql/resolvers_spec.js
@@ -41,11 +41,16 @@ describe('~/frontend/environments/graphql/resolvers', () => {
it('should fetch environments and map them to frontend data', async () => {
const cache = { writeQuery: jest.fn() };
const scope = 'available';
+ const search = '';
mock
- .onGet(ENDPOINT, { params: { nested: true, scope, page: 1 } })
+ .onGet(ENDPOINT, { params: { nested: true, scope, page: 1, search } })
.reply(200, environmentsApp, {});
- const app = await mockResolvers.Query.environmentApp(null, { scope, page: 1 }, { cache });
+ const app = await mockResolvers.Query.environmentApp(
+ null,
+ { scope, page: 1, search },
+ { cache },
+ );
expect(app).toEqual(resolvedEnvironmentsApp);
expect(cache.writeQuery).toHaveBeenCalledWith({
query: pollIntervalQuery,
@@ -57,12 +62,12 @@ describe('~/frontend/environments/graphql/resolvers', () => {
const scope = 'stopped';
const interval = 3000;
mock
- .onGet(ENDPOINT, { params: { nested: true, scope, page: 1 } })
+ .onGet(ENDPOINT, { params: { nested: true, scope, page: 1, search: '' } })
.reply(200, environmentsApp, {
'poll-interval': interval,
});
- await mockResolvers.Query.environmentApp(null, { scope, page: 1 }, { cache });
+ await mockResolvers.Query.environmentApp(null, { scope, page: 1, search: '' }, { cache });
expect(cache.writeQuery).toHaveBeenCalledWith({
query: pollIntervalQuery,
data: { interval },
@@ -72,7 +77,7 @@ describe('~/frontend/environments/graphql/resolvers', () => {
const cache = { writeQuery: jest.fn() };
const scope = 'stopped';
mock
- .onGet(ENDPOINT, { params: { nested: true, scope, page: 1 } })
+ .onGet(ENDPOINT, { params: { nested: true, scope, page: 1, search: '' } })
.reply(200, environmentsApp, {
'x-next-page': '2',
'x-page': '1',
@@ -82,7 +87,7 @@ describe('~/frontend/environments/graphql/resolvers', () => {
'X-Total-Pages': '5',
});
- await mockResolvers.Query.environmentApp(null, { scope, page: 1 }, { cache });
+ await mockResolvers.Query.environmentApp(null, { scope, page: 1, search: '' }, { cache });
expect(cache.writeQuery).toHaveBeenCalledWith({
query: pageInfoQuery,
data: {
@@ -102,10 +107,10 @@ describe('~/frontend/environments/graphql/resolvers', () => {
const cache = { writeQuery: jest.fn() };
const scope = 'stopped';
mock
- .onGet(ENDPOINT, { params: { nested: true, scope, page: 1 } })
+ .onGet(ENDPOINT, { params: { nested: true, scope, page: 1, search: '' } })
.reply(200, environmentsApp, {});
- await mockResolvers.Query.environmentApp(null, { scope, page: 1 }, { cache });
+ await mockResolvers.Query.environmentApp(null, { scope, page: 1, search: '' }, { cache });
expect(cache.writeQuery).toHaveBeenCalledWith({
query: pageInfoQuery,
data: {
@@ -124,11 +129,14 @@ describe('~/frontend/environments/graphql/resolvers', () => {
});
describe('folder', () => {
it('should fetch the folder url passed to it', async () => {
- mock.onGet(ENDPOINT, { params: { per_page: 3, scope: 'available' } }).reply(200, folder);
+ mock
+ .onGet(ENDPOINT, { params: { per_page: 3, scope: 'available', search: '' } })
+ .reply(200, folder);
const environmentFolder = await mockResolvers.Query.folder(null, {
environment: { folderPath: ENDPOINT },
scope: 'available',
+ search: '',
});
expect(environmentFolder).toEqual(resolvedFolder);
diff --git a/spec/frontend/environments/new_environment_spec.js b/spec/frontend/environments/new_environment_spec.js
index 2405cb82eac..6dd4eea7437 100644
--- a/spec/frontend/environments/new_environment_spec.js
+++ b/spec/frontend/environments/new_environment_spec.js
@@ -3,7 +3,7 @@ import MockAdapter from 'axios-mock-adapter';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import NewEnvironment from '~/environments/components/new_environment.vue';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import { visitUrl } from '~/lib/utils/url_utility';
@@ -94,7 +94,7 @@ describe('~/environments/components/new.vue', () => {
await submitForm(expected, [400, { message: ['name taken'] }]);
- expect(createFlash).toHaveBeenCalledWith({ message: 'name taken' });
+ expect(createAlert).toHaveBeenCalledWith({ message: 'name taken' });
expect(showsLoading()).toBe(false);
});
});
diff --git a/spec/frontend/error_tracking/components/error_details_spec.js b/spec/frontend/error_tracking/components/error_details_spec.js
index 732eff65495..9d6e46be8c4 100644
--- a/spec/frontend/error_tracking/components/error_details_spec.js
+++ b/spec/frontend/error_tracking/components/error_details_spec.js
@@ -18,7 +18,7 @@ import {
trackErrorDetailsViewsOptions,
trackErrorStatusUpdateOptions,
} from '~/error_tracking/utils';
-import createFlash from '~/flash';
+import { createAlert, VARIANT_WARNING } from '~/flash';
import { __ } from '~/locale';
import Tracking from '~/tracking';
@@ -144,7 +144,7 @@ describe('ErrorDetails', () => {
await nextTick();
expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
- expect(createFlash).not.toHaveBeenCalled();
+ expect(createAlert).not.toHaveBeenCalled();
expect(mocks.$apollo.queries.error.stopPolling).not.toHaveBeenCalled();
});
@@ -156,9 +156,9 @@ describe('ErrorDetails', () => {
await nextTick();
expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(false);
expect(wrapper.findComponent(GlLink).exists()).toBe(false);
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'Could not connect to Sentry. Refresh the page to try again.',
- type: 'warning',
+ variant: VARIANT_WARNING,
});
expect(mocks.$apollo.queries.error.stopPolling).toHaveBeenCalled();
});
diff --git a/spec/frontend/error_tracking/store/actions_spec.js b/spec/frontend/error_tracking/store/actions_spec.js
index 6bac21341a7..8f085282f80 100644
--- a/spec/frontend/error_tracking/store/actions_spec.js
+++ b/spec/frontend/error_tracking/store/actions_spec.js
@@ -2,7 +2,7 @@ import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
import * as actions from '~/error_tracking/store/actions';
import * as types from '~/error_tracking/store/mutation_types';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import { visitUrl } from '~/lib/utils/url_utility';
@@ -20,7 +20,7 @@ describe('Sentry common store actions', () => {
afterEach(() => {
mock.restore();
- createFlash.mockClear();
+ createAlert.mockClear();
});
const endpoint = '123/stacktrace';
const redirectUrl = '/list';
@@ -49,7 +49,7 @@ describe('Sentry common store actions', () => {
mock.onPut().reply(400, {});
await testAction(actions.updateStatus, params, {}, [], []);
expect(visitUrl).not.toHaveBeenCalled();
- expect(createFlash).toHaveBeenCalledTimes(1);
+ expect(createAlert).toHaveBeenCalledTimes(1);
});
});
diff --git a/spec/frontend/error_tracking/store/details/actions_spec.js b/spec/frontend/error_tracking/store/details/actions_spec.js
index a3a6f7cc309..1893d226270 100644
--- a/spec/frontend/error_tracking/store/details/actions_spec.js
+++ b/spec/frontend/error_tracking/store/details/actions_spec.js
@@ -2,7 +2,7 @@ import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
import * as actions from '~/error_tracking/store/details/actions';
import * as types from '~/error_tracking/store/details/mutation_types';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import Poll from '~/lib/utils/poll';
@@ -19,7 +19,7 @@ describe('Sentry error details store actions', () => {
afterEach(() => {
mockedAdapter.restore();
- createFlash.mockClear();
+ createAlert.mockClear();
if (mockedRestart) {
mockedRestart.mockRestore();
mockedRestart = null;
@@ -53,7 +53,7 @@ describe('Sentry error details store actions', () => {
[{ type: types.SET_LOADING_STACKTRACE, payload: false }],
[],
);
- expect(createFlash).toHaveBeenCalledTimes(1);
+ expect(createAlert).toHaveBeenCalledTimes(1);
});
it('should not restart polling when receiving an empty 204 response', async () => {
diff --git a/spec/frontend/error_tracking/store/list/actions_spec.js b/spec/frontend/error_tracking/store/list/actions_spec.js
index 7173f68bb96..2809bbe834e 100644
--- a/spec/frontend/error_tracking/store/list/actions_spec.js
+++ b/spec/frontend/error_tracking/store/list/actions_spec.js
@@ -2,7 +2,7 @@ import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
import * as actions from '~/error_tracking/store/list/actions';
import * as types from '~/error_tracking/store/list/mutation_types';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import httpStatusCodes from '~/lib/utils/http_status';
@@ -51,7 +51,7 @@ describe('error tracking actions', () => {
],
[],
);
- expect(createFlash).toHaveBeenCalledTimes(1);
+ expect(createAlert).toHaveBeenCalledTimes(1);
});
});
diff --git a/spec/frontend/feature_highlight/feature_highlight_helper_spec.js b/spec/frontend/feature_highlight/feature_highlight_helper_spec.js
index b87571830ca..22bac3fca15 100644
--- a/spec/frontend/feature_highlight/feature_highlight_helper_spec.js
+++ b/spec/frontend/feature_highlight/feature_highlight_helper_spec.js
@@ -1,6 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
import { dismiss } from '~/feature_highlight/feature_highlight_helper';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import httpStatusCodes from '~/lib/utils/http_status';
@@ -32,7 +32,7 @@ describe('feature highlight helper', () => {
await dismiss(endpoint, highlightId);
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message:
'An error occurred while dismissing the feature highlight. Refresh the page and try dismissing again.',
});
diff --git a/spec/frontend/fixtures/namespaces.rb b/spec/frontend/fixtures/namespaces.rb
index b11f661fe09..a3f295f4e66 100644
--- a/spec/frontend/fixtures/namespaces.rb
+++ b/spec/frontend/fixtures/namespaces.rb
@@ -7,38 +7,43 @@ RSpec.describe 'Jobs (JavaScript fixtures)' do
include JavaScriptFixturesHelpers
include GraphqlHelpers
- describe GraphQL::Query, type: :request do
+ describe API::Projects, type: :request do
let_it_be(:user) { create(:user) }
- let_it_be(:groups) { create_list(:group, 4) }
- before_all do
- groups.each { |group| group.add_owner(user) }
- end
+ describe 'transfer_locations' do
+ let_it_be(:groups) { create_list(:group, 4) }
+ let_it_be(:project) { create(:project, namespace: user.namespace) }
- query_name = 'search_namespaces_where_user_can_transfer_projects'
- query_extension = '.query.graphql'
+ before_all do
+ groups.each { |group| group.add_owner(user) }
+ end
- full_input_path = "projects/settings/graphql/queries/#{query_name}#{query_extension}"
- base_output_path = "graphql/projects/settings/#{query_name}"
+ it 'api/projects/transfer_locations_page_1.json' do
+ get api("/projects/#{project.id}/transfer_locations?per_page=2", user)
- it "#{base_output_path}_page_1#{query_extension}.json" do
- query = get_graphql_query_as_string(full_input_path)
+ expect(response).to be_successful
+ end
- post_graphql(query, current_user: user, variables: { first: 2 })
+ it 'api/projects/transfer_locations_page_2.json' do
+ get api("/projects/#{project.id}/transfer_locations?per_page=2&page=2", user)
- expect_graphql_errors_to_be_empty
+ expect(response).to be_successful
+ end
end
+ end
+
+ describe GraphQL::Query, type: :request do
+ let_it_be(:user) { create(:user) }
+
+ query_name = 'current_user_namespace.query.graphql'
- it "#{base_output_path}_page_2#{query_extension}.json" do
- query = get_graphql_query_as_string(full_input_path)
+ input_path = "projects/settings/graphql/queries/#{query_name}"
+ output_path = "graphql/projects/settings/#{query_name}.json"
- post_graphql(query, current_user: user, variables: { first: 2 })
+ it output_path do
+ query = get_graphql_query_as_string(input_path)
- post_graphql(
- query,
- current_user: user,
- variables: { first: 2, after: graphql_data_at('currentUser', 'groups', 'pageInfo', 'endCursor') }
- )
+ post_graphql(query, current_user: user)
expect_graphql_errors_to_be_empty
end
diff --git a/spec/frontend/fixtures/pipeline_schedules.rb b/spec/frontend/fixtures/pipeline_schedules.rb
index 5b7a445557e..4de0bd762f8 100644
--- a/spec/frontend/fixtures/pipeline_schedules.rb
+++ b/spec/frontend/fixtures/pipeline_schedules.rb
@@ -2,40 +2,74 @@
require 'spec_helper'
-RSpec.describe Projects::PipelineSchedulesController, '(JavaScript fixtures)', type: :controller do
+RSpec.describe 'Pipeline schedules (JavaScript fixtures)' do
+ include ApiHelpers
include JavaScriptFixturesHelpers
+ include GraphqlHelpers
let(:namespace) { create(:namespace, name: 'frontend-fixtures' ) }
let(:project) { create(:project, :public, :repository) }
let(:user) { project.first_owner }
let!(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project, owner: user) }
+ let!(:pipeline_schedule_inactive) { create(:ci_pipeline_schedule, :inactive, project: project, owner: user) }
let!(:pipeline_schedule_populated) { create(:ci_pipeline_schedule, project: project, owner: user) }
let!(:pipeline_schedule_variable1) { create(:ci_pipeline_schedule_variable, key: 'foo', value: 'foovalue', pipeline_schedule: pipeline_schedule_populated) }
let!(:pipeline_schedule_variable2) { create(:ci_pipeline_schedule_variable, key: 'bar', value: 'barvalue', pipeline_schedule: pipeline_schedule_populated) }
- render_views
+ describe Projects::PipelineSchedulesController, type: :controller do
+ render_views
- before do
- sign_in(user)
- end
+ before do
+ sign_in(user)
+ stub_feature_flags(pipeline_schedules_vue: false)
+ end
+
+ it 'pipeline_schedules/edit.html' do
+ get :edit, params: {
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ id: pipeline_schedule.id
+ }
+
+ expect(response).to be_successful
+ end
- it 'pipeline_schedules/edit.html' do
- get :edit, params: {
- namespace_id: project.namespace.to_param,
- project_id: project,
- id: pipeline_schedule.id
- }
+ it 'pipeline_schedules/edit_with_variables.html' do
+ get :edit, params: {
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ id: pipeline_schedule_populated.id
+ }
- expect(response).to be_successful
+ expect(response).to be_successful
+ end
end
- it 'pipeline_schedules/edit_with_variables.html' do
- get :edit, params: {
- namespace_id: project.namespace.to_param,
- project_id: project,
- id: pipeline_schedule_populated.id
- }
+ describe GraphQL::Query, type: :request do
+ before do
+ pipeline_schedule.pipelines << build(:ci_pipeline, project: project)
+ end
+
+ fixtures_path = 'graphql/pipeline_schedules/'
+ get_pipeline_schedules_query = 'get_pipeline_schedules.query.graphql'
+
+ let_it_be(:query) do
+ get_graphql_query_as_string("pipeline_schedules/graphql/queries/#{get_pipeline_schedules_query}")
+ end
+
+ it "#{fixtures_path}#{get_pipeline_schedules_query}.json" do
+ post_graphql(query, current_user: user, variables: { projectPath: project.full_path })
+
+ expect_graphql_errors_to_be_empty
+ end
+
+ it "#{fixtures_path}#{get_pipeline_schedules_query}.as_guest.json" do
+ guest = create(:user)
+ project.add_guest(user)
+
+ post_graphql(query, current_user: guest, variables: { projectPath: project.full_path })
- expect(response).to be_successful
+ expect_graphql_errors_to_be_empty
+ end
end
end
diff --git a/spec/frontend/flash_spec.js b/spec/frontend/flash_spec.js
index e26c52f0bf7..a809bf248bf 100644
--- a/spec/frontend/flash_spec.js
+++ b/spec/frontend/flash_spec.js
@@ -285,6 +285,13 @@ describe('Flash', () => {
expect(document.querySelector('.gl-alert')).toBeNull();
});
+ it('does not crash if calling .dismiss() twice', () => {
+ alert = createAlert({ message: mockMessage });
+
+ alert.dismiss();
+ expect(() => alert.dismiss()).not.toThrow();
+ });
+
it('calls onDismiss when dismissed', () => {
const dismissHandler = jest.fn();
diff --git a/spec/frontend/grafana_integration/components/grafana_integration_spec.js b/spec/frontend/grafana_integration/components/grafana_integration_spec.js
index d2111194097..021a3aa41ed 100644
--- a/spec/frontend/grafana_integration/components/grafana_integration_spec.js
+++ b/spec/frontend/grafana_integration/components/grafana_integration_spec.js
@@ -3,7 +3,7 @@ import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import { TEST_HOST } from 'helpers/test_constants';
import { mountExtended } from 'helpers/vue_test_utils_helper';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import GrafanaIntegration from '~/grafana_integration/components/grafana_integration.vue';
import { createStore } from '~/grafana_integration/store';
import axios from '~/lib/utils/axios_utils';
@@ -30,7 +30,7 @@ describe('grafana integration component', () => {
afterEach(() => {
if (wrapper.destroy) {
wrapper.destroy();
- createFlash.mockReset();
+ createAlert.mockReset();
refreshCurrentPage.mockReset();
}
});
@@ -113,7 +113,7 @@ describe('grafana integration component', () => {
await nextTick();
await jest.runAllTicks();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: `There was an error saving your changes. ${message}`,
});
});
diff --git a/spec/frontend/groups/components/app_spec.js b/spec/frontend/groups/components/app_spec.js
index a4a7530184d..091ec17d58e 100644
--- a/spec/frontend/groups/components/app_spec.js
+++ b/spec/frontend/groups/components/app_spec.js
@@ -3,7 +3,7 @@ import { shallowMount } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
import Vue, { nextTick } from 'vue';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import appComponent from '~/groups/components/app.vue';
import groupFolderComponent from '~/groups/components/group_folder.vue';
import groupItemComponent from '~/groups/components/group_item.vue';
@@ -11,6 +11,7 @@ import eventHub from '~/groups/event_hub';
import GroupsService from '~/groups/service/groups_service';
import GroupsStore from '~/groups/store/groups_store';
import EmptyState from '~/groups/components/empty_state.vue';
+import GroupsComponent from '~/groups/components/groups.vue';
import axios from '~/lib/utils/axios_utils';
import * as urlUtilities from '~/lib/utils/url_utility';
import setWindowLocation from 'helpers/set_window_location_helper';
@@ -115,7 +116,7 @@ describe('AppComponent', () => {
return vm.fetchGroups({}).then(() => {
expect(vm.isLoading).toBe(false);
expect(window.scrollTo).toHaveBeenCalledWith({ behavior: 'smooth', top: 0 });
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'An error occurred. Please try again.',
});
});
@@ -326,7 +327,7 @@ describe('AppComponent', () => {
expect(vm.service.leaveGroup).toHaveBeenCalledWith(childGroupItem.leavePath);
return waitForPromises().then(() => {
expect(vm.store.removeGroup).not.toHaveBeenCalled();
- expect(createFlash).toHaveBeenCalledWith({ message });
+ expect(createAlert).toHaveBeenCalledWith({ message });
expect(vm.targetGroup.isBeingRemoved).toBe(false);
});
});
@@ -341,7 +342,7 @@ describe('AppComponent', () => {
expect(vm.service.leaveGroup).toHaveBeenCalledWith(childGroupItem.leavePath);
return waitForPromises().then(() => {
expect(vm.store.removeGroup).not.toHaveBeenCalled();
- expect(createFlash).toHaveBeenCalledWith({ message });
+ expect(createAlert).toHaveBeenCalledWith({ message });
expect(vm.targetGroup.isBeingRemoved).toBe(false);
});
});
@@ -388,24 +389,27 @@ describe('AppComponent', () => {
});
describe.each`
- action | groups | fromSearch | renderEmptyState | expected
- ${'subgroups_and_projects'} | ${[]} | ${false} | ${true} | ${true}
- ${''} | ${[]} | ${false} | ${true} | ${false}
- ${'subgroups_and_projects'} | ${mockGroups} | ${false} | ${true} | ${false}
- ${'subgroups_and_projects'} | ${[]} | ${true} | ${true} | ${false}
+ action | groups | fromSearch | shouldRenderEmptyState | searchEmpty
+ ${'subgroups_and_projects'} | ${[]} | ${false} | ${true} | ${false}
+ ${''} | ${[]} | ${false} | ${false} | ${false}
+ ${'subgroups_and_projects'} | ${mockGroups} | ${false} | ${false} | ${false}
+ ${'subgroups_and_projects'} | ${[]} | ${true} | ${false} | ${true}
`(
- 'when `action` is $action, `groups` is $groups, `fromSearch` is $fromSearch, and `renderEmptyState` is $renderEmptyState',
- ({ action, groups, fromSearch, renderEmptyState, expected }) => {
- it(expected ? 'renders empty state' : 'does not render empty state', async () => {
+ 'when `action` is $action, `groups` is $groups, and `fromSearch` is $fromSearch',
+ ({ action, groups, fromSearch, shouldRenderEmptyState, searchEmpty }) => {
+ it(`${shouldRenderEmptyState ? 'renders' : 'does not render'} empty state`, async () => {
createShallowComponent({
- propsData: { action, renderEmptyState },
+ propsData: { action, renderEmptyState: true },
});
+ await waitForPromises();
+
vm.updateGroups(groups, fromSearch);
await nextTick();
- expect(wrapper.findComponent(EmptyState).exists()).toBe(expected);
+ expect(wrapper.findComponent(EmptyState).exists()).toBe(shouldRenderEmptyState);
+ expect(wrapper.findComponent(GroupsComponent).props('searchEmpty')).toBe(searchEmpty);
});
},
);
@@ -440,18 +444,10 @@ describe('AppComponent', () => {
expect(eventHub.$on).toHaveBeenCalledWith('showLeaveGroupModal', expect.any(Function));
expect(eventHub.$on).toHaveBeenCalledWith('updatePagination', expect.any(Function));
expect(eventHub.$on).toHaveBeenCalledWith('updateGroups', expect.any(Function));
- });
-
- it('should initialize `searchEmptyMessage` prop with correct string when `hideProjects` is `false`', async () => {
- createShallowComponent();
- await nextTick();
- expect(vm.searchEmptyMessage).toBe('No groups or projects matched your search');
- });
-
- it('should initialize `searchEmptyMessage` prop with correct string when `hideProjects` is `true`', async () => {
- createShallowComponent({ propsData: { hideProjects: true } });
- await nextTick();
- expect(vm.searchEmptyMessage).toBe('No groups matched your search');
+ expect(eventHub.$on).toHaveBeenCalledWith(
+ 'fetchFilteredAndSortedGroups',
+ expect.any(Function),
+ );
});
});
@@ -468,6 +464,46 @@ describe('AppComponent', () => {
expect(eventHub.$off).toHaveBeenCalledWith('showLeaveGroupModal', expect.any(Function));
expect(eventHub.$off).toHaveBeenCalledWith('updatePagination', expect.any(Function));
expect(eventHub.$off).toHaveBeenCalledWith('updateGroups', expect.any(Function));
+ expect(eventHub.$off).toHaveBeenCalledWith(
+ 'fetchFilteredAndSortedGroups',
+ expect.any(Function),
+ );
+ });
+ });
+
+ describe('when `fetchFilteredAndSortedGroups` event is emitted', () => {
+ const search = 'Foo bar';
+ const sort = 'created_asc';
+ const emitFetchFilteredAndSortedGroups = () => {
+ eventHub.$emit('fetchFilteredAndSortedGroups', {
+ filterGroupsBy: search,
+ sortBy: sort,
+ });
+ };
+ let setPaginationInfoSpy;
+
+ beforeEach(() => {
+ setPaginationInfoSpy = jest.spyOn(GroupsStore.prototype, 'setPaginationInfo');
+ createShallowComponent();
+ });
+
+ it('renders loading icon', async () => {
+ emitFetchFilteredAndSortedGroups();
+ await nextTick();
+
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
+ });
+
+ it('calls API with expected params', () => {
+ emitFetchFilteredAndSortedGroups();
+
+ expect(getGroupsSpy).toHaveBeenCalledWith(undefined, undefined, search, sort, undefined);
+ });
+
+ it('updates pagination', () => {
+ emitFetchFilteredAndSortedGroups();
+
+ expect(setPaginationInfoSpy).toHaveBeenCalled();
});
});
diff --git a/spec/frontend/groups/components/group_item_spec.js b/spec/frontend/groups/components/group_item_spec.js
index 3aa66644c19..4570aa33a6c 100644
--- a/spec/frontend/groups/components/group_item_spec.js
+++ b/spec/frontend/groups/components/group_item_spec.js
@@ -245,19 +245,14 @@ describe('GroupItemComponent', () => {
expect(vm.$el.querySelector('.group-list-tree')).toBeDefined();
});
});
+
describe('schema.org props', () => {
describe('when showSchemaMarkup is disabled on the group', () => {
- it.each(['itemprop', 'itemtype', 'itemscope'], 'it does not set %s', (attr) => {
+ it.each(['itemprop', 'itemtype', 'itemscope'])('does not set %s', (attr) => {
expect(wrapper.attributes(attr)).toBeUndefined();
});
- it.each(
- ['.js-group-avatar', '.js-group-name', '.js-group-description'],
- 'it does not set `itemprop` on sub-nodes',
- (selector) => {
- expect(wrapper.find(selector).attributes('itemprop')).toBeUndefined();
- },
- );
});
+
describe('when group has microdata', () => {
beforeEach(() => {
const group = withMicrodata({
diff --git a/spec/frontend/groups/components/groups_spec.js b/spec/frontend/groups/components/groups_spec.js
index 866868eff36..0cbb6cc8309 100644
--- a/spec/frontend/groups/components/groups_spec.js
+++ b/spec/frontend/groups/components/groups_spec.js
@@ -1,4 +1,5 @@
import Vue from 'vue';
+import { GlEmptyState } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import GroupFolderComponent from '~/groups/components/group_folder.vue';
@@ -15,7 +16,6 @@ describe('GroupsComponent', () => {
const defaultPropsData = {
groups: mockGroups,
pageInfo: mockPageInfo,
- searchEmptyMessage: 'No matching results',
searchEmpty: false,
};
@@ -67,13 +67,16 @@ describe('GroupsComponent', () => {
expect(wrapper.findComponent(GroupFolderComponent).exists()).toBe(true);
expect(findPaginationLinks().exists()).toBe(true);
- expect(wrapper.findByText(defaultPropsData.searchEmptyMessage).exists()).toBe(false);
+ expect(wrapper.findComponent(GlEmptyState).exists()).toBe(false);
});
it('should render empty search message when `searchEmpty` is `true`', () => {
createComponent({ propsData: { searchEmpty: true } });
- expect(wrapper.findByText(defaultPropsData.searchEmptyMessage).exists()).toBe(true);
+ expect(wrapper.findComponent(GlEmptyState).props()).toMatchObject({
+ title: GroupsComponent.i18n.emptyStateTitle,
+ description: GroupsComponent.i18n.emptyStateDescription,
+ });
});
});
});
diff --git a/spec/frontend/groups/components/new_top_level_group_alert_spec.js b/spec/frontend/groups/components/new_top_level_group_alert_spec.js
new file mode 100644
index 00000000000..db9a5c7b16b
--- /dev/null
+++ b/spec/frontend/groups/components/new_top_level_group_alert_spec.js
@@ -0,0 +1,75 @@
+import { shallowMount } from '@vue/test-utils';
+import NewTopLevelGroupAlert from '~/groups/components/new_top_level_group_alert.vue';
+import { makeMockUserCalloutDismisser } from 'helpers/mock_user_callout_dismisser';
+import { helpPagePath } from '~/helpers/help_page_helper';
+
+describe('NewTopLevelGroupAlert', () => {
+ let wrapper;
+ let userCalloutDismissSpy;
+
+ const findAlert = () => wrapper.findComponent({ ref: 'newTopLevelAlert' });
+ const createSubGroupPath = '/groups/new?parent_id=1#create-group-pane';
+
+ const createComponent = ({ shouldShowCallout = true } = {}) => {
+ userCalloutDismissSpy = jest.fn();
+
+ wrapper = shallowMount(NewTopLevelGroupAlert, {
+ provide: {
+ createSubGroupPath,
+ },
+ stubs: {
+ UserCalloutDismisser: makeMockUserCalloutDismisser({
+ dismiss: userCalloutDismissSpy,
+ shouldShowCallout,
+ }),
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when the component is created', () => {
+ beforeEach(() => {
+ createComponent({
+ shouldShowCallout: true,
+ });
+ });
+
+ it('renders a button with a link to create a new sub-group', () => {
+ expect(findAlert().props('primaryButtonText')).toBe(
+ NewTopLevelGroupAlert.i18n.primaryBtnText,
+ );
+ expect(findAlert().props('primaryButtonLink')).toBe(
+ helpPagePath('user/group/subgroups/index'),
+ );
+ });
+ });
+
+ describe('dismissing the alert', () => {
+ beforeEach(() => {
+ findAlert().vm.$emit('dismiss');
+ });
+
+ it('calls the dismiss callback', () => {
+ expect(userCalloutDismissSpy).toHaveBeenCalled();
+ });
+ });
+
+ describe('when the alert has been dismissed', () => {
+ beforeEach(() => {
+ createComponent({
+ shouldShowCallout: false,
+ });
+ });
+
+ it('does not show the alert', () => {
+ expect(findAlert().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/groups/components/overview_tabs_spec.js b/spec/frontend/groups/components/overview_tabs_spec.js
index 352bf25b84f..93e087e10f2 100644
--- a/spec/frontend/groups/components/overview_tabs_spec.js
+++ b/spec/frontend/groups/components/overview_tabs_spec.js
@@ -1,28 +1,46 @@
-import { GlTab } from '@gitlab/ui';
+import { GlSorting, GlSortingItem, GlTab } from '@gitlab/ui';
import { nextTick } from 'vue';
+import { createLocalVue } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import OverviewTabs from '~/groups/components/overview_tabs.vue';
import GroupsApp from '~/groups/components/app.vue';
+import GroupFolderComponent from '~/groups/components/group_folder.vue';
import GroupsStore from '~/groups/store/groups_store';
import GroupsService from '~/groups/service/groups_service';
import { createRouter } from '~/groups/init_overview_tabs';
+import eventHub from '~/groups/event_hub';
import {
ACTIVE_TAB_SUBGROUPS_AND_PROJECTS,
ACTIVE_TAB_SHARED,
ACTIVE_TAB_ARCHIVED,
+ OVERVIEW_TABS_SORTING_ITEMS,
} from '~/groups/constants';
import axios from '~/lib/utils/axios_utils';
+const localVue = createLocalVue();
+localVue.component('GroupFolder', GroupFolderComponent);
const router = createRouter();
+const [SORTING_ITEM_NAME, , SORTING_ITEM_UPDATED] = OVERVIEW_TABS_SORTING_ITEMS;
describe('OverviewTabs', () => {
let wrapper;
+ let axiosMock;
- const endpoints = {
- subgroups_and_projects: '/groups/foobar/-/children.json',
- shared: '/groups/foobar/-/shared_projects.json',
- archived: '/groups/foobar/-/children.json?archived=only',
+ const defaultProvide = {
+ endpoints: {
+ subgroups_and_projects: '/groups/foobar/-/children.json',
+ shared: '/groups/foobar/-/shared_projects.json',
+ archived: '/groups/foobar/-/children.json?archived=only',
+ },
+ newSubgroupPath: '/groups/new',
+ newProjectPath: 'projects/new',
+ newSubgroupIllustration: '',
+ newProjectIllustration: '',
+ emptySubgroupIllustration: '',
+ canCreateSubgroups: false,
+ canCreateProjects: false,
+ initialSort: 'name_asc',
};
const routerMock = {
@@ -31,12 +49,15 @@ describe('OverviewTabs', () => {
const createComponent = async ({
route = { name: ACTIVE_TAB_SUBGROUPS_AND_PROJECTS, params: { group: 'foo/bar/baz' } },
+ provide = {},
} = {}) => {
wrapper = mountExtended(OverviewTabs, {
router,
provide: {
- endpoints,
+ ...defaultProvide,
+ ...provide,
},
+ localVue,
mocks: { $route: route, $router: routerMock },
});
@@ -47,13 +68,13 @@ describe('OverviewTabs', () => {
const findTab = (name) => wrapper.findByRole('tab', { name });
const findSelectedTab = () => wrapper.findByRole('tab', { selected: true });
- afterEach(() => {
- wrapper.destroy();
+ beforeEach(() => {
+ axiosMock = new AxiosMockAdapter(axios);
});
- beforeEach(async () => {
- // eslint-disable-next-line no-new
- new AxiosMockAdapter(axios);
+ afterEach(() => {
+ wrapper.destroy();
+ axiosMock.restore();
});
it('renders `Subgroups and projects` tab with `GroupsApp` component', async () => {
@@ -68,7 +89,7 @@ describe('OverviewTabs', () => {
expect(tabPanel.findComponent(GroupsApp).props()).toMatchObject({
action: ACTIVE_TAB_SUBGROUPS_AND_PROJECTS,
store: new GroupsStore({ showSchemaMarkup: true }),
- service: new GroupsService(endpoints[ACTIVE_TAB_SUBGROUPS_AND_PROJECTS]),
+ service: new GroupsService(defaultProvide.endpoints[ACTIVE_TAB_SUBGROUPS_AND_PROJECTS]),
hideProjects: false,
renderEmptyState: true,
});
@@ -89,7 +110,7 @@ describe('OverviewTabs', () => {
expect(tabPanel.findComponent(GroupsApp).props()).toMatchObject({
action: ACTIVE_TAB_SHARED,
store: new GroupsStore(),
- service: new GroupsService(endpoints[ACTIVE_TAB_SHARED]),
+ service: new GroupsService(defaultProvide.endpoints[ACTIVE_TAB_SHARED]),
hideProjects: false,
renderEmptyState: false,
});
@@ -112,7 +133,7 @@ describe('OverviewTabs', () => {
expect(tabPanel.findComponent(GroupsApp).props()).toMatchObject({
action: ACTIVE_TAB_ARCHIVED,
store: new GroupsStore(),
- service: new GroupsService(endpoints[ACTIVE_TAB_ARCHIVED]),
+ service: new GroupsService(defaultProvide.endpoints[ACTIVE_TAB_ARCHIVED]),
hideProjects: false,
renderEmptyState: false,
});
@@ -120,6 +141,14 @@ describe('OverviewTabs', () => {
expect(tabPanel.vm.$attrs.lazy).toBe(false);
});
+ it('sets `lazy` prop to `false` for initially active tab and `true` for all other tabs', async () => {
+ await createComponent({ route: { name: ACTIVE_TAB_SHARED, params: { group: 'foo/bar' } } });
+
+ expect(findTabPanels().at(0).vm.$attrs.lazy).toBe(true);
+ expect(findTabPanels().at(1).vm.$attrs.lazy).toBe(false);
+ expect(findTabPanels().at(2).vm.$attrs.lazy).toBe(true);
+ });
+
describe.each([
[
{ name: ACTIVE_TAB_SUBGROUPS_AND_PROJECTS, params: { group: 'foo/bar/baz' } },
@@ -184,4 +213,109 @@ describe('OverviewTabs', () => {
expect(routerMock.push).toHaveBeenCalledWith(expectedRoute);
});
});
+
+ describe('searching and sorting', () => {
+ const setup = async () => {
+ jest.spyOn(eventHub, '$emit');
+ await createComponent();
+
+ // Click through tabs so they are all loaded
+ await findTab(OverviewTabs.i18n[ACTIVE_TAB_SHARED]).trigger('click');
+ await findTab(OverviewTabs.i18n[ACTIVE_TAB_ARCHIVED]).trigger('click');
+ await findTab(OverviewTabs.i18n[ACTIVE_TAB_SUBGROUPS_AND_PROJECTS]).trigger('click');
+ };
+
+ const sharedAssertions = ({ search, sort }) => {
+ it('sets `lazy` prop to `true` for all of the non-active tabs so they are reloaded after sort or search is applied', () => {
+ expect(findTabPanels().at(0).vm.$attrs.lazy).toBe(false);
+ expect(findTabPanels().at(1).vm.$attrs.lazy).toBe(true);
+ expect(findTabPanels().at(2).vm.$attrs.lazy).toBe(true);
+ });
+
+ it('emits `fetchFilteredAndSortedGroups` event from `eventHub`', () => {
+ expect(eventHub.$emit).toHaveBeenCalledWith(
+ `${ACTIVE_TAB_SUBGROUPS_AND_PROJECTS}fetchFilteredAndSortedGroups`,
+ {
+ filterGroupsBy: search,
+ sortBy: sort,
+ },
+ );
+ });
+ };
+
+ describe('when search is typed in', () => {
+ const search = 'Foo bar';
+
+ beforeEach(async () => {
+ await setup();
+ await wrapper.findByPlaceholderText(OverviewTabs.i18n.searchPlaceholder).setValue(search);
+ });
+
+ it('updates query string with `filter` key', () => {
+ expect(routerMock.push).toHaveBeenCalledWith({ query: { filter: search } });
+ });
+
+ sharedAssertions({ search, sort: defaultProvide.initialSort });
+ });
+
+ describe('when sort is changed', () => {
+ beforeEach(async () => {
+ await setup();
+ wrapper.findAllComponents(GlSortingItem).at(2).vm.$emit('click');
+ await nextTick();
+ });
+
+ it('updates query string with `sort` key', () => {
+ expect(routerMock.push).toHaveBeenCalledWith({
+ query: { sort: SORTING_ITEM_UPDATED.asc },
+ });
+ });
+
+ sharedAssertions({ search: '', sort: SORTING_ITEM_UPDATED.asc });
+ });
+
+ describe('when sort direction is changed', () => {
+ beforeEach(async () => {
+ await setup();
+ await wrapper
+ .findByRole('button', { name: 'Sorting Direction: Ascending' })
+ .trigger('click');
+ });
+
+ it('updates query string with `sort` key', () => {
+ expect(routerMock.push).toHaveBeenCalledWith({
+ query: { sort: SORTING_ITEM_NAME.desc },
+ });
+ });
+
+ sharedAssertions({ search: '', sort: SORTING_ITEM_NAME.desc });
+ });
+
+ describe('when `filter` and `sort` query strings are set', () => {
+ beforeEach(async () => {
+ await createComponent({
+ route: {
+ name: ACTIVE_TAB_SUBGROUPS_AND_PROJECTS,
+ params: { group: 'foo/bar/baz' },
+ query: { filter: 'Foo bar', sort: SORTING_ITEM_UPDATED.desc },
+ },
+ });
+ });
+
+ it('sets value of search input', () => {
+ expect(
+ wrapper.findByPlaceholderText(OverviewTabs.i18n.searchPlaceholder).element.value,
+ ).toBe('Foo bar');
+ });
+
+ it('sets sort dropdown', () => {
+ expect(wrapper.findComponent(GlSorting).props()).toMatchObject({
+ text: SORTING_ITEM_UPDATED.label,
+ isAscending: false,
+ });
+
+ expect(wrapper.findAllComponents(GlSortingItem).at(2).vm.$attrs.active).toBe(true);
+ });
+ });
+ });
});
diff --git a/spec/frontend/groups/components/transfer_group_form_spec.js b/spec/frontend/groups/components/transfer_group_form_spec.js
index 8cfe8ce8e18..7cbe6e5bbab 100644
--- a/spec/frontend/groups/components/transfer_group_form_spec.js
+++ b/spec/frontend/groups/components/transfer_group_form_spec.js
@@ -2,7 +2,7 @@ import { GlAlert, GlSprintf } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import Component from '~/groups/components/transfer_group_form.vue';
import ConfirmDanger from '~/vue_shared/components/confirm_danger/confirm_danger.vue';
-import NamespaceSelect from '~/vue_shared/components/namespace_select/namespace_select.vue';
+import NamespaceSelect from '~/vue_shared/components/namespace_select/namespace_select_deprecated.vue';
describe('Transfer group form', () => {
let wrapper;
diff --git a/spec/frontend/groups/store/groups_store_spec.js b/spec/frontend/groups/store/groups_store_spec.js
index 8ac5d7099f1..ce1791d0062 100644
--- a/spec/frontend/groups/store/groups_store_spec.js
+++ b/spec/frontend/groups/store/groups_store_spec.js
@@ -16,13 +16,13 @@ describe('ProjectsStore', () => {
store = new GroupsStore();
expect(Object.keys(store.state).length).toBe(2);
- expect(Array.isArray(store.state.groups)).toBeTruthy();
+ expect(Array.isArray(store.state.groups)).toBe(true);
expect(Object.keys(store.state.pageInfo).length).toBe(0);
- expect(store.hideProjects).toBeFalsy();
+ expect(store.hideProjects).toBe(false);
store = new GroupsStore({ hideProjects: true });
- expect(store.hideProjects).toBeTruthy();
+ expect(store.hideProjects).toBe(true);
});
});
@@ -65,8 +65,8 @@ describe('ProjectsStore', () => {
expect(store.formatGroupItem).toHaveBeenCalledWith(expect.any(Object));
expect(mockParentGroupItem.children.length).toBe(1);
expect(Object.keys(mockParentGroupItem.children[0]).indexOf('fullName')).toBeGreaterThan(-1);
- expect(mockParentGroupItem.isOpen).toBeTruthy();
- expect(mockParentGroupItem.isChildrenLoading).toBeFalsy();
+ expect(mockParentGroupItem.isOpen).toBe(true);
+ expect(mockParentGroupItem.isChildrenLoading).toBe(false);
});
});
diff --git a/spec/frontend/header_search/components/app_spec.js b/spec/frontend/header_search/components/app_spec.js
index 6a138f9a247..b0bfe2b45f0 100644
--- a/spec/frontend/header_search/components/app_spec.js
+++ b/spec/frontend/header_search/components/app_spec.js
@@ -2,6 +2,7 @@ import { GlSearchBoxByType, GlToken, GlIcon } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { mockTracking } from 'helpers/tracking_helper';
import { s__, sprintf } from '~/locale';
import HeaderSearchApp from '~/header_search/components/app.vue';
import HeaderSearchAutocompleteItems from '~/header_search/components/header_search_autocomplete_items.vue';
@@ -360,22 +361,43 @@ describe('HeaderSearchApp', () => {
describe('Header Search Input', () => {
describe('when dropdown is closed', () => {
- it('onFocus opens dropdown', async () => {
+ let trackingSpy;
+
+ beforeEach(() => {
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ });
+
+ it('onFocus opens dropdown and triggers snowplow event', async () => {
expect(findHeaderSearchDropdown().exists()).toBe(false);
findHeaderSearchInput().vm.$emit('focus');
await nextTick();
expect(findHeaderSearchDropdown().exists()).toBe(true);
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'focus_input', {
+ label: 'global_search',
+ property: 'top_navigation',
+ });
});
- it('onClick opens dropdown', async () => {
+ it('onClick opens dropdown and triggers snowplow event', async () => {
expect(findHeaderSearchDropdown().exists()).toBe(false);
findHeaderSearchInput().vm.$emit('click');
await nextTick();
expect(findHeaderSearchDropdown().exists()).toBe(true);
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'focus_input', {
+ label: 'global_search',
+ property: 'top_navigation',
+ });
+ });
+
+ it('onClick followed by onFocus only triggers a single snowplow event', async () => {
+ findHeaderSearchInput().vm.$emit('click');
+ findHeaderSearchInput().vm.$emit('focus');
+
+ expect(trackingSpy).toHaveBeenCalledTimes(1);
});
});
diff --git a/spec/frontend/ide/components/commit_sidebar/actions_spec.js b/spec/frontend/ide/components/commit_sidebar/actions_spec.js
index c9425f6c9cd..dc103fec5d0 100644
--- a/spec/frontend/ide/components/commit_sidebar/actions_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/actions_spec.js
@@ -1,7 +1,7 @@
import Vue, { nextTick } from 'vue';
-import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
+import { mount } from '@vue/test-utils';
import { projectData, branches } from 'jest/ide/mock_data';
-import commitActions from '~/ide/components/commit_sidebar/actions.vue';
+import CommitActions from '~/ide/components/commit_sidebar/actions.vue';
import { createStore } from '~/ide/stores';
import {
COMMIT_TO_NEW_BRANCH,
@@ -18,32 +18,27 @@ const BRANCH_REGULAR_NO_ACCESS = 'regular/no-access';
describe('IDE commit sidebar actions', () => {
let store;
- let vm;
+ let wrapper;
const createComponent = ({ hasMR = false, currentBranchId = 'main', emptyRepo = false } = {}) => {
- const Component = Vue.extend(commitActions);
-
- vm = createComponentWithStore(Component, store);
-
- vm.$store.state.currentBranchId = currentBranchId;
- vm.$store.state.currentProjectId = 'abcproject';
+ store.state.currentBranchId = currentBranchId;
+ store.state.currentProjectId = 'abcproject';
const proj = { ...projectData };
proj.branches[currentBranchId] = branches.find((branch) => branch.name === currentBranchId);
proj.empty_repo = emptyRepo;
- Vue.set(vm.$store.state.projects, 'abcproject', proj);
+ Vue.set(store.state.projects, 'abcproject', proj);
if (hasMR) {
- vm.$store.state.currentMergeRequestId = '1';
- vm.$store.state.projects[store.state.currentProjectId].mergeRequests[
+ store.state.currentMergeRequestId = '1';
+ store.state.projects[store.state.currentProjectId].mergeRequests[
store.state.currentMergeRequestId
] = { foo: 'bar' };
}
- vm.$mount();
-
- return vm;
+ wrapper = mount(CommitActions, { store });
+ return wrapper;
};
beforeEach(() => {
@@ -52,17 +47,16 @@ describe('IDE commit sidebar actions', () => {
});
afterEach(() => {
- vm.$destroy();
- vm = null;
+ wrapper.destroy();
});
- const findText = () => vm.$el.textContent;
- const findRadios = () => Array.from(vm.$el.querySelectorAll('input[type="radio"]'));
+ const findText = () => wrapper.text();
+ const findRadios = () => wrapper.findAll('input[type="radio"]');
it('renders 2 groups', () => {
createComponent();
- expect(findRadios().length).toBe(2);
+ expect(findRadios()).toHaveLength(2);
});
it('renders current branch text', () => {
@@ -79,41 +73,38 @@ describe('IDE commit sidebar actions', () => {
expect(findText()).not.toContain('Create a new branch and merge request');
});
- describe('currentBranchText', () => {
- it('escapes current branch', () => {
- const injectedSrc = '<img src="x" />';
- createComponent({ currentBranchId: injectedSrc });
+ it('escapes current branch name', () => {
+ const injectedSrc = '<img src="x" />';
+ const escapedSrc = '&lt;img src=&quot;x&quot; /&gt';
+ createComponent({ currentBranchId: injectedSrc });
- expect(vm.currentBranchText).not.toContain(injectedSrc);
- });
+ expect(wrapper.text()).not.toContain(injectedSrc);
+ expect(wrapper.text).not.toContain(escapedSrc);
});
describe('updateSelectedCommitAction', () => {
it('does not return anything if currentBranch does not exist', () => {
createComponent({ currentBranchId: null });
- expect(vm.$store.dispatch).not.toHaveBeenCalled();
+ expect(store.dispatch).not.toHaveBeenCalled();
});
it('is not called on mount if there is already a selected commitAction', () => {
store.state.commitAction = '1';
createComponent({ currentBranchId: null });
- expect(vm.$store.dispatch).not.toHaveBeenCalled();
+ expect(store.dispatch).not.toHaveBeenCalled();
});
it('calls again after staged changes', async () => {
createComponent({ currentBranchId: null });
- vm.$store.state.currentBranchId = 'main';
- vm.$store.state.changedFiles.push({});
- vm.$store.state.stagedFiles.push({});
+ store.state.currentBranchId = 'main';
+ store.state.changedFiles.push({});
+ store.state.stagedFiles.push({});
await nextTick();
- expect(vm.$store.dispatch).toHaveBeenCalledWith(
- ACTION_UPDATE_COMMIT_ACTION,
- expect.anything(),
- );
+ expect(store.dispatch).toHaveBeenCalledWith(ACTION_UPDATE_COMMIT_ACTION, expect.anything());
});
it.each`
@@ -133,9 +124,7 @@ describe('IDE commit sidebar actions', () => {
({ input, expectedOption }) => {
createComponent(input);
- expect(vm.$store.dispatch.mock.calls).toEqual([
- [ACTION_UPDATE_COMMIT_ACTION, expectedOption],
- ]);
+ expect(store.dispatch.mock.calls).toEqual([[ACTION_UPDATE_COMMIT_ACTION, expectedOption]]);
},
);
});
diff --git a/spec/frontend/ide/components/commit_sidebar/list_item_spec.js b/spec/frontend/ide/components/commit_sidebar/list_item_spec.js
index dea920ecb5e..c9571d39acb 100644
--- a/spec/frontend/ide/components/commit_sidebar/list_item_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/list_item_spec.js
@@ -1,133 +1,136 @@
+import { mount } from '@vue/test-utils';
+import { GlIcon } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
import { trimText } from 'helpers/text_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
-import listItem from '~/ide/components/commit_sidebar/list_item.vue';
+import ListItem from '~/ide/components/commit_sidebar/list_item.vue';
import { createRouter } from '~/ide/ide_router';
import { createStore } from '~/ide/stores';
import { file } from '../../helpers';
describe('Multi-file editor commit sidebar list item', () => {
- let vm;
- let f;
+ let wrapper;
+ let testFile;
let findPathEl;
let store;
let router;
beforeEach(() => {
store = createStore();
- router = createRouter(store);
+ jest.spyOn(store, 'dispatch');
- const Component = Vue.extend(listItem);
+ router = createRouter(store);
- f = file('test-file');
+ testFile = file('test-file');
- store.state.entries[f.path] = f;
+ store.state.entries[testFile.path] = testFile;
- vm = createComponentWithStore(Component, store, {
- file: f,
- activeFileKey: `staged-${f.key}`,
- }).$mount();
+ wrapper = mount(ListItem, {
+ store,
+ propsData: {
+ file: testFile,
+ activeFileKey: `staged-${testFile.key}`,
+ },
+ });
- findPathEl = vm.$el.querySelector('.multi-file-commit-list-path');
+ findPathEl = wrapper.find('.multi-file-commit-list-path');
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
- const findPathText = () => trimText(findPathEl.textContent);
+ const findPathText = () => trimText(findPathEl.text());
it('renders file path', () => {
- expect(findPathText()).toContain(f.path);
+ expect(findPathText()).toContain(testFile.path);
});
it('correctly renders renamed entries', async () => {
- Vue.set(vm.file, 'prevName', 'Old name');
-
+ Vue.set(testFile, 'prevName', 'Old name');
await nextTick();
- expect(findPathText()).toEqual(`Old name → ${f.name}`);
+
+ expect(findPathText()).toEqual(`Old name → ${testFile.name}`);
});
it('correctly renders entry, the name of which did not change after rename (as within a folder)', async () => {
- Vue.set(vm.file, 'prevName', f.name);
-
+ Vue.set(testFile, 'prevName', testFile.name);
await nextTick();
- expect(findPathText()).toEqual(f.name);
+
+ expect(findPathText()).toEqual(testFile.name);
});
it('opens a closed file in the editor when clicking the file path', async () => {
- jest.spyOn(vm, 'openPendingTab');
jest.spyOn(router, 'push').mockImplementation(() => {});
- findPathEl.click();
-
- await nextTick();
+ await findPathEl.trigger('click');
- expect(vm.openPendingTab).toHaveBeenCalled();
+ expect(store.dispatch).toHaveBeenCalledWith('openPendingTab', expect.anything());
expect(router.push).toHaveBeenCalled();
});
it('calls updateViewer with diff when clicking file', async () => {
- jest.spyOn(vm, 'openFileInEditor');
- jest.spyOn(vm, 'updateViewer');
jest.spyOn(router, 'push').mockImplementation(() => {});
- findPathEl.click();
-
+ await findPathEl.trigger('click');
await waitForPromises();
- expect(vm.updateViewer).toHaveBeenCalledWith('diff');
+ expect(store.dispatch).toHaveBeenCalledWith('updateViewer', 'diff');
});
- describe('computed', () => {
- describe('iconName', () => {
- it('returns modified when not a tempFile', () => {
- expect(vm.iconName).toBe('file-modified');
- });
+ describe('icon name', () => {
+ const getIconName = () => wrapper.findComponent(GlIcon).props('name');
+
+ it('is modified when not a tempFile', () => {
+ expect(getIconName()).toBe('file-modified');
+ });
- it('returns addition when not a tempFile', () => {
- f.tempFile = true;
+ it('is addition when is a tempFile', async () => {
+ testFile.tempFile = true;
+ await nextTick();
- expect(vm.iconName).toBe('file-addition');
- });
+ expect(getIconName()).toBe('file-addition');
+ });
- it('returns deletion', () => {
- f.deleted = true;
+ it('is deletion when is deleted', async () => {
+ testFile.deleted = true;
+ await nextTick();
- expect(vm.iconName).toBe('file-deletion');
- });
+ expect(getIconName()).toBe('file-deletion');
});
+ });
- describe('iconClass', () => {
- it('returns modified when not a tempFile', () => {
- expect(vm.iconClass).toContain('ide-file-modified');
- });
+ describe('icon class', () => {
+ const getIconClass = () => wrapper.findComponent(GlIcon).classes();
- it('returns addition when not a tempFile', () => {
- f.tempFile = true;
+ it('is modified when not a tempFile', () => {
+ expect(getIconClass()).toContain('ide-file-modified');
+ });
- expect(vm.iconClass).toContain('ide-file-addition');
- });
+ it('is addition when is a tempFile', async () => {
+ testFile.tempFile = true;
+ await nextTick();
- it('returns deletion', () => {
- f.deleted = true;
+ expect(getIconClass()).toContain('ide-file-addition');
+ });
- expect(vm.iconClass).toContain('ide-file-deletion');
- });
+ it('returns deletion when is deleted', async () => {
+ testFile.deleted = true;
+ await nextTick();
+
+ expect(getIconClass()).toContain('ide-file-deletion');
});
});
describe('is active', () => {
it('does not add active class when dont keys match', () => {
- expect(vm.$el.querySelector('.is-active')).toBe(null);
+ expect(wrapper.find('.is-active').exists()).toBe(false);
});
it('adds active class when keys match', async () => {
- vm.keyPrefix = 'staged';
+ await wrapper.setProps({ keyPrefix: 'staged' });
- await nextTick();
- expect(vm.$el.querySelector('.is-active')).not.toBe(null);
+ expect(wrapper.find('.is-active').exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/ide/components/commit_sidebar/message_field_spec.js b/spec/frontend/ide/components/commit_sidebar/message_field_spec.js
index ace266aec5e..c2ef29c1059 100644
--- a/spec/frontend/ide/components/commit_sidebar/message_field_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/message_field_spec.js
@@ -1,135 +1,121 @@
-import Vue, { nextTick } from 'vue';
-import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
-import createComponent from 'helpers/vue_mount_component_helper';
+import { nextTick } from 'vue';
+import { mount } from '@vue/test-utils';
import CommitMessageField from '~/ide/components/commit_sidebar/message_field.vue';
describe('IDE commit message field', () => {
- const Component = Vue.extend(CommitMessageField);
- let vm;
+ let wrapper;
beforeEach(() => {
- setHTMLFixture('<div id="app"></div>');
-
- vm = createComponent(
- Component,
- {
+ wrapper = mount(CommitMessageField, {
+ propsData: {
text: '',
placeholder: 'testing',
},
- '#app',
- );
+ attachTo: document.body,
+ });
});
afterEach(() => {
- vm.$destroy();
-
- resetHTMLFixture();
+ wrapper.destroy();
});
+ const findMessage = () => wrapper.find('textarea');
+ const findHighlights = () => wrapper.findAll('.highlights span');
+ const findMarks = () => wrapper.findAll('mark');
+
it('adds is-focused class on focus', async () => {
- vm.$el.querySelector('textarea').focus();
+ await findMessage().trigger('focus');
- await nextTick();
- expect(vm.$el.querySelector('.is-focused')).not.toBeNull();
+ expect(wrapper.find('.is-focused').exists()).toBe(true);
});
it('removed is-focused class on blur', async () => {
- vm.$el.querySelector('textarea').focus();
+ await findMessage().trigger('focus');
- await nextTick();
- expect(vm.$el.querySelector('.is-focused')).not.toBeNull();
+ expect(wrapper.find('.is-focused').exists()).toBe(true);
- vm.$el.querySelector('textarea').blur();
+ await findMessage().trigger('blur');
- await nextTick();
- expect(vm.$el.querySelector('.is-focused')).toBeNull();
+ expect(wrapper.find('.is-focused').exists()).toBe(false);
});
- it('emits input event on input', () => {
- jest.spyOn(vm, '$emit').mockImplementation();
-
- const textarea = vm.$el.querySelector('textarea');
- textarea.value = 'testing';
-
- textarea.dispatchEvent(new Event('input'));
+ it('emits input event on input', async () => {
+ await findMessage().setValue('testing');
- expect(vm.$emit).toHaveBeenCalledWith('input', 'testing');
+ expect(wrapper.emitted('input')[0]).toStrictEqual(['testing']);
});
describe('highlights', () => {
describe('subject line', () => {
it('does not highlight less than 50 characters', async () => {
- vm.text = 'text less than 50 chars';
+ await wrapper.setProps({ text: 'text less than 50 chars' });
- await nextTick();
- expect(vm.$el.querySelector('.highlights span').textContent).toContain(
- 'text less than 50 chars',
- );
+ expect(findHighlights()).toHaveLength(1);
+ expect(findHighlights().at(0).text()).toContain('text less than 50 chars');
- expect(vm.$el.querySelector('mark').style.display).toBe('none');
+ expect(findMarks()).toHaveLength(1);
+ expect(findMarks().at(0).isVisible()).toBe(false);
});
it('highlights characters over 50 length', async () => {
- vm.text =
- 'text less than 50 chars that should not highlighted. text more than 50 should be highlighted';
+ await wrapper.setProps({
+ text:
+ 'text less than 50 chars that should not highlighted. text more than 50 should be highlighted',
+ });
- await nextTick();
- expect(vm.$el.querySelector('.highlights span').textContent).toContain(
+ expect(findHighlights()).toHaveLength(1);
+ expect(findHighlights().at(0).text()).toContain(
'text less than 50 chars that should not highlighte',
);
- expect(vm.$el.querySelector('mark').style.display).not.toBe('none');
- expect(vm.$el.querySelector('mark').textContent).toBe(
- 'd. text more than 50 should be highlighted',
- );
+ expect(findMarks()).toHaveLength(1);
+ expect(findMarks().at(0).isVisible()).toBe(true);
+ expect(findMarks().at(0).text()).toBe('d. text more than 50 should be highlighted');
});
});
describe('body text', () => {
it('does not highlight body text less tan 72 characters', async () => {
- vm.text = 'subject line\nbody content';
+ await wrapper.setProps({ text: 'subject line\nbody content' });
- await nextTick();
- expect(vm.$el.querySelectorAll('.highlights span').length).toBe(2);
- expect(vm.$el.querySelectorAll('mark')[1].style.display).toBe('none');
+ expect(findHighlights()).toHaveLength(2);
+ expect(findMarks().at(1).isVisible()).toBe(false);
});
it('highlights body text more than 72 characters', async () => {
- vm.text =
- 'subject line\nbody content that will be highlighted when it is more than 72 characters in length';
-
- await nextTick();
- expect(vm.$el.querySelectorAll('.highlights span').length).toBe(2);
- expect(vm.$el.querySelectorAll('mark')[1].style.display).not.toBe('none');
- expect(vm.$el.querySelectorAll('mark')[1].textContent).toBe(' in length');
+ await wrapper.setProps({
+ text:
+ 'subject line\nbody content that will be highlighted when it is more than 72 characters in length',
+ });
+
+ expect(findHighlights()).toHaveLength(2);
+ expect(findMarks().at(1).isVisible()).toBe(true);
+ expect(findMarks().at(1).text()).toBe('in length');
});
it('highlights body text & subject line', async () => {
- vm.text =
- 'text less than 50 chars that should not highlighted\nbody content that will be highlighted when it is more than 72 characters in length';
+ await wrapper.setProps({
+ text:
+ 'text less than 50 chars that should not highlighted\nbody content that will be highlighted when it is more than 72 characters in length',
+ });
- await nextTick();
- expect(vm.$el.querySelectorAll('.highlights span').length).toBe(2);
- expect(vm.$el.querySelectorAll('mark').length).toBe(2);
+ expect(findHighlights()).toHaveLength(2);
+ expect(findMarks()).toHaveLength(2);
- expect(vm.$el.querySelectorAll('mark')[0].textContent).toContain('d');
- expect(vm.$el.querySelectorAll('mark')[1].textContent).toBe(' in length');
+ expect(findMarks().at(0).text()).toContain('d');
+ expect(findMarks().at(1).text()).toBe('in length');
});
});
});
describe('scrolling textarea', () => {
it('updates transform of highlights', async () => {
- vm.text = 'subject line\n\n\n\n\n\n\n\n\n\n\nbody content';
+ await wrapper.setProps({ text: 'subject line\n\n\n\n\n\n\n\n\n\n\nbody content' });
+ findMessage().element.scrollTo(0, 50);
await nextTick();
- vm.$el.querySelector('textarea').scrollTo(0, 50);
- vm.handleScroll();
-
- await nextTick();
- expect(vm.scrollTop).toBe(50);
- expect(vm.$el.querySelector('.highlights').style.transform).toBe('translate3d(0, -50px, 0)');
+ expect(wrapper.find('.highlights').element.style.transform).toBe('translate3d(0, -50px, 0)');
});
});
});
diff --git a/spec/frontend/ide/components/commit_sidebar/radio_group_spec.js b/spec/frontend/ide/components/commit_sidebar/radio_group_spec.js
index ee6ed694285..a3fa03a4aa5 100644
--- a/spec/frontend/ide/components/commit_sidebar/radio_group_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/radio_group_spec.js
@@ -1,123 +1,116 @@
-import Vue, { nextTick } from 'vue';
-import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
+import { GlFormRadioGroup } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
import RadioGroup from '~/ide/components/commit_sidebar/radio_group.vue';
import { createStore } from '~/ide/stores';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
describe('IDE commit sidebar radio group', () => {
- let vm;
+ let wrapper;
let store;
- beforeEach(async () => {
+ const createComponent = (config = {}) => {
store = createStore();
- const Component = Vue.extend(RadioGroup);
-
store.state.commit.commitAction = '2';
+ store.state.commit.newBranchName = 'test-123';
- vm = createComponentWithStore(Component, store, {
- value: '1',
- label: 'test',
- checked: true,
+ wrapper = mount(RadioGroup, {
+ store,
+ propsData: config.props,
+ slots: config.slots,
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
});
-
- vm.$mount();
-
- await nextTick();
- });
+ };
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
- it('uses label if present', () => {
- expect(vm.$el.textContent).toContain('test');
- });
+ describe('without input', () => {
+ const props = {
+ value: '1',
+ label: 'test',
+ checked: true,
+ };
- it('uses slot if label is not present', async () => {
- vm.$destroy();
+ it('uses label if present', () => {
+ createComponent({ props });
- vm = new Vue({
- components: {
- RadioGroup,
- },
- store,
- render: (createElement) =>
- createElement('radio-group', { props: { value: '1' } }, 'Testing slot'),
+ expect(wrapper.text()).toContain('test');
});
- vm.$mount();
+ it('uses slot if label is not present', () => {
+ createComponent({ props: { value: '1', checked: true }, slots: { default: 'Testing slot' } });
- await nextTick();
- expect(vm.$el.textContent).toContain('Testing slot');
- });
+ expect(wrapper.text()).toContain('Testing slot');
+ });
- it('updates store when changing radio button', async () => {
- vm.$el.querySelector('input').dispatchEvent(new Event('change'));
+ it('updates store when changing radio button', async () => {
+ createComponent({ props });
- await nextTick();
- expect(store.state.commit.commitAction).toBe('1');
+ await wrapper.find('input').trigger('change');
+
+ expect(store.state.commit.commitAction).toBe('1');
+ });
});
describe('with input', () => {
- beforeEach(async () => {
- vm.$destroy();
-
- const Component = Vue.extend(RadioGroup);
-
- store.state.commit.commitAction = '1';
- store.state.commit.newBranchName = 'test-123';
-
- vm = createComponentWithStore(Component, store, {
- value: '1',
- label: 'test',
- checked: true,
- showInput: true,
- });
-
- vm.$mount();
-
- await nextTick();
- });
+ const props = {
+ value: '2',
+ label: 'test',
+ checked: true,
+ showInput: true,
+ };
it('renders input box when commitAction matches value', () => {
- expect(vm.$el.querySelector('.form-control')).not.toBeNull();
+ createComponent({ props: { ...props, value: '2' } });
+
+ expect(wrapper.find('.form-control').exists()).toBe(true);
});
- it('hides input when commitAction doesnt match value', async () => {
- store.state.commit.commitAction = '2';
+ it('hides input when commitAction doesnt match value', () => {
+ createComponent({ props: { ...props, value: '1' } });
- await nextTick();
- expect(vm.$el.querySelector('.form-control')).toBeNull();
+ expect(wrapper.find('.form-control').exists()).toBe(false);
});
it('updates branch name in store on input', async () => {
- const input = vm.$el.querySelector('.form-control');
- input.value = 'testing-123';
- input.dispatchEvent(new Event('input'));
+ createComponent({ props });
+
+ await wrapper.find('.form-control').setValue('testing-123');
- await nextTick();
expect(store.state.commit.newBranchName).toBe('testing-123');
});
it('renders newBranchName if present', () => {
- const input = vm.$el.querySelector('.form-control');
+ createComponent({ props });
- expect(input.value).toBe('test-123');
+ const input = wrapper.find('.form-control');
+
+ expect(input.element.value).toBe('test-123');
});
});
describe('tooltipTitle', () => {
it('returns title when disabled', () => {
- vm.title = 'test title';
- vm.disabled = true;
+ createComponent({
+ props: { value: '1', label: 'test', disabled: true, title: 'test title' },
+ });
- expect(vm.tooltipTitle).toBe('test title');
+ const tooltip = getBinding(wrapper.findComponent(GlFormRadioGroup).element, 'gl-tooltip');
+ expect(tooltip.value).toBe('test title');
});
it('returns blank when not disabled', () => {
- vm.title = 'test title';
+ createComponent({
+ props: { value: '1', label: 'test', title: 'test title' },
+ });
+
+ const tooltip = getBinding(wrapper.findComponent(GlFormRadioGroup).element, 'gl-tooltip');
- expect(vm.tooltipTitle).not.toBe('test title');
+ expect(tooltip.value).toBe('');
});
});
});
diff --git a/spec/frontend/ide/components/file_row_extra_spec.js b/spec/frontend/ide/components/file_row_extra_spec.js
index 5a7a1fe7db0..281c549a1b4 100644
--- a/spec/frontend/ide/components/file_row_extra_spec.js
+++ b/spec/frontend/ide/components/file_row_extra_spec.js
@@ -1,146 +1,146 @@
-import Vue, { nextTick } from 'vue';
-import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
+import Vuex from 'vuex';
+import { mount } from '@vue/test-utils';
import FileRowExtra from '~/ide/components/file_row_extra.vue';
-import { createStore } from '~/ide/stores';
+import { createStoreOptions } from '~/ide/stores';
import { file } from '../helpers';
describe('IDE extra file row component', () => {
- let Component;
- let vm;
+ let wrapper;
+ let store;
let unstagedFilesCount = 0;
let stagedFilesCount = 0;
let changesCount = 0;
- beforeAll(() => {
- Component = Vue.extend(FileRowExtra);
- });
+ const createComponent = (fileProps) => {
+ const storeConfig = createStoreOptions();
- beforeEach(() => {
- vm = createComponentWithStore(Component, createStore(), {
- file: {
- ...file('test'),
+ store = new Vuex.Store({
+ ...storeConfig,
+ getters: {
+ getUnstagedFilesCountForPath: () => () => unstagedFilesCount,
+ getStagedFilesCountForPath: () => () => stagedFilesCount,
+ getChangesInFolder: () => () => changesCount,
},
- dropdownOpen: false,
});
- jest.spyOn(vm, 'getUnstagedFilesCountForPath', 'get').mockReturnValue(() => unstagedFilesCount);
- jest.spyOn(vm, 'getStagedFilesCountForPath', 'get').mockReturnValue(() => stagedFilesCount);
- jest.spyOn(vm, 'getChangesInFolder', 'get').mockReturnValue(() => changesCount);
-
- vm.$mount();
- });
+ wrapper = mount(FileRowExtra, {
+ store,
+ propsData: {
+ file: {
+ ...file('test'),
+ type: 'tree',
+ ...fileProps,
+ },
+ dropdownOpen: false,
+ },
+ });
+ };
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
stagedFilesCount = 0;
unstagedFilesCount = 0;
changesCount = 0;
});
- describe('folderChangesTooltip', () => {
- it('returns undefined when changes count is 0', () => {
- changesCount = 0;
-
- expect(vm.folderChangesTooltip).toBe(undefined);
- });
-
+ describe('folder changes tooltip', () => {
[
{ input: 1, output: '1 changed file' },
{ input: 2, output: '2 changed files' },
].forEach(({ input, output }) => {
- it('returns changed files count if changes count is not 0', () => {
+ it('shows changed files count if changes count is not 0', () => {
changesCount = input;
+ createComponent();
- expect(vm.folderChangesTooltip).toBe(output);
+ expect(wrapper.find('.ide-file-modified').attributes('title')).toBe(output);
});
});
});
describe('show tree changes count', () => {
+ const findTreeChangesCount = () => wrapper.find('.ide-tree-changes');
+
it('does not show for blobs', () => {
- vm.file.type = 'blob';
+ createComponent({ type: 'blob' });
- expect(vm.$el.querySelector('.ide-tree-changes')).toBe(null);
+ expect(findTreeChangesCount().exists()).toBe(false);
});
it('does not show when changes count is 0', () => {
- vm.file.type = 'tree';
+ createComponent({ type: 'tree' });
- expect(vm.$el.querySelector('.ide-tree-changes')).toBe(null);
+ expect(findTreeChangesCount().exists()).toBe(false);
});
- it('does not show when tree is open', async () => {
- vm.file.type = 'tree';
- vm.file.opened = true;
+ it('does not show when tree is open', () => {
changesCount = 1;
+ createComponent({ type: 'tree', opened: true });
- await nextTick();
- expect(vm.$el.querySelector('.ide-tree-changes')).toBe(null);
+ expect(findTreeChangesCount().exists()).toBe(false);
});
- it('shows for trees with changes', async () => {
- vm.file.type = 'tree';
- vm.file.opened = false;
+ it('shows for trees with changes', () => {
changesCount = 1;
+ createComponent({ type: 'tree', opened: false });
- await nextTick();
- expect(vm.$el.querySelector('.ide-tree-changes')).not.toBe(null);
+ expect(findTreeChangesCount().exists()).toBe(true);
});
});
describe('changes file icon', () => {
+ const findChangedFileIcon = () => wrapper.find('.file-changed-icon');
+
it('hides when file is not changed', () => {
- expect(vm.$el.querySelector('.file-changed-icon')).toBe(null);
+ createComponent();
+
+ expect(findChangedFileIcon().exists()).toBe(false);
});
- it('shows when file is changed', async () => {
- vm.file.changed = true;
+ it('shows when file is changed', () => {
+ createComponent({ type: 'blob', changed: true });
- await nextTick();
- expect(vm.$el.querySelector('.file-changed-icon')).not.toBe(null);
+ expect(findChangedFileIcon().exists()).toBe(true);
});
- it('shows when file is staged', async () => {
- vm.file.staged = true;
+ it('shows when file is staged', () => {
+ createComponent({ type: 'blob', staged: true });
- await nextTick();
- expect(vm.$el.querySelector('.file-changed-icon')).not.toBe(null);
+ expect(findChangedFileIcon().exists()).toBe(true);
});
- it('shows when file is a tempFile', async () => {
- vm.file.tempFile = true;
+ it('shows when file is a tempFile', () => {
+ createComponent({ type: 'blob', tempFile: true });
- await nextTick();
- expect(vm.$el.querySelector('.file-changed-icon')).not.toBe(null);
+ expect(findChangedFileIcon().exists()).toBe(true);
});
- it('shows when file is renamed', async () => {
- vm.file.prevPath = 'original-file';
+ it('shows when file is renamed', () => {
+ createComponent({ type: 'blob', prevPath: 'original-file' });
- await nextTick();
- expect(vm.$el.querySelector('.file-changed-icon')).not.toBe(null);
+ expect(findChangedFileIcon().exists()).toBe(true);
});
- it('hides when file is renamed', async () => {
- vm.file.prevPath = 'original-file';
- vm.file.type = 'tree';
+ it('hides when tree is renamed', () => {
+ createComponent({ type: 'tree', prevPath: 'original-path' });
- await nextTick();
- expect(vm.$el.querySelector('.file-changed-icon')).toBe(null);
+ expect(findChangedFileIcon().exists()).toBe(false);
});
});
describe('merge request icon', () => {
+ const findMergeRequestIcon = () => wrapper.find('[data-testid="git-merge-icon"]');
+
it('hides when not a merge request change', () => {
- expect(vm.$el.querySelector('[data-testid="git-merge-icon"]')).toBe(null);
+ createComponent();
+
+ expect(findMergeRequestIcon().exists()).toBe(false);
});
- it('shows when a merge request change', async () => {
- vm.file.mrChange = true;
+ it('shows when a merge request change', () => {
+ createComponent({ mrChange: true });
- await nextTick();
- expect(vm.$el.querySelector('[data-testid="git-merge-icon"]')).not.toBe(null);
+ expect(findMergeRequestIcon().exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/ide/components/file_templates/bar_spec.js b/spec/frontend/ide/components/file_templates/bar_spec.js
index aaf9c17ccbf..60f37260393 100644
--- a/spec/frontend/ide/components/file_templates/bar_spec.js
+++ b/spec/frontend/ide/components/file_templates/bar_spec.js
@@ -1,19 +1,16 @@
-import Vue, { nextTick } from 'vue';
-import { mountComponentWithStore } from 'helpers/vue_mount_component_helper';
+import { nextTick } from 'vue';
+import { mount } from '@vue/test-utils';
import Bar from '~/ide/components/file_templates/bar.vue';
import { createStore } from '~/ide/stores';
import { file } from '../../helpers';
describe('IDE file templates bar component', () => {
- let Component;
- let vm;
-
- beforeAll(() => {
- Component = Vue.extend(Bar);
- });
+ let wrapper;
+ let store;
beforeEach(() => {
- const store = createStore();
+ store = createStore();
+ jest.spyOn(store, 'dispatch').mockImplementation();
store.state.openFiles.push({
...file('file'),
@@ -21,24 +18,22 @@ describe('IDE file templates bar component', () => {
active: true,
});
- vm = mountComponentWithStore(Component, { store });
+ wrapper = mount(Bar, { store });
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
describe('template type dropdown', () => {
it('renders dropdown component', () => {
- expect(vm.$el.querySelector('.dropdown').textContent).toContain('Choose a type');
+ expect(wrapper.find('.dropdown').text()).toContain('Choose a type');
});
- it('calls setSelectedTemplateType when clicking item', () => {
- jest.spyOn(vm, 'setSelectedTemplateType').mockImplementation();
-
- vm.$el.querySelector('.dropdown-menu button').click();
+ it('calls setSelectedTemplateType when clicking item', async () => {
+ await wrapper.find('.dropdown-menu button').trigger('click');
- expect(vm.setSelectedTemplateType).toHaveBeenCalledWith({
+ expect(store.dispatch).toHaveBeenCalledWith('fileTemplates/setSelectedTemplateType', {
name: '.gitlab-ci.yml',
key: 'gitlab_ci_ymls',
});
@@ -46,60 +41,52 @@ describe('IDE file templates bar component', () => {
});
describe('template dropdown', () => {
- beforeEach(async () => {
- vm.$store.state.fileTemplates.templates = [
+ beforeEach(() => {
+ store.state.fileTemplates.templates = [
{
name: 'test',
},
];
- vm.$store.state.fileTemplates.selectedTemplateType = {
+ store.state.fileTemplates.selectedTemplateType = {
name: '.gitlab-ci.yml',
key: 'gitlab_ci_ymls',
};
-
- await nextTick();
});
it('renders dropdown component', () => {
- expect(vm.$el.querySelectorAll('.dropdown')[1].textContent).toContain('Choose a template');
+ expect(wrapper.findAll('.dropdown').at(1).text()).toContain('Choose a template');
});
- it('calls fetchTemplate on dropdown open', () => {
- jest.spyOn(vm, 'fetchTemplate').mockImplementation();
-
- vm.$el.querySelectorAll('.dropdown-menu')[1].querySelector('button').click();
+ it('calls fetchTemplate on dropdown open', async () => {
+ await wrapper.findAll('.dropdown-menu').at(1).find('button').trigger('click');
- expect(vm.fetchTemplate).toHaveBeenCalledWith({
+ expect(store.dispatch).toHaveBeenCalledWith('fileTemplates/fetchTemplate', {
name: 'test',
});
});
});
+ const findUndoButton = () => wrapper.find('.btn-default-secondary');
it('shows undo button if updateSuccess is true', async () => {
- vm.$store.state.fileTemplates.updateSuccess = true;
-
+ store.state.fileTemplates.updateSuccess = true;
await nextTick();
- expect(vm.$el.querySelector('.btn-default').style.display).not.toBe('none');
- });
- it('calls undoFileTemplate when clicking undo button', () => {
- jest.spyOn(vm, 'undoFileTemplate').mockImplementation();
+ expect(findUndoButton().isVisible()).toBe(true);
+ });
- vm.$el.querySelector('.btn-default-secondary').click();
+ it('calls undoFileTemplate when clicking undo button', async () => {
+ await findUndoButton().trigger('click');
- expect(vm.undoFileTemplate).toHaveBeenCalled();
+ expect(store.dispatch).toHaveBeenCalledWith('fileTemplates/undoFileTemplate', undefined);
});
it('calls setSelectedTemplateType if activeFile name matches a template', async () => {
const fileName = '.gitlab-ci.yml';
-
- jest.spyOn(vm, 'setSelectedTemplateType').mockImplementation(() => {});
- vm.$store.state.openFiles[0].name = fileName;
-
- vm.setInitialType();
+ store.state.openFiles = [{ ...file(fileName), opened: true, active: true }];
await nextTick();
- expect(vm.setSelectedTemplateType).toHaveBeenCalledWith({
+
+ expect(store.dispatch).toHaveBeenCalledWith('fileTemplates/setSelectedTemplateType', {
name: fileName,
key: 'gitlab_ci_ymls',
});
diff --git a/spec/frontend/ide/components/jobs/__snapshots__/stage_spec.js.snap b/spec/frontend/ide/components/jobs/__snapshots__/stage_spec.js.snap
deleted file mode 100644
index 45444166a50..00000000000
--- a/spec/frontend/ide/components/jobs/__snapshots__/stage_spec.js.snap
+++ /dev/null
@@ -1,60 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`IDE pipeline stage renders stage details & icon 1`] = `
-<div
- class="ide-stage card gl-mt-3"
->
- <div
- class="card-header"
- >
- <ci-icon-stub
- cssclasses=""
- size="24"
- status="[object Object]"
- />
-
- <strong
- class="gl-ml-3 text-truncate"
- data-container="body"
- >
-
- build
-
- </strong>
-
- <div
- class="gl-mr-3 gl-ml-2"
- >
- <gl-badge-stub
- size="md"
- variant="muted"
- >
- 4
- </gl-badge-stub>
- </div>
-
- <gl-icon-stub
- class="ide-stage-collapse-icon"
- name="chevron-lg-down"
- size="16"
- />
- </div>
-
- <div
- class="card-body p-0"
- >
- <item-stub
- job="[object Object]"
- />
- <item-stub
- job="[object Object]"
- />
- <item-stub
- job="[object Object]"
- />
- <item-stub
- job="[object Object]"
- />
- </div>
-</div>
-`;
diff --git a/spec/frontend/ide/components/jobs/detail/description_spec.js b/spec/frontend/ide/components/jobs/detail/description_spec.js
index 128ccff6568..629c4424314 100644
--- a/spec/frontend/ide/components/jobs/detail/description_spec.js
+++ b/spec/frontend/ide/components/jobs/detail/description_spec.js
@@ -1,44 +1,43 @@
-import Vue from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
+import { mount } from '@vue/test-utils';
+import { GlIcon } from '@gitlab/ui';
import Description from '~/ide/components/jobs/detail/description.vue';
import { jobs } from '../../../mock_data';
describe('IDE job description', () => {
- const Component = Vue.extend(Description);
- let vm;
+ let wrapper;
beforeEach(() => {
- vm = mountComponent(Component, {
- job: jobs[0],
+ wrapper = mount(Description, {
+ propsData: {
+ job: jobs[0],
+ },
});
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
it('renders job details', () => {
- expect(vm.$el.textContent).toContain('#1');
- expect(vm.$el.textContent).toContain('test');
+ expect(wrapper.text()).toContain('#1');
+ expect(wrapper.text()).toContain('test');
});
it('renders CI icon', () => {
- expect(
- vm.$el.querySelector('.ci-status-icon [data-testid="status_success_borderless-icon"]'),
- ).not.toBe(null);
+ expect(wrapper.find('.ci-status-icon').findComponent(GlIcon).exists()).toBe(true);
});
it('renders a borderless CI icon', () => {
- expect(
- vm.$el.querySelector('.borderless [data-testid="status_success_borderless-icon"]'),
- ).not.toBe(null);
+ expect(wrapper.find('.borderless').findComponent(GlIcon).exists()).toBe(true);
});
it('renders bridge job details without the job link', () => {
- vm = mountComponent(Component, {
- job: { ...jobs[0], path: undefined },
+ wrapper = mount(Description, {
+ propsData: {
+ job: { ...jobs[0], path: undefined },
+ },
});
- expect(vm.$el.querySelector('[data-testid="description-detail-link"]')).toBe(null);
+ expect(wrapper.find('[data-testid="description-detail-link"]').exists()).toBe(false);
});
});
diff --git a/spec/frontend/ide/components/jobs/detail_spec.js b/spec/frontend/ide/components/jobs/detail_spec.js
index 9122471d421..bf2be3aa595 100644
--- a/spec/frontend/ide/components/jobs/detail_spec.js
+++ b/spec/frontend/ide/components/jobs/detail_spec.js
@@ -1,15 +1,17 @@
-import Vue, { nextTick } from 'vue';
+import { nextTick } from 'vue';
+import { mount } from '@vue/test-utils';
+
import { TEST_HOST } from 'helpers/test_constants';
-import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
import JobDetail from '~/ide/components/jobs/detail.vue';
import { createStore } from '~/ide/stores';
import { jobs } from '../../mock_data';
describe('IDE jobs detail view', () => {
- let vm;
+ let wrapper;
+ let store;
const createComponent = () => {
- const store = createStore();
+ store = createStore();
store.state.pipelines.detailJob = {
...jobs[0],
@@ -18,163 +20,129 @@ describe('IDE jobs detail view', () => {
rawPath: `${TEST_HOST}/raw`,
};
- return createComponentWithStore(Vue.extend(JobDetail), store);
+ jest.spyOn(store, 'dispatch');
+ store.dispatch.mockResolvedValue();
+
+ wrapper = mount(JobDetail, { store });
};
- beforeEach(() => {
- vm = createComponent();
+ const findBuildJobLog = () => wrapper.find('pre');
+ const findScrollToBottomButton = () => wrapper.find('button[aria-label="Scroll to bottom"]');
+ const findScrollToTopButton = () => wrapper.find('button[aria-label="Scroll to top"]');
- jest.spyOn(vm, 'fetchJobLogs').mockResolvedValue();
+ beforeEach(() => {
+ createComponent();
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
describe('mounted', () => {
- beforeEach(() => {
- vm = vm.$mount();
- });
+ const findJobOutput = () => wrapper.find('.bash');
+ const findBuildLoaderAnimation = () => wrapper.find('.build-loader-animation');
it('calls fetchJobLogs', () => {
- expect(vm.fetchJobLogs).toHaveBeenCalled();
+ expect(store.dispatch).toHaveBeenCalledWith('pipelines/fetchJobLogs', undefined);
});
it('scrolls to bottom', () => {
- expect(vm.$refs.buildJobLog.scrollTo).toHaveBeenCalled();
+ expect(findBuildJobLog().element.scrollTo).toHaveBeenCalled();
});
it('renders job output', () => {
- expect(vm.$el.querySelector('.bash').textContent).toContain('testing');
+ expect(findJobOutput().text()).toContain('testing');
});
it('renders empty message output', async () => {
- vm.$store.state.pipelines.detailJob.output = '';
-
+ store.state.pipelines.detailJob.output = '';
await nextTick();
- expect(vm.$el.querySelector('.bash').textContent).toContain('No messages were logged');
+
+ expect(findJobOutput().text()).toContain('No messages were logged');
});
it('renders loading icon', () => {
- expect(vm.$el.querySelector('.build-loader-animation')).not.toBe(null);
- expect(vm.$el.querySelector('.build-loader-animation').style.display).toBe('');
+ expect(findBuildLoaderAnimation().exists()).toBe(true);
+ expect(findBuildLoaderAnimation().isVisible()).toBe(true);
});
it('hides output when loading', () => {
- expect(vm.$el.querySelector('.bash')).not.toBe(null);
- expect(vm.$el.querySelector('.bash').style.display).toBe('none');
+ expect(findJobOutput().exists()).toBe(true);
+ expect(findJobOutput().isVisible()).toBe(false);
});
it('hide loading icon when isLoading is false', async () => {
- vm.$store.state.pipelines.detailJob.isLoading = false;
-
+ store.state.pipelines.detailJob.isLoading = false;
await nextTick();
- expect(vm.$el.querySelector('.build-loader-animation').style.display).toBe('none');
- });
- it('resets detailJob when clicking header button', () => {
- jest.spyOn(vm, 'setDetailJob').mockImplementation();
+ expect(findBuildLoaderAnimation().isVisible()).toBe(false);
+ });
- vm.$el.querySelector('.btn').click();
+ it('resets detailJob when clicking header button', async () => {
+ await wrapper.find('.btn').trigger('click');
- expect(vm.setDetailJob).toHaveBeenCalledWith(null);
+ expect(store.dispatch).toHaveBeenCalledWith('pipelines/setDetailJob', null);
});
it('renders raw path link', () => {
- expect(vm.$el.querySelector('.controllers-buttons').getAttribute('href')).toBe(
- `${TEST_HOST}/raw`,
- );
+ expect(wrapper.find('.controllers-buttons').attributes('href')).toBe(`${TEST_HOST}/raw`);
});
});
describe('scroll buttons', () => {
beforeEach(() => {
- vm = createComponent();
- jest.spyOn(vm, 'fetchJobLogs').mockResolvedValue();
- });
-
- afterEach(() => {
- vm.$destroy();
+ createComponent();
});
it.each`
- fnName | btnName | scrollPos
- ${'scrollDown'} | ${'down'} | ${0}
- ${'scrollUp'} | ${'up'} | ${1}
- `('triggers $fnName when clicking $btnName button', async ({ fnName, scrollPos }) => {
- jest.spyOn(vm, fnName).mockImplementation();
-
- vm = vm.$mount();
+ fnName | btnName | scrollPos | targetScrollPos
+ ${'scroll down'} | ${'down'} | ${0} | ${200}
+ ${'scroll up'} | ${'up'} | ${200} | ${0}
+ `('triggers $fnName when clicking $btnName button', async ({ scrollPos, targetScrollPos }) => {
+ jest.spyOn(findBuildJobLog().element, 'offsetHeight', 'get').mockReturnValue(0);
+ jest.spyOn(findBuildJobLog().element, 'scrollHeight', 'get').mockReturnValue(200);
+ jest.spyOn(findBuildJobLog().element, 'scrollTop', 'get').mockReturnValue(scrollPos);
+ findBuildJobLog().element.scrollTo.mockReset();
- vm.scrollPos = scrollPos;
-
- await nextTick();
- vm.$el.querySelector('.btn-scroll:not([disabled])').click();
- expect(vm[fnName]).toHaveBeenCalled();
- });
- });
-
- describe('scrollDown', () => {
- beforeEach(() => {
- vm = vm.$mount();
-
- jest.spyOn(vm.$refs.buildJobLog, 'scrollTo').mockImplementation();
- });
-
- it('scrolls build trace to bottom', () => {
- jest.spyOn(vm.$refs.buildJobLog, 'scrollHeight', 'get').mockReturnValue(1000);
-
- vm.scrollDown();
-
- expect(vm.$refs.buildJobLog.scrollTo).toHaveBeenCalledWith(0, 1000);
- });
- });
-
- describe('scrollUp', () => {
- beforeEach(() => {
- vm = vm.$mount();
-
- jest.spyOn(vm.$refs.buildJobLog, 'scrollTo').mockImplementation();
- });
+ await findBuildJobLog().trigger('scroll'); // trigger button updates
- it('scrolls build trace to top', () => {
- vm.scrollUp();
+ await wrapper.find('.controllers button:not(:disabled)').trigger('click');
- expect(vm.$refs.buildJobLog.scrollTo).toHaveBeenCalledWith(0, 0);
+ expect(findBuildJobLog().element.scrollTo).toHaveBeenCalledWith(0, targetScrollPos);
});
});
- describe('scrollBuildLog', () => {
+ describe('scrolling build log', () => {
beforeEach(() => {
- vm = vm.$mount();
- jest.spyOn(vm.$refs.buildJobLog, 'scrollTo').mockImplementation();
- jest.spyOn(vm.$refs.buildJobLog, 'offsetHeight', 'get').mockReturnValue(100);
- jest.spyOn(vm.$refs.buildJobLog, 'scrollHeight', 'get').mockReturnValue(200);
+ jest.spyOn(findBuildJobLog().element, 'offsetHeight', 'get').mockReturnValue(100);
+ jest.spyOn(findBuildJobLog().element, 'scrollHeight', 'get').mockReturnValue(200);
});
- it('sets scrollPos to bottom when at the bottom', () => {
- jest.spyOn(vm.$refs.buildJobLog, 'scrollTop', 'get').mockReturnValue(100);
+ it('keeps scroll at bottom when already at the bottom', async () => {
+ jest.spyOn(findBuildJobLog().element, 'scrollTop', 'get').mockReturnValue(100);
- vm.scrollBuildLog();
+ await findBuildJobLog().trigger('scroll');
- expect(vm.scrollPos).toBe(1);
+ expect(findScrollToBottomButton().attributes('disabled')).toBe('disabled');
+ expect(findScrollToTopButton().attributes('disabled')).not.toBe('disabled');
});
- it('sets scrollPos to top when at the top', () => {
- jest.spyOn(vm.$refs.buildJobLog, 'scrollTop', 'get').mockReturnValue(0);
- vm.scrollPos = 1;
+ it('keeps scroll at top when already at top', async () => {
+ jest.spyOn(findBuildJobLog().element, 'scrollTop', 'get').mockReturnValue(0);
- vm.scrollBuildLog();
+ await findBuildJobLog().trigger('scroll');
- expect(vm.scrollPos).toBe(0);
+ expect(findScrollToBottomButton().attributes('disabled')).not.toBe('disabled');
+ expect(findScrollToTopButton().attributes('disabled')).toBe('disabled');
});
- it('resets scrollPos when not at top or bottom', () => {
- jest.spyOn(vm.$refs.buildJobLog, 'scrollTop', 'get').mockReturnValue(10);
+ it('resets scroll when not at top or bottom', async () => {
+ jest.spyOn(findBuildJobLog().element, 'scrollTop', 'get').mockReturnValue(10);
- vm.scrollBuildLog();
+ await findBuildJobLog().trigger('scroll');
- expect(vm.scrollPos).toBe('');
+ expect(findScrollToBottomButton().attributes('disabled')).not.toBe('disabled');
+ expect(findScrollToTopButton().attributes('disabled')).not.toBe('disabled');
});
});
});
diff --git a/spec/frontend/ide/components/jobs/item_spec.js b/spec/frontend/ide/components/jobs/item_spec.js
index c76760a5522..32e27333e42 100644
--- a/spec/frontend/ide/components/jobs/item_spec.js
+++ b/spec/frontend/ide/components/jobs/item_spec.js
@@ -1,36 +1,38 @@
-import Vue, { nextTick } from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
+import { mount } from '@vue/test-utils';
+import { GlButton } from '@gitlab/ui';
+
import JobItem from '~/ide/components/jobs/item.vue';
import { jobs } from '../../mock_data';
describe('IDE jobs item', () => {
- const Component = Vue.extend(JobItem);
const job = jobs[0];
- let vm;
+ let wrapper;
beforeEach(() => {
- vm = mountComponent(Component, {
- job,
- });
+ wrapper = mount(JobItem, { propsData: { job } });
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
it('renders job details', () => {
- expect(vm.$el.textContent).toContain(job.name);
- expect(vm.$el.textContent).toContain(`#${job.id}`);
+ expect(wrapper.text()).toContain(job.name);
+ expect(wrapper.text()).toContain(`#${job.id}`);
});
it('renders CI icon', () => {
- expect(vm.$el.querySelector('[data-testid="status_success_borderless-icon"]')).not.toBe(null);
+ expect(wrapper.find('[data-testid="status_success_borderless-icon"]').exists()).toBe(true);
});
it('does not render view logs button if not started', async () => {
- vm.job.started = false;
+ await wrapper.setProps({
+ job: {
+ ...jobs[0],
+ started: false,
+ },
+ });
- await nextTick();
- expect(vm.$el.querySelector('.btn')).toBe(null);
+ expect(wrapper.findComponent(GlButton).exists()).toBe(false);
});
});
diff --git a/spec/frontend/ide/components/jobs/stage_spec.js b/spec/frontend/ide/components/jobs/stage_spec.js
index 1d5e5743a4d..52fbff2f497 100644
--- a/spec/frontend/ide/components/jobs/stage_spec.js
+++ b/spec/frontend/ide/components/jobs/stage_spec.js
@@ -18,8 +18,9 @@ describe('IDE pipeline stage', () => {
},
};
- const findHeader = () => wrapper.findComponent({ ref: 'cardHeader' });
- const findJobList = () => wrapper.findComponent({ ref: 'jobList' });
+ const findHeader = () => wrapper.find('[data-testid="card-header"]');
+ const findJobList = () => wrapper.find('[data-testid="job-list"]');
+ const findStageTitle = () => wrapper.find('[data-testid="stage-title"]');
const createComponent = (props) => {
wrapper = shallowMount(Stage, {
@@ -65,9 +66,9 @@ describe('IDE pipeline stage', () => {
expect(wrapper.emitted().clickViewLog[0][0]).toBe(job);
});
- it('renders stage details & icon', () => {
+ it('renders stage title', () => {
createComponent();
- expect(wrapper.element).toMatchSnapshot();
+ expect(findStageTitle().isVisible()).toBe(true);
});
describe('when collapsed', () => {
diff --git a/spec/frontend/ide/components/new_dropdown/button_spec.js b/spec/frontend/ide/components/new_dropdown/button_spec.js
index 298d7b810e1..a9cfdfd20c1 100644
--- a/spec/frontend/ide/components/new_dropdown/button_spec.js
+++ b/spec/frontend/ide/components/new_dropdown/button_spec.js
@@ -1,59 +1,60 @@
-import Vue, { nextTick } from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
+import { mount } from '@vue/test-utils';
import Button from '~/ide/components/new_dropdown/button.vue';
describe('IDE new entry dropdown button component', () => {
- let Component;
- let vm;
-
- beforeAll(() => {
- Component = Vue.extend(Button);
- });
-
- beforeEach(() => {
- vm = mountComponent(Component, {
- label: 'Testing',
- icon: 'doc-new',
+ let wrapper;
+
+ const createComponent = (props = {}) => {
+ wrapper = mount(Button, {
+ propsData: {
+ label: 'Testing',
+ icon: 'doc-new',
+ ...props,
+ },
});
-
- jest.spyOn(vm, '$emit').mockImplementation(() => {});
- });
+ };
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
it('renders button with label', () => {
- expect(vm.$el.textContent).toContain('Testing');
+ createComponent();
+
+ expect(wrapper.text()).toContain('Testing');
});
it('renders icon', () => {
- expect(vm.$el.querySelector('[data-testid="doc-new-icon"]')).not.toBe(null);
+ createComponent();
+
+ expect(wrapper.find('[data-testid="doc-new-icon"]').exists()).toBe(true);
});
- it('emits click event', () => {
- vm.$el.click();
+ it('emits click event', async () => {
+ createComponent();
- expect(vm.$emit).toHaveBeenCalledWith('click');
+ await wrapper.trigger('click');
+
+ expect(wrapper.emitted('click')).toHaveLength(1);
});
- it('hides label if showLabel is false', async () => {
- vm.showLabel = false;
+ it('hides label if showLabel is false', () => {
+ createComponent({ showLabel: false });
- await nextTick();
- expect(vm.$el.textContent).not.toContain('Testing');
+ expect(wrapper.text()).not.toContain('Testing');
});
- describe('tooltipTitle', () => {
+ describe('tooltip title', () => {
it('returns empty string when showLabel is true', () => {
- expect(vm.tooltipTitle).toBe('');
+ createComponent({ showLabel: true });
+
+ expect(wrapper.attributes('title')).toBe('');
});
- it('returns label', async () => {
- vm.showLabel = false;
+ it('returns label', () => {
+ createComponent({ showLabel: false });
- await nextTick();
- expect(vm.tooltipTitle).toBe('Testing');
+ expect(wrapper.attributes('title')).toBe('Testing');
});
});
});
diff --git a/spec/frontend/ide/components/new_dropdown/modal_spec.js b/spec/frontend/ide/components/new_dropdown/modal_spec.js
index 68cc08d2ebc..c6f9fd0c4ea 100644
--- a/spec/frontend/ide/components/new_dropdown/modal_spec.js
+++ b/spec/frontend/ide/components/new_dropdown/modal_spec.js
@@ -1,6 +1,6 @@
import { GlButton, GlModal } from '@gitlab/ui';
import { nextTick } from 'vue';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import Modal from '~/ide/components/new_dropdown/modal.vue';
import { createStore } from '~/ide/stores';
import { stubComponent } from 'helpers/stub_component';
@@ -341,7 +341,7 @@ describe('new file modal component', () => {
});
it('does not trigger flash', () => {
- expect(createFlash).not.toHaveBeenCalled();
+ expect(createAlert).not.toHaveBeenCalled();
});
});
@@ -360,7 +360,7 @@ describe('new file modal component', () => {
});
it('does not trigger flash', () => {
- expect(createFlash).not.toHaveBeenCalled();
+ expect(createAlert).not.toHaveBeenCalled();
});
});
});
@@ -380,7 +380,7 @@ describe('new file modal component', () => {
});
it('creates flash', () => {
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'The name "src" is already taken in this directory.',
fadeTransition: false,
addBodyClass: true,
@@ -405,7 +405,7 @@ describe('new file modal component', () => {
});
it('does not create flash', () => {
- expect(createFlash).not.toHaveBeenCalled();
+ expect(createAlert).not.toHaveBeenCalled();
});
it('dispatches event', () => {
diff --git a/spec/frontend/ide/components/new_dropdown/upload_spec.js b/spec/frontend/ide/components/new_dropdown/upload_spec.js
index 3eafe9e7ccb..fc643589d51 100644
--- a/spec/frontend/ide/components/new_dropdown/upload_spec.js
+++ b/spec/frontend/ide/components/new_dropdown/upload_spec.js
@@ -1,39 +1,34 @@
-import Vue from 'vue';
-import createComponent from 'helpers/vue_mount_component_helper';
-import upload from '~/ide/components/new_dropdown/upload.vue';
+import { mount } from '@vue/test-utils';
+import Upload from '~/ide/components/new_dropdown/upload.vue';
describe('new dropdown upload', () => {
- let vm;
+ let wrapper;
beforeEach(() => {
- const Component = Vue.extend(upload);
-
- vm = createComponent(Component, {
- path: '',
+ wrapper = mount(Upload, {
+ propsData: {
+ path: '',
+ },
});
-
- vm.entryName = 'testing';
-
- jest.spyOn(vm, '$emit');
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
describe('openFile', () => {
it('calls for each file', () => {
const files = ['test', 'test2', 'test3'];
- jest.spyOn(vm, 'readFile').mockImplementation(() => {});
- jest.spyOn(vm.$refs.fileUpload, 'files', 'get').mockReturnValue(files);
+ jest.spyOn(wrapper.vm, 'readFile').mockImplementation(() => {});
+ jest.spyOn(wrapper.vm.$refs.fileUpload, 'files', 'get').mockReturnValue(files);
- vm.openFile();
+ wrapper.vm.openFile();
- expect(vm.readFile.mock.calls.length).toBe(3);
+ expect(wrapper.vm.readFile.mock.calls.length).toBe(3);
files.forEach((file, i) => {
- expect(vm.readFile.mock.calls[i]).toEqual([file]);
+ expect(wrapper.vm.readFile.mock.calls[i]).toEqual([file]);
});
});
});
@@ -48,7 +43,7 @@ describe('new dropdown upload', () => {
type: 'images/png',
};
- vm.readFile(file);
+ wrapper.vm.readFile(file);
expect(FileReader.prototype.readAsDataURL).toHaveBeenCalledWith(file);
});
@@ -71,35 +66,39 @@ describe('new dropdown upload', () => {
it('calls readAsText and creates file in plain text (without encoding) if the file content is plain text', async () => {
const waitForCreate = new Promise((resolve) => {
- vm.$on('create', resolve);
+ wrapper.vm.$on('create', resolve);
});
- vm.createFile(textTarget, textFile);
+ wrapper.vm.createFile(textTarget, textFile);
expect(FileReader.prototype.readAsText).toHaveBeenCalledWith(textFile);
await waitForCreate;
- expect(vm.$emit).toHaveBeenCalledWith('create', {
- name: textFile.name,
- type: 'blob',
- content: 'plain text',
- rawPath: '',
- mimeType: 'test/mime-text',
- });
+ expect(wrapper.emitted('create')[0]).toStrictEqual([
+ {
+ name: textFile.name,
+ type: 'blob',
+ content: 'plain text',
+ rawPath: '',
+ mimeType: 'test/mime-text',
+ },
+ ]);
});
it('creates a blob URL for the content if binary', () => {
- vm.createFile(binaryTarget, binaryFile);
+ wrapper.vm.createFile(binaryTarget, binaryFile);
expect(FileReader.prototype.readAsText).not.toHaveBeenCalled();
- expect(vm.$emit).toHaveBeenCalledWith('create', {
- name: binaryFile.name,
- type: 'blob',
- content: 'ðððð',
- rawPath: 'blob:https://gitlab.com/048c7ac1-98de-4a37-ab1b-0206d0ea7e1b',
- mimeType: 'test/mime-binary',
- });
+ expect(wrapper.emitted('create')[0]).toStrictEqual([
+ {
+ name: binaryFile.name,
+ type: 'blob',
+ content: 'ðððð',
+ rawPath: 'blob:https://gitlab.com/048c7ac1-98de-4a37-ab1b-0206d0ea7e1b',
+ mimeType: 'test/mime-binary',
+ },
+ ]);
});
});
});
diff --git a/spec/frontend/ide/components/shared/tokened_input_spec.js b/spec/frontend/ide/components/shared/tokened_input_spec.js
index 2efef9918b1..b70c9659e46 100644
--- a/spec/frontend/ide/components/shared/tokened_input_spec.js
+++ b/spec/frontend/ide/components/shared/tokened_input_spec.js
@@ -1,5 +1,4 @@
-import Vue, { nextTick } from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
+import { mount } from '@vue/test-utils';
import TokenedInput from '~/ide/components/shared/tokened_input.vue';
const TEST_PLACEHOLDER = 'Searching in test';
@@ -10,120 +9,106 @@ const TEST_TOKENS = [
];
const TEST_VALUE = 'lorem';
-function getTokenElements(vm) {
- return Array.from(vm.$el.querySelectorAll('.filtered-search-token button'));
-}
-
-function createBackspaceEvent() {
- const e = new Event('keyup');
- e.keyCode = 8;
- e.which = e.keyCode;
- e.altKey = false;
- e.ctrlKey = true;
- e.shiftKey = false;
- e.metaKey = false;
- return e;
+function getTokenElements(wrapper) {
+ return wrapper.findAll('.filtered-search-token button');
}
describe('IDE shared/TokenedInput', () => {
- const Component = Vue.extend(TokenedInput);
- let vm;
-
- beforeEach(() => {
- vm = mountComponent(Component, {
- tokens: TEST_TOKENS,
- placeholder: TEST_PLACEHOLDER,
- value: TEST_VALUE,
+ let wrapper;
+
+ const createComponent = (props = {}) => {
+ wrapper = mount(TokenedInput, {
+ propsData: {
+ tokens: TEST_TOKENS,
+ placeholder: TEST_PLACEHOLDER,
+ value: TEST_VALUE,
+ ...props,
+ },
+ attachTo: document.body,
});
-
- jest.spyOn(vm, '$emit').mockImplementation(() => {});
- });
+ };
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
it('renders tokens', () => {
- const renderedTokens = getTokenElements(vm).map((x) => x.textContent.trim());
+ createComponent();
+ const renderedTokens = getTokenElements(wrapper).wrappers.map((w) => w.text());
expect(renderedTokens).toEqual(TEST_TOKENS.map((x) => x.label));
});
it('renders input', () => {
- expect(vm.$refs.input).toBeInstanceOf(HTMLInputElement);
- expect(vm.$refs.input).toHaveValue(TEST_VALUE);
- });
-
- it('renders placeholder, when tokens are empty', async () => {
- vm.tokens = [];
+ createComponent();
- await nextTick();
- expect(vm.$refs.input).toHaveAttr('placeholder', TEST_PLACEHOLDER);
+ expect(wrapper.find('input').element).toBeInstanceOf(HTMLInputElement);
+ expect(wrapper.find('input').element).toHaveValue(TEST_VALUE);
});
- it('triggers "removeToken" on token click', () => {
- getTokenElements(vm)[0].click();
+ it('renders placeholder, when tokens are empty', () => {
+ createComponent({ tokens: [] });
- expect(vm.$emit).toHaveBeenCalledWith('removeToken', TEST_TOKENS[0]);
+ expect(wrapper.find('input').attributes('placeholder')).toBe(TEST_PLACEHOLDER);
});
- it('when input triggers backspace event, it calls "onBackspace"', () => {
- jest.spyOn(vm, 'onBackspace').mockImplementation(() => {});
+ it('triggers "removeToken" on token click', async () => {
+ createComponent();
+ await getTokenElements(wrapper).at(0).trigger('click');
- vm.$refs.input.dispatchEvent(createBackspaceEvent());
- vm.$refs.input.dispatchEvent(createBackspaceEvent());
-
- expect(vm.onBackspace).toHaveBeenCalledTimes(2);
+ expect(wrapper.emitted('removeToken')[0]).toStrictEqual([TEST_TOKENS[0]]);
});
- it('triggers "removeToken" on backspaces when value is empty', () => {
- vm.value = '';
-
- vm.onBackspace();
+ it('removes token on backspace when value is empty', async () => {
+ createComponent({ value: '' });
- expect(vm.$emit).not.toHaveBeenCalled();
- expect(vm.backspaceCount).toEqual(1);
+ expect(wrapper.emitted('removeToken')).toBeUndefined();
- vm.onBackspace();
+ await wrapper.find('input').trigger('keyup.delete');
+ await wrapper.find('input').trigger('keyup.delete');
- expect(vm.$emit).toHaveBeenCalledWith('removeToken', TEST_TOKENS[TEST_TOKENS.length - 1]);
- expect(vm.backspaceCount).toEqual(0);
+ expect(wrapper.emitted('removeToken')[0]).toStrictEqual([TEST_TOKENS[TEST_TOKENS.length - 1]]);
});
- it('does not trigger "removeToken" on backspaces when value is not empty', () => {
- vm.onBackspace();
- vm.onBackspace();
+ it('does not trigger "removeToken" on backspaces when value is not empty', async () => {
+ createComponent({ value: 'SOMETHING' });
+
+ await wrapper.find('input').trigger('keyup.delete');
+ await wrapper.find('input').trigger('keyup.delete');
- expect(vm.backspaceCount).toEqual(0);
- expect(vm.$emit).not.toHaveBeenCalled();
+ expect(wrapper.emitted('removeToken')).toBeUndefined();
});
- it('does not trigger "removeToken" on backspaces when tokens are empty', () => {
- vm.tokens = [];
+ it('does not trigger "removeToken" on backspaces when tokens are empty', async () => {
+ createComponent({ value: '', tokens: [] });
- vm.onBackspace();
- vm.onBackspace();
+ await wrapper.find('input').trigger('keyup.delete');
+ await wrapper.find('input').trigger('keyup.delete');
- expect(vm.backspaceCount).toEqual(0);
- expect(vm.$emit).not.toHaveBeenCalled();
+ expect(wrapper.emitted('removeToken')).toBeUndefined();
});
- it('triggers "focus" on input focus', () => {
- vm.$refs.input.dispatchEvent(new Event('focus'));
+ it('triggers "focus" on input focus', async () => {
+ createComponent();
- expect(vm.$emit).toHaveBeenCalledWith('focus');
+ await wrapper.find('input').trigger('focus');
+
+ expect(wrapper.emitted('focus')).toHaveLength(1);
});
- it('triggers "blur" on input blur', () => {
- vm.$refs.input.dispatchEvent(new Event('blur'));
+ it('triggers "blur" on input blur', async () => {
+ createComponent();
+
+ await wrapper.find('input').trigger('blur');
- expect(vm.$emit).toHaveBeenCalledWith('blur');
+ expect(wrapper.emitted('blur')).toHaveLength(1);
});
- it('triggers "input" with value on input change', () => {
- vm.$refs.input.value = 'something-else';
- vm.$refs.input.dispatchEvent(new Event('input'));
+ it('triggers "input" with value on input change', async () => {
+ createComponent();
+
+ await wrapper.find('input').setValue('something-else');
- expect(vm.$emit).toHaveBeenCalledWith('input', 'something-else');
+ expect(wrapper.emitted('input')[0]).toStrictEqual(['something-else']);
});
});
diff --git a/spec/frontend/ide/components/terminal/terminal_spec.js b/spec/frontend/ide/components/terminal/terminal_spec.js
index 4da3e1910e9..0d22f7f73fe 100644
--- a/spec/frontend/ide/components/terminal/terminal_spec.js
+++ b/spec/frontend/ide/components/terminal/terminal_spec.js
@@ -171,7 +171,7 @@ describe('IDE Terminal', () => {
it('creates the terminal', () => {
expect(GLTerminal).toHaveBeenCalledWith(wrapper.vm.$refs.terminal);
- expect(wrapper.vm.glterminal).toBeTruthy();
+ expect(wrapper.vm.glterminal).toBeInstanceOf(GLTerminal);
});
describe('scroll listener', () => {
diff --git a/spec/frontend/ide/init_gitlab_web_ide_spec.js b/spec/frontend/ide/init_gitlab_web_ide_spec.js
index ec8559f1b56..067da25cb52 100644
--- a/spec/frontend/ide/init_gitlab_web_ide_spec.js
+++ b/spec/frontend/ide/init_gitlab_web_ide_spec.js
@@ -6,7 +6,7 @@ jest.mock('@gitlab/web-ide');
const ROOT_ELEMENT_ID = 'ide';
const TEST_NONCE = 'test123nonce';
-const TEST_PROJECT = { path_with_namespace: 'group1/project1' };
+const TEST_PROJECT_PATH = 'group1/project1';
const TEST_BRANCH_NAME = '12345-foo-patch';
const TEST_GITLAB_URL = 'https://test-gitlab/';
const TEST_GITLAB_WEB_IDE_PUBLIC_PATH = 'test/webpack/assets/gitlab-web-ide/public/path';
@@ -18,7 +18,7 @@ describe('ide/init_gitlab_web_ide', () => {
el.id = ROOT_ELEMENT_ID;
// why: We'll test that this class is removed later
el.classList.add('ide-loading');
- el.dataset.project = JSON.stringify(TEST_PROJECT);
+ el.dataset.projectPath = TEST_PROJECT_PATH;
el.dataset.cspNonce = TEST_NONCE;
el.dataset.branchName = TEST_BRANCH_NAME;
@@ -43,7 +43,7 @@ describe('ide/init_gitlab_web_ide', () => {
it('calls start with element', () => {
expect(start).toHaveBeenCalledWith(findRootElement(), {
baseUrl: `${TEST_HOST}/${TEST_GITLAB_WEB_IDE_PUBLIC_PATH}`,
- projectPath: TEST_PROJECT.path_with_namespace,
+ projectPath: TEST_PROJECT_PATH,
ref: TEST_BRANCH_NAME,
gitlabUrl: TEST_GITLAB_URL,
nonce: TEST_NONCE,
diff --git a/spec/frontend/ide/stores/actions/merge_request_spec.js b/spec/frontend/ide/stores/actions/merge_request_spec.js
index abc3ba5b0a2..f1b2a7b881a 100644
--- a/spec/frontend/ide/stores/actions/merge_request_spec.js
+++ b/spec/frontend/ide/stores/actions/merge_request_spec.js
@@ -3,7 +3,7 @@ import { range } from 'lodash';
import { stubPerformanceWebAPI } from 'helpers/performance';
import { TEST_HOST } from 'helpers/test_constants';
import testAction from 'helpers/vuex_action_helper';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import { leftSidebarViews, PERMISSION_READ_MR, MAX_MR_FILES_AUTO_OPEN } from '~/ide/constants';
import service from '~/ide/services';
import { createStore } from '~/ide/stores';
@@ -139,8 +139,8 @@ describe('IDE store merge request actions', () => {
branchId: 'bar',
})
.catch(() => {
- expect(createFlash).toHaveBeenCalled();
- expect(createFlash.mock.calls[0][0].message).toBe(
+ expect(createAlert).toHaveBeenCalled();
+ expect(createAlert.mock.calls[0][0].message).toBe(
'Error fetching merge requests for bar',
);
});
@@ -520,7 +520,7 @@ describe('IDE store merge request actions', () => {
store.dispatch.mockRejectedValue();
return openMergeRequest(store, mr).catch(() => {
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: expect.any(String),
});
});
diff --git a/spec/frontend/ide/stores/actions/project_spec.js b/spec/frontend/ide/stores/actions/project_spec.js
index cc7d39b4d43..5a5ead4c544 100644
--- a/spec/frontend/ide/stores/actions/project_spec.js
+++ b/spec/frontend/ide/stores/actions/project_spec.js
@@ -2,7 +2,7 @@ import MockAdapter from 'axios-mock-adapter';
import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
import testAction from 'helpers/vuex_action_helper';
import api from '~/api';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import service from '~/ide/services';
import { createStore } from '~/ide/stores';
import {
@@ -97,7 +97,7 @@ describe('IDE store project actions', () => {
});
afterEach(() => {
- createFlash.mockRestore();
+ createAlert.mockRestore();
});
it.each`
@@ -122,7 +122,7 @@ describe('IDE store project actions', () => {
if (!responseSuccess) {
expect(logError).toHaveBeenCalled();
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
}
});
});
diff --git a/spec/frontend/ide/stores/actions_spec.js b/spec/frontend/ide/stores/actions_spec.js
index f6d54491d77..fd2c3d18813 100644
--- a/spec/frontend/ide/stores/actions_spec.js
+++ b/spec/frontend/ide/stores/actions_spec.js
@@ -4,6 +4,7 @@ import testAction from 'helpers/vuex_action_helper';
import eventHub from '~/ide/eventhub';
import { createRouter } from '~/ide/ide_router';
import { createStore } from '~/ide/stores';
+import { createAlert } from '~/flash';
import {
init,
stageAllChanges,
@@ -29,6 +30,7 @@ jest.mock('~/lib/utils/url_utility', () => ({
visitUrl: jest.fn(),
joinPaths: jest.requireActual('~/lib/utils/url_utility').joinPaths,
}));
+jest.mock('~/flash');
describe('Multi-file store actions', () => {
let store;
@@ -138,7 +140,7 @@ describe('Multi-file store actions', () => {
name: 'testing/test',
type: 'tree',
});
- expect(tree.tree[0].tempFile).toBeTruthy();
+ expect(tree.tree[0].tempFile).toBe(true);
expect(tree.tree[0].name).toBe('test');
expect(tree.tree[0].type).toBe('tree');
});
@@ -158,7 +160,7 @@ describe('Multi-file store actions', () => {
type: 'tree',
});
expect(store.state.entries[tree.path].tempFile).toEqual(false);
- expect(document.querySelector('.flash-alert')).not.toBeNull();
+ expect(createAlert).toHaveBeenCalled();
});
});
@@ -173,7 +175,7 @@ describe('Multi-file store actions', () => {
});
const f = store.state.entries[name];
- expect(f.tempFile).toBeTruthy();
+ expect(f.tempFile).toBe(true);
expect(f.mimeType).toBe('test/mime');
expect(store.state.trees['abcproject/mybranch'].tree.length).toBe(1);
});
@@ -216,8 +218,10 @@ describe('Multi-file store actions', () => {
name: 'test',
type: 'blob',
});
- expect(document.querySelector('.flash-alert')?.textContent.trim()).toEqual(
- `The name "${f.name}" is already taken in this directory.`,
+ expect(createAlert).toHaveBeenCalledWith(
+ expect.objectContaining({
+ message: `The name "${f.name}" is already taken in this directory.`,
+ }),
);
});
});
@@ -930,7 +934,7 @@ describe('Multi-file store actions', () => {
);
expect(dispatch.mock.calls).toHaveLength(0);
- expect(document.querySelector('.flash-alert')).not.toBeNull();
+ expect(createAlert).toHaveBeenCalled();
});
});
});
diff --git a/spec/frontend/ide/stores/modules/commit/mutations_spec.js b/spec/frontend/ide/stores/modules/commit/mutations_spec.js
index 50342832d75..d277157e737 100644
--- a/spec/frontend/ide/stores/modules/commit/mutations_spec.js
+++ b/spec/frontend/ide/stores/modules/commit/mutations_spec.js
@@ -37,7 +37,7 @@ describe('IDE commit module mutations', () => {
it('updates submitCommitLoading', () => {
mutations.UPDATE_LOADING(state, true);
- expect(state.submitCommitLoading).toBeTruthy();
+ expect(state.submitCommitLoading).toBe(true);
});
});
diff --git a/spec/frontend/ide/stores/modules/terminal/actions/session_controls_spec.js b/spec/frontend/ide/stores/modules/terminal/actions/session_controls_spec.js
index ecda7f304ba..f48797415df 100644
--- a/spec/frontend/ide/stores/modules/terminal/actions/session_controls_spec.js
+++ b/spec/frontend/ide/stores/modules/terminal/actions/session_controls_spec.js
@@ -1,6 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import * as actions from '~/ide/stores/modules/terminal/actions/session_controls';
import { STARTING, PENDING, STOPPING, STOPPED } from '~/ide/stores/modules/terminal/constants';
import * as messages from '~/ide/stores/modules/terminal/messages';
@@ -89,7 +89,7 @@ describe('IDE store terminal session controls actions', () => {
it('flashes message', () => {
actions.receiveStartSessionError({ dispatch });
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: messages.UNEXPECTED_ERROR_STARTING,
});
});
@@ -163,7 +163,7 @@ describe('IDE store terminal session controls actions', () => {
it('flashes message', () => {
actions.receiveStopSessionError({ dispatch });
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: messages.UNEXPECTED_ERROR_STOPPING,
});
});
diff --git a/spec/frontend/ide/stores/modules/terminal/actions/session_status_spec.js b/spec/frontend/ide/stores/modules/terminal/actions/session_status_spec.js
index eabc69b23aa..fe2328f25c2 100644
--- a/spec/frontend/ide/stores/modules/terminal/actions/session_status_spec.js
+++ b/spec/frontend/ide/stores/modules/terminal/actions/session_status_spec.js
@@ -1,6 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import * as actions from '~/ide/stores/modules/terminal/actions/session_status';
import { PENDING, RUNNING, STOPPING, STOPPED } from '~/ide/stores/modules/terminal/constants';
import * as messages from '~/ide/stores/modules/terminal/messages';
@@ -115,7 +115,7 @@ describe('IDE store terminal session controls actions', () => {
it('flashes message', () => {
actions.receiveSessionStatusError({ dispatch });
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: messages.UNEXPECTED_ERROR_STATUS,
});
});
diff --git a/spec/frontend/ide/stores/mutations/tree_spec.js b/spec/frontend/ide/stores/mutations/tree_spec.js
index 6935e57578f..a8c0d7ba2c8 100644
--- a/spec/frontend/ide/stores/mutations/tree_spec.js
+++ b/spec/frontend/ide/stores/mutations/tree_spec.js
@@ -17,11 +17,11 @@ describe('Multi-file store tree mutations', () => {
it('toggles tree open', () => {
mutations.TOGGLE_TREE_OPEN(localState, localTree.path);
- expect(localTree.opened).toBeTruthy();
+ expect(localTree.opened).toBe(true);
mutations.TOGGLE_TREE_OPEN(localState, localTree.path);
- expect(localTree.opened).toBeFalsy();
+ expect(localTree.opened).toBe(false);
});
});
diff --git a/spec/frontend/ide/stores/mutations_spec.js b/spec/frontend/ide/stores/mutations_spec.js
index 4602a0837e0..4117f2648bd 100644
--- a/spec/frontend/ide/stores/mutations_spec.js
+++ b/spec/frontend/ide/stores/mutations_spec.js
@@ -30,13 +30,13 @@ describe('Multi-file store mutations', () => {
entry,
});
- expect(entry.loading).toBeTruthy();
+ expect(entry.loading).toBe(true);
mutations.TOGGLE_LOADING(localState, {
entry,
});
- expect(entry.loading).toBeFalsy();
+ expect(entry.loading).toBe(false);
});
it('toggles loading of entry and sets specific value', () => {
@@ -44,14 +44,14 @@ describe('Multi-file store mutations', () => {
entry,
});
- expect(entry.loading).toBeTruthy();
+ expect(entry.loading).toBe(true);
mutations.TOGGLE_LOADING(localState, {
entry,
forceValue: true,
});
- expect(entry.loading).toBeTruthy();
+ expect(entry.loading).toBe(true);
});
});
diff --git a/spec/frontend/ide/utils_spec.js b/spec/frontend/ide/utils_spec.js
index fd9d481251d..4efc0ac6028 100644
--- a/spec/frontend/ide/utils_spec.js
+++ b/spec/frontend/ide/utils_spec.js
@@ -1,4 +1,5 @@
import { languages } from 'monaco-editor';
+import { setDiagnosticsOptions as yamlDiagnosticsOptions } from 'monaco-yaml';
import {
isTextFile,
registerLanguages,
@@ -203,7 +204,6 @@ describe('WebIDE utils', () => {
};
jest.spyOn(languages.json.jsonDefaults, 'setDiagnosticsOptions');
- jest.spyOn(languages.yaml.yamlDefaults, 'setDiagnosticsOptions');
});
it('registers the given schemas with monaco for both json and yaml languages', () => {
@@ -212,7 +212,7 @@ describe('WebIDE utils', () => {
expect(languages.json.jsonDefaults.setDiagnosticsOptions).toHaveBeenCalledWith(
expect.objectContaining({ schemas: [schema] }),
);
- expect(languages.yaml.yamlDefaults.setDiagnosticsOptions).toHaveBeenCalledWith(
+ expect(yamlDiagnosticsOptions).toHaveBeenCalledWith(
expect.objectContaining({ schemas: [schema] }),
);
});
diff --git a/spec/frontend/import_entities/import_groups/components/import_table_spec.js b/spec/frontend/import_entities/import_groups/components/import_table_spec.js
index f97ea046cbe..a0115cb9349 100644
--- a/spec/frontend/import_entities/import_groups/components/import_table_spec.js
+++ b/spec/frontend/import_entities/import_groups/components/import_table_spec.js
@@ -5,13 +5,14 @@ import VueApollo from 'vue-apollo';
import MockAdapter from 'axios-mock-adapter';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import httpStatus from '~/lib/utils/http_status';
import axios from '~/lib/utils/axios_utils';
import { STATUSES } from '~/import_entities/constants';
import { i18n, ROOT_NAMESPACE } from '~/import_entities/import_groups/constants';
import ImportTable from '~/import_entities/import_groups/components/import_table.vue';
import importGroupsMutation from '~/import_entities/import_groups/graphql/mutations/import_groups.mutation.graphql';
+import PaginationBar from '~/vue_shared/components/pagination_bar/pagination_bar.vue';
import PaginationLinks from '~/vue_shared/components/pagination_links.vue';
import { availableNamespacesFixture, generateFakeEntry } from '../graphql/fixtures';
@@ -246,7 +247,7 @@ describe('import table', () => {
await findImportButtons()[0].trigger('click');
await waitForPromises();
- expect(createFlash).toHaveBeenCalledWith(
+ expect(createAlert).toHaveBeenCalledWith(
expect.objectContaining({
message: i18n.ERROR_IMPORT,
}),
@@ -528,6 +529,17 @@ describe('import table', () => {
});
});
+ it('renders pagination bar with storage key', async () => {
+ createComponent({
+ bulkImportSourceGroups: () => new Promise(() => {}),
+ });
+ await waitForPromises();
+
+ expect(wrapper.getComponent(PaginationBar).props('storageKey')).toBe(
+ ImportTable.LOCAL_STORAGE_KEY,
+ );
+ });
+
describe('unavailable features warning', () => {
it('renders alert when there are unavailable features', async () => {
createComponent({
diff --git a/spec/frontend/import_entities/import_groups/services/status_poller_spec.js b/spec/frontend/import_entities/import_groups/services/status_poller_spec.js
index 01f976562c6..13d2a95ca14 100644
--- a/spec/frontend/import_entities/import_groups/services/status_poller_spec.js
+++ b/spec/frontend/import_entities/import_groups/services/status_poller_spec.js
@@ -1,6 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
import Visibility from 'visibilityjs';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import { STATUSES } from '~/import_entities/constants';
import { StatusPoller } from '~/import_entities/import_groups/services/status_poller';
import axios from '~/lib/utils/axios_utils';
@@ -83,7 +83,7 @@ describe('Bulk import status poller', () => {
it('when error occurs shows flash with error', () => {
const [[pollConfig]] = Poll.mock.calls;
pollConfig.errorCallback();
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
it('when success response arrives updates relevant group status', () => {
diff --git a/spec/frontend/import_entities/import_projects/components/advanced_settings_spec.js b/spec/frontend/import_entities/import_projects/components/advanced_settings_spec.js
new file mode 100644
index 00000000000..68716600592
--- /dev/null
+++ b/spec/frontend/import_entities/import_projects/components/advanced_settings_spec.js
@@ -0,0 +1,60 @@
+import { mount } from '@vue/test-utils';
+import { GlFormCheckbox } from '@gitlab/ui';
+import AdvancedSettingsPanel from '~/import_entities/import_projects/components/advanced_settings.vue';
+
+describe('Import Advanced Settings', () => {
+ let wrapper;
+ const OPTIONAL_STAGES = [
+ { name: 'stage1', label: 'Stage 1' },
+ { name: 'stage2', label: 'Stage 2', details: 'Extra details' },
+ ];
+
+ const createComponent = () => {
+ wrapper = mount(AdvancedSettingsPanel, {
+ propsData: {
+ stages: OPTIONAL_STAGES,
+ value: {
+ stage1: false,
+ stage2: false,
+ },
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders GLFormCheckbox for each optional stage', () => {
+ expect(wrapper.findAllComponents(GlFormCheckbox)).toHaveLength(OPTIONAL_STAGES.length);
+ });
+
+ it('renders label for each optional stage', () => {
+ wrapper.findAllComponents(GlFormCheckbox).wrappers.forEach((w, idx) => {
+ expect(w.text()).toContain(OPTIONAL_STAGES[idx].label);
+ });
+ });
+
+ it('renders details for stage with details', () => {
+ expect(wrapper.findAllComponents(GlFormCheckbox).at(1).text()).toContain(
+ OPTIONAL_STAGES[1].details,
+ );
+ });
+
+ it('emits new stages selection state when checkbox is changed', () => {
+ const firstCheckbox = wrapper.findComponent(GlFormCheckbox);
+
+ firstCheckbox.vm.$emit('change', true);
+
+ expect(wrapper.emitted('input')[0]).toStrictEqual([
+ {
+ stage1: true,
+ stage2: false,
+ },
+ ]);
+ });
+});
diff --git a/spec/frontend/import_entities/import_projects/components/import_projects_table_spec.js b/spec/frontend/import_entities/import_projects/components/import_projects_table_spec.js
index c0ae4294e3d..53807167fe8 100644
--- a/spec/frontend/import_entities/import_projects/components/import_projects_table_spec.js
+++ b/spec/frontend/import_entities/import_projects/components/import_projects_table_spec.js
@@ -5,6 +5,7 @@ import Vuex from 'vuex';
import { STATUSES } from '~/import_entities/constants';
import ImportProjectsTable from '~/import_entities/import_projects/components/import_projects_table.vue';
import ProviderRepoTableRow from '~/import_entities/import_projects/components/provider_repo_table_row.vue';
+import AdvancedSettingsPanel from '~/import_entities/import_projects/components/advanced_settings.vue';
import * as getters from '~/import_entities/import_projects/store/getters';
import state from '~/import_entities/import_projects/store/state';
@@ -45,6 +46,7 @@ describe('ImportProjectsTable', () => {
slots,
filterable,
paginatable,
+ optionalStages,
} = {}) {
Vue.use(Vuex);
@@ -71,6 +73,7 @@ describe('ImportProjectsTable', () => {
providerTitle,
filterable,
paginatable,
+ optionalStages,
},
slots,
stubs: {
@@ -271,4 +274,23 @@ describe('ImportProjectsTable', () => {
expect(wrapper.text().includes(INCOMPATIBLE_TEXT)).toBe(shouldRenderSlot);
},
);
+
+ it('should not render advanced settings panel when no optional steps are passed', () => {
+ createComponent({ state: { providerRepos: [providerRepo] } });
+
+ expect(wrapper.findComponent(AdvancedSettingsPanel).exists()).toBe(false);
+ });
+
+ it('should render advanced settings panel when no optional steps are passed', () => {
+ const OPTIONAL_STAGES = [{ name: 'step1', label: 'Step 1' }];
+ createComponent({ state: { providerRepos: [providerRepo] }, optionalStages: OPTIONAL_STAGES });
+
+ expect(wrapper.findComponent(AdvancedSettingsPanel).exists()).toBe(true);
+ expect(wrapper.findComponent(AdvancedSettingsPanel).props('stages')).toStrictEqual(
+ OPTIONAL_STAGES,
+ );
+ expect(wrapper.findComponent(AdvancedSettingsPanel).props('value')).toStrictEqual({
+ step1: false,
+ });
+ });
});
diff --git a/spec/frontend/import_entities/import_projects/components/provider_repo_table_row_spec.js b/spec/frontend/import_entities/import_projects/components/provider_repo_table_row_spec.js
index 17a07b1e9f9..40934e90b78 100644
--- a/spec/frontend/import_entities/import_projects/components/provider_repo_table_row_spec.js
+++ b/spec/frontend/import_entities/import_projects/components/provider_repo_table_row_spec.js
@@ -44,7 +44,7 @@ describe('ProviderRepoTableRow', () => {
wrapper = shallowMount(ProviderRepoTableRow, {
store,
- propsData: { availableNamespaces, userNamespace, ...props },
+ propsData: { availableNamespaces, userNamespace, optionalStages: {}, ...props },
});
}
@@ -92,10 +92,24 @@ describe('ProviderRepoTableRow', () => {
await nextTick();
- const { calls } = fetchImport.mock;
+ expect(fetchImport).toHaveBeenCalledWith(expect.anything(), {
+ repoId: repo.importSource.id,
+ optionalStages: {},
+ });
+ });
+
+ it('includes optionalStages to import', async () => {
+ const OPTIONAL_STAGES = { stage1: true, stage2: false };
+ await wrapper.setProps({ optionalStages: OPTIONAL_STAGES });
+
+ findImportButton().vm.$emit('click');
+
+ await nextTick();
- expect(calls).toHaveLength(1);
- expect(calls[0][1]).toBe(repo.importSource.id);
+ expect(fetchImport).toHaveBeenCalledWith(expect.anything(), {
+ repoId: repo.importSource.id,
+ optionalStages: OPTIONAL_STAGES,
+ });
});
});
diff --git a/spec/frontend/import_entities/import_projects/store/actions_spec.js b/spec/frontend/import_entities/import_projects/store/actions_spec.js
index 0ebe8525b5a..e154863f339 100644
--- a/spec/frontend/import_entities/import_projects/store/actions_spec.js
+++ b/spec/frontend/import_entities/import_projects/store/actions_spec.js
@@ -1,7 +1,7 @@
import MockAdapter from 'axios-mock-adapter';
import { TEST_HOST } from 'helpers/test_constants';
import testAction from 'helpers/vuex_action_helper';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import { STATUSES } from '~/import_entities/constants';
import actionsFactory from '~/import_entities/import_projects/store/actions';
import { getImportTarget } from '~/import_entities/import_projects/store/getters';
@@ -155,7 +155,7 @@ describe('import_projects store actions', () => {
[],
);
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'Provider rate limit exceeded. Try again later',
});
});
@@ -198,7 +198,7 @@ describe('import_projects store actions', () => {
return testAction(
fetchImport,
- importRepoId,
+ { repoId: importRepoId, optionalStages: {} },
localState,
[
{
@@ -222,7 +222,7 @@ describe('import_projects store actions', () => {
await testAction(
fetchImport,
- importRepoId,
+ { repoId: importRepoId, optionalStages: {} },
localState,
[
{
@@ -234,7 +234,7 @@ describe('import_projects store actions', () => {
[],
);
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'Importing the project failed',
});
});
@@ -245,7 +245,7 @@ describe('import_projects store actions', () => {
await testAction(
fetchImport,
- importRepoId,
+ { repoId: importRepoId, optionalStages: {} },
localState,
[
{
@@ -257,7 +257,7 @@ describe('import_projects store actions', () => {
[],
);
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: `Importing the project failed: ${ERROR_MESSAGE}`,
});
});
@@ -358,7 +358,7 @@ describe('import_projects store actions', () => {
[],
);
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'Requesting namespaces failed',
});
});
@@ -366,14 +366,22 @@ describe('import_projects store actions', () => {
describe('importAll', () => {
it('dispatches multiple fetchImport actions', async () => {
+ const OPTIONAL_STAGES = { stage1: true, stage2: false };
+
await testAction(
importAll,
- null,
+ { optionalStages: OPTIONAL_STAGES },
localState,
[],
[
- { type: 'fetchImport', payload: importRepoId },
- { type: 'fetchImport', payload: otherImportRepoId },
+ {
+ type: 'fetchImport',
+ payload: { repoId: importRepoId, optionalStages: OPTIONAL_STAGES },
+ },
+ {
+ type: 'fetchImport',
+ payload: { repoId: otherImportRepoId, optionalStages: OPTIONAL_STAGES },
+ },
],
);
});
diff --git a/spec/frontend/integrations/edit/components/integration_form_spec.js b/spec/frontend/integrations/edit/components/integration_form_spec.js
index 21e57a2e33c..0a3beee0507 100644
--- a/spec/frontend/integrations/edit/components/integration_form_spec.js
+++ b/spec/frontend/integrations/edit/components/integration_form_spec.js
@@ -18,6 +18,7 @@ import {
integrationLevels,
I18N_SUCCESSFUL_CONNECTION_MESSAGE,
I18N_DEFAULT_ERROR_MESSAGE,
+ INTEGRATION_FORM_TYPE_SLACK,
billingPlans,
billingPlanNames,
} from '~/integrations/constants';
@@ -88,6 +89,7 @@ describe('IntegrationForm', () => {
const findConnectionSection = () => findAllSections().at(0);
const findConnectionSectionComponent = () =>
findConnectionSection().findComponent(IntegrationSectionConnection);
+ const findHelpHtml = () => wrapper.findByTestId('help-html');
beforeEach(() => {
mockAxios = new MockAdapter(axios);
@@ -712,5 +714,48 @@ describe('IntegrationForm', () => {
expect(refreshCurrentPage).toHaveBeenCalledTimes(1);
});
});
+
+ describe('Help and sections rendering', () => {
+ const dummyHelp = 'Foo Help';
+
+ it.each`
+ integration | flagIsOn | helpHtml | sections | shouldShowSections | shouldShowHelp
+ ${INTEGRATION_FORM_TYPE_SLACK} | ${false} | ${''} | ${[]} | ${false} | ${false}
+ ${INTEGRATION_FORM_TYPE_SLACK} | ${false} | ${dummyHelp} | ${[]} | ${false} | ${true}
+ ${INTEGRATION_FORM_TYPE_SLACK} | ${false} | ${undefined} | ${[mockSectionConnection]} | ${false} | ${false}
+ ${INTEGRATION_FORM_TYPE_SLACK} | ${false} | ${dummyHelp} | ${[mockSectionConnection]} | ${false} | ${true}
+ ${INTEGRATION_FORM_TYPE_SLACK} | ${true} | ${''} | ${[]} | ${false} | ${false}
+ ${INTEGRATION_FORM_TYPE_SLACK} | ${true} | ${dummyHelp} | ${[]} | ${false} | ${true}
+ ${INTEGRATION_FORM_TYPE_SLACK} | ${true} | ${undefined} | ${[mockSectionConnection]} | ${true} | ${false}
+ ${INTEGRATION_FORM_TYPE_SLACK} | ${true} | ${dummyHelp} | ${[mockSectionConnection]} | ${true} | ${true}
+ ${'foo'} | ${false} | ${''} | ${[]} | ${false} | ${false}
+ ${'foo'} | ${false} | ${dummyHelp} | ${[]} | ${false} | ${true}
+ ${'foo'} | ${false} | ${undefined} | ${[mockSectionConnection]} | ${true} | ${false}
+ ${'foo'} | ${false} | ${dummyHelp} | ${[mockSectionConnection]} | ${true} | ${false}
+ ${'foo'} | ${true} | ${''} | ${[]} | ${false} | ${false}
+ ${'foo'} | ${true} | ${dummyHelp} | ${[]} | ${false} | ${true}
+ ${'foo'} | ${true} | ${undefined} | ${[mockSectionConnection]} | ${true} | ${false}
+ ${'foo'} | ${true} | ${dummyHelp} | ${[mockSectionConnection]} | ${true} | ${false}
+ `(
+ '$sections sections, and "$helpHtml" helpHtml when the FF is "$flagIsOn" for "$integration" integration',
+ ({ integration, flagIsOn, helpHtml, sections, shouldShowSections, shouldShowHelp }) => {
+ createComponent({
+ provide: {
+ helpHtml,
+ glFeatures: { integrationSlackAppNotifications: flagIsOn },
+ },
+ customStateProps: {
+ sections,
+ type: integration,
+ },
+ });
+ expect(findAllSections().length > 0).toEqual(shouldShowSections);
+ expect(findHelpHtml().exists()).toBe(shouldShowHelp);
+ if (shouldShowHelp) {
+ expect(findHelpHtml().html()).toContain(helpHtml);
+ }
+ },
+ );
+ });
});
});
diff --git a/spec/frontend/issuable/bulk_update_sidebar/components/status_select_spec.js b/spec/frontend/issuable/bulk_update_sidebar/components/status_dropdown_spec.js
index 8ecbf41ce56..2f281cb88f9 100644
--- a/spec/frontend/issuable/bulk_update_sidebar/components/status_select_spec.js
+++ b/spec/frontend/issuable/bulk_update_sidebar/components/status_dropdown_spec.js
@@ -1,9 +1,9 @@
import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import StatusSelect from '~/issuable/bulk_update_sidebar/components/status_select.vue';
-import { ISSUE_STATUS_SELECT_OPTIONS } from '~/issuable/bulk_update_sidebar/constants';
+import StatusDropdown from '~/issuable/bulk_update_sidebar/components/status_dropdown.vue';
+import { statusDropdownOptions } from '~/issuable/bulk_update_sidebar/constants';
-describe('StatusSelect', () => {
+describe('SubscriptionsDropdown component', () => {
let wrapper;
const findDropdown = () => wrapper.findComponent(GlDropdown);
@@ -11,7 +11,7 @@ describe('StatusSelect', () => {
const findHiddenInput = () => wrapper.find('input');
function createComponent() {
- wrapper = shallowMount(StatusSelect);
+ wrapper = shallowMount(StatusDropdown);
}
afterEach(() => {
@@ -45,14 +45,12 @@ describe('StatusSelect', () => {
it('updates value of the hidden input', () => {
expect(findHiddenInput().attributes('value')).toBe(
- ISSUE_STATUS_SELECT_OPTIONS[selectItemAtIndex].value,
+ statusDropdownOptions[selectItemAtIndex].value,
);
});
it('updates the dropdown text prop', () => {
- expect(findDropdown().props('text')).toBe(
- ISSUE_STATUS_SELECT_OPTIONS[selectItemAtIndex].text,
- );
+ expect(findDropdown().props('text')).toBe(statusDropdownOptions[selectItemAtIndex].text);
});
it('sets dropdown item `is-checked` prop to `true`', () => {
diff --git a/spec/frontend/issuable/bulk_update_sidebar/components/subscriptions_dropdown_spec.js b/spec/frontend/issuable/bulk_update_sidebar/components/subscriptions_dropdown_spec.js
new file mode 100644
index 00000000000..56ef7a1ed39
--- /dev/null
+++ b/spec/frontend/issuable/bulk_update_sidebar/components/subscriptions_dropdown_spec.js
@@ -0,0 +1,76 @@
+import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import SubscriptionsDropdown from '~/issuable/bulk_update_sidebar/components/subscriptions_dropdown.vue';
+import { subscriptionsDropdownOptions } from '~/issuable/bulk_update_sidebar/constants';
+
+describe('SubscriptionsDropdown component', () => {
+ let wrapper;
+
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findAllDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
+ const findHiddenInput = () => wrapper.find('input');
+
+ function createComponent() {
+ wrapper = shallowMount(SubscriptionsDropdown);
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('with no value selected', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('hidden input value is undefined', () => {
+ expect(findHiddenInput().attributes('value')).toBeUndefined();
+ });
+
+ it('renders default text', () => {
+ expect(findDropdown().props('text')).toBe(SubscriptionsDropdown.i18n.defaultDropdownText);
+ });
+
+ it('renders dropdown items with `is-checked` prop set to `false`', () => {
+ const dropdownItems = findAllDropdownItems();
+
+ expect(dropdownItems.at(0).props('isChecked')).toBe(false);
+ expect(dropdownItems.at(1).props('isChecked')).toBe(false);
+ });
+ });
+
+ describe('when selecting a value', () => {
+ beforeEach(() => {
+ createComponent();
+ findAllDropdownItems().at(0).vm.$emit('click');
+ });
+
+ it('updates value of the hidden input', () => {
+ expect(findHiddenInput().attributes('value')).toBe(subscriptionsDropdownOptions[0].value);
+ });
+
+ it('updates the dropdown text prop', () => {
+ expect(findDropdown().props('text')).toBe(subscriptionsDropdownOptions[0].text);
+ });
+
+ it('sets dropdown item `is-checked` prop to `true`', () => {
+ const dropdownItems = findAllDropdownItems();
+
+ expect(dropdownItems.at(0).props('isChecked')).toBe(true);
+ expect(dropdownItems.at(1).props('isChecked')).toBe(false);
+ });
+
+ describe('when selecting the value that is already selected', () => {
+ it('clears dropdown selection', async () => {
+ findAllDropdownItems().at(0).vm.$emit('click');
+ await nextTick();
+ const dropdownItems = findAllDropdownItems();
+
+ expect(dropdownItems.at(0).props('isChecked')).toBe(false);
+ expect(dropdownItems.at(1).props('isChecked')).toBe(false);
+ expect(findDropdown().props('text')).toBe(SubscriptionsDropdown.i18n.defaultDropdownText);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/issuable/related_issues/components/related_issues_root_spec.js b/spec/frontend/issuable/related_issues/components/related_issues_root_spec.js
index b518d2fbdec..680dbd68493 100644
--- a/spec/frontend/issuable/related_issues/components/related_issues_root_spec.js
+++ b/spec/frontend/issuable/related_issues/components/related_issues_root_spec.js
@@ -7,7 +7,7 @@ import {
issuable1,
issuable2,
} from 'jest/issuable/components/related_issuable_mock_data';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import { linkedIssueTypesMap } from '~/related_issues/constants';
import RelatedIssuesBlock from '~/related_issues/components/related_issues_block.vue';
@@ -136,7 +136,7 @@ describe('RelatedIssuesRoot', () => {
await createComponent();
jest.spyOn(wrapper.vm, 'processAllReferences');
jest.spyOn(wrapper.vm.service, 'addRelatedIssues');
- createFlash.mockClear();
+ createAlert.mockClear();
});
it('processes references before submitting', () => {
@@ -207,12 +207,12 @@ describe('RelatedIssuesRoot', () => {
mock.onPost(defaultProps.endpoint).reply(409, { message });
wrapper.vm.store.setPendingReferences([issuable1.reference, issuable2.reference]);
- expect(createFlash).not.toHaveBeenCalled();
+ expect(createAlert).not.toHaveBeenCalled();
findRelatedIssuesBlock().vm.$emit('addIssuableFormSubmit', input);
await waitForPromises();
- expect(createFlash).toHaveBeenCalledWith({ message });
+ expect(createAlert).toHaveBeenCalledWith({ message });
});
});
diff --git a/spec/frontend/issues/show/components/edited_spec.js b/spec/frontend/issues/show/components/edited_spec.js
index 8a240c38b5f..aa6e0a9dceb 100644
--- a/spec/frontend/issues/show/components/edited_spec.js
+++ b/spec/frontend/issues/show/components/edited_spec.js
@@ -1,7 +1,10 @@
-import { shallowMount } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
+import { getTimeago } from '~/lib/utils/datetime_utility';
import Edited from '~/issues/show/components/edited.vue';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
+const timeago = getTimeago();
+
describe('Edited component', () => {
let wrapper;
@@ -9,7 +12,8 @@ describe('Edited component', () => {
const findTimeAgoTooltip = () => wrapper.findComponent(TimeAgoTooltip);
const formatText = (text) => text.trim().replace(/\s\s+/g, ' ');
- const mountComponent = (propsData) => shallowMount(Edited, { propsData });
+ const mountComponent = (propsData) => mount(Edited, { propsData });
+ const updatedAt = '2017-05-15T12:31:04.428Z';
afterEach(() => {
wrapper.destroy();
@@ -17,12 +21,12 @@ describe('Edited component', () => {
it('renders an edited at+by string', () => {
wrapper = mountComponent({
- updatedAt: '2017-05-15T12:31:04.428Z',
+ updatedAt,
updatedByName: 'Some User',
updatedByPath: '/some_user',
});
- expect(formatText(wrapper.text())).toBe('Edited by Some User');
+ expect(formatText(wrapper.text())).toBe(`Edited ${timeago.format(updatedAt)} by Some User`);
expect(findAuthorLink().attributes('href')).toBe('/some_user');
expect(findTimeAgoTooltip().exists()).toBe(true);
});
@@ -40,10 +44,10 @@ describe('Edited component', () => {
it('if no updatedByName and updatedByPath is provided, no user element will be rendered', () => {
wrapper = mountComponent({
- updatedAt: '2017-05-15T12:31:04.428Z',
+ updatedAt,
});
- expect(formatText(wrapper.text())).toBe('Edited');
+ expect(formatText(wrapper.text())).toBe(`Edited ${timeago.format(updatedAt)}`);
expect(findAuthorLink().exists()).toBe(false);
expect(findTimeAgoTooltip().exists()).toBe(true);
});
diff --git a/spec/frontend/issues/show/components/fields/description_spec.js b/spec/frontend/issues/show/components/fields/description_spec.js
index 61433607a2b..cd4d422583b 100644
--- a/spec/frontend/issues/show/components/fields/description_spec.js
+++ b/spec/frontend/issues/show/components/fields/description_spec.js
@@ -2,13 +2,15 @@ import { shallowMount } from '@vue/test-utils';
import DescriptionField from '~/issues/show/components/fields/description.vue';
import eventHub from '~/issues/show/event_hub';
import MarkdownField from '~/vue_shared/components/markdown/field.vue';
+import MarkdownEditor from '~/vue_shared/components/markdown/markdown_editor.vue';
describe('Description field component', () => {
let wrapper;
const findTextarea = () => wrapper.findComponent({ ref: 'textarea' });
+ const findMarkdownEditor = () => wrapper.findComponent(MarkdownEditor);
- const mountComponent = (description = 'test') =>
+ const mountComponent = ({ description = 'test', contentEditorOnIssues = false } = {}) =>
shallowMount(DescriptionField, {
attachTo: document.body,
propsData: {
@@ -17,6 +19,11 @@ describe('Description field component', () => {
quickActionsDocsPath: '/',
value: description,
},
+ provide: {
+ glFeatures: {
+ contentEditorOnIssues,
+ },
+ },
stubs: {
MarkdownField,
},
@@ -40,7 +47,7 @@ describe('Description field component', () => {
it('renders markdown field with a markdown description', () => {
const markdown = '**test**';
- wrapper = mountComponent(markdown);
+ wrapper = mountComponent({ description: markdown });
expect(findTextarea().element.value).toBe(markdown);
});
@@ -66,4 +73,52 @@ describe('Description field component', () => {
expect(eventHub.$emit).toHaveBeenCalledWith('update.issuable');
});
+
+ describe('when contentEditorOnIssues feature flag is on', () => {
+ beforeEach(() => {
+ wrapper = mountComponent({ contentEditorOnIssues: true });
+ });
+
+ it('uses the MarkdownEditor component to edit markdown', () => {
+ expect(findMarkdownEditor().props()).toEqual(
+ expect.objectContaining({
+ value: 'test',
+ renderMarkdownPath: '/',
+ markdownDocsPath: '/',
+ quickActionsDocsPath: expect.any(String),
+ initOnAutofocus: true,
+ supportsQuickActions: true,
+ enableAutocomplete: true,
+ }),
+ );
+ });
+
+ it('triggers update with meta+enter', () => {
+ findMarkdownEditor().vm.$emit('keydown', {
+ type: 'keydown',
+ keyCode: 13,
+ metaKey: true,
+ });
+
+ expect(eventHub.$emit).toHaveBeenCalledWith('update.issuable');
+ });
+
+ it('triggers update with ctrl+enter', () => {
+ findMarkdownEditor().vm.$emit('keydown', {
+ type: 'keydown',
+ keyCode: 13,
+ ctrlKey: true,
+ });
+
+ expect(eventHub.$emit).toHaveBeenCalledWith('update.issuable');
+ });
+
+ it('emits input event when MarkdownEditor emits input event', () => {
+ const markdown = 'markdown';
+
+ findMarkdownEditor().vm.$emit('input', markdown);
+
+ expect(wrapper.emitted('input')).toEqual([[markdown]]);
+ });
+ });
});
diff --git a/spec/frontend/issues/show/components/form_spec.js b/spec/frontend/issues/show/components/form_spec.js
index 5c0fe991b22..aedb974cbd0 100644
--- a/spec/frontend/issues/show/components/form_spec.js
+++ b/spec/frontend/issues/show/components/form_spec.js
@@ -1,14 +1,16 @@
import { GlAlert } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
-import Autosave from '~/autosave';
+import { getDraft, updateDraft, clearDraft, getLockVersion } from '~/lib/utils/autosave';
import DescriptionTemplate from '~/issues/show/components/fields/description_template.vue';
+import IssuableTitleField from '~/issues/show/components/fields/title.vue';
+import DescriptionField from '~/issues/show/components/fields/description.vue';
import IssueTypeField from '~/issues/show/components/fields/type.vue';
import formComponent from '~/issues/show/components/form.vue';
import LockedWarning from '~/issues/show/components/locked_warning.vue';
import eventHub from '~/issues/show/event_hub';
-jest.mock('~/autosave');
+jest.mock('~/lib/utils/autosave');
describe('Inline edit form component', () => {
let wrapper;
@@ -38,9 +40,14 @@ describe('Inline edit form component', () => {
...defaultProps,
...props,
},
+ stubs: {
+ DescriptionField,
+ },
});
};
+ const findTitleField = () => wrapper.findComponent(IssuableTitleField);
+ const findDescriptionField = () => wrapper.findComponent(DescriptionField);
const findDescriptionTemplate = () => wrapper.findComponent(DescriptionTemplate);
const findIssuableTypeField = () => wrapper.findComponent(IssueTypeField);
const findLockedWarning = () => wrapper.findComponent(LockedWarning);
@@ -108,16 +115,34 @@ describe('Inline edit form component', () => {
});
describe('autosave', () => {
- let spy;
-
beforeEach(() => {
- spy = jest.spyOn(Autosave.prototype, 'reset');
+ getDraft.mockImplementation((autosaveKey) => {
+ return autosaveKey[autosaveKey.length - 1];
+ });
});
- it('initialized Autosave on mount', () => {
+ it('initializes title and description fields with saved drafts', () => {
createComponent();
- expect(Autosave).toHaveBeenCalledTimes(2);
+ expect(findTitleField().props().value).toBe('title');
+ expect(findDescriptionField().props().value).toBe('description');
+ });
+
+ it('updates local storage drafts when title and description change', () => {
+ const updatedTitle = 'updated title';
+ const updatedDescription = 'updated description';
+
+ createComponent();
+
+ findTitleField().vm.$emit('input', updatedTitle);
+ findDescriptionField().vm.$emit('input', updatedDescription);
+
+ expect(updateDraft).toHaveBeenCalledWith(expect.any(Array), updatedTitle);
+ expect(updateDraft).toHaveBeenCalledWith(
+ expect.any(Array),
+ updatedDescription,
+ defaultProps.formState.lock_version,
+ );
});
it('calls reset on autosave when eventHub emits appropriate events', () => {
@@ -125,33 +150,60 @@ describe('Inline edit form component', () => {
eventHub.$emit('close.form');
- expect(spy).toHaveBeenCalledTimes(2);
+ expect(clearDraft).toHaveBeenCalledTimes(2);
eventHub.$emit('delete.issuable');
- expect(spy).toHaveBeenCalledTimes(4);
+ expect(clearDraft).toHaveBeenCalledTimes(4);
eventHub.$emit('update.issuable');
- expect(spy).toHaveBeenCalledTimes(6);
+ expect(clearDraft).toHaveBeenCalledTimes(6);
});
describe('outdated description', () => {
+ const clientSideMockVersion = 'lock version from local storage';
+ const serverSideMockVersion = 'lock version from server';
+
+ const mockGetLockVersion = () => getLockVersion.mockResolvedValue(clientSideMockVersion);
+
it('does not show warning if lock version from server is the same as the local lock version', () => {
createComponent();
expect(findAlert().exists()).toBe(false);
});
it('shows warning if lock version from server differs than the local lock version', async () => {
- Autosave.prototype.getSavedLockVersion.mockResolvedValue('lock version from local storage');
+ mockGetLockVersion();
createComponent({
- formState: { ...defaultProps.formState, lock_version: 'lock version from server' },
+ formState: { ...defaultProps.formState, lock_version: serverSideMockVersion },
});
await nextTick();
expect(findAlert().exists()).toBe(true);
});
+
+ describe('when saved draft is discarded', () => {
+ beforeEach(async () => {
+ mockGetLockVersion();
+
+ createComponent({
+ formState: { ...defaultProps.formState, lock_version: serverSideMockVersion },
+ });
+
+ await nextTick();
+
+ findAlert().vm.$emit('secondaryAction');
+ });
+
+ it('hides the warning alert', () => {
+ expect(findAlert().exists()).toBe(false);
+ });
+
+ it('clears the description draft', () => {
+ expect(clearDraft).toHaveBeenCalledWith(expect.any(Array));
+ });
+ });
});
});
});
diff --git a/spec/frontend/issues/show/components/incidents/incident_tabs_spec.js b/spec/frontend/issues/show/components/incidents/incident_tabs_spec.js
index d92aeabba0f..458c1c3f858 100644
--- a/spec/frontend/issues/show/components/incidents/incident_tabs_spec.js
+++ b/spec/frontend/issues/show/components/incidents/incident_tabs_spec.js
@@ -5,7 +5,6 @@ import { trackIncidentDetailsViewsOptions } from '~/incidents/constants';
import DescriptionComponent from '~/issues/show/components/description.vue';
import HighlightBar from '~/issues/show/components/incidents/highlight_bar.vue';
import IncidentTabs from '~/issues/show/components/incidents/incident_tabs.vue';
-import TimelineTab from '~/issues/show/components/incidents/timeline_events_tab.vue';
import INVALID_URL from '~/lib/utils/invalid_url';
import Tracking from '~/tracking';
import AlertDetailsTable from '~/vue_shared/components/alert_details_table.vue';
@@ -38,7 +37,6 @@ describe('Incident Tabs component', () => {
projectId: '',
issuableId: '',
uploadMetricsFeatureAvailable: true,
- glFeatures: { incidentTimeline: true },
},
data() {
return { alert: mockAlert, ...data };
@@ -67,7 +65,6 @@ describe('Incident Tabs component', () => {
const findAlertDetailsComponent = () => wrapper.findComponent(AlertDetailsTable);
const findDescriptionComponent = () => wrapper.findComponent(DescriptionComponent);
const findHighlightBarComponent = () => wrapper.findComponent(HighlightBar);
- const findTimelineTab = () => wrapper.findComponent(TimelineTab);
describe('empty state', () => {
beforeEach(() => {
@@ -128,20 +125,4 @@ describe('Incident Tabs component', () => {
expect(Tracking.event).toHaveBeenCalledWith(category, action);
});
});
-
- describe('incident timeline tab', () => {
- beforeEach(() => {
- mountComponent();
- });
-
- it('renders the timeline tab when feature flag is enabled', () => {
- expect(findTimelineTab().exists()).toBe(true);
- });
-
- it('does not render timeline tab when feature flag is disabled', () => {
- mountComponent({}, { provide: { glFeatures: { incidentTimeline: false } } });
-
- expect(findTimelineTab().exists()).toBe(false);
- });
- });
});
diff --git a/spec/frontend/issues/show/components/incidents/timeline_events_form_spec.js b/spec/frontend/issues/show/components/incidents/timeline_events_form_spec.js
index 7f086a276f7..2e7449974e5 100644
--- a/spec/frontend/issues/show/components/incidents/timeline_events_form_spec.js
+++ b/spec/frontend/issues/show/components/incidents/timeline_events_form_spec.js
@@ -22,12 +22,15 @@ describe('Timeline events form', () => {
useFakeDate(fakeDate);
let wrapper;
- const mountComponent = ({ mountMethod = shallowMountExtended }) => {
+ const mountComponent = ({ mountMethod = shallowMountExtended } = {}) => {
wrapper = mountMethod(TimelineEventsForm, {
propsData: {
showSaveAndAdd: true,
isEventProcessed: false,
},
+ stubs: {
+ GlButton: true,
+ },
});
};
@@ -48,17 +51,18 @@ describe('Timeline events form', () => {
findHourInput().setValue(5);
findMinuteInput().setValue(45);
};
+ const findTextarea = () => wrapper.findByTestId('input-note');
const submitForm = async () => {
- findSubmitButton().trigger('click');
+ findSubmitButton().vm.$emit('click');
await waitForPromises();
};
const submitFormAndAddAnother = async () => {
- findSubmitAndAddButton().trigger('click');
+ findSubmitAndAddButton().vm.$emit('click');
await waitForPromises();
};
const cancelForm = async () => {
- findCancelButton().trigger('click');
+ findCancelButton().vm.$emit('click');
await waitForPromises();
};
@@ -118,5 +122,17 @@ describe('Timeline events form', () => {
expect(findHourInput().element.value).toBe('0');
expect(findMinuteInput().element.value).toBe('0');
});
+
+ it('should disable the save buttons when event content does not exist', async () => {
+ expect(findSubmitButton().props('disabled')).toBe(true);
+ expect(findSubmitAndAddButton().props('disabled')).toBe(true);
+ });
+
+ it('should enable the save buttons when event content exists', async () => {
+ await findTextarea().setValue('hello');
+
+ expect(findSubmitButton().props('disabled')).toBe(false);
+ expect(findSubmitAndAddButton().props('disabled')).toBe(false);
+ });
});
});
diff --git a/spec/frontend/jira_connect/branches/components/new_branch_form_spec.js b/spec/frontend/jira_connect/branches/components/new_branch_form_spec.js
index cc8346253ee..d41031f9eaa 100644
--- a/spec/frontend/jira_connect/branches/components/new_branch_form_spec.js
+++ b/spec/frontend/jira_connect/branches/components/new_branch_form_spec.js
@@ -238,7 +238,7 @@ describe('NewBranchForm', () => {
scenario | mutation | alertTitle | alertText
${'with errors-as-data'} | ${mockCreateBranchMutationWithErrors} | ${CREATE_BRANCH_ERROR_WITH_CONTEXT} | ${mockCreateBranchMutationResponseWithErrors.data.createBranch.errors[0]}
${'top-level error'} | ${mockCreateBranchMutationFailed} | ${''} | ${CREATE_BRANCH_ERROR_GENERIC}
- `('', ({ mutation, alertTitle, alertText }) => {
+ `('given $scenario', ({ mutation, alertTitle, alertText }) => {
beforeEach(async () => {
createComponent({
mockApollo: createMockApolloProvider({
diff --git a/spec/frontend/jira_connect/subscriptions/pkce_spec.js b/spec/frontend/jira_connect/subscriptions/pkce_spec.js
index 4ee88059b7a..671922c36d8 100644
--- a/spec/frontend/jira_connect/subscriptions/pkce_spec.js
+++ b/spec/frontend/jira_connect/subscriptions/pkce_spec.js
@@ -1,11 +1,7 @@
import crypto from 'crypto';
-import { TextEncoder, TextDecoder } from 'util';
import { createCodeVerifier, createCodeChallenge } from '~/jira_connect/subscriptions/pkce';
-global.TextEncoder = TextEncoder;
-global.TextDecoder = TextDecoder;
-
describe('pkce', () => {
beforeAll(() => {
Object.defineProperty(global.self, 'crypto', {
diff --git a/spec/frontend/jobs/components/table/job_table_app_spec.js b/spec/frontend/jobs/components/table/job_table_app_spec.js
index 8c724a8030b..109cef6f817 100644
--- a/spec/frontend/jobs/components/table/job_table_app_spec.js
+++ b/spec/frontend/jobs/components/table/job_table_app_spec.js
@@ -12,7 +12,7 @@ import { s__ } from '~/locale';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { TEST_HOST } from 'spec/test_constants';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import getJobsQuery from '~/jobs/components/table/graphql/queries/get_jobs.query.graphql';
import JobsTable from '~/jobs/components/table/jobs_table.vue';
import JobsTableApp from '~/jobs/components/table/jobs_table_app.vue';
@@ -229,7 +229,7 @@ describe('Job table app', () => {
await findFilteredSearch().vm.$emit('filterJobsBySearch', ['raw text']);
- expect(createFlash).toHaveBeenCalledWith(expectedWarning);
+ expect(createAlert).toHaveBeenCalledWith(expectedWarning);
expect(wrapper.vm.$apollo.queries.jobs.refetch).toHaveBeenCalledTimes(0);
});
diff --git a/spec/frontend/labels/components/promote_label_modal_spec.js b/spec/frontend/labels/components/promote_label_modal_spec.js
index 8cfaba6f98a..8953e3cbcd8 100644
--- a/spec/frontend/labels/components/promote_label_modal_spec.js
+++ b/spec/frontend/labels/components/promote_label_modal_spec.js
@@ -1,98 +1,100 @@
-import Vue from 'vue';
+import { shallowMount } from '@vue/test-utils';
+import { GlModal, GlSprintf } from '@gitlab/ui';
+import AxiosMockAdapter from 'axios-mock-adapter';
+
import { TEST_HOST } from 'helpers/test_constants';
-import mountComponent from 'helpers/vue_mount_component_helper';
+import { stubComponent } from 'helpers/stub_component';
+
import axios from '~/lib/utils/axios_utils';
-import promoteLabelModal from '~/labels/components/promote_label_modal.vue';
+import PromoteLabelModal from '~/labels/components/promote_label_modal.vue';
import eventHub from '~/labels/event_hub';
describe('Promote label modal', () => {
- let vm;
- const Component = Vue.extend(promoteLabelModal);
+ let wrapper;
+ let axiosMock;
+
const labelMockData = {
labelTitle: 'Documentation',
- labelColor: '#5cb85c',
- labelTextColor: '#ffffff',
+ labelColor: 'rgb(92, 184, 92)',
+ labelTextColor: 'rgb(255, 255, 255)',
url: `${TEST_HOST}/dummy/promote/labels`,
groupName: 'group',
};
- describe('Modal title and description', () => {
- beforeEach(() => {
- vm = mountComponent(Component, labelMockData);
+ const createComponent = () => {
+ wrapper = shallowMount(PromoteLabelModal, {
+ propsData: labelMockData,
+ stubs: {
+ GlSprintf,
+ GlModal: stubComponent(GlModal, {
+ template: `<div><slot name="modal-title"></slot><slot></slot></div>`,
+ }),
+ },
});
+ };
- afterEach(() => {
- vm.$destroy();
- });
+ beforeEach(() => {
+ axiosMock = new AxiosMockAdapter(axios);
+ createComponent();
+ });
+ afterEach(() => {
+ axiosMock.reset();
+ wrapper.destroy();
+ });
+
+ describe('Modal title and description', () => {
it('contains the proper description', () => {
- expect(vm.text).toContain(
+ expect(wrapper.text()).toContain(
`Promoting ${labelMockData.labelTitle} will make it available for all projects inside ${labelMockData.groupName}`,
);
});
it('contains a label span with the color', () => {
- expect(vm.labelColor).not.toBe(null);
- expect(vm.labelColor).toBe(labelMockData.labelColor);
- expect(vm.labelTitle).toBe(labelMockData.labelTitle);
+ const label = wrapper.find('.modal-title-with-label .label');
+
+ expect(label.element.style.backgroundColor).toBe(labelMockData.labelColor);
+ expect(label.element.style.color).toBe(labelMockData.labelTextColor);
+ expect(label.text()).toBe(labelMockData.labelTitle);
});
});
describe('When requesting a label promotion', () => {
beforeEach(() => {
- vm = mountComponent(Component, {
- ...labelMockData,
- });
jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
});
- afterEach(() => {
- vm.$destroy();
- });
-
- it('redirects when a label is promoted', () => {
+ it('redirects when a label is promoted', async () => {
const responseURL = `${TEST_HOST}/dummy/endpoint`;
- jest.spyOn(axios, 'post').mockImplementation((url) => {
- expect(url).toBe(labelMockData.url);
- expect(eventHub.$emit).toHaveBeenCalledWith(
- 'promoteLabelModal.requestStarted',
- labelMockData.url,
- );
- return Promise.resolve({
- request: {
- responseURL,
- },
- });
- });
+ axiosMock.onPost(labelMockData.url).reply(200, { url: responseURL });
- return vm.onSubmit().then(() => {
- expect(eventHub.$emit).toHaveBeenCalledWith('promoteLabelModal.requestFinished', {
- labelUrl: labelMockData.url,
- successful: true,
- });
+ wrapper.findComponent(GlModal).vm.$emit('primary');
+
+ expect(eventHub.$emit).toHaveBeenCalledWith(
+ 'promoteLabelModal.requestStarted',
+ labelMockData.url,
+ );
+
+ await axios.waitForAll();
+
+ expect(eventHub.$emit).toHaveBeenCalledWith('promoteLabelModal.requestFinished', {
+ labelUrl: labelMockData.url,
+ successful: true,
});
});
- it('displays an error if promoting a label failed', () => {
+ it('displays an error if promoting a label failed', async () => {
const dummyError = new Error('promoting label failed');
dummyError.response = { status: 500 };
+ axiosMock.onPost(labelMockData.url).reply(500, { error: dummyError });
- jest.spyOn(axios, 'post').mockImplementation((url) => {
- expect(url).toBe(labelMockData.url);
- expect(eventHub.$emit).toHaveBeenCalledWith(
- 'promoteLabelModal.requestStarted',
- labelMockData.url,
- );
+ wrapper.findComponent(GlModal).vm.$emit('primary');
- return Promise.reject(dummyError);
- });
+ await axios.waitForAll();
- return vm.onSubmit().catch((error) => {
- expect(error).toBe(dummyError);
- expect(eventHub.$emit).toHaveBeenCalledWith('promoteLabelModal.requestFinished', {
- labelUrl: labelMockData.url,
- successful: false,
- });
+ expect(eventHub.$emit).toHaveBeenCalledWith('promoteLabelModal.requestFinished', {
+ labelUrl: labelMockData.url,
+ successful: false,
});
});
});
diff --git a/spec/frontend/lib/dompurify_spec.js b/spec/frontend/lib/dompurify_spec.js
index 5523cc0606e..412408ce377 100644
--- a/spec/frontend/lib/dompurify_spec.js
+++ b/spec/frontend/lib/dompurify_spec.js
@@ -1,4 +1,4 @@
-import { sanitize } from '~/lib/dompurify';
+import { sanitize, defaultConfig } from '~/lib/dompurify';
// GDK
const rootGon = {
@@ -45,7 +45,7 @@ const invalidProtocolUrls = [
/* eslint-enable no-script-url */
const validProtocolUrls = ['slack://open', 'x-devonthink-item://90909', 'x-devonthink-item:90909'];
-const forbiddenDataAttrs = ['data-remote', 'data-url', 'data-type', 'data-method'];
+const forbiddenDataAttrs = defaultConfig.FORBID_ATTR;
const acceptedDataAttrs = ['data-random', 'data-custom'];
describe('~/lib/dompurify', () => {
diff --git a/spec/frontend/lib/utils/autosave_spec.js b/spec/frontend/lib/utils/autosave_spec.js
index 12e97f6cdec..afb49dd6db4 100644
--- a/spec/frontend/lib/utils/autosave_spec.js
+++ b/spec/frontend/lib/utils/autosave_spec.js
@@ -1,32 +1,42 @@
-import { clearDraft, getDraft, updateDraft } from '~/lib/utils/autosave';
+import { clearDraft, getDraft, updateDraft, getLockVersion } from '~/lib/utils/autosave';
describe('autosave utils', () => {
const autosaveKey = 'dummy-autosave-key';
const text = 'some dummy text';
+ const lockVersion = '2';
+ const normalizedAutosaveKey = `autosave/${autosaveKey}`;
+ const lockVersionKey = `autosave/${autosaveKey}/lockVersion`;
describe('clearDraft', () => {
beforeEach(() => {
- localStorage.setItem(`autosave/${autosaveKey}`, text);
+ localStorage.setItem(normalizedAutosaveKey, text);
+ localStorage.setItem(lockVersionKey, lockVersion);
});
afterEach(() => {
- localStorage.removeItem(`autosave/${autosaveKey}`);
+ localStorage.removeItem(normalizedAutosaveKey);
});
it('removes the draft from localStorage', () => {
clearDraft(autosaveKey);
- expect(localStorage.getItem(`autosave/${autosaveKey}`)).toBe(null);
+ expect(localStorage.getItem(normalizedAutosaveKey)).toBe(null);
+ });
+
+ it('removes the lockVersion from localStorage', () => {
+ clearDraft(autosaveKey);
+
+ expect(localStorage.getItem(lockVersionKey)).toBe(null);
});
});
describe('getDraft', () => {
beforeEach(() => {
- localStorage.setItem(`autosave/${autosaveKey}`, text);
+ localStorage.setItem(normalizedAutosaveKey, text);
});
afterEach(() => {
- localStorage.removeItem(`autosave/${autosaveKey}`);
+ localStorage.removeItem(normalizedAutosaveKey);
});
it('returns the draft from localStorage', () => {
@@ -36,7 +46,7 @@ describe('autosave utils', () => {
});
it('returns null if no entry exists in localStorage', () => {
- localStorage.removeItem(`autosave/${autosaveKey}`);
+ localStorage.removeItem(normalizedAutosaveKey);
const result = getDraft(autosaveKey);
@@ -46,19 +56,44 @@ describe('autosave utils', () => {
describe('updateDraft', () => {
beforeEach(() => {
- localStorage.setItem(`autosave/${autosaveKey}`, text);
+ localStorage.setItem(normalizedAutosaveKey, text);
});
afterEach(() => {
- localStorage.removeItem(`autosave/${autosaveKey}`);
+ localStorage.removeItem(normalizedAutosaveKey);
});
- it('removes the draft from localStorage', () => {
+ it('updates the stored draft', () => {
const newText = 'new text';
updateDraft(autosaveKey, newText);
- expect(localStorage.getItem(`autosave/${autosaveKey}`)).toBe(newText);
+ expect(localStorage.getItem(normalizedAutosaveKey)).toBe(newText);
+ });
+
+ describe('when lockVersion is provided', () => {
+ it('updates the stored lockVersion', () => {
+ const newText = 'new text';
+ const newLockVersion = '2';
+
+ updateDraft(autosaveKey, newText, lockVersion);
+
+ expect(localStorage.getItem(lockVersionKey)).toBe(newLockVersion);
+ });
+ });
+ });
+
+ describe('getLockVersion', () => {
+ beforeEach(() => {
+ localStorage.setItem(lockVersionKey, lockVersion);
+ });
+
+ afterEach(() => {
+ localStorage.removeItem(lockVersionKey);
+ });
+
+ it('returns the lockVersion from localStorage', () => {
+ expect(getLockVersion(autosaveKey)).toBe(lockVersion);
});
});
});
diff --git a/spec/frontend/lib/utils/datetime/date_format_utility_spec.js b/spec/frontend/lib/utils/datetime/date_format_utility_spec.js
index 018ae12c908..2e0bb6a8dcd 100644
--- a/spec/frontend/lib/utils/datetime/date_format_utility_spec.js
+++ b/spec/frontend/lib/utils/datetime/date_format_utility_spec.js
@@ -145,3 +145,22 @@ describe('durationTimeFormatted', () => {
expect(utils.durationTimeFormatted(duration)).toBe(expectedOutput);
});
});
+
+describe('formatUtcOffset', () => {
+ it.each`
+ offset | expected
+ ${-32400} | ${'- 9'}
+ ${'-12600'} | ${'- 3.5'}
+ ${0} | ${'0'}
+ ${'10800'} | ${'+ 3'}
+ ${19800} | ${'+ 5.5'}
+ ${0} | ${'0'}
+ ${[]} | ${'0'}
+ ${{}} | ${'0'}
+ ${true} | ${'0'}
+ ${null} | ${'0'}
+ ${undefined} | ${'0'}
+ `('returns $expected given $offset', ({ offset, expected }) => {
+ expect(utils.formatUtcOffset(offset)).toEqual(expected);
+ });
+});
diff --git a/spec/frontend/lib/utils/text_markdown_spec.js b/spec/frontend/lib/utils/text_markdown_spec.js
index 8d179baa505..9fbb3d0a660 100644
--- a/spec/frontend/lib/utils/text_markdown_spec.js
+++ b/spec/frontend/lib/utils/text_markdown_spec.js
@@ -4,15 +4,30 @@ import {
keypressNoteText,
compositionStartNoteText,
compositionEndNoteText,
+ updateTextForToolbarBtn,
} from '~/lib/utils/text_markdown';
import '~/lib/utils/jquery_at_who';
+import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
describe('init markdown', () => {
+ let mdArea;
let textArea;
+ let indentButton;
+ let outdentButton;
beforeAll(() => {
- textArea = document.createElement('textarea');
- document.querySelector('body').appendChild(textArea);
+ setHTMLFixture(
+ `<div class='md-area'>
+ <textarea></textarea>
+ <button data-md-command="indentLines" id="indentButton"></button>
+ <button data-md-command="outdentLines" id="outdentButton"></button>
+ </div>`,
+ );
+ mdArea = document.querySelector('.md-area');
+ textArea = mdArea.querySelector('textarea');
+ indentButton = mdArea.querySelector('#indentButton');
+ outdentButton = mdArea.querySelector('#outdentButton');
+
textArea.focus();
// needed for the underlying insertText to work
@@ -20,7 +35,7 @@ describe('init markdown', () => {
});
afterAll(() => {
- textArea.parentNode.removeChild(textArea);
+ resetHTMLFixture();
});
describe('insertMarkdownText', () => {
@@ -183,6 +198,7 @@ describe('init markdown', () => {
textArea.addEventListener('keydown', keypressNoteText);
textArea.addEventListener('compositionstart', compositionStartNoteText);
textArea.addEventListener('compositionend', compositionEndNoteText);
+ gon.markdown_automatic_lists = true;
});
it.each`
@@ -302,19 +318,22 @@ describe('init markdown', () => {
expect(textArea.value).toEqual(expected);
expect(textArea.selectionStart).toBe(expected.length);
});
- });
- });
- describe('shifting selected lines left or right', () => {
- const indentEvent = new KeyboardEvent('keydown', { key: ']', metaKey: true });
- const outdentEvent = new KeyboardEvent('keydown', { key: '[', metaKey: true });
+ it('does nothing if user preference disabled', () => {
+ const text = '- test';
- beforeEach(() => {
- textArea.addEventListener('keydown', keypressNoteText);
- textArea.addEventListener('compositionstart', compositionStartNoteText);
- textArea.addEventListener('compositionend', compositionEndNoteText);
+ gon.markdown_automatic_lists = false;
+
+ textArea.value = text;
+ textArea.setSelectionRange(text.length, text.length);
+ textArea.dispatchEvent(enterEvent);
+
+ expect(textArea.value).toEqual(text);
+ });
});
+ });
+ describe('shifting selected lines left or right', () => {
it.each`
selectionStart | selectionEnd | expected | expectedSelectionStart | expectedSelectionEnd
${0} | ${0} | ${' 012\n456\n89'} | ${2} | ${2}
@@ -338,7 +357,7 @@ describe('init markdown', () => {
textArea.value = text;
textArea.setSelectionRange(selectionStart, selectionEnd);
- textArea.dispatchEvent(indentEvent);
+ updateTextForToolbarBtn($(indentButton));
expect(textArea.value).toEqual(expected);
expect(textArea.selectionStart).toEqual(expectedSelectionStart);
@@ -350,7 +369,7 @@ describe('init markdown', () => {
textArea.value = '012\n\n89';
textArea.setSelectionRange(4, 4);
- textArea.dispatchEvent(indentEvent);
+ updateTextForToolbarBtn($(indentButton));
expect(textArea.value).toEqual('012\n \n89');
expect(textArea.selectionStart).toEqual(6);
@@ -381,7 +400,7 @@ describe('init markdown', () => {
textArea.value = text;
textArea.setSelectionRange(selectionStart, selectionEnd);
- textArea.dispatchEvent(outdentEvent);
+ updateTextForToolbarBtn($(outdentButton));
expect(textArea.value).toEqual(expected);
expect(textArea.selectionStart).toEqual(expectedSelectionStart);
@@ -393,7 +412,7 @@ describe('init markdown', () => {
textArea.value = '012\n\n89';
textArea.setSelectionRange(4, 4);
- textArea.dispatchEvent(outdentEvent);
+ updateTextForToolbarBtn($(outdentButton));
expect(textArea.value).toEqual('012\n\n89');
expect(textArea.selectionStart).toEqual(4);
diff --git a/spec/frontend/lib/utils/text_utility_spec.js b/spec/frontend/lib/utils/text_utility_spec.js
index 49a160c9f23..f2572ca0ad2 100644
--- a/spec/frontend/lib/utils/text_utility_spec.js
+++ b/spec/frontend/lib/utils/text_utility_spec.js
@@ -386,4 +386,16 @@ describe('text_utility', () => {
expect(textUtils.limitedCounterWithDelimiter(120)).toBe(120);
});
});
+
+ describe('base64EncodeUnicode', () => {
+ it('encodes unicode characters', () => {
+ expect(textUtils.base64EncodeUnicode('😀')).toBe('8J+YgA==');
+ });
+ });
+
+ describe('base64DecodeUnicode', () => {
+ it('decodes unicode characters', () => {
+ expect(textUtils.base64DecodeUnicode('8J+YgA==')).toBe('😀');
+ });
+ });
});
diff --git a/spec/frontend/listbox/index_spec.js b/spec/frontend/listbox/index_spec.js
index 07c6cca535a..fd41531796b 100644
--- a/spec/frontend/listbox/index_spec.js
+++ b/spec/frontend/listbox/index_spec.js
@@ -1,6 +1,6 @@
import { nextTick } from 'vue';
-import { getAllByRole, getByRole } from '@testing-library/dom';
-import { GlDropdown } from '@gitlab/ui';
+import { getAllByRole, getByTestId } from '@testing-library/dom';
+import { GlListbox } from '@gitlab/ui';
import { createWrapper } from '@vue/test-utils';
import { initListbox, parseAttributes } from '~/listbox';
import { getFixture, setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
@@ -28,20 +28,6 @@ describe('initListbox', () => {
instance = initListbox(...args);
};
- // TODO: Rewrite these finders to use better semantics once the
- // implementation is switched to GlListbox
- // https://gitlab.com/gitlab-org/gitlab/-/issues/348738
- const findToggleButton = () => document.body.querySelector('.gl-dropdown-toggle');
- const findItem = (text) => getByRole(document.body, 'menuitem', { name: text });
- const findItems = () => getAllByRole(document.body, 'menuitem');
- const findSelectedItems = () =>
- findItems().filter(
- (menuitem) =>
- !menuitem
- .querySelector('.gl-new-dropdown-item-check-icon')
- .classList.contains('gl-visibility-hidden'),
- );
-
it('returns null given no element', () => {
setup();
@@ -55,6 +41,10 @@ describe('initListbox', () => {
describe('given a valid element', () => {
let onChangeSpy;
+ const listbox = () => createWrapper(instance).findComponent(GlListbox);
+ const findToggleButton = () => getByTestId(document.body, 'base-dropdown-toggle');
+ const findSelectedItems = () => getAllByRole(document.body, 'option', { selected: true });
+
beforeEach(async () => {
setHTMLFixture(fixture);
onChangeSpy = jest.fn();
@@ -85,10 +75,9 @@ describe('initListbox', () => {
expect(instance.$el.classList).toContain('test-class-1', 'test-class-2');
});
- describe.each(parsedAttributes.items)('clicking on an item', (item) => {
+ describe.each(parsedAttributes.items)('selecting an item', (item) => {
beforeEach(async () => {
- findItem(item.text).click();
-
+ listbox().vm.$emit('select', item.value);
await nextTick();
});
@@ -108,8 +97,7 @@ describe('initListbox', () => {
});
it('passes the "right" prop through to the underlying component', () => {
- const wrapper = createWrapper(instance).findComponent(GlDropdown);
- expect(wrapper.props('right')).toBe(parsedAttributes.right);
+ expect(listbox().props('right')).toBe(parsedAttributes.right);
});
});
});
diff --git a/spec/frontend/members/components/filter_sort/sort_dropdown_spec.js b/spec/frontend/members/components/filter_sort/sort_dropdown_spec.js
index 5581fd52458..ef3c8bde3cf 100644
--- a/spec/frontend/members/components/filter_sort/sort_dropdown_spec.js
+++ b/spec/frontend/members/components/filter_sort/sort_dropdown_spec.js
@@ -45,7 +45,7 @@ describe('SortDropdown', () => {
const findSortingComponent = () => wrapper.findComponent(GlSorting);
const findSortDirectionToggle = () =>
- findSortingComponent().find('button[title="Sort direction"]');
+ findSortingComponent().find('button[title^="Sort direction"]');
const findDropdownToggle = () => wrapper.find('button[aria-haspopup="true"]');
const findDropdownItemByText = (text) =>
wrapper
diff --git a/spec/frontend/members/components/table/member_action_buttons_spec.js b/spec/frontend/members/components/table/member_action_buttons_spec.js
index f3f50bf620a..03cfc6ca0f6 100644
--- a/spec/frontend/members/components/table/member_action_buttons_spec.js
+++ b/spec/frontend/members/components/table/member_action_buttons_spec.js
@@ -27,7 +27,7 @@ describe('MemberActionButtons', () => {
wrapper.destroy();
});
- test.each`
+ it.each`
memberType | member | expectedComponent | expectedComponentName
${MEMBER_TYPES.user} | ${memberMock} | ${UserActionButtons} | ${'UserActionButtons'}
${MEMBER_TYPES.group} | ${group} | ${GroupActionButtons} | ${'GroupActionButtons'}
diff --git a/spec/frontend/members/components/table/member_avatar_spec.js b/spec/frontend/members/components/table/member_avatar_spec.js
index 35f82c28fc5..dc5c97f41df 100644
--- a/spec/frontend/members/components/table/member_avatar_spec.js
+++ b/spec/frontend/members/components/table/member_avatar_spec.js
@@ -22,7 +22,7 @@ describe('MemberList', () => {
wrapper.destroy();
});
- test.each`
+ it.each`
memberType | member | expectedComponent | expectedComponentName
${MEMBER_TYPES.user} | ${memberMock} | ${UserAvatar} | ${'UserAvatar'}
${MEMBER_TYPES.group} | ${group} | ${GroupAvatar} | ${'GroupAvatar'}
diff --git a/spec/frontend/members/components/table/members_table_cell_spec.js b/spec/frontend/members/components/table/members_table_cell_spec.js
index fd56699602e..0b0140b0cdb 100644
--- a/spec/frontend/members/components/table/members_table_cell_spec.js
+++ b/spec/frontend/members/components/table/members_table_cell_spec.js
@@ -95,7 +95,7 @@ describe('MembersTableCell', () => {
wrapper = null;
});
- test.each`
+ it.each`
member | expectedMemberType
${memberMock} | ${MEMBER_TYPES.user}
${group} | ${MEMBER_TYPES.group}
diff --git a/spec/frontend/members/utils_spec.js b/spec/frontend/members/utils_spec.js
index 0271483801c..8bef2096a2a 100644
--- a/spec/frontend/members/utils_spec.js
+++ b/spec/frontend/members/utils_spec.js
@@ -89,7 +89,7 @@ describe('Members Utils', () => {
});
describe('isGroup', () => {
- test.each`
+ it.each`
member | expected
${group} | ${true}
${memberMock} | ${false}
@@ -99,7 +99,7 @@ describe('Members Utils', () => {
});
describe('isDirectMember', () => {
- test.each`
+ it.each`
member | expected
${directMember} | ${true}
${inheritedMember} | ${false}
@@ -109,7 +109,7 @@ describe('Members Utils', () => {
});
describe('isCurrentUser', () => {
- test.each`
+ it.each`
currentUserId | expected
${IS_CURRENT_USER_ID} | ${true}
${IS_NOT_CURRENT_USER_ID} | ${false}
@@ -119,7 +119,7 @@ describe('Members Utils', () => {
});
describe('canRemove', () => {
- test.each`
+ it.each`
member | expected
${{ ...directMember, canRemove: true }} | ${true}
${{ ...inheritedMember, canRemove: true }} | ${false}
@@ -130,7 +130,7 @@ describe('Members Utils', () => {
});
describe('canResend', () => {
- test.each`
+ it.each`
member | expected
${invite} | ${true}
${{ ...invite, invite: { ...invite.invite, canResend: false } }} | ${false}
@@ -140,7 +140,7 @@ describe('Members Utils', () => {
});
describe('canUpdate', () => {
- test.each`
+ it.each`
member | currentUserId | expected
${{ ...directMember, canUpdate: true }} | ${IS_NOT_CURRENT_USER_ID} | ${true}
${{ ...directMember, canUpdate: true }} | ${IS_CURRENT_USER_ID} | ${false}
diff --git a/spec/frontend/merge_conflicts/store/actions_spec.js b/spec/frontend/merge_conflicts/store/actions_spec.js
index e73769cba51..50eac982e20 100644
--- a/spec/frontend/merge_conflicts/store/actions_spec.js
+++ b/spec/frontend/merge_conflicts/store/actions_spec.js
@@ -3,7 +3,7 @@ import MockAdapter from 'axios-mock-adapter';
import Cookies from '~/lib/utils/cookies';
import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
import testAction from 'helpers/vuex_action_helper';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import { INTERACTIVE_RESOLVE_MODE, EDIT_RESOLVE_MODE } from '~/merge_conflicts/constants';
import * as actions from '~/merge_conflicts/store/actions';
import * as types from '~/merge_conflicts/store/mutation_types';
@@ -125,7 +125,7 @@ describe('merge conflicts actions', () => {
],
[],
);
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'Failed to save merge conflicts resolutions. Please try again!',
});
});
diff --git a/spec/frontend/merge_request_spec.js b/spec/frontend/merge_request_spec.js
index bcf64204c7a..16e3e49a297 100644
--- a/spec/frontend/merge_request_spec.js
+++ b/spec/frontend/merge_request_spec.js
@@ -3,9 +3,12 @@ import $ from 'jquery';
import { loadHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import { TEST_HOST } from 'spec/test_constants';
import waitForPromises from 'helpers/wait_for_promises';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import MergeRequest from '~/merge_request';
+jest.mock('~/flash');
+
describe('MergeRequest', () => {
const test = {};
describe('task lists', () => {
@@ -95,8 +98,11 @@ describe('MergeRequest', () => {
await waitForPromises();
- expect(document.querySelector('.flash-container .flash-text').innerText.trim()).toBe(
- 'Someone edited this merge request at the same time you did. Please refresh the page to see changes.',
+ expect(createAlert).toHaveBeenCalledWith(
+ expect.objectContaining({
+ message:
+ 'Someone edited this merge request at the same time you did. Please refresh the page to see changes.',
+ }),
);
});
});
diff --git a/spec/frontend/milestones/components/promote_milestone_modal_spec.js b/spec/frontend/milestones/components/promote_milestone_modal_spec.js
index 11eaa92f2b0..60657fbc9b8 100644
--- a/spec/frontend/milestones/components/promote_milestone_modal_spec.js
+++ b/spec/frontend/milestones/components/promote_milestone_modal_spec.js
@@ -3,7 +3,7 @@ import { shallowMount } from '@vue/test-utils';
import { setHTMLFixture } from 'helpers/fixtures';
import { TEST_HOST } from 'helpers/test_constants';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import * as urlUtils from '~/lib/utils/url_utility';
import PromoteMilestoneModal from '~/milestones/components/promote_milestone_modal.vue';
@@ -103,7 +103,7 @@ describe('Promote milestone modal', () => {
wrapper.findComponent(GlModal).vm.$emit('primary');
await waitForPromises();
- expect(createFlash).toHaveBeenCalledWith({ message: dummyError });
+ expect(createAlert).toHaveBeenCalledWith({ message: dummyError });
});
});
});
diff --git a/spec/frontend/monitoring/components/dashboard_spec.js b/spec/frontend/monitoring/components/dashboard_spec.js
index 1de6b6e3e98..1d17a9116df 100644
--- a/spec/frontend/monitoring/components/dashboard_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_spec.js
@@ -4,7 +4,7 @@ import { nextTick } from 'vue';
import setWindowLocation from 'helpers/set_window_location_helper';
import { TEST_HOST } from 'helpers/test_constants';
import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import { ESC_KEY } from '~/lib/utils/keys';
import { objectToQuery } from '~/lib/utils/url_utility';
@@ -198,7 +198,7 @@ describe('Dashboard', () => {
);
await nextTick();
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
it('does not display a warning if there are no validation warnings', async () => {
@@ -210,7 +210,7 @@ describe('Dashboard', () => {
);
await nextTick();
- expect(createFlash).not.toHaveBeenCalled();
+ expect(createAlert).not.toHaveBeenCalled();
});
});
@@ -275,7 +275,7 @@ describe('Dashboard', () => {
setupStoreWithData(store);
await nextTick();
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
expect(store.dispatch).not.toHaveBeenCalledWith(
'monitoringDashboard/setExpandedPanel',
expect.anything(),
diff --git a/spec/frontend/monitoring/components/dashboard_url_time_spec.js b/spec/frontend/monitoring/components/dashboard_url_time_spec.js
index a327e234581..9873654bdda 100644
--- a/spec/frontend/monitoring/components/dashboard_url_time_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_url_time_spec.js
@@ -1,7 +1,7 @@
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import {
queryToObject,
@@ -115,7 +115,7 @@ describe('dashboard invalid url parameters', () => {
createMountedWrapper();
await nextTick();
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
expect(findDateTimePicker().props('value')).toEqual(defaultTimeRange);
diff --git a/spec/frontend/monitoring/requests/index_spec.js b/spec/frontend/monitoring/requests/index_spec.js
index 03bf5d70153..6f9af911a9f 100644
--- a/spec/frontend/monitoring/requests/index_spec.js
+++ b/spec/frontend/monitoring/requests/index_spec.js
@@ -129,7 +129,7 @@ describe('monitoring metrics_requests', () => {
});
});
- test.each`
+ it.each`
code | reason
${statusCodes.BAD_REQUEST} | ${'Parameters are missing or incorrect'}
${statusCodes.UNPROCESSABLE_ENTITY} | ${"Expression can't be executed"}
diff --git a/spec/frontend/monitoring/store/actions_spec.js b/spec/frontend/monitoring/store/actions_spec.js
index a872a7780eb..ca66768c3cc 100644
--- a/spec/frontend/monitoring/store/actions_spec.js
+++ b/spec/frontend/monitoring/store/actions_spec.js
@@ -1,7 +1,7 @@
import MockAdapter from 'axios-mock-adapter';
import { backoffMockImplementation } from 'helpers/backoff_helper';
import testAction from 'helpers/vuex_action_helper';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import * as commonUtils from '~/lib/utils/common_utils';
import statusCodes from '~/lib/utils/http_status';
@@ -82,7 +82,7 @@ describe('Monitoring store actions', () => {
mock.reset();
commonUtils.backOff.mockReset();
- createFlash.mockReset();
+ createAlert.mockReset();
});
// Setup
@@ -241,7 +241,7 @@ describe('Monitoring store actions', () => {
'receiveMetricsDashboardFailure',
new Error('Request failed with status code 500'),
);
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
it('dispatches a failure action when a message is returned', async () => {
@@ -250,7 +250,7 @@ describe('Monitoring store actions', () => {
'receiveMetricsDashboardFailure',
new Error('Request failed with status code 500'),
);
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: expect.stringContaining(mockDashboardsErrorResponse.message),
});
});
@@ -263,7 +263,7 @@ describe('Monitoring store actions', () => {
'receiveMetricsDashboardFailure',
new Error('Request failed with status code 500'),
);
- expect(createFlash).not.toHaveBeenCalled();
+ expect(createAlert).not.toHaveBeenCalled();
});
});
});
@@ -328,7 +328,7 @@ describe('Monitoring store actions', () => {
},
});
- expect(createFlash).not.toHaveBeenCalled();
+ expect(createAlert).not.toHaveBeenCalled();
});
it('dispatches fetchPrometheusMetric for each panel query', async () => {
@@ -385,7 +385,7 @@ describe('Monitoring store actions', () => {
defaultQueryParams,
});
- expect(createFlash).toHaveBeenCalledTimes(1);
+ expect(createAlert).toHaveBeenCalledTimes(1);
});
});
@@ -570,7 +570,7 @@ describe('Monitoring store actions', () => {
[],
[{ type: 'receiveDeploymentsDataFailure' }],
() => {
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
},
);
});
@@ -1084,8 +1084,8 @@ describe('Monitoring store actions', () => {
return testAction(fetchVariableMetricLabelValues, { defaultQueryParams }, state, [], []).then(
() => {
- expect(createFlash).toHaveBeenCalledTimes(1);
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledTimes(1);
+ expect(createAlert).toHaveBeenCalledWith({
message: expect.stringContaining('error getting options for variable "label1"'),
});
},
diff --git a/spec/frontend/monitoring/utils_spec.js b/spec/frontend/monitoring/utils_spec.js
index 31975052077..6c6c3d6b90f 100644
--- a/spec/frontend/monitoring/utils_spec.js
+++ b/spec/frontend/monitoring/utils_spec.js
@@ -290,7 +290,7 @@ describe('monitoring/utils', () => {
expect(() => expandedPanelPayloadFromUrl(metricsDashboardViewModel, search)).toThrow();
});
- test.each`
+ it.each`
group | title | yLabel | missingField
${'NOT_A_GROUP'} | ${title} | ${yLabel} | ${'group'}
${group} | ${'NOT_A_TITLE'} | ${yLabel} | ${'title'}
@@ -367,7 +367,7 @@ describe('monitoring/utils', () => {
],
};
- [
+ it.each([
{
input: { metrics: undefined },
output: {},
@@ -393,12 +393,10 @@ describe('monitoring/utils', () => {
output: multipleMetricExpected,
testCase: 'barChartsDataParser returns multiple series object with multiple metrics',
},
- ].forEach(({ input, output, testCase }) => {
- it(testCase, () => {
- expect(monitoringUtils.barChartsDataParser(input.metrics)).toEqual(
- expect.objectContaining(output),
- );
- });
+ ])('$testCase', ({ input, output }) => {
+ expect(monitoringUtils.barChartsDataParser(input.metrics)).toEqual(
+ expect.objectContaining(output),
+ );
});
});
diff --git a/spec/frontend/nav/components/top_nav_app_spec.js b/spec/frontend/nav/components/top_nav_app_spec.js
index 745707c1d28..b32ab5ebe09 100644
--- a/spec/frontend/nav/components/top_nav_app_spec.js
+++ b/spec/frontend/nav/components/top_nav_app_spec.js
@@ -1,5 +1,6 @@
import { GlNavItemDropdown } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { mount, shallowMount } from '@vue/test-utils';
+import { mockTracking } from 'helpers/tracking_helper';
import TopNavApp from '~/nav/components/top_nav_app.vue';
import TopNavDropdownMenu from '~/nav/components/top_nav_dropdown_menu.vue';
import { TEST_NAV_DATA } from '../mock_data';
@@ -8,6 +9,14 @@ describe('~/nav/components/top_nav_app.vue', () => {
let wrapper;
const createComponent = () => {
+ wrapper = mount(TopNavApp, {
+ propsData: {
+ navData: TEST_NAV_DATA,
+ },
+ });
+ };
+
+ const createComponentShallow = () => {
wrapper = shallowMount(TopNavApp, {
propsData: {
navData: TEST_NAV_DATA,
@@ -16,6 +25,7 @@ describe('~/nav/components/top_nav_app.vue', () => {
};
const findNavItemDropdown = () => wrapper.findComponent(GlNavItemDropdown);
+ const findNavItemDropdowToggle = () => findNavItemDropdown().find('.js-top-nav-dropdown-toggle');
const findMenu = () => wrapper.findComponent(TopNavDropdownMenu);
afterEach(() => {
@@ -24,7 +34,7 @@ describe('~/nav/components/top_nav_app.vue', () => {
describe('default', () => {
beforeEach(() => {
- createComponent();
+ createComponentShallow();
});
it('renders nav item dropdown', () => {
@@ -45,4 +55,18 @@ describe('~/nav/components/top_nav_app.vue', () => {
});
});
});
+
+ describe('tracking', () => {
+ it('emits a tracking event when the toggle is clicked', () => {
+ const trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ createComponent();
+
+ findNavItemDropdowToggle().trigger('click');
+
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_nav', {
+ label: 'hamburger_menu',
+ property: 'top_navigation',
+ });
+ });
+ });
});
diff --git a/spec/frontend/notebook/cells/output/index_spec.js b/spec/frontend/notebook/cells/output/index_spec.js
index 97a7e22be60..8bf049235a9 100644
--- a/spec/frontend/notebook/cells/output/index_spec.js
+++ b/spec/frontend/notebook/cells/output/index_spec.js
@@ -53,6 +53,7 @@ describe('Output component', () => {
expect(iframe.exists()).toBe(true);
expect(iframe.element.getAttribute('sandbox')).toBe('');
expect(iframe.element.getAttribute('srcdoc')).toBe('<p>test</p>');
+ expect(iframe.element.getAttribute('scrolling')).toBe('auto');
});
it('renders multiple raw HTML outputs', () => {
diff --git a/spec/frontend/notes/components/__snapshots__/notes_app_spec.js.snap b/spec/frontend/notes/components/__snapshots__/notes_app_spec.js.snap
index 5f4b3e04a79..bc29903d4bf 100644
--- a/spec/frontend/notes/components/__snapshots__/notes_app_spec.js.snap
+++ b/spec/frontend/notes/components/__snapshots__/notes_app_spec.js.snap
@@ -3,15 +3,15 @@
exports[`note_app when sort direction is asc shows skeleton notes after the loaded discussions 1`] = `
"<ul id=\\"notes-list\\" class=\\"notes main-notes-list timeline\\">
<noteable-discussion-stub discussion=\\"[object Object]\\" renderdifffile=\\"true\\" helppagepath=\\"\\" isoverviewtab=\\"true\\"></noteable-discussion-stub>
- <skeleton-loading-container-stub></skeleton-loading-container-stub>
- <discussion-filter-note-stub style=\\"display: none;\\"></discussion-filter-note-stub>
+ <skeleton-loading-container-stub class=\\"note-skeleton\\"></skeleton-loading-container-stub>
+ <!---->
</ul>"
`;
exports[`note_app when sort direction is desc shows skeleton notes before the loaded discussions 1`] = `
"<ul id=\\"notes-list\\" class=\\"notes main-notes-list timeline\\">
- <skeleton-loading-container-stub></skeleton-loading-container-stub>
+ <skeleton-loading-container-stub class=\\"note-skeleton\\"></skeleton-loading-container-stub>
<noteable-discussion-stub discussion=\\"[object Object]\\" renderdifffile=\\"true\\" helppagepath=\\"\\" isoverviewtab=\\"true\\"></noteable-discussion-stub>
- <discussion-filter-note-stub style=\\"display: none;\\"></discussion-filter-note-stub>
+ <!---->
</ul>"
`;
diff --git a/spec/frontend/notes/components/comment_form_spec.js b/spec/frontend/notes/components/comment_form_spec.js
index 55e4ef42e37..701ff492702 100644
--- a/spec/frontend/notes/components/comment_form_spec.js
+++ b/spec/frontend/notes/components/comment_form_spec.js
@@ -7,7 +7,7 @@ import Vuex from 'vuex';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import batchComments from '~/batch_comments/stores/modules/batch_comments';
import { refreshUserMergeRequestCounts } from '~/commons/nav/user_merge_requests';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import CommentForm from '~/notes/components/comment_form.vue';
import CommentTypeDropdown from '~/notes/components/comment_type_dropdown.vue';
@@ -71,11 +71,19 @@ describe('issue_comment_form component', () => {
};
const notableDataMockCanUpdateIssuable = createNotableDataMock({
- current_user: { can_update: true, can_create_note: true },
+ current_user: { can_update: true, can_create_note: true, can_create_confidential_note: true },
});
const notableDataMockCannotUpdateIssuable = createNotableDataMock({
- current_user: { can_update: false, can_create_note: true },
+ current_user: {
+ can_update: false,
+ can_create_note: false,
+ can_create_confidential_note: false,
+ },
+ });
+
+ const notableDataMockCannotCreateConfidentialNote = createNotableDataMock({
+ current_user: { can_update: false, can_create_note: true, can_create_confidential_note: false },
});
const mountComponent = ({
@@ -490,7 +498,7 @@ describe('issue_comment_form component', () => {
await nextTick();
await nextTick();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: `Something went wrong while closing the ${type}. Please try again later.`,
});
});
@@ -526,7 +534,7 @@ describe('issue_comment_form component', () => {
await nextTick();
await nextTick();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: `Something went wrong while reopening the ${type}. Please try again later.`,
});
});
@@ -562,6 +570,17 @@ describe('issue_comment_form component', () => {
expect(checkbox.element.checked).toBe(false);
});
+ it('should not render checkbox if user is not at least a reporter', () => {
+ mountComponent({
+ mountFunction: mount,
+ initialData: { note: 'confidential note' },
+ noteableData: { ...notableDataMockCannotCreateConfidentialNote },
+ });
+
+ const checkbox = findConfidentialNoteCheckbox();
+ expect(checkbox.exists()).toBe(false);
+ });
+
it.each`
noteableType | rendered | message
${'Issue'} | ${true} | ${'render'}
diff --git a/spec/frontend/notes/components/diff_discussion_header_spec.js b/spec/frontend/notes/components/diff_discussion_header_spec.js
index 5800f68b114..bb44563b87a 100644
--- a/spec/frontend/notes/components/diff_discussion_header_spec.js
+++ b/spec/frontend/notes/components/diff_discussion_header_spec.js
@@ -42,7 +42,7 @@ describe('diff_discussion_header component', () => {
expect(props).toMatchObject({
src: firstNoteAuthor.avatar_url,
alt: firstNoteAuthor.name,
- size: { default: 24, md: 32 },
+ size: 32,
});
});
});
diff --git a/spec/frontend/notes/components/discussion_actions_spec.js b/spec/frontend/notes/components/discussion_actions_spec.js
index d16c13d6fd3..e414ada1854 100644
--- a/spec/frontend/notes/components/discussion_actions_spec.js
+++ b/spec/frontend/notes/components/discussion_actions_spec.js
@@ -81,7 +81,7 @@ describe('DiscussionActions', () => {
});
});
- it(shouldRender ? 'renders resolve buttons' : 'does not render resolve buttons', () => {
+ it(`${shouldRender ? 'renders' : 'does not render'} resolve buttons`, () => {
expect(wrapper.findComponent(ResolveDiscussionButton).exists()).toBe(shouldRender);
expect(wrapper.findComponent(ResolveWithIssueButton).exists()).toBe(shouldRender);
});
diff --git a/spec/frontend/notes/components/note_header_spec.js b/spec/frontend/notes/components/note_header_spec.js
index 76177229cff..b870cda2a24 100644
--- a/spec/frontend/notes/components/note_header_spec.js
+++ b/spec/frontend/notes/components/note_header_spec.js
@@ -1,10 +1,7 @@
-import { GlSprintf } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import NoteHeader from '~/notes/components/note_header.vue';
-import { AVAILABILITY_STATUS } from '~/set_status_modal/constants';
-import UserNameWithStatus from '~/sidebar/components/assignees/user_name_with_status.vue';
Vue.use(Vuex);
@@ -23,7 +20,6 @@ describe('NoteHeader component', () => {
const findTimestamp = () => wrapper.findComponent({ ref: 'noteTimestamp' });
const findInternalNoteIndicator = () => wrapper.findByTestId('internalNoteIndicator');
const findSpinner = () => wrapper.findComponent({ ref: 'spinner' });
- const findAuthorStatus = () => wrapper.findComponent({ ref: 'authorStatus' });
const statusHtml =
'"<span class="user-status-emoji has-tooltip" title="foo bar" data-html="true" data-placement="top"><gl-emoji title="basketball and hoop" data-name="basketball" data-unicode-version="6.0">🏀</gl-emoji></span>"';
@@ -37,22 +33,14 @@ describe('NoteHeader component', () => {
username: 'root',
show_status: true,
status_tooltip_html: statusHtml,
- availability: '',
};
- const createComponent = (props, userAttributes = false) => {
+ const createComponent = (props) => {
wrapper = shallowMountExtended(NoteHeader, {
store: new Vuex.Store({
actions,
}),
propsData: { ...props },
- stubs: { GlSprintf, UserNameWithStatus },
- provide: {
- glFeatures: {
- removeUserAttributesProjects: userAttributes,
- removeUserAttributesGroups: userAttributes,
- },
- },
});
};
@@ -61,26 +49,6 @@ describe('NoteHeader component', () => {
wrapper = null;
});
- describe('when removeUserAttributesProjects feature flag is enabled', () => {
- it('does not render busy status', () => {
- createComponent({ author: { ...author, availability: AVAILABILITY_STATUS.BUSY } }, true);
-
- expect(wrapper.find('.note-header-info').text()).not.toContain('(Busy)');
- });
-
- it('does not render author status', () => {
- createComponent({ author }, true);
-
- expect(findAuthorStatus().exists()).toBe(false);
- });
-
- it('does not render username', () => {
- createComponent({ author }, true);
-
- expect(wrapper.find('.note-header-info').text()).not.toContain('@');
- });
- });
-
it('does not render discussion actions when includeToggle is false', () => {
createComponent({
includeToggle: false,
@@ -145,39 +113,6 @@ describe('NoteHeader component', () => {
expect(wrapper.find('.js-user-link').exists()).toBe(true);
});
-
- it('renders busy status if author availability is set', () => {
- createComponent({ author: { ...author, availability: AVAILABILITY_STATUS.BUSY } });
-
- expect(wrapper.find('.js-user-link').text()).toContain('(Busy)');
- });
-
- it('renders author status', () => {
- createComponent({ author });
-
- expect(findAuthorStatus().exists()).toBe(true);
- });
-
- it('does not render author status if show_status=false', () => {
- createComponent({
- author: { ...author, status: { availability: AVAILABILITY_STATUS.BUSY }, show_status: false },
- });
-
- expect(findAuthorStatus().exists()).toBe(false);
- });
-
- it('does not render author status if status_tooltip_html=null', () => {
- createComponent({
- author: {
- ...author,
- status: { availability: AVAILABILITY_STATUS.BUSY },
- status_tooltip_html: null,
- },
- });
-
- expect(findAuthorStatus().exists()).toBe(false);
- });
-
it('renders deleted user text if author is not passed as a prop', () => {
createComponent();
@@ -270,24 +205,6 @@ describe('NoteHeader component', () => {
});
});
- describe('when author status tooltip is opened', () => {
- it('removes `title` attribute from emoji to prevent duplicate tooltips', () => {
- createComponent({
- author: {
- ...author,
- status_tooltip_html: statusHtml,
- },
- });
-
- return nextTick().then(() => {
- const authorStatus = findAuthorStatus();
- authorStatus.trigger('mouseenter');
-
- expect(authorStatus.find('gl-emoji').attributes('title')).toBeUndefined();
- });
- });
- });
-
describe('when author username link is hovered', () => {
it('toggles hover specific CSS classes on author name link', async () => {
createComponent({ author });
@@ -327,4 +244,18 @@ describe('NoteHeader component', () => {
);
});
});
+
+ it('does render username', () => {
+ createComponent({ author }, true);
+
+ expect(wrapper.find('.note-header-info').text()).toContain('@');
+ });
+
+ describe('with system note', () => {
+ it('does not render username', () => {
+ createComponent({ author, isSystemNote: true }, true);
+
+ expect(wrapper.find('.note-header-info').text()).not.toContain('@');
+ });
+ });
});
diff --git a/spec/frontend/notes/components/noteable_note_spec.js b/spec/frontend/notes/components/noteable_note_spec.js
index b044d40cbe4..3d7195752d3 100644
--- a/spec/frontend/notes/components/noteable_note_spec.js
+++ b/spec/frontend/notes/components/noteable_note_spec.js
@@ -214,7 +214,7 @@ describe('issue_note', () => {
expect(avatarProps.src).toBe(author.avatar_url);
expect(avatarProps.entityName).toBe(author.username);
expect(avatarProps.alt).toBe(author.name);
- expect(avatarProps.size).toEqual({ default: 24, md: 32 });
+ expect(avatarProps.size).toEqual(32);
});
it('should render note header content', () => {
diff --git a/spec/frontend/notes/components/notes_activity_header_spec.js b/spec/frontend/notes/components/notes_activity_header_spec.js
new file mode 100644
index 00000000000..5b3165bf401
--- /dev/null
+++ b/spec/frontend/notes/components/notes_activity_header_spec.js
@@ -0,0 +1,67 @@
+import { shallowMount } from '@vue/test-utils';
+import { __ } from '~/locale';
+import NotesActivityHeader from '~/notes/components/notes_activity_header.vue';
+import DiscussionFilter from '~/notes/components/discussion_filter.vue';
+import TimelineToggle from '~/notes/components/timeline_toggle.vue';
+import createStore from '~/notes/stores';
+import waitForPromises from 'helpers/wait_for_promises';
+import { notesFilters } from '../mock_data';
+
+describe('~/notes/components/notes_activity_header.vue', () => {
+ let wrapper;
+
+ const findTitle = () => wrapper.find('h2');
+
+ const createComponent = ({ props = {}, ...options } = {}) => {
+ wrapper = shallowMount(NotesActivityHeader, {
+ propsData: {
+ notesFilters,
+ ...props,
+ },
+ // why: Rendering async timeline toggle requires store
+ store: createStore(),
+ ...options,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('default', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders title', () => {
+ expect(findTitle().text()).toBe(__('Activity'));
+ });
+
+ it('renders discussion filter', () => {
+ expect(wrapper.findComponent(DiscussionFilter).props()).toEqual({
+ filters: notesFilters,
+ selectedValue: 0,
+ });
+ });
+
+ it('does not render timeline toggle', () => {
+ expect(wrapper.findComponent(TimelineToggle).exists()).toBe(false);
+ });
+ });
+
+ it('with notesFilterValue prop, passes to discussion filter', () => {
+ createComponent({ props: { notesFilterValue: 1 } });
+
+ expect(wrapper.findComponent(DiscussionFilter).props('selectedValue')).toBe(1);
+ });
+
+ it('with showTimelineViewToggle injected, renders timeline toggle asynchronously', async () => {
+ createComponent({ provide: { showTimelineViewToggle: () => true } });
+
+ expect(wrapper.findComponent(TimelineToggle).exists()).toBe(false);
+
+ await waitForPromises();
+
+ expect(wrapper.findComponent(TimelineToggle).exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/notes/components/notes_app_spec.js b/spec/frontend/notes/components/notes_app_spec.js
index d4cb07d97dc..9051fcab97f 100644
--- a/spec/frontend/notes/components/notes_app_spec.js
+++ b/spec/frontend/notes/components/notes_app_spec.js
@@ -11,6 +11,7 @@ import axios from '~/lib/utils/axios_utils';
import * as urlUtility from '~/lib/utils/url_utility';
import CommentForm from '~/notes/components/comment_form.vue';
import NotesApp from '~/notes/components/notes_app.vue';
+import NotesActivityHeader from '~/notes/components/notes_activity_header.vue';
import * as constants from '~/notes/constants';
import createStore from '~/notes/stores';
import '~/behaviors/markdown/render_gfm';
@@ -20,11 +21,14 @@ import * as mockData from '../mock_data';
const TYPE_COMMENT_FORM = 'comment-form';
const TYPE_NOTES_LIST = 'notes-list';
+const TEST_NOTES_FILTER_VALUE = 1;
const propsData = {
noteableData: mockData.noteableDataMock,
notesData: mockData.notesDataMock,
userData: mockData.userDataMock,
+ notesFilters: mockData.notesFilters,
+ notesFilterValue: TEST_NOTES_FILTER_VALUE,
};
describe('note_app', () => {
@@ -47,7 +51,7 @@ describe('note_app', () => {
axiosMock = new AxiosMockAdapter(axios);
store = createStore();
- mountComponent = () => {
+ mountComponent = ({ props = {} } = {}) => {
return mount(
{
components: {
@@ -58,7 +62,10 @@ describe('note_app', () => {
</div>`,
},
{
- propsData,
+ propsData: {
+ ...propsData,
+ ...props,
+ },
store,
},
);
@@ -144,6 +151,13 @@ describe('note_app', () => {
it('updates discussions badge', () => {
expect(document.querySelector('.js-discussions-count').textContent).toEqual('2');
});
+
+ it('should render notes activity header', () => {
+ expect(wrapper.findComponent(NotesActivityHeader).props()).toEqual({
+ notesFilterValue: TEST_NOTES_FILTER_VALUE,
+ notesFilters: mockData.notesFilters,
+ });
+ });
});
describe('render with comments disabled', () => {
@@ -151,8 +165,15 @@ describe('note_app', () => {
setHTMLFixture('<div class="js-discussions-count"></div>');
axiosMock.onAny().reply(mockData.getIndividualNoteResponse);
- store.state.commentsDisabled = true;
- wrapper = mountComponent();
+ wrapper = mountComponent({
+ // why: In this integration test, previously we manually set store.state.commentsDisabled
+ // This stopped working when we added `<discussion-filter>` into the component tree.
+ // Let's lean into the integration scope and use a prop that "disables comments".
+ props: {
+ notesFilterValue: constants.HISTORY_ONLY_FILTER_VALUE,
+ },
+ });
+
return waitForPromises();
});
@@ -358,7 +379,7 @@ describe('note_app', () => {
it('should listen hashchange event', () => {
const notesApp = wrapper.findComponent(NotesApp);
const hash = 'some dummy hash';
- jest.spyOn(urlUtility, 'getLocationHash').mockReturnValueOnce(hash);
+ jest.spyOn(urlUtility, 'getLocationHash').mockReturnValue(hash);
const setTargetNoteHash = jest.spyOn(notesApp.vm, 'setTargetNoteHash');
window.dispatchEvent(new Event('hashchange'), hash);
diff --git a/spec/frontend/notes/mixins/discussion_navigation_spec.js b/spec/frontend/notes/mixins/discussion_navigation_spec.js
index 1b4e8026d84..45625d0a23f 100644
--- a/spec/frontend/notes/mixins/discussion_navigation_spec.js
+++ b/spec/frontend/notes/mixins/discussion_navigation_spec.js
@@ -4,7 +4,6 @@ import Vuex from 'vuex';
import { setHTMLFixture } from 'helpers/fixtures';
import createEventHub from '~/helpers/event_hub_factory';
import * as utils from '~/lib/utils/common_utils';
-import eventHub from '~/notes/event_hub';
import discussionNavigation from '~/notes/mixins/discussion_navigation';
import notesModule from '~/notes/stores/modules';
@@ -35,13 +34,15 @@ describe('Discussion navigation mixin', () => {
beforeEach(() => {
setHTMLFixture(
- [...'abcde']
+ `<div class="notes">
+ ${[...'abcde']
.map(
(id) =>
`<ul class="notes" data-discussion-id="${id}"></ul>
<div class="discussion" data-discussion-id="${id}"></div>`,
)
- .join(''),
+ .join('')}
+ </div>`,
);
jest.spyOn(utils, 'scrollToElementWithContext');
@@ -58,7 +59,7 @@ describe('Discussion navigation mixin', () => {
},
diffs: {
namespaced: true,
- actions: { scrollToFile },
+ actions: { scrollToFile, disableVirtualScroller: () => {} },
state: { diffFiles: [] },
},
},
@@ -73,9 +74,6 @@ describe('Discussion navigation mixin', () => {
jest.clearAllMocks();
});
- const findDiscussion = (selector, id) =>
- document.querySelector(`${selector}[data-discussion-id="${id}"]`);
-
describe('jumpToFirstUnresolvedDiscussion method', () => {
let vm;
@@ -110,14 +108,14 @@ describe('Discussion navigation mixin', () => {
});
describe.each`
- fn | args | currentId | expected
- ${'jumpToNextDiscussion'} | ${[]} | ${null} | ${'a'}
- ${'jumpToNextDiscussion'} | ${[]} | ${'a'} | ${'c'}
- ${'jumpToNextDiscussion'} | ${[]} | ${'e'} | ${'a'}
- ${'jumpToPreviousDiscussion'} | ${[]} | ${null} | ${'e'}
- ${'jumpToPreviousDiscussion'} | ${[]} | ${'e'} | ${'c'}
- ${'jumpToPreviousDiscussion'} | ${[]} | ${'c'} | ${'a'}
- `('$fn (args = $args, currentId = $currentId)', ({ fn, args, currentId, expected }) => {
+ fn | args | currentId
+ ${'jumpToNextDiscussion'} | ${[]} | ${null}
+ ${'jumpToNextDiscussion'} | ${[]} | ${'a'}
+ ${'jumpToNextDiscussion'} | ${[]} | ${'e'}
+ ${'jumpToPreviousDiscussion'} | ${[]} | ${null}
+ ${'jumpToPreviousDiscussion'} | ${[]} | ${'e'}
+ ${'jumpToPreviousDiscussion'} | ${[]} | ${'c'}
+ `('$fn (args = $args, currentId = $currentId)', ({ fn, args, currentId }) => {
beforeEach(() => {
store.state.notes.currentDiscussionId = currentId;
});
@@ -130,125 +128,18 @@ describe('Discussion navigation mixin', () => {
await nextTick();
});
- it('expands discussion', () => {
- expect(expandDiscussion).toHaveBeenCalled();
- });
-
- it('scrolls to element', () => {
- expect(utils.scrollToElement).toHaveBeenCalled();
- });
- });
-
- describe('on `diffs` active tab', () => {
- beforeEach(async () => {
- window.mrTabs.currentAction = 'diffs';
- wrapper.vm[fn](...args);
-
+ it('expands discussion', async () => {
await nextTick();
- });
- it('sets current discussion', () => {
- expect(store.state.notes.currentDiscussionId).toEqual(expected);
- });
-
- it('expands discussion', () => {
expect(expandDiscussion).toHaveBeenCalled();
});
- it('scrolls when scrollToDiscussion is emitted', () => {
- expect(utils.scrollToElementWithContext).not.toHaveBeenCalled();
-
- eventHub.$emit('scrollToDiscussion');
-
- expect(utils.scrollToElementWithContext).toHaveBeenCalledWith(
- findDiscussion('ul.notes', expected),
- { behavior: 'auto', offset: 0 },
- );
- });
- });
-
- describe('on `other` active tab', () => {
- beforeEach(async () => {
- window.mrTabs.currentAction = 'other';
- wrapper.vm[fn](...args);
-
+ it('scrolls to element', async () => {
await nextTick();
- });
- it('sets current discussion', () => {
- expect(store.state.notes.currentDiscussionId).toEqual(expected);
- });
-
- it('does not expand discussion yet', () => {
- expect(expandDiscussion).not.toHaveBeenCalled();
- });
-
- it('shows mrTabs', () => {
- expect(window.mrTabs.tabShown).toHaveBeenCalledWith('show');
- });
-
- describe('when tab is changed', () => {
- beforeEach(() => {
- window.mrTabs.eventHub.$emit('MergeRequestTabChange');
-
- jest.runAllTimers();
- });
-
- it('expands discussion', () => {
- expect(expandDiscussion).toHaveBeenCalledWith(expect.anything(), {
- discussionId: expected,
- });
- });
-
- it('scrolls to discussion', () => {
- expect(utils.scrollToElement).toHaveBeenCalledWith(
- findDiscussion('div.discussion', expected),
- { behavior: 'auto', offset: 0 },
- );
- });
+ expect(utils.scrollToElement).toHaveBeenCalled();
});
});
});
-
- describe('virtual scrolling feature', () => {
- beforeEach(() => {
- jest.spyOn(store, 'dispatch');
-
- store.state.notes.currentDiscussionId = 'a';
- window.location.hash = 'test';
- });
-
- afterEach(() => {
- window.gon = {};
- window.location.hash = '';
- });
-
- it('resets location hash', async () => {
- wrapper.vm.jumpToNextDiscussion();
-
- await nextTick();
-
- expect(window.location.hash).toBe('');
- });
-
- it.each`
- tabValue
- ${'diffs'}
- ${'other'}
- `(
- 'calls scrollToFile with setHash as $hashValue when the tab is $tabValue',
- async ({ tabValue }) => {
- window.mrTabs.currentAction = tabValue;
-
- wrapper.vm.jumpToNextDiscussion();
-
- await nextTick();
-
- expect(store.dispatch).toHaveBeenCalledWith('diffs/scrollToFile', {
- path: 'test.js',
- });
- },
- );
- });
});
});
diff --git a/spec/frontend/notes/mock_data.js b/spec/frontend/notes/mock_data.js
index 9fa7166474a..286f2adc1d8 100644
--- a/spec/frontend/notes/mock_data.js
+++ b/spec/frontend/notes/mock_data.js
@@ -1,4 +1,5 @@
// Copied to ee/spec/frontend/notes/mock_data.js
+import { __ } from '~/locale';
export const notesDataMock = {
discussionsPath: '/gitlab-org/gitlab-foss/issues/26/discussions.json',
@@ -35,6 +36,7 @@ export const noteableDataMock = {
can_create_note: true,
can_update: true,
can_award_emoji: true,
+ can_create_confidential_note: true,
},
description: '',
due_date: null,
@@ -1292,3 +1294,18 @@ export const draftDiffDiscussion = {
file_path: 'lib/foo.rb',
isDraft: true,
};
+
+export const notesFilters = [
+ {
+ title: __('Show all activity'),
+ value: 0,
+ },
+ {
+ title: __('Show comments only'),
+ value: 1,
+ },
+ {
+ title: __('Show history only'),
+ value: 2,
+ },
+];
diff --git a/spec/frontend/notes/utils/get_notes_filter_data_spec.js b/spec/frontend/notes/utils/get_notes_filter_data_spec.js
new file mode 100644
index 00000000000..c3a8d3bc619
--- /dev/null
+++ b/spec/frontend/notes/utils/get_notes_filter_data_spec.js
@@ -0,0 +1,44 @@
+import { getNotesFilterData } from '~/notes/utils/get_notes_filter_data';
+import { notesFilters } from '../mock_data';
+
+// what: This is the format we expect the element attribute to be in
+// why: For readability, we make this clear by hardcoding the indecise instead of using `reduce`.
+const TEST_NOTES_FILTERS_ATTR = {
+ [notesFilters[0].title]: notesFilters[0].value,
+ [notesFilters[1].title]: notesFilters[1].value,
+ [notesFilters[2].title]: notesFilters[2].value,
+};
+
+describe('~/notes/utils/get_notes_filter_data', () => {
+ it.each([
+ {
+ desc: 'empty',
+ attributes: {},
+ expectation: {
+ notesFilters: [],
+ notesFilterValue: undefined,
+ },
+ },
+ {
+ desc: 'valid attributes',
+ attributes: {
+ 'data-notes-filters': JSON.stringify(TEST_NOTES_FILTERS_ATTR),
+ 'data-notes-filter-value': '1',
+ },
+ expectation: {
+ notesFilters,
+ notesFilterValue: 1,
+ },
+ },
+ ])('with $desc, parses data from element attributes', ({ attributes, expectation }) => {
+ const el = document.createElement('div');
+
+ Object.entries(attributes).forEach(([key, value]) => {
+ el.setAttribute(key, value);
+ });
+
+ const actual = getNotesFilterData(el);
+
+ expect(actual).toStrictEqual(expectation);
+ });
+});
diff --git a/spec/frontend/operation_settings/components/metrics_settings_spec.js b/spec/frontend/operation_settings/components/metrics_settings_spec.js
index 810049220ae..732dfdd42fb 100644
--- a/spec/frontend/operation_settings/components/metrics_settings_spec.js
+++ b/spec/frontend/operation_settings/components/metrics_settings_spec.js
@@ -2,7 +2,7 @@ import { GlButton, GlLink, GlFormGroup, GlFormInput, GlFormSelect } from '@gitla
import { mount, shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import { TEST_HOST } from 'helpers/test_constants';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import { refreshCurrentPage } from '~/lib/utils/url_utility';
import { timezones } from '~/monitoring/format_date';
@@ -52,7 +52,7 @@ describe('operation settings external dashboard component', () => {
}
axios.patch.mockReset();
refreshCurrentPage.mockReset();
- createFlash.mockReset();
+ createAlert.mockReset();
});
it('renders header text', () => {
@@ -208,7 +208,7 @@ describe('operation settings external dashboard component', () => {
await nextTick();
await jest.runAllTicks();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: `There was an error saving your changes. ${message}`,
});
});
diff --git a/spec/frontend/packages_and_registries/harbor_registry/components/tags/tags_list_row_spec.js b/spec/frontend/packages_and_registries/harbor_registry/components/tags/tags_list_row_spec.js
index 6fe3dabc603..849215e286b 100644
--- a/spec/frontend/packages_and_registries/harbor_registry/components/tags/tags_list_row_spec.js
+++ b/spec/frontend/packages_and_registries/harbor_registry/components/tags/tags_list_row_spec.js
@@ -8,8 +8,8 @@ import { defaultConfig, harborTagsList } from '../../mock_data';
describe('Harbor tag list row', () => {
let wrapper;
- const findListItem = () => wrapper.find(ListItem);
- const findClipboardButton = () => wrapper.find(ClipboardButton);
+ const findListItem = () => wrapper.findComponent(ListItem);
+ const findClipboardButton = () => wrapper.findComponent(ClipboardButton);
const findByTestId = (testId) => wrapper.findByTestId(testId);
const $route = {
@@ -58,7 +58,7 @@ describe('Harbor tag list row', () => {
expect(findByTestId('name').text()).toBe(harborTagsList[0].name);
});
- describe(' clipboard button', () => {
+ describe('clipboard button', () => {
it('exists', () => {
expect(findClipboardButton().exists()).toBe(true);
});
diff --git a/spec/frontend/packages_and_registries/harbor_registry/components/tags/tags_list_spec.js b/spec/frontend/packages_and_registries/harbor_registry/components/tags/tags_list_spec.js
index 6bcf6611d07..4c6b2b6daaa 100644
--- a/spec/frontend/packages_and_registries/harbor_registry/components/tags/tags_list_spec.js
+++ b/spec/frontend/packages_and_registries/harbor_registry/components/tags/tags_list_spec.js
@@ -8,9 +8,9 @@ import { defaultConfig, harborTagsResponse } from '../../mock_data';
describe('Harbor Tags List', () => {
let wrapper;
- const findTagsLoader = () => wrapper.find(TagsLoader);
+ const findTagsLoader = () => wrapper.findComponent(TagsLoader);
const findTagsListRows = () => wrapper.findAllComponents(TagsListRow);
- const findRegistryList = () => wrapper.find(RegistryList);
+ const findRegistryList = () => wrapper.findComponent(RegistryList);
const mountComponent = ({ propsData, config = defaultConfig }) => {
wrapper = shallowMount(TagsList, {
diff --git a/spec/frontend/packages_and_registries/harbor_registry/pages/tags_spec.js b/spec/frontend/packages_and_registries/harbor_registry/pages/tags_spec.js
index 7e0f05e736b..10901c6ec1e 100644
--- a/spec/frontend/packages_and_registries/harbor_registry/pages/tags_spec.js
+++ b/spec/frontend/packages_and_registries/harbor_registry/pages/tags_spec.js
@@ -15,8 +15,8 @@ jest.mock('~/rest_api', () => ({
describe('Harbor Tags page', () => {
let wrapper;
- const findTagsHeader = () => wrapper.find(TagsHeader);
- const findTagsList = () => wrapper.find(TagsList);
+ const findTagsHeader = () => wrapper.findComponent(TagsHeader);
+ const findTagsList = () => wrapper.findComponent(TagsList);
const waitForHarborTagsRequest = async () => {
await waitForPromises();
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/store/actions_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/store/actions_spec.js
index 31ab108558c..bb970336b94 100644
--- a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/store/actions_spec.js
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/store/actions_spec.js
@@ -1,6 +1,6 @@
import testAction from 'helpers/vuex_action_helper';
import Api from '~/api';
-import createFlash from '~/flash';
+import { createAlert, VARIANT_SUCCESS, VARIANT_WARNING } from '~/flash';
import { FETCH_PACKAGE_VERSIONS_ERROR } from '~/packages_and_registries/infrastructure_registry/details/constants';
import {
fetchPackageVersions,
@@ -67,9 +67,9 @@ describe('Actions Package details store', () => {
[],
);
expect(Api.projectPackage).toHaveBeenCalledWith(packageEntity.project_id, packageEntity.id);
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: FETCH_PACKAGE_VERSIONS_ERROR,
- type: 'warning',
+ variant: VARIANT_WARNING,
});
});
});
@@ -87,9 +87,9 @@ describe('Actions Package details store', () => {
Api.deleteProjectPackage = jest.fn().mockRejectedValue();
await testAction(deletePackage, undefined, { packageEntity }, [], []);
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: DELETE_PACKAGE_ERROR_MESSAGE,
- type: 'warning',
+ variant: VARIANT_WARNING,
});
});
});
@@ -112,18 +112,18 @@ describe('Actions Package details store', () => {
packageEntity.id,
fileId,
);
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: DELETE_PACKAGE_FILE_SUCCESS_MESSAGE,
- type: 'success',
+ variant: VARIANT_SUCCESS,
});
});
it('should create flash on API error', async () => {
Api.deleteProjectPackageFile = jest.fn().mockRejectedValue();
await testAction(deletePackageFile, fileId, { packageEntity }, [], []);
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: DELETE_PACKAGE_FILE_ERROR_MESSAGE,
- type: 'warning',
+ variant: VARIANT_WARNING,
});
});
});
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/infrastructure_title_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/infrastructure_title_spec.js
index 93d013bb458..aca6b0942cc 100644
--- a/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/infrastructure_title_spec.js
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/infrastructure_title_spec.js
@@ -74,7 +74,7 @@ describe('Infrastructure Title', () => {
mountComponent({ ...exampleProps, count });
});
- it(exist ? 'exists' : 'does not exist', () => {
+ it(`${exist ? 'exists' : 'does not exist'}`, () => {
expect(findMetadataItem().exists()).toBe(exist);
});
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/packages_list_app_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/packages_list_app_spec.js
index db1d3f3f633..dff95364d7d 100644
--- a/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/packages_list_app_spec.js
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/components/packages_list_app_spec.js
@@ -3,7 +3,7 @@ import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import Vuex from 'vuex';
import setWindowLocation from 'helpers/set_window_location_helper';
-import createFlash from '~/flash';
+import { createAlert, VARIANT_INFO } from '~/flash';
import * as commonUtils from '~/lib/utils/common_utils';
import PackageListApp from '~/packages_and_registries/infrastructure_registry/list/components/packages_list_app.vue';
import { DELETE_PACKAGE_SUCCESS_MESSAGE } from '~/packages_and_registries/infrastructure_registry/list/constants';
@@ -222,9 +222,9 @@ describe('packages_list_app', () => {
it(`creates a flash if the query string contains ${SHOW_DELETE_SUCCESS_ALERT}`, () => {
mountComponent();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: DELETE_PACKAGE_SUCCESS_MESSAGE,
- type: 'notice',
+ variant: VARIANT_INFO,
});
});
@@ -238,7 +238,7 @@ describe('packages_list_app', () => {
setWindowLocation('?');
mountComponent();
- expect(createFlash).not.toHaveBeenCalled();
+ expect(createAlert).not.toHaveBeenCalled();
expect(commonUtils.historyReplaceState).not.toHaveBeenCalled();
});
});
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/list/stores/actions_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/stores/actions_spec.js
index d596f2dae33..36417eaf793 100644
--- a/spec/frontend/packages_and_registries/infrastructure_registry/components/list/stores/actions_spec.js
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/list/stores/actions_spec.js
@@ -2,7 +2,7 @@ import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
import Api from '~/api';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import { MISSING_DELETE_PATH_ERROR } from '~/packages_and_registries/infrastructure_registry/list/constants';
import * as actions from '~/packages_and_registries/infrastructure_registry/list/stores/actions';
import * as types from '~/packages_and_registries/infrastructure_registry/list/stores/mutation_types';
@@ -107,7 +107,7 @@ describe('Actions Package list store', () => {
{ type: 'setLoading', payload: false },
],
);
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
it('should force the terraform_module type when forceTerraform is true', async () => {
@@ -209,17 +209,17 @@ describe('Actions Package list store', () => {
{ type: 'setLoading', payload: false },
],
);
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
it.each`
property | actionPayload
${'_links'} | ${{}}
${'delete_api_path'} | ${{ _links: {} }}
- `('should reject and createFlash when $property is missing', ({ actionPayload }) => {
+ `('should reject and createAlert when $property is missing', ({ actionPayload }) => {
return testAction(actions.requestDeletePackage, actionPayload, null, [], []).catch((e) => {
expect(e).toEqual(new Error(MISSING_DELETE_PATH_ERROR));
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: DELETE_PACKAGE_ERROR_MESSAGE,
});
});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/package_title_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/package_title_spec.js.snap
index 61923233d2e..047fa04947c 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/package_title_spec.js.snap
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/package_title_spec.js.snap
@@ -79,6 +79,18 @@ exports[`PackageTitle renders with tags 1`] = `
texttooltip=""
/>
</div>
+ <div
+ class="gl-display-flex gl-align-items-center gl-mr-5"
+ >
+ <metadata-item-stub
+ data-testid="package-last-downloaded-at"
+ icon="download"
+ link=""
+ size="m"
+ text="Last downloaded Aug 17, 2021"
+ texttooltip=""
+ />
+ </div>
</div>
</div>
@@ -164,6 +176,18 @@ exports[`PackageTitle renders without tags 1`] = `
texttooltip=""
/>
</div>
+ <div
+ class="gl-display-flex gl-align-items-center gl-mr-5"
+ >
+ <metadata-item-stub
+ data-testid="package-last-downloaded-at"
+ icon="download"
+ link=""
+ size="m"
+ text="Last downloaded Aug 17, 2021"
+ texttooltip=""
+ />
+ </div>
</div>
</div>
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/package_title_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/package_title_spec.js
index 37416dcd4e7..1fda77f2aaa 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/package_title_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/package_title_spec.js
@@ -49,6 +49,7 @@ describe('PackageTitle', () => {
const findPackageSize = () => wrapper.findByTestId('package-size');
const findPipelineProject = () => wrapper.findByTestId('pipeline-project');
const findPackageRef = () => wrapper.findByTestId('package-ref');
+ const findPackageLastDownloadedAt = () => wrapper.findByTestId('package-last-downloaded-at');
const findPackageTags = () => wrapper.findComponent(PackageTags);
const findPackageBadges = () => wrapper.findAllByTestId('tag-badge');
const findSubHeaderText = () => wrapper.findByTestId('sub-header');
@@ -227,4 +228,25 @@ describe('PackageTitle', () => {
});
});
});
+
+ describe('package last downloaded at', () => {
+ it('does not display the data if missing', async () => {
+ await createComponent({
+ ...packageData(),
+ lastDownloadedAt: null,
+ });
+
+ expect(findPackageLastDownloadedAt().exists()).toBe(false);
+ });
+
+ it('correctly shows the data if present', async () => {
+ await createComponent();
+
+ expect(findPackageLastDownloadedAt().props()).toMatchObject({
+ text: 'Last downloaded Aug 17, 2021',
+ icon: 'download',
+ size: 'm',
+ });
+ });
+ });
});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/functional/delete_package_spec.js b/spec/frontend/packages_and_registries/package_registry/components/functional/delete_package_spec.js
index 14a70def7d0..93c2196b210 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/functional/delete_package_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/functional/delete_package_spec.js
@@ -3,7 +3,7 @@ import VueApollo from 'vue-apollo';
import waitForPromises from 'helpers/wait_for_promises';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
-import createFlash from '~/flash';
+import { createAlert, VARIANT_SUCCESS, VARIANT_WARNING } from '~/flash';
import DeletePackage from '~/packages_and_registries/package_registry/components/functional/delete_package.vue';
import destroyPackageMutation from '~/packages_and_registries/package_registry/graphql/mutations/destroy_package.mutation.graphql';
@@ -104,22 +104,22 @@ describe('DeletePackage', () => {
expect(wrapper.emitted('end')).toEqual([[]]);
});
- it('does not call createFlash', async () => {
+ it('does not call createAlert', async () => {
createComponent();
await clickOnButtonAndWait(eventPayload);
- expect(createFlash).not.toHaveBeenCalled();
+ expect(createAlert).not.toHaveBeenCalled();
});
- it('calls createFlash with the success message when showSuccessAlert is true', async () => {
+ it('calls createAlert with the success message when showSuccessAlert is true', async () => {
createComponent({ showSuccessAlert: true });
await clickOnButtonAndWait(eventPayload);
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: DeletePackage.i18n.successMessage,
- type: 'success',
+ variant: VARIANT_SUCCESS,
});
});
});
@@ -141,14 +141,14 @@ describe('DeletePackage', () => {
expect(wrapper.emitted('end')).toEqual([[]]);
});
- it('calls createFlash with the error message', async () => {
+ it('calls createAlert with the error message', async () => {
createComponent({ showSuccessAlert: true });
await clickOnButtonAndWait(eventPayload);
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: DeletePackage.i18n.errorMessage,
- type: 'warning',
+ variant: VARIANT_WARNING,
captureError: true,
error: expect.any(Error),
});
diff --git a/spec/frontend/packages_and_registries/package_registry/mock_data.js b/spec/frontend/packages_and_registries/package_registry/mock_data.js
index 22236424e6a..c2b6fb734d6 100644
--- a/spec/frontend/packages_and_registries/package_registry/mock_data.js
+++ b/spec/frontend/packages_and_registries/package_registry/mock_data.js
@@ -127,6 +127,7 @@ export const packageData = (extend) => ({
version: '1.0.0',
createdAt: '2020-08-17T14:23:32Z',
updatedAt: '2020-08-17T14:23:32Z',
+ lastDownloadedAt: '2021-08-17T14:23:32Z',
status: 'DEFAULT',
mavenUrl: 'http://gdk.test:3000/api/v4/projects/1/packages/maven',
npmUrl: 'http://gdk.test:3000/api/v4/projects/1/packages/npm',
diff --git a/spec/frontend/packages_and_registries/package_registry/pages/details_spec.js b/spec/frontend/packages_and_registries/package_registry/pages/details_spec.js
index 83158d1cc5e..a32e76a132e 100644
--- a/spec/frontend/packages_and_registries/package_registry/pages/details_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/pages/details_spec.js
@@ -1,4 +1,4 @@
-import { GlEmptyState, GlBadge, GlTabs, GlTab } from '@gitlab/ui';
+import { GlEmptyState, GlBadge, GlTabs, GlTab, GlSprintf } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
@@ -6,7 +6,7 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import AdditionalMetadata from '~/packages_and_registries/package_registry/components/details/additional_metadata.vue';
import PackagesApp from '~/packages_and_registries/package_registry/pages/details.vue';
@@ -86,11 +86,17 @@ describe('PackagesApp', () => {
PackageTitle,
DeletePackage,
GlModal: {
- template: '<div></div>',
+ template: `
+ <div>
+ <slot name="modal-title"></slot>
+ <p><slot></slot></p>
+ </div>
+ `,
methods: {
show: jest.fn(),
},
},
+ GlSprintf,
GlTabs,
GlTab,
},
@@ -149,7 +155,7 @@ describe('PackagesApp', () => {
await waitForPromises();
- expect(createFlash).toHaveBeenCalledWith(
+ expect(createAlert).toHaveBeenCalledWith(
expect.objectContaining({
message: FETCH_PACKAGE_DETAILS_ERROR_MESSAGE,
}),
@@ -245,7 +251,9 @@ describe('PackagesApp', () => {
await findDeleteButton().trigger('click');
- expect(findDeleteModal().exists()).toBe(true);
+ expect(findDeleteModal().find('p').text()).toBe(
+ 'You are about to delete version 1.0.0 of @gitlab-org/package-15. Are you sure?',
+ );
});
describe('successful request', () => {
@@ -359,6 +367,12 @@ describe('PackagesApp', () => {
expect(showDeletePackageSpy).toHaveBeenCalled();
expect(showDeleteFileSpy).not.toHaveBeenCalled();
+
+ await waitForPromises();
+
+ expect(findDeleteModal().find('p').text()).toBe(
+ 'Deleting the last package asset will remove version 1.0.0 of @gitlab-org/package-15. Are you sure?',
+ );
});
it('confirming on the modal sets the loading state', async () => {
@@ -383,7 +397,7 @@ describe('PackagesApp', () => {
await doDeleteFile();
- expect(createFlash).toHaveBeenCalledWith(
+ expect(createAlert).toHaveBeenCalledWith(
expect.objectContaining({
message: DELETE_PACKAGE_FILE_SUCCESS_MESSAGE,
}),
@@ -399,7 +413,7 @@ describe('PackagesApp', () => {
await doDeleteFile();
- expect(createFlash).toHaveBeenCalledWith(
+ expect(createAlert).toHaveBeenCalledWith(
expect.objectContaining({
message: DELETE_PACKAGE_FILE_ERROR_MESSAGE,
}),
@@ -416,7 +430,7 @@ describe('PackagesApp', () => {
await doDeleteFile();
- expect(createFlash).toHaveBeenCalledWith(
+ expect(createAlert).toHaveBeenCalledWith(
expect.objectContaining({
message: DELETE_PACKAGE_FILE_ERROR_MESSAGE,
}),
@@ -468,7 +482,7 @@ describe('PackagesApp', () => {
await doDeleteFiles();
- expect(createFlash).toHaveBeenCalledWith(
+ expect(createAlert).toHaveBeenCalledWith(
expect.objectContaining({
message: DELETE_PACKAGE_FILES_SUCCESS_MESSAGE,
}),
@@ -484,7 +498,7 @@ describe('PackagesApp', () => {
await doDeleteFiles();
- expect(createFlash).toHaveBeenCalledWith(
+ expect(createAlert).toHaveBeenCalledWith(
expect.objectContaining({
message: DELETE_PACKAGE_FILES_ERROR_MESSAGE,
}),
@@ -501,7 +515,7 @@ describe('PackagesApp', () => {
await doDeleteFiles();
- expect(createFlash).toHaveBeenCalledWith(
+ expect(createAlert).toHaveBeenCalledWith(
expect.objectContaining({
message: DELETE_PACKAGE_FILES_ERROR_MESSAGE,
}),
@@ -533,6 +547,12 @@ describe('PackagesApp', () => {
findPackageFiles().vm.$emit('delete-files', packageFiles());
expect(showDeletePackageSpy).toHaveBeenCalled();
+
+ await waitForPromises();
+
+ expect(findDeleteModal().find('p').text()).toBe(
+ 'Deleting all package assets will remove version 1.0.0 of @gitlab-org/package-15. Are you sure?',
+ );
});
});
});
diff --git a/spec/frontend/packages_and_registries/settings/project/settings/components/cleanup_image_tags_spec.js b/spec/frontend/packages_and_registries/settings/project/settings/components/cleanup_image_tags_spec.js
index 8b60f31512b..2bb99fb8e8f 100644
--- a/spec/frontend/packages_and_registries/settings/project/settings/components/cleanup_image_tags_spec.js
+++ b/spec/frontend/packages_and_registries/settings/project/settings/components/cleanup_image_tags_spec.js
@@ -19,6 +19,7 @@ import {
expirationPolicyPayload,
emptyExpirationPolicyPayload,
containerExpirationPolicyData,
+ nullExpirationPolicyPayload,
} from '../mock_data';
describe('Cleanup image tags project settings', () => {
@@ -98,15 +99,30 @@ describe('Cleanup image tags project settings', () => {
expect(findDescription().text()).toMatchInterpolatedText(CONTAINER_CLEANUP_POLICY_DESCRIPTION);
});
+ it('when loading does not render form or alert components', () => {
+ mountComponentWithApollo({
+ resolver: jest.fn().mockResolvedValue(),
+ });
+
+ expect(findFormComponent().exists()).toBe(false);
+ expect(findAlert().exists()).toBe(false);
+ });
+
describe('the form is disabled', () => {
- it('hides the form', () => {
- mountComponent();
+ it('hides the form', async () => {
+ mountComponentWithApollo({
+ resolver: jest.fn().mockResolvedValue(nullExpirationPolicyPayload()),
+ });
+ await waitForPromises();
expect(findFormComponent().exists()).toBe(false);
});
- it('shows an alert', () => {
- mountComponent();
+ it('shows an alert', async () => {
+ mountComponentWithApollo({
+ resolver: jest.fn().mockResolvedValue(nullExpirationPolicyPayload()),
+ });
+ await waitForPromises();
const text = findAlert().text();
expect(text).toContain(UNAVAILABLE_FEATURE_INTRO_TEXT);
@@ -114,8 +130,12 @@ describe('Cleanup image tags project settings', () => {
});
describe('an admin is visiting the page', () => {
- it('shows the admin part of the alert message', () => {
- mountComponent({ ...defaultProvidedValues, isAdmin: true });
+ it('shows the admin part of the alert message', async () => {
+ mountComponentWithApollo({
+ provide: { ...defaultProvidedValues, isAdmin: true },
+ resolver: jest.fn().mockResolvedValue(nullExpirationPolicyPayload()),
+ });
+ await waitForPromises();
const sprintf = findAlert().findComponent(GlSprintf);
expect(sprintf.text()).toBe('administration settings');
diff --git a/spec/frontend/packages_and_registries/settings/project/settings/components/container_expiration_policy_spec.js b/spec/frontend/packages_and_registries/settings/project/settings/components/container_expiration_policy_spec.js
index 35baeaeac61..43484d26d76 100644
--- a/spec/frontend/packages_and_registries/settings/project/settings/components/container_expiration_policy_spec.js
+++ b/spec/frontend/packages_and_registries/settings/project/settings/components/container_expiration_policy_spec.js
@@ -16,7 +16,11 @@ import {
import expirationPolicyQuery from '~/packages_and_registries/settings/project/graphql/queries/get_expiration_policy.query.graphql';
import SettingsBlock from '~/vue_shared/components/settings/settings_block.vue';
-import { expirationPolicyPayload, emptyExpirationPolicyPayload } from '../mock_data';
+import {
+ expirationPolicyPayload,
+ emptyExpirationPolicyPayload,
+ nullExpirationPolicyPayload,
+} from '../mock_data';
describe('Container expiration policy project settings', () => {
let wrapper;
@@ -78,15 +82,30 @@ describe('Container expiration policy project settings', () => {
expect(findButton().attributes('href')).toBe(defaultProvidedValues.cleanupSettingsPath);
});
+ it('when loading does not render form or alert components', () => {
+ mountComponentWithApollo({
+ resolver: jest.fn().mockResolvedValue(),
+ });
+
+ expect(findFormComponent().exists()).toBe(false);
+ expect(findAlert().exists()).toBe(false);
+ });
+
describe('the form is disabled', () => {
- it('the form is hidden', () => {
- mountComponent();
+ it('hides the form', async () => {
+ mountComponentWithApollo({
+ resolver: jest.fn().mockResolvedValue(nullExpirationPolicyPayload()),
+ });
+ await waitForPromises();
expect(findFormComponent().exists()).toBe(false);
});
- it('shows an alert', () => {
- mountComponent();
+ it('shows an alert', async () => {
+ mountComponentWithApollo({
+ resolver: jest.fn().mockResolvedValue(nullExpirationPolicyPayload()),
+ });
+ await waitForPromises();
const text = findAlert().text();
expect(text).toContain(UNAVAILABLE_FEATURE_INTRO_TEXT);
@@ -94,8 +113,12 @@ describe('Container expiration policy project settings', () => {
});
describe('an admin is visiting the page', () => {
- it('shows the admin part of the alert message', () => {
- mountComponent({ ...defaultProvidedValues, isAdmin: true });
+ it('shows the admin part of the alert message', async () => {
+ mountComponentWithApollo({
+ provide: { ...defaultProvidedValues, isAdmin: true },
+ resolver: jest.fn().mockResolvedValue(nullExpirationPolicyPayload()),
+ });
+ await waitForPromises();
const sprintf = findAlert().findComponent(GlSprintf);
expect(sprintf.text()).toBe('administration settings');
diff --git a/spec/frontend/packages_and_registries/settings/project/settings/mock_data.js b/spec/frontend/packages_and_registries/settings/project/settings/mock_data.js
index 0696144215c..3204ca01f99 100644
--- a/spec/frontend/packages_and_registries/settings/project/settings/mock_data.js
+++ b/spec/frontend/packages_and_registries/settings/project/settings/mock_data.js
@@ -29,6 +29,15 @@ export const emptyExpirationPolicyPayload = () => ({
},
});
+export const nullExpirationPolicyPayload = () => ({
+ data: {
+ project: {
+ id: '1',
+ containerExpirationPolicy: null,
+ },
+ },
+});
+
export const expirationPolicyMutationPayload = ({ override, errors = [] } = {}) => ({
data: {
updateContainerExpirationPolicy: {
diff --git a/spec/frontend/pages/admin/jobs/index/components/stop_jobs_modal_spec.js b/spec/frontend/pages/admin/jobs/index/components/stop_jobs_modal_spec.js
index ebf21c01324..17669331370 100644
--- a/spec/frontend/pages/admin/jobs/index/components/stop_jobs_modal_spec.js
+++ b/spec/frontend/pages/admin/jobs/index/components/stop_jobs_modal_spec.js
@@ -1,9 +1,10 @@
-import Vue from 'vue';
+import Vue, { nextTick } from 'vue';
+import { mount } from '@vue/test-utils';
+import { GlModal } from '@gitlab/ui';
import { TEST_HOST } from 'helpers/test_constants';
-import mountComponent from 'helpers/vue_mount_component_helper';
import axios from '~/lib/utils/axios_utils';
import { redirectTo } from '~/lib/utils/url_utility';
-import stopJobsModal from '~/pages/admin/jobs/index/components/stop_jobs_modal.vue';
+import StopJobsModal from '~/pages/admin/jobs/index/components/stop_jobs_modal.vue';
jest.mock('~/lib/utils/url_utility', () => ({
...jest.requireActual('~/lib/utils/url_utility'),
@@ -14,20 +15,23 @@ describe('stop_jobs_modal.vue', () => {
const props = {
url: `${TEST_HOST}/stop_jobs_modal.vue/stopAll`,
};
- let vm;
+ let wrapper;
- afterEach(() => {
- vm.$destroy();
+ beforeEach(() => {
+ wrapper = mount(StopJobsModal, { propsData: props });
});
- beforeEach(() => {
- const Component = Vue.extend(stopJobsModal);
- vm = mountComponent(Component, props);
+ afterEach(() => {
+ wrapper.destroy();
});
- describe('onSubmit', () => {
+ describe('on submit', () => {
it('stops jobs and redirects to overview page', async () => {
const responseURL = `${TEST_HOST}/stop_jobs_modal.vue/jobs`;
+ // TODO: We can't use axios-mock-adapter because our current version
+ // does not support responseURL
+ //
+ // see https://gitlab.com/gitlab-org/gitlab/-/issues/375308 for details
jest.spyOn(axios, 'post').mockImplementation((url) => {
expect(url).toBe(props.url);
return Promise.resolve({
@@ -37,18 +41,28 @@ describe('stop_jobs_modal.vue', () => {
});
});
- await vm.onSubmit();
+ wrapper.findComponent(GlModal).vm.$emit('primary');
+ await nextTick();
+
expect(redirectTo).toHaveBeenCalledWith(responseURL);
});
it('displays error if stopping jobs failed', async () => {
+ Vue.config.errorHandler = () => {}; // silencing thrown error
+
const dummyError = new Error('stopping jobs failed');
+ // TODO: We can't use axios-mock-adapter because our current version
+ // does not support responseURL
+ //
+ // see https://gitlab.com/gitlab-org/gitlab/-/issues/375308 for details
jest.spyOn(axios, 'post').mockImplementation((url) => {
expect(url).toBe(props.url);
return Promise.reject(dummyError);
});
- await expect(vm.onSubmit()).rejects.toEqual(dummyError);
+ wrapper.findComponent(GlModal).vm.$emit('primary');
+ await nextTick();
+
expect(redirectTo).not.toHaveBeenCalled();
});
});
diff --git a/spec/frontend/pages/import/fogbugz/new_user_map/components/user_select_spec.js b/spec/frontend/pages/import/fogbugz/new_user_map/components/user_select_spec.js
new file mode 100644
index 00000000000..c1e1545944b
--- /dev/null
+++ b/spec/frontend/pages/import/fogbugz/new_user_map/components/user_select_spec.js
@@ -0,0 +1,81 @@
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import { GlListbox } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import searchUsersQuery from '~/graphql_shared/queries/users_search_all.query.graphql';
+
+import createMockApollo from 'helpers/mock_apollo_helper';
+import UserSelect from '~/pages/import/fogbugz/new_user_map/components/user_select.vue';
+
+Vue.use(VueApollo);
+
+const USERS_RESPONSE = {
+ data: {
+ users: {
+ nodes: [
+ {
+ id: 'gid://gitlab/User/44',
+ avatarUrl: '/avatar1',
+ webUrl: '/reported_user_22',
+ name: 'Birgit Steuber',
+ username: 'reported_user_22',
+ __typename: 'UserCore',
+ },
+ {
+ id: 'gid://gitlab/User/43',
+ avatarUrl: '/avatar2',
+ webUrl: '/reported_user_21',
+ name: 'Luke Spinka',
+ username: 'reported_user_21',
+ __typename: 'UserCore',
+ },
+ ],
+ __typename: 'UserCoreConnection',
+ },
+ },
+};
+
+describe('fogbugz user select component', () => {
+ let wrapper;
+ const searchQueryHandlerSuccess = jest.fn().mockResolvedValue(USERS_RESPONSE);
+
+ const createComponent = (propsData = { name: 'demo' }) => {
+ const fakeApollo = createMockApollo([[searchUsersQuery, searchQueryHandlerSuccess]]);
+
+ wrapper = shallowMount(UserSelect, {
+ apolloProvider: fakeApollo,
+ propsData,
+ });
+ };
+
+ it('renders hidden input with name from props', () => {
+ const name = 'test';
+ createComponent({ name });
+ expect(wrapper.find('input').attributes('name')).toBe(name);
+ });
+
+ it('syncs input value with value emitted from listbox', async () => {
+ createComponent();
+
+ const id = 8;
+
+ wrapper.findComponent(GlListbox).vm.$emit('select', `gid://gitlab/User/${id}`);
+ await nextTick();
+
+ expect(wrapper.get('input').attributes('value')).toBe(id.toString());
+ });
+
+ it('filters users when search is performed in listbox', async () => {
+ createComponent();
+ jest.runOnlyPendingTimers();
+
+ wrapper.findComponent(GlListbox).vm.$emit('search', 'test');
+ await nextTick();
+ jest.runOnlyPendingTimers();
+
+ expect(searchQueryHandlerSuccess).toHaveBeenCalledWith({
+ first: expect.anything(),
+ search: 'test',
+ });
+ });
+});
diff --git a/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js b/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js
index f221a90da61..727c5164cdc 100644
--- a/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js
+++ b/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js
@@ -6,7 +6,7 @@ import AxiosMockAdapter from 'axios-mock-adapter';
import { kebabCase } from 'lodash';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import * as urlUtility from '~/lib/utils/url_utility';
import ForkForm from '~/pages/projects/forks/new/components/fork_form.vue';
import createMockApollo from 'helpers/mock_apollo_helper';
@@ -449,7 +449,7 @@ describe('ForkForm component', () => {
await submitForm();
expect(urlUtility.redirectTo).not.toHaveBeenCalled();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'An error occurred while forking the project. Please try again.',
});
});
diff --git a/spec/frontend/pages/projects/forks/new/components/project_namespace_spec.js b/spec/frontend/pages/projects/forks/new/components/project_namespace_spec.js
index 1a88aebae32..f6d3957115f 100644
--- a/spec/frontend/pages/projects/forks/new/components/project_namespace_spec.js
+++ b/spec/frontend/pages/projects/forks/new/components/project_namespace_spec.js
@@ -10,7 +10,7 @@ import { mount, shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import searchQuery from '~/pages/projects/forks/new/queries/search_forkable_namespaces.query.graphql';
import ProjectNamespace from '~/pages/projects/forks/new/components/project_namespace.vue';
@@ -167,7 +167,7 @@ describe('ProjectNamespace component', () => {
});
it('creates a flash message and captures the error', () => {
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'Something went wrong while loading data. Please refresh the page to try again.',
captureError: true,
error: expect.any(Error),
diff --git a/spec/frontend/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js b/spec/frontend/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js
index 5b9c48f0d9b..f54d56c3af4 100644
--- a/spec/frontend/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js
+++ b/spec/frontend/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js
@@ -1,8 +1,7 @@
import $ from 'jquery';
import { loadHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
+import { formatUtcOffset, formatTimezone } from '~/lib/utils/datetime_utility';
import TimezoneDropdown, {
- formatUtcOffset,
- formatTimezone,
findTimezoneByIdentifier,
} from '~/pages/projects/pipeline_schedules/shared/components/timezone_dropdown';
diff --git a/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js b/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js
index b37d2f06191..0f947e84e0f 100644
--- a/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js
+++ b/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js
@@ -1,15 +1,12 @@
import { nextTick } from 'vue';
-import { GlAlert, GlButton, GlFormInput, GlFormGroup, GlSegmentedControl } from '@gitlab/ui';
+import { GlAlert, GlButton, GlFormInput, GlFormGroup } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { mockTracking } from 'helpers/tracking_helper';
-import { stubComponent } from 'helpers/stub_component';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import waitForPromises from 'helpers/wait_for_promises';
-import ContentEditor from '~/content_editor/components/content_editor.vue';
-import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
import WikiForm from '~/pages/shared/wikis/components/wiki_form.vue';
+import MarkdownEditor from '~/vue_shared/components/markdown/markdown_editor.vue';
import {
CONTENT_EDITOR_LOADED_ACTION,
SAVED_USING_CONTENT_EDITOR_ACTION,
@@ -18,8 +15,6 @@ import {
WIKI_FORMAT_UPDATED_ACTION,
} from '~/pages/shared/wikis/constants';
-import MarkdownField from '~/vue_shared/components/markdown/field.vue';
-
jest.mock('~/emoji');
describe('WikiForm', () => {
@@ -30,16 +25,12 @@ describe('WikiForm', () => {
const findForm = () => wrapper.find('form');
const findTitle = () => wrapper.find('#wiki_title');
const findFormat = () => wrapper.find('#wiki_format');
- const findContent = () => wrapper.find('#wiki_content');
const findMessage = () => wrapper.find('#wiki_message');
+ const findMarkdownEditor = () => wrapper.findComponent(MarkdownEditor);
const findSubmitButton = () => wrapper.findByTestId('wiki-submit-button');
const findCancelButton = () => wrapper.findByTestId('wiki-cancel-button');
- const findToggleEditingModeButton = () => wrapper.findByTestId('toggle-editing-mode-button');
const findTitleHelpLink = () => wrapper.findByText('Learn more.');
const findMarkdownHelpLink = () => wrapper.findByTestId('wiki-markdown-help-link');
- const findContentEditor = () => wrapper.findComponent(ContentEditor);
- const findClassicEditor = () => wrapper.findComponent(MarkdownField);
- const findLocalStorageSync = () => wrapper.findComponent(LocalStorageSync);
const setFormat = (value) => {
const format = findFormat();
@@ -53,13 +44,6 @@ describe('WikiForm', () => {
await nextTick();
};
- const dispatchBeforeUnload = () => {
- const e = new Event('beforeunload');
- jest.spyOn(e, 'preventDefault');
- window.dispatchEvent(e);
- return e;
- };
-
const pageInfoNew = {
persisted: false,
uploadsPath: '/project/path/-/wikis/attachments',
@@ -103,11 +87,8 @@ describe('WikiForm', () => {
},
},
stubs: {
- MarkdownField,
GlAlert,
GlButton,
- GlSegmentedControl,
- LocalStorageSync: stubComponent(LocalStorageSync),
GlFormInput,
GlFormGroup,
},
@@ -126,6 +107,22 @@ describe('WikiForm', () => {
wrapper = null;
});
+ it('displays markdown editor', () => {
+ createWrapper({ persisted: true });
+
+ expect(findMarkdownEditor().props()).toEqual(
+ expect.objectContaining({
+ value: pageInfoPersisted.content,
+ renderMarkdownPath: pageInfoPersisted.markdownPreviewPath,
+ markdownDocsPath: pageInfoPersisted.markdownHelpPath,
+ uploadsPath: pageInfoPersisted.uploadsPath,
+ initOnAutofocus: pageInfoPersisted.persisted,
+ formFieldId: 'wiki_content',
+ formFieldName: 'wiki[content]',
+ }),
+ );
+ });
+
it.each`
title | persisted | message
${'my page'} | ${false} | ${'Create my page'}
@@ -154,7 +151,7 @@ describe('WikiForm', () => {
it('does not trim page content by default', () => {
createWrapper({ persisted: true });
- expect(findContent().element.value).toBe(' My page content ');
+ expect(findMarkdownEditor().props().value).toBe(' My page content ');
});
it.each`
@@ -168,7 +165,9 @@ describe('WikiForm', () => {
await setFormat(format);
- expect(findClassicEditor().props('enablePreview')).toBe(enabled);
+ nextTick();
+
+ expect(findMarkdownEditor().props('enablePreview')).toBe(enabled);
});
it.each`
@@ -185,14 +184,6 @@ describe('WikiForm', () => {
expect(wrapper.text()).toContain(text);
});
- it('starts with no unload warning', () => {
- createWrapper();
-
- const e = dispatchBeforeUnload();
- expect(typeof e.returnValue).not.toBe('string');
- expect(e.preventDefault).not.toHaveBeenCalled();
- });
-
it.each`
persisted | titleHelpText | titleHelpLink
${true} | ${'You can move this page by adding the path to the beginning of the title.'} | ${'/help/user/project/wiki/index#move-a-wiki-page'}
@@ -219,15 +210,7 @@ describe('WikiForm', () => {
beforeEach(async () => {
createWrapper({ mountFn: mount, persisted: true });
- const input = findContent();
-
- await input.setValue(' Lorem ipsum dolar sit! ');
- });
-
- it('sets before unload warning', () => {
- const e = dispatchBeforeUnload();
-
- expect(e.preventDefault).toHaveBeenCalledTimes(1);
+ await findMarkdownEditor().vm.$emit('input', ' Lorem ipsum dolar sit! ');
});
describe('form submit', () => {
@@ -235,17 +218,12 @@ describe('WikiForm', () => {
await triggerFormSubmit();
});
- it('when form submitted, unsets before unload warning', () => {
- const e = dispatchBeforeUnload();
- expect(e.preventDefault).not.toHaveBeenCalled();
- });
-
it('triggers wiki format tracking event', () => {
expect(trackingSpy).toHaveBeenCalledTimes(1);
});
it('does not trim page content', () => {
- expect(findContent().element.value).toBe(' Lorem ipsum dolar sit! ');
+ expect(findMarkdownEditor().props().value).toBe(' Lorem ipsum dolar sit! ');
});
});
});
@@ -264,7 +242,7 @@ describe('WikiForm', () => {
createWrapper({ mountFn: mount });
await findTitle().setValue(title);
- await findContent().setValue(content);
+ await findMarkdownEditor().vm.$emit('input', content);
expect(findSubmitButton().props().disabled).toBe(disabledAttr);
},
@@ -296,208 +274,59 @@ describe('WikiForm', () => {
);
});
- describe('toggle editing mode control', () => {
- beforeEach(() => {
- createWrapper({ mountFn: mount });
- });
+ it.each`
+ format | enabled | action
+ ${'markdown'} | ${true} | ${'enables'}
+ ${'rdoc'} | ${false} | ${'disables'}
+ ${'asciidoc'} | ${false} | ${'disables'}
+ ${'org'} | ${false} | ${'disables'}
+ `('$action content editor when format is $format', async ({ format, enabled }) => {
+ createWrapper({ mountFn: mount });
- it.each`
- format | exists | action
- ${'markdown'} | ${true} | ${'displays'}
- ${'rdoc'} | ${false} | ${'hides'}
- ${'asciidoc'} | ${false} | ${'hides'}
- ${'org'} | ${false} | ${'hides'}
- `('$action toggle editing mode button when format is $format', async ({ format, exists }) => {
- await setFormat(format);
-
- expect(findToggleEditingModeButton().exists()).toBe(exists);
- });
+ setFormat(format);
- describe('when content editor is not active', () => {
- it('displays "Source" label in the toggle editing mode button', () => {
- expect(findToggleEditingModeButton().props().checked).toBe('source');
- });
+ await nextTick();
- describe('when clicking the toggle editing mode button', () => {
- beforeEach(async () => {
- await findToggleEditingModeButton().vm.$emit('input', 'richText');
- });
+ expect(findMarkdownEditor().props().enableContentEditor).toBe(enabled);
+ });
- it('hides the classic editor', () => {
- expect(findClassicEditor().exists()).toBe(false);
- });
+ describe('when markdown editor activates the content editor', () => {
+ beforeEach(async () => {
+ createWrapper({ mountFn: mount, persisted: true });
- it('shows the content editor', () => {
- expect(findContentEditor().exists()).toBe(true);
- });
- });
+ await findMarkdownEditor().vm.$emit('contentEditor');
});
- describe('markdown editor type persistance', () => {
- it('loads content editor by default if it is persisted in local storage', async () => {
- expect(findClassicEditor().exists()).toBe(true);
- expect(findContentEditor().exists()).toBe(false);
-
- // enable content editor
- await findLocalStorageSync().vm.$emit('input', 'richText');
-
- expect(findContentEditor().exists()).toBe(true);
- expect(findClassicEditor().exists()).toBe(false);
- });
+ it('disables the format dropdown', () => {
+ expect(findFormat().element.getAttribute('disabled')).toBeDefined();
});
- describe('when content editor is active', () => {
- beforeEach(() => {
- createWrapper();
- findToggleEditingModeButton().vm.$emit('input', 'richText');
- });
-
- it('displays "Edit Rich" label in the toggle editing mode button', () => {
- expect(findToggleEditingModeButton().props().checked).toBe('richText');
- });
-
- describe('when clicking the toggle editing mode button', () => {
- beforeEach(async () => {
- await findToggleEditingModeButton().vm.$emit('input', 'source');
- await nextTick();
- });
-
- it('hides the content editor', () => {
- expect(findContentEditor().exists()).toBe(false);
- });
-
- it('displays the classic editor', () => {
- expect(findClassicEditor().exists()).toBe(true);
- });
- });
-
- describe('when content editor is loading', () => {
- beforeEach(async () => {
- findContentEditor().vm.$emit('loading');
-
- await nextTick();
- });
-
- it('disables toggle editing mode button', () => {
- expect(findToggleEditingModeButton().attributes().disabled).toBe('true');
- });
-
- describe('when content editor loads successfully', () => {
- it('enables toggle editing mode button', async () => {
- findContentEditor().vm.$emit('loadingSuccess');
-
- await nextTick();
-
- expect(findToggleEditingModeButton().attributes().disabled).not.toBeDefined();
- });
- });
-
- describe('when content editor fails to load', () => {
- it('enables toggle editing mode button', async () => {
- findContentEditor().vm.$emit('loadingError');
-
- await nextTick();
-
- expect(findToggleEditingModeButton().attributes().disabled).not.toBeDefined();
- });
- });
+ it('sends tracking event when editor loads', async () => {
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, CONTENT_EDITOR_LOADED_ACTION, {
+ label: WIKI_CONTENT_EDITOR_TRACKING_LABEL,
});
});
- });
-
- describe('wiki content editor', () => {
- describe('clicking "Edit rich text": editor fails to load', () => {
- beforeEach(async () => {
- createWrapper({ mountFn: mount });
- mock.onPost(/preview-markdown/).reply(400);
-
- await findToggleEditingModeButton().vm.$emit('input', 'richText');
-
- // try waiting for content editor to load (but it will never actually load)
- await waitForPromises();
- });
-
- it('disables the submit button', () => {
- expect(findSubmitButton().props('disabled')).toBe(true);
- });
-
- describe('toggling editing modes to the classic editor', () => {
- beforeEach(() => {
- return findToggleEditingModeButton().vm.$emit('input', 'source');
- });
- it('switches to classic editor', () => {
- expect(findContentEditor().exists()).toBe(false);
- expect(findClassicEditor().exists()).toBe(true);
- });
- });
- });
+ describe('when triggering form submit', () => {
+ const updatedMarkdown = 'hello **world**';
- describe('clicking "Edit rich text": editor loads successfully', () => {
beforeEach(async () => {
- createWrapper({ persisted: true, mountFn: mount });
-
- mock.onPost(/preview-markdown/).reply(200, { body: '<p>hello <strong>world</strong></p>' });
-
- await findToggleEditingModeButton().vm.$emit('input', 'richText');
- await waitForPromises();
- });
-
- it('shows the rich text editor when loading finishes', async () => {
- expect(findContentEditor().exists()).toBe(true);
+ findMarkdownEditor().vm.$emit('input', updatedMarkdown);
+ await triggerFormSubmit();
});
- it('sends tracking event when editor loads', async () => {
- expect(trackingSpy).toHaveBeenCalledWith(undefined, CONTENT_EDITOR_LOADED_ACTION, {
+ it('triggers tracking events on form submit', async () => {
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, SAVED_USING_CONTENT_EDITOR_ACTION, {
label: WIKI_CONTENT_EDITOR_TRACKING_LABEL,
});
- });
-
- it('disables the format dropdown', () => {
- expect(findFormat().element.getAttribute('disabled')).toBeDefined();
- });
- describe('when wiki content is updated', () => {
- const updatedMarkdown = 'hello **world**';
-
- beforeEach(() => {
- findContentEditor().vm.$emit('change', {
- empty: false,
- changed: true,
- markdown: updatedMarkdown,
- });
- });
-
- it('sets before unload warning', () => {
- const e = dispatchBeforeUnload();
- expect(e.preventDefault).toHaveBeenCalledTimes(1);
- });
-
- it('unsets before unload warning on form submit', async () => {
- await triggerFormSubmit();
-
- const e = dispatchBeforeUnload();
- expect(e.preventDefault).not.toHaveBeenCalled();
- });
-
- it('triggers tracking events on form submit', async () => {
- await triggerFormSubmit();
- expect(trackingSpy).toHaveBeenCalledWith(undefined, SAVED_USING_CONTENT_EDITOR_ACTION, {
- label: WIKI_CONTENT_EDITOR_TRACKING_LABEL,
- });
-
- expect(trackingSpy).toHaveBeenCalledWith(undefined, WIKI_FORMAT_UPDATED_ACTION, {
- label: WIKI_FORMAT_LABEL,
- extra: {
- value: findFormat().element.value,
- old_format: pageInfoPersisted.format,
- project_path: pageInfoPersisted.path,
- },
- });
- });
-
- it('sets content field to the content editor updated markdown', async () => {
- expect(findContent().element.value).toBe(updatedMarkdown);
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, WIKI_FORMAT_UPDATED_ACTION, {
+ label: WIKI_FORMAT_LABEL,
+ extra: {
+ value: findFormat().element.value,
+ old_format: pageInfoPersisted.format,
+ project_path: pageInfoPersisted.path,
+ },
});
});
});
diff --git a/spec/frontend/pdf/page_spec.js b/spec/frontend/pdf/page_spec.js
index 07a7f1bb2ff..4cf83a3252d 100644
--- a/spec/frontend/pdf/page_spec.js
+++ b/spec/frontend/pdf/page_spec.js
@@ -1,17 +1,16 @@
-import Vue, { nextTick } from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
+import { nextTick } from 'vue';
+import { mount } from '@vue/test-utils';
import PageComponent from '~/pdf/page/index.vue';
jest.mock('pdfjs-dist/webpack', () => {
- return { default: jest.requireActual('pdfjs-dist/build/pdf') };
+ return { default: jest.requireActual('pdfjs-dist/legacy/build/pdf') };
});
describe('Page component', () => {
- const Component = Vue.extend(PageComponent);
- let vm;
+ let wrapper;
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
it('renders the page when mounting', async () => {
@@ -20,16 +19,18 @@ describe('Page component', () => {
getViewport: jest.fn().mockReturnValue({}),
};
- vm = mountComponent(Component, {
- page: testPage,
- number: 1,
+ wrapper = mount(PageComponent, {
+ propsData: {
+ page: testPage,
+ number: 1,
+ },
});
- expect(vm.rendering).toBe(true);
-
await nextTick();
- expect(testPage.render).toHaveBeenCalledWith(vm.renderContext);
- expect(vm.rendering).toBe(false);
+ expect(testPage.render).toHaveBeenCalledWith({
+ canvasContext: wrapper.find('canvas').element.getContext('2d'),
+ viewport: testPage.getViewport(),
+ });
});
});
diff --git a/spec/frontend/performance_bar/components/request_warning_spec.js b/spec/frontend/performance_bar/components/request_warning_spec.js
index d558c7b018a..9dd8ea9f933 100644
--- a/spec/frontend/performance_bar/components/request_warning_spec.js
+++ b/spec/frontend/performance_bar/components/request_warning_spec.js
@@ -2,14 +2,21 @@ import { shallowMount } from '@vue/test-utils';
import RequestWarning from '~/performance_bar/components/request_warning.vue';
describe('request warning', () => {
+ let wrapper;
const htmlId = 'request-123';
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
describe('when the request has warnings', () => {
- const wrapper = shallowMount(RequestWarning, {
- propsData: {
- htmlId,
- warnings: ['gitaly calls: 30 over 10', 'gitaly duration: 1500 over 1000'],
- },
+ beforeEach(() => {
+ wrapper = shallowMount(RequestWarning, {
+ propsData: {
+ htmlId,
+ warnings: ['gitaly calls: 30 over 10', 'gitaly duration: 1500 over 1000'],
+ },
+ });
});
it('adds a warning emoji with the correct ID', () => {
@@ -19,11 +26,13 @@ describe('request warning', () => {
});
describe('when the request does not have warnings', () => {
- const wrapper = shallowMount(RequestWarning, {
- propsData: {
- htmlId,
- warnings: [],
- },
+ beforeEach(() => {
+ wrapper = shallowMount(RequestWarning, {
+ propsData: {
+ htmlId,
+ warnings: [],
+ },
+ });
});
it('does nothing', () => {
diff --git a/spec/frontend/persistent_user_callout_spec.js b/spec/frontend/persistent_user_callout_spec.js
index 9cd5bb9e9a1..c9574208900 100644
--- a/spec/frontend/persistent_user_callout_spec.js
+++ b/spec/frontend/persistent_user_callout_spec.js
@@ -1,7 +1,7 @@
import MockAdapter from 'axios-mock-adapter';
import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import PersistentUserCallout from '~/persistent_user_callout';
@@ -108,7 +108,7 @@ describe('PersistentUserCallout', () => {
await waitForPromises();
expect(persistentUserCallout.container.remove).not.toHaveBeenCalled();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'An error occurred while dismissing the alert. Refresh the page and try again.',
});
});
@@ -214,7 +214,7 @@ describe('PersistentUserCallout', () => {
await waitForPromises();
expect(window.location.assign).not.toHaveBeenCalled();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message:
'An error occurred while acknowledging the notification. Refresh the page and try again.',
});
diff --git a/spec/frontend/pipeline_editor/components/file-nav/branch_switcher_spec.js b/spec/frontend/pipeline_editor/components/file-nav/branch_switcher_spec.js
index 8f6f4d8cff9..f0347ad19ac 100644
--- a/spec/frontend/pipeline_editor/components/file-nav/branch_switcher_spec.js
+++ b/spec/frontend/pipeline_editor/components/file-nav/branch_switcher_spec.js
@@ -360,7 +360,7 @@ describe('Pipeline editor branch switcher', () => {
});
describe('loading icon', () => {
- test.each`
+ it.each`
isQueryLoading | isRendered
${true} | ${true}
${false} | ${false}
diff --git a/spec/frontend/pipeline_editor/components/ui/pipeline_editor_empty_state_spec.js b/spec/frontend/pipeline_editor/components/ui/pipeline_editor_empty_state_spec.js
index 8e0a73b6e7c..c76c3460e99 100644
--- a/spec/frontend/pipeline_editor/components/ui/pipeline_editor_empty_state_spec.js
+++ b/spec/frontend/pipeline_editor/components/ui/pipeline_editor_empty_state_spec.js
@@ -7,6 +7,7 @@ describe('Pipeline editor empty state', () => {
let wrapper;
const defaultProvide = {
emptyStateIllustrationPath: 'my/svg/path',
+ usesExternalConfig: false,
};
const createComponent = ({ provide } = {}) => {
@@ -18,6 +19,7 @@ describe('Pipeline editor empty state', () => {
const findFileNav = () => wrapper.findComponent(PipelineEditorFileNav);
const findSvgImage = () => wrapper.find('img');
const findTitle = () => wrapper.find('h1');
+ const findExternalCiInstructions = () => wrapper.find('p');
const findConfirmButton = () => wrapper.findComponent(GlButton);
const findDescription = () => wrapper.findComponent(GlSprintf);
@@ -25,7 +27,33 @@ describe('Pipeline editor empty state', () => {
wrapper.destroy();
});
- describe('template', () => {
+ describe('when project uses an external CI config', () => {
+ beforeEach(() => {
+ createComponent({
+ provide: { usesExternalConfig: true },
+ });
+ });
+
+ it('renders an svg image', () => {
+ expect(findSvgImage().exists()).toBe(true);
+ });
+
+ it('renders the correct title and instructions', () => {
+ expect(findTitle().exists()).toBe(true);
+ expect(findExternalCiInstructions().exists()).toBe(true);
+
+ expect(findExternalCiInstructions().html()).toContain(
+ wrapper.vm.$options.i18n.externalCiInstructions,
+ );
+ expect(findTitle().text()).toBe(wrapper.vm.$options.i18n.externalCiNote);
+ });
+
+ it('does not render the CTA button', () => {
+ expect(findConfirmButton().exists()).toBe(false);
+ });
+ });
+
+ describe('when project uses an accessible CI config', () => {
beforeEach(() => {
createComponent();
});
diff --git a/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js b/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js
index 1989f23a415..9fe1536d3f5 100644
--- a/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js
+++ b/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js
@@ -55,11 +55,12 @@ jest.mock('~/lib/utils/url_utility', () => ({
const localVue = createLocalVue();
localVue.use(VueApollo);
-const mockProvide = {
+const defaultProvide = {
ciConfigPath: mockCiConfigPath,
defaultBranch: mockDefaultBranch,
newMergeRequestPath: mockNewMergeRequestPath,
projectFullPath: mockProjectFullPath,
+ usesExternalConfig: false,
};
describe('Pipeline editor app component', () => {
@@ -79,7 +80,7 @@ describe('Pipeline editor app component', () => {
stubs = {},
} = {}) => {
wrapper = shallowMount(PipelineEditorApp, {
- provide: { ...mockProvide, ...provide },
+ provide: { ...defaultProvide, ...provide },
stubs,
mocks: {
$apollo: {
@@ -229,6 +230,22 @@ describe('Pipeline editor app component', () => {
mockLatestCommitShaQuery.mockResolvedValue(mockCommitShaResults);
});
+ describe('when project uses an external CI config file', () => {
+ beforeEach(async () => {
+ await createComponentWithApollo({
+ provide: {
+ usesExternalConfig: true,
+ },
+ });
+ });
+
+ it('shows an empty state and does not show editor home component', () => {
+ expect(findEmptyState().exists()).toBe(true);
+ expect(findAlert().exists()).toBe(false);
+ expect(findEditorHome().exists()).toBe(false);
+ });
+ });
+
describe('when file exists', () => {
beforeEach(async () => {
await createComponentWithApollo();
diff --git a/spec/frontend/pipeline_editor/pipeline_editor_home_spec.js b/spec/frontend/pipeline_editor/pipeline_editor_home_spec.js
index e317d1ddcc2..2b06660c4b3 100644
--- a/spec/frontend/pipeline_editor/pipeline_editor_home_spec.js
+++ b/spec/frontend/pipeline_editor/pipeline_editor_home_spec.js
@@ -149,20 +149,20 @@ describe('Pipeline editor home wrapper', () => {
await nextTick();
- expect(findCommitSection().exists()).toBe(shouldShow);
+ expect(findCommitSection().isVisible()).toBe(shouldShow);
},
);
it('shows the commit form again when coming back to the create tab', async () => {
- expect(findCommitSection().exists()).toBe(true);
+ expect(findCommitSection().isVisible()).toBe(true);
findPipelineEditorTabs().vm.$emit('set-current-tab', MERGED_TAB);
await nextTick();
- expect(findCommitSection().exists()).toBe(false);
+ expect(findCommitSection().isVisible()).toBe(false);
findPipelineEditorTabs().vm.$emit('set-current-tab', CREATE_TAB);
await nextTick();
- expect(findCommitSection().exists()).toBe(true);
+ expect(findCommitSection().isVisible()).toBe(true);
});
describe('rendering with tab params', () => {
@@ -178,7 +178,7 @@ describe('Pipeline editor home wrapper', () => {
setWindowLocation(`https://gitlab.test/ci/editor/?tab=${TABS_INDEX[tab]}`);
await createComponent({ stubs: { PipelineEditorTabs } });
- expect(findCommitSection().exists()).toBe(shouldShow);
+ expect(findCommitSection().isVisible()).toBe(shouldShow);
},
);
});
diff --git a/spec/frontend/pipeline_new/components/pipeline_new_form_spec.js b/spec/frontend/pipeline_new/components/pipeline_new_form_spec.js
index 5ce29bd6c5d..3e699b93fd3 100644
--- a/spec/frontend/pipeline_new/components/pipeline_new_form_spec.js
+++ b/spec/frontend/pipeline_new/components/pipeline_new_form_spec.js
@@ -1,72 +1,101 @@
-import { GlForm, GlSprintf, GlLoadingIcon } from '@gitlab/ui';
-import { mount, shallowMount } from '@vue/test-utils';
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import { GlForm, GlDropdownItem, GlSprintf, GlLoadingIcon } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
-import { nextTick } from 'vue';
import CreditCardValidationRequiredAlert from 'ee_component/billings/components/cc_validation_required_alert.vue';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { shallowMountExtended, mountExtended } from 'helpers/vue_test_utils_helper';
import { TEST_HOST } from 'helpers/test_constants';
import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
import httpStatusCodes from '~/lib/utils/http_status';
import { redirectTo } from '~/lib/utils/url_utility';
import PipelineNewForm from '~/pipeline_new/components/pipeline_new_form.vue';
+import ciConfigVariablesQuery from '~/pipeline_new/graphql/queries/ci_config_variables.graphql';
+import { resolvers } from '~/pipeline_new/graphql/resolvers';
import RefsDropdown from '~/pipeline_new/components/refs_dropdown.vue';
import {
+ mockCreditCardValidationRequiredError,
+ mockCiConfigVariablesResponse,
+ mockCiConfigVariablesResponseWithoutDesc,
+ mockEmptyCiConfigVariablesResponse,
+ mockError,
mockQueryParams,
mockPostParams,
mockProjectId,
- mockError,
mockRefs,
- mockCreditCardValidationRequiredError,
+ mockYamlVariables,
} from '../mock_data';
+Vue.use(VueApollo);
+
jest.mock('~/lib/utils/url_utility', () => ({
redirectTo: jest.fn(),
}));
const projectRefsEndpoint = '/root/project/refs';
const pipelinesPath = '/root/project/-/pipelines';
-const configVariablesPath = '/root/project/-/pipelines/config_variables';
+const projectPath = '/root/project/-/pipelines/config_variables';
const newPipelinePostResponse = { id: 1 };
const defaultBranch = 'main';
describe('Pipeline New Form', () => {
let wrapper;
let mock;
+ let mockApollo;
+ let mockCiConfigVariables;
let dummySubmitEvent;
const findForm = () => wrapper.findComponent(GlForm);
const findRefsDropdown = () => wrapper.findComponent(RefsDropdown);
- const findSubmitButton = () => wrapper.find('[data-testid="run_pipeline_button"]');
- const findVariableRows = () => wrapper.findAll('[data-testid="ci-variable-row"]');
- const findRemoveIcons = () => wrapper.findAll('[data-testid="remove-ci-variable-row"]');
- const findDropdowns = () => wrapper.findAll('[data-testid="pipeline-form-ci-variable-type"]');
- const findKeyInputs = () => wrapper.findAll('[data-testid="pipeline-form-ci-variable-key"]');
- const findValueInputs = () => wrapper.findAll('[data-testid="pipeline-form-ci-variable-value"]');
- const findErrorAlert = () => wrapper.find('[data-testid="run-pipeline-error-alert"]');
- const findWarningAlert = () => wrapper.find('[data-testid="run-pipeline-warning-alert"]');
+ const findSubmitButton = () => wrapper.findByTestId('run_pipeline_button');
+ const findVariableRows = () => wrapper.findAllByTestId('ci-variable-row');
+ const findRemoveIcons = () => wrapper.findAllByTestId('remove-ci-variable-row');
+ const findVariableTypes = () => wrapper.findAllByTestId('pipeline-form-ci-variable-type');
+ const findKeyInputs = () => wrapper.findAllByTestId('pipeline-form-ci-variable-key');
+ const findValueInputs = () => wrapper.findAllByTestId('pipeline-form-ci-variable-value');
+ const findValueDropdowns = () =>
+ wrapper.findAllByTestId('pipeline-form-ci-variable-value-dropdown');
+ const findValueDropdownItems = (dropdown) => dropdown.findAllComponents(GlDropdownItem);
+ const findErrorAlert = () => wrapper.findByTestId('run-pipeline-error-alert');
+ const findWarningAlert = () => wrapper.findByTestId('run-pipeline-warning-alert');
const findWarningAlertSummary = () => findWarningAlert().findComponent(GlSprintf);
- const findWarnings = () => wrapper.findAll('[data-testid="run-pipeline-warning"]');
+ const findWarnings = () => wrapper.findAllByTestId('run-pipeline-warning');
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findCCAlert = () => wrapper.findComponent(CreditCardValidationRequiredAlert);
const getFormPostParams = () => JSON.parse(mock.history.post[0].data);
- const selectBranch = (branch) => {
+ const selectBranch = async (branch) => {
// Select a branch in the dropdown
findRefsDropdown().vm.$emit('input', {
shortName: branch,
fullName: `refs/heads/${branch}`,
});
+
+ await waitForPromises();
+ };
+
+ const changeKeyInputValue = async (keyInputIndex, value) => {
+ const input = findKeyInputs().at(keyInputIndex);
+ input.element.value = value;
+ input.trigger('change');
+
+ await nextTick();
};
- const createComponent = (props = {}, method = shallowMount) => {
+ const createComponentWithApollo = ({ method = shallowMountExtended, props = {} } = {}) => {
+ const handlers = [[ciConfigVariablesQuery, mockCiConfigVariables]];
+ mockApollo = createMockApollo(handlers, resolvers);
+
wrapper = method(PipelineNewForm, {
+ apolloProvider: mockApollo,
provide: {
projectRefsEndpoint,
},
propsData: {
projectId: mockProjectId,
pipelinesPath,
- configVariablesPath,
+ projectPath,
defaultBranch,
refParam: defaultBranch,
settingsLink: '',
@@ -78,7 +107,7 @@ describe('Pipeline New Form', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
- mock.onGet(configVariablesPath).reply(httpStatusCodes.OK, {});
+ mockCiConfigVariables = jest.fn();
mock.onGet(projectRefsEndpoint).reply(httpStatusCodes.OK, mockRefs);
dummySubmitEvent = {
@@ -87,24 +116,20 @@ describe('Pipeline New Form', () => {
});
afterEach(() => {
- wrapper.destroy();
- wrapper = null;
-
mock.restore();
+ wrapper.destroy();
});
describe('Form', () => {
beforeEach(async () => {
- createComponent(mockQueryParams, mount);
-
- mock.onPost(pipelinesPath).reply(httpStatusCodes.OK, newPipelinePostResponse);
-
+ mockCiConfigVariables.mockResolvedValue(mockEmptyCiConfigVariablesResponse);
+ createComponentWithApollo({ props: mockQueryParams, method: mountExtended });
await waitForPromises();
});
it('displays the correct values for the provided query params', async () => {
- expect(findDropdowns().at(0).props('text')).toBe('Variable');
- expect(findDropdowns().at(1).props('text')).toBe('File');
+ expect(findVariableTypes().at(0).props('text')).toBe('Variable');
+ expect(findVariableTypes().at(1).props('text')).toBe('File');
expect(findRefsDropdown().props('value')).toEqual({ shortName: 'tag-1' });
expect(findVariableRows()).toHaveLength(3);
});
@@ -117,7 +142,7 @@ describe('Pipeline New Form', () => {
it('displays an empty variable for the user to fill out', async () => {
expect(findKeyInputs().at(2).element.value).toBe('');
expect(findValueInputs().at(2).element.value).toBe('');
- expect(findDropdowns().at(2).props('text')).toBe('Variable');
+ expect(findVariableTypes().at(2).props('text')).toBe('Variable');
});
it('does not display remove icon for last row', () => {
@@ -147,13 +172,12 @@ describe('Pipeline New Form', () => {
describe('Pipeline creation', () => {
beforeEach(async () => {
+ mockCiConfigVariables.mockResolvedValue(mockEmptyCiConfigVariablesResponse);
mock.onPost(pipelinesPath).reply(httpStatusCodes.OK, newPipelinePostResponse);
-
- await waitForPromises();
});
it('does not submit the native HTML form', async () => {
- createComponent();
+ createComponentWithApollo();
findForm().vm.$emit('submit', dummySubmitEvent);
@@ -161,7 +185,7 @@ describe('Pipeline New Form', () => {
});
it('disables the submit button immediately after submitting', async () => {
- createComponent();
+ createComponentWithApollo();
expect(findSubmitButton().props('disabled')).toBe(false);
@@ -172,7 +196,7 @@ describe('Pipeline New Form', () => {
});
it('creates pipeline with full ref and variables', async () => {
- createComponent();
+ createComponentWithApollo();
findForm().vm.$emit('submit', dummySubmitEvent);
await waitForPromises();
@@ -182,7 +206,7 @@ describe('Pipeline New Form', () => {
});
it('creates a pipeline with short ref and variables from the query params', async () => {
- createComponent(mockQueryParams);
+ createComponentWithApollo({ props: mockQueryParams });
await waitForPromises();
@@ -197,64 +221,51 @@ describe('Pipeline New Form', () => {
describe('When the ref has been changed', () => {
beforeEach(async () => {
- createComponent({}, mount);
+ mockCiConfigVariables.mockResolvedValue(mockEmptyCiConfigVariablesResponse);
+ createComponentWithApollo({ method: mountExtended });
await waitForPromises();
});
- it('variables persist between ref changes', async () => {
- selectBranch('main');
-
- await waitForPromises();
- const mainInput = findKeyInputs().at(0);
- mainInput.element.value = 'build_var';
- mainInput.trigger('change');
+ it('variables persist between ref changes', async () => {
+ await selectBranch('main');
+ await changeKeyInputValue(0, 'build_var');
- await nextTick();
+ await selectBranch('branch-1');
+ await changeKeyInputValue(0, 'deploy_var');
- selectBranch('branch-1');
+ await selectBranch('main');
- await waitForPromises();
+ expect(findKeyInputs().at(0).element.value).toBe('build_var');
+ expect(findVariableRows().length).toBe(2);
- const branchOneInput = findKeyInputs().at(0);
- branchOneInput.element.value = 'deploy_var';
- branchOneInput.trigger('change');
+ await selectBranch('branch-1');
- await nextTick();
+ expect(findKeyInputs().at(0).element.value).toBe('deploy_var');
+ expect(findVariableRows().length).toBe(2);
+ });
- selectBranch('main');
+ it('skips query call when form variables are already cached', async () => {
+ await selectBranch('main');
+ await changeKeyInputValue(0, 'build_var');
- await waitForPromises();
+ expect(mockCiConfigVariables).toHaveBeenCalledTimes(1);
- expect(findKeyInputs().at(0).element.value).toBe('build_var');
- expect(findVariableRows().length).toBe(2);
+ await selectBranch('branch-1');
- selectBranch('branch-1');
+ expect(mockCiConfigVariables).toHaveBeenCalledTimes(2);
- await waitForPromises();
+ // no additional call since `main` form values have been cached
+ await selectBranch('main');
- expect(findKeyInputs().at(0).element.value).toBe('deploy_var');
- expect(findVariableRows().length).toBe(2);
+ expect(mockCiConfigVariables).toHaveBeenCalledTimes(2);
});
});
describe('when yml defines a variable', () => {
- const mockYmlKey = 'yml_var';
- const mockYmlValue = 'yml_var_val';
- const mockYmlMultiLineValue = `A value
- with multiple
- lines`;
- const mockYmlDesc = 'A var from yml.';
-
it('loading icon is shown when content is requested and hidden when received', async () => {
- createComponent(mockQueryParams, mount);
-
- mock.onGet(configVariablesPath).reply(httpStatusCodes.OK, {
- [mockYmlKey]: {
- value: mockYmlValue,
- description: mockYmlDesc,
- },
- });
+ mockCiConfigVariables.mockResolvedValue(mockEmptyCiConfigVariablesResponse);
+ createComponentWithApollo({ props: mockQueryParams, method: mountExtended });
expect(findLoadingIcon().exists()).toBe(true);
@@ -263,51 +274,62 @@ describe('Pipeline New Form', () => {
expect(findLoadingIcon().exists()).toBe(false);
});
- it('multi-line strings are added to the value field without removing line breaks', async () => {
- createComponent(mockQueryParams, mount);
+ describe('with different predefined values', () => {
+ beforeEach(async () => {
+ mockCiConfigVariables.mockResolvedValue(mockCiConfigVariablesResponse);
+ createComponentWithApollo({ method: mountExtended });
+ await waitForPromises();
+ });
+
+ it('multi-line strings are added to the value field without removing line breaks', () => {
+ expect(findValueInputs().at(1).element.value).toBe(mockYamlVariables[1].value);
+ });
- mock.onGet(configVariablesPath).reply(httpStatusCodes.OK, {
- [mockYmlKey]: {
- value: mockYmlMultiLineValue,
- description: mockYmlDesc,
- },
+ it('multiple predefined values are rendered as a dropdown', () => {
+ const dropdown = findValueDropdowns().at(0);
+ const dropdownItems = findValueDropdownItems(dropdown);
+ const { valueOptions } = mockYamlVariables[2];
+
+ expect(dropdownItems.at(0).text()).toBe(valueOptions[0]);
+ expect(dropdownItems.at(1).text()).toBe(valueOptions[1]);
+ expect(dropdownItems.at(2).text()).toBe(valueOptions[2]);
});
- await waitForPromises();
+ it('variables with multiple predefined values sets the first option as the default', () => {
+ const dropdown = findValueDropdowns().at(0);
+ const { valueOptions } = mockYamlVariables[2];
- expect(findValueInputs().at(0).element.value).toBe(mockYmlMultiLineValue);
+ expect(dropdown.props('text')).toBe(valueOptions[0]);
+ });
});
describe('with description', () => {
beforeEach(async () => {
- createComponent(mockQueryParams, mount);
-
- mock.onGet(configVariablesPath).reply(httpStatusCodes.OK, {
- [mockYmlKey]: {
- value: mockYmlValue,
- description: mockYmlDesc,
- },
- });
-
+ mockCiConfigVariables.mockResolvedValue(mockCiConfigVariablesResponse);
+ createComponentWithApollo({ props: mockQueryParams, method: mountExtended });
await waitForPromises();
});
it('displays all the variables', async () => {
- expect(findVariableRows()).toHaveLength(4);
+ expect(findVariableRows()).toHaveLength(6);
});
it('displays a variable from yml', () => {
- expect(findKeyInputs().at(0).element.value).toBe(mockYmlKey);
- expect(findValueInputs().at(0).element.value).toBe(mockYmlValue);
+ expect(findKeyInputs().at(0).element.value).toBe(mockYamlVariables[0].key);
+ expect(findValueInputs().at(0).element.value).toBe(mockYamlVariables[0].value);
});
it('displays a variable from provided query params', () => {
- expect(findKeyInputs().at(1).element.value).toBe('test_var');
- expect(findValueInputs().at(1).element.value).toBe('test_var_val');
+ expect(findKeyInputs().at(3).element.value).toBe(
+ Object.keys(mockQueryParams.variableParams)[0],
+ );
+ expect(findValueInputs().at(3).element.value).toBe(
+ Object.values(mockQueryParams.fileParams)[0],
+ );
});
it('adds a description to the first variable from yml', () => {
- expect(findVariableRows().at(0).text()).toContain(mockYmlDesc);
+ expect(findVariableRows().at(0).text()).toContain(mockYamlVariables[0].description);
});
it('removes the description when a variable key changes', async () => {
@@ -316,39 +338,27 @@ describe('Pipeline New Form', () => {
await nextTick();
- expect(findVariableRows().at(0).text()).not.toContain(mockYmlDesc);
+ expect(findVariableRows().at(0).text()).not.toContain(mockYamlVariables[0].description);
});
});
describe('without description', () => {
beforeEach(async () => {
- createComponent(mockQueryParams, mount);
-
- mock.onGet(configVariablesPath).reply(httpStatusCodes.OK, {
- [mockYmlKey]: {
- value: mockYmlValue,
- description: null,
- },
- yml_var2: {
- value: 'yml_var2_val',
- },
- yml_var3: {
- description: '',
- },
- });
-
+ mockCiConfigVariables.mockResolvedValue(mockCiConfigVariablesResponseWithoutDesc);
+ createComponentWithApollo({ method: mountExtended });
await waitForPromises();
});
- it('displays all the variables', async () => {
- expect(findVariableRows()).toHaveLength(3);
+ it('displays variables with description only', async () => {
+ expect(findVariableRows()).toHaveLength(2); // extra empty variable is added at the end
});
});
});
describe('Form errors and warnings', () => {
beforeEach(() => {
- createComponent();
+ mockCiConfigVariables.mockResolvedValue(mockEmptyCiConfigVariablesResponse);
+ createComponentWithApollo();
});
describe('when the refs cannot be loaded', () => {
diff --git a/spec/frontend/pipeline_new/mock_data.js b/spec/frontend/pipeline_new/mock_data.js
index e99684ff417..e95a65171fc 100644
--- a/spec/frontend/pipeline_new/mock_data.js
+++ b/spec/frontend/pipeline_new/mock_data.js
@@ -65,3 +65,62 @@ export const mockVariables = [
},
{ uniqueId: 'var-refs/heads/main4', variable_type: 'env_var', key: '', value: '' },
];
+
+export const mockYamlVariables = [
+ {
+ description: 'This is a variable with a value.',
+ key: 'VAR_WITH_VALUE',
+ value: 'test_value',
+ valueOptions: null,
+ },
+ {
+ description: 'This is a variable with a multi-line value.',
+ key: 'VAR_WITH_MULTILINE',
+ value: `this is
+ a multiline value`,
+ valueOptions: null,
+ },
+ {
+ description: 'This is a variable with predefined values.',
+ key: 'VAR_WITH_OPTIONS',
+ value: 'development',
+ valueOptions: ['development', 'staging', 'production'],
+ },
+];
+
+export const mockYamlVariablesWithoutDesc = [
+ {
+ description: 'This is a variable with a value.',
+ key: 'VAR_WITH_VALUE',
+ value: 'test_value',
+ valueOptions: null,
+ },
+ {
+ description: null,
+ key: 'VAR_WITH_MULTILINE',
+ value: `this is
+ a multiline value`,
+ valueOptions: null,
+ },
+ {
+ description: null,
+ key: 'VAR_WITH_OPTIONS',
+ value: 'development',
+ valueOptions: ['development', 'staging', 'production'],
+ },
+];
+
+export const mockCiConfigVariablesQueryResponse = (ciConfigVariables) => ({
+ data: {
+ project: {
+ id: 1,
+ ciConfigVariables,
+ },
+ },
+});
+
+export const mockCiConfigVariablesResponse = mockCiConfigVariablesQueryResponse(mockYamlVariables);
+export const mockEmptyCiConfigVariablesResponse = mockCiConfigVariablesQueryResponse([]);
+export const mockCiConfigVariablesResponseWithoutDesc = mockCiConfigVariablesQueryResponse(
+ mockYamlVariablesWithoutDesc,
+);
diff --git a/spec/frontend/pipeline_schedules/components/pipeline_schedules_form_spec.js b/spec/frontend/pipeline_schedules/components/pipeline_schedules_form_spec.js
new file mode 100644
index 00000000000..4b5a9611251
--- /dev/null
+++ b/spec/frontend/pipeline_schedules/components/pipeline_schedules_form_spec.js
@@ -0,0 +1,25 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlForm } from '@gitlab/ui';
+import PipelineSchedulesForm from '~/pipeline_schedules/components/pipeline_schedules_form.vue';
+
+describe('Pipeline schedules form', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMount(PipelineSchedulesForm);
+ };
+
+ const findForm = () => wrapper.findComponent(GlForm);
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('displays form', () => {
+ expect(findForm().exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/pipeline_schedules/components/pipeline_schedules_spec.js b/spec/frontend/pipeline_schedules/components/pipeline_schedules_spec.js
new file mode 100644
index 00000000000..cce8f480928
--- /dev/null
+++ b/spec/frontend/pipeline_schedules/components/pipeline_schedules_spec.js
@@ -0,0 +1,161 @@
+import { GlAlert, GlLoadingIcon, GlModal } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import PipelineSchedules from '~/pipeline_schedules/components/pipeline_schedules.vue';
+import PipelineSchedulesTable from '~/pipeline_schedules/components/table/pipeline_schedules_table.vue';
+import deletePipelineScheduleMutation from '~/pipeline_schedules/graphql/mutations/delete_pipeline_schedule.mutation.graphql';
+import getPipelineSchedulesQuery from '~/pipeline_schedules/graphql/queries/get_pipeline_schedules.query.graphql';
+import {
+ mockGetPipelineSchedulesGraphQLResponse,
+ mockPipelineScheduleNodes,
+ deleteMutationResponse,
+} from '../mock_data';
+
+Vue.use(VueApollo);
+
+describe('Pipeline schedules app', () => {
+ let wrapper;
+
+ const successHandler = jest.fn().mockResolvedValue(mockGetPipelineSchedulesGraphQLResponse);
+ const failedHandler = jest.fn().mockRejectedValue(new Error('GraphQL error'));
+
+ const deleteMutationHandlerSuccess = jest.fn().mockResolvedValue(deleteMutationResponse);
+ const deleteMutationHandlerFailed = jest.fn().mockRejectedValue(new Error('GraphQL error'));
+
+ const createMockApolloProvider = (
+ requestHandlers = [[getPipelineSchedulesQuery, successHandler]],
+ ) => {
+ return createMockApollo(requestHandlers);
+ };
+
+ const createComponent = (requestHandlers) => {
+ wrapper = shallowMount(PipelineSchedules, {
+ provide: {
+ fullPath: 'gitlab-org/gitlab',
+ },
+ apolloProvider: createMockApolloProvider(requestHandlers),
+ });
+ };
+
+ const findTable = () => wrapper.findComponent(PipelineSchedulesTable);
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findModal = () => wrapper.findComponent(GlModal);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('displays table', async () => {
+ createComponent();
+
+ await waitForPromises();
+
+ expect(findTable().exists()).toBe(true);
+ expect(findAlert().exists()).toBe(false);
+ });
+
+ it('fetches query and passes an array of pipeline schedules', async () => {
+ createComponent();
+
+ expect(successHandler).toHaveBeenCalled();
+
+ await waitForPromises();
+
+ expect(findTable().props('schedules')).toEqual(mockPipelineScheduleNodes);
+ });
+
+ it('handles loading state', async () => {
+ createComponent();
+
+ expect(findLoadingIcon().exists()).toBe(true);
+
+ await waitForPromises();
+
+ expect(findLoadingIcon().exists()).toBe(false);
+ });
+
+ it('shows query error alert', async () => {
+ createComponent([[getPipelineSchedulesQuery, failedHandler]]);
+
+ await waitForPromises();
+
+ expect(findAlert().text()).toBe('There was a problem fetching pipeline schedules.');
+ });
+
+ it('shows delete mutation error alert', async () => {
+ createComponent([
+ [getPipelineSchedulesQuery, successHandler],
+ [deletePipelineScheduleMutation, deleteMutationHandlerFailed],
+ ]);
+
+ await waitForPromises();
+
+ findModal().vm.$emit('primary');
+
+ await waitForPromises();
+
+ expect(findAlert().text()).toBe('There was a problem deleting the pipeline schedule.');
+ });
+
+ it('deletes pipeline schedule and refetches query', async () => {
+ createComponent([
+ [getPipelineSchedulesQuery, successHandler],
+ [deletePipelineScheduleMutation, deleteMutationHandlerSuccess],
+ ]);
+
+ jest.spyOn(wrapper.vm.$apollo.queries.schedules, 'refetch');
+
+ await waitForPromises();
+
+ const scheduleId = mockPipelineScheduleNodes[0].id;
+
+ findTable().vm.$emit('showDeleteModal', scheduleId);
+
+ expect(wrapper.vm.$apollo.queries.schedules.refetch).not.toHaveBeenCalled();
+
+ findModal().vm.$emit('primary');
+
+ await waitForPromises();
+
+ expect(deleteMutationHandlerSuccess).toHaveBeenCalledWith({
+ id: scheduleId,
+ });
+ expect(wrapper.vm.$apollo.queries.schedules.refetch).toHaveBeenCalled();
+ });
+
+ it('modal should be visible after event', async () => {
+ createComponent();
+
+ await waitForPromises();
+
+ expect(findModal().props('visible')).toBe(false);
+
+ findTable().vm.$emit('showDeleteModal', mockPipelineScheduleNodes[0].id);
+
+ await nextTick();
+
+ expect(findModal().props('visible')).toBe(true);
+ });
+
+ it('modal should be hidden', async () => {
+ createComponent();
+
+ await waitForPromises();
+
+ findTable().vm.$emit('showDeleteModal', mockPipelineScheduleNodes[0].id);
+
+ await nextTick();
+
+ expect(findModal().props('visible')).toBe(true);
+
+ findModal().vm.$emit('hide');
+
+ await nextTick();
+
+ expect(findModal().props('visible')).toBe(false);
+ });
+});
diff --git a/spec/frontend/pipeline_schedules/components/table/cells/pipeline_schedule_actions_spec.js b/spec/frontend/pipeline_schedules/components/table/cells/pipeline_schedule_actions_spec.js
new file mode 100644
index 00000000000..ecc1bdeb679
--- /dev/null
+++ b/spec/frontend/pipeline_schedules/components/table/cells/pipeline_schedule_actions_spec.js
@@ -0,0 +1,49 @@
+import { GlButton } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import PipelineScheduleActions from '~/pipeline_schedules/components/table/cells/pipeline_schedule_actions.vue';
+import { mockPipelineScheduleNodes, mockPipelineScheduleAsGuestNodes } from '../../../mock_data';
+
+describe('Pipeline schedule actions', () => {
+ let wrapper;
+
+ const defaultProps = {
+ schedule: mockPipelineScheduleNodes[0],
+ };
+
+ const createComponent = (props = defaultProps) => {
+ wrapper = shallowMountExtended(PipelineScheduleActions, {
+ propsData: {
+ ...props,
+ },
+ });
+ };
+
+ const findAllButtons = () => wrapper.findAllComponents(GlButton);
+ const findDeleteBtn = () => wrapper.findByTestId('delete-pipeline-schedule-btn');
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('displays action buttons', () => {
+ createComponent();
+
+ expect(findAllButtons()).toHaveLength(3);
+ });
+
+ it('does not display action buttons', () => {
+ createComponent({ schedule: mockPipelineScheduleAsGuestNodes[0] });
+
+ expect(findAllButtons()).toHaveLength(0);
+ });
+
+ it('delete button emits showDeleteModal event and schedule id', () => {
+ createComponent();
+
+ findDeleteBtn().vm.$emit('click');
+
+ expect(wrapper.emitted()).toEqual({
+ showDeleteModal: [[mockPipelineScheduleNodes[0].id]],
+ });
+ });
+});
diff --git a/spec/frontend/pipeline_schedules/components/table/cells/pipeline_schedule_last_pipeline_spec.js b/spec/frontend/pipeline_schedules/components/table/cells/pipeline_schedule_last_pipeline_spec.js
new file mode 100644
index 00000000000..5a47b24232f
--- /dev/null
+++ b/spec/frontend/pipeline_schedules/components/table/cells/pipeline_schedule_last_pipeline_spec.js
@@ -0,0 +1,42 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import CiBadge from '~/vue_shared/components/ci_badge_link.vue';
+import PipelineScheduleLastPipeline from '~/pipeline_schedules/components/table/cells/pipeline_schedule_last_pipeline.vue';
+import { mockPipelineScheduleNodes } from '../../../mock_data';
+
+describe('Pipeline schedule last pipeline', () => {
+ let wrapper;
+
+ const defaultProps = {
+ schedule: mockPipelineScheduleNodes[2],
+ };
+
+ const createComponent = (props = defaultProps) => {
+ wrapper = shallowMountExtended(PipelineScheduleLastPipeline, {
+ propsData: {
+ ...props,
+ },
+ });
+ };
+
+ const findCIBadge = () => wrapper.findComponent(CiBadge);
+ const findStatusText = () => wrapper.findByTestId('pipeline-schedule-status-text');
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('displays pipeline status', () => {
+ createComponent();
+
+ expect(findCIBadge().exists()).toBe(true);
+ expect(findCIBadge().props('status')).toBe(defaultProps.schedule.lastPipeline.detailedStatus);
+ expect(findStatusText().exists()).toBe(false);
+ });
+
+ it('displays "none" status text', () => {
+ createComponent({ schedule: mockPipelineScheduleNodes[0] });
+
+ expect(findStatusText().text()).toBe('None');
+ expect(findCIBadge().exists()).toBe(false);
+ });
+});
diff --git a/spec/frontend/pipeline_schedules/components/table/cells/pipeline_schedule_next_run_spec.js b/spec/frontend/pipeline_schedules/components/table/cells/pipeline_schedule_next_run_spec.js
new file mode 100644
index 00000000000..b1bdc1e91a0
--- /dev/null
+++ b/spec/frontend/pipeline_schedules/components/table/cells/pipeline_schedule_next_run_spec.js
@@ -0,0 +1,43 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import PipelineScheduleNextRun from '~/pipeline_schedules/components/table/cells/pipeline_schedule_next_run.vue';
+import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
+import { mockPipelineScheduleNodes } from '../../../mock_data';
+
+describe('Pipeline schedule next run', () => {
+ let wrapper;
+
+ const defaultProps = {
+ schedule: mockPipelineScheduleNodes[0],
+ };
+
+ const createComponent = (props = defaultProps) => {
+ wrapper = shallowMountExtended(PipelineScheduleNextRun, {
+ propsData: {
+ ...props,
+ },
+ });
+ };
+
+ const findTimeAgo = () => wrapper.findComponent(TimeAgoTooltip);
+ const findInactive = () => wrapper.findByTestId('pipeline-schedule-inactive');
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('displays time ago', () => {
+ createComponent();
+
+ expect(findTimeAgo().exists()).toBe(true);
+ expect(findInactive().exists()).toBe(false);
+ expect(findTimeAgo().props('time')).toBe(defaultProps.schedule.realNextRun);
+ });
+
+ it('displays inactive state', () => {
+ const inactiveSchedule = mockPipelineScheduleNodes[1];
+ createComponent({ schedule: inactiveSchedule });
+
+ expect(findInactive().text()).toBe('Inactive');
+ expect(findTimeAgo().exists()).toBe(false);
+ });
+});
diff --git a/spec/frontend/pipeline_schedules/components/table/cells/pipeline_schedule_owner_spec.js b/spec/frontend/pipeline_schedules/components/table/cells/pipeline_schedule_owner_spec.js
new file mode 100644
index 00000000000..3ab04958f5e
--- /dev/null
+++ b/spec/frontend/pipeline_schedules/components/table/cells/pipeline_schedule_owner_spec.js
@@ -0,0 +1,40 @@
+import { GlAvatar, GlAvatarLink } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import PipelineScheduleOwner from '~/pipeline_schedules/components/table/cells/pipeline_schedule_owner.vue';
+import { mockPipelineScheduleNodes } from '../../../mock_data';
+
+describe('Pipeline schedule owner', () => {
+ let wrapper;
+
+ const defaultProps = {
+ schedule: mockPipelineScheduleNodes[0],
+ };
+
+ const createComponent = (props = defaultProps) => {
+ wrapper = shallowMount(PipelineScheduleOwner, {
+ propsData: {
+ ...props,
+ },
+ });
+ };
+
+ const findAvatar = () => wrapper.findComponent(GlAvatar);
+ const findAvatarLink = () => wrapper.findComponent(GlAvatarLink);
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('displays avatar', () => {
+ expect(findAvatar().exists()).toBe(true);
+ expect(findAvatar().props('src')).toBe(defaultProps.schedule.owner.avatarUrl);
+ });
+
+ it('avatar links to user', () => {
+ expect(findAvatarLink().attributes('href')).toBe(defaultProps.schedule.owner.webPath);
+ });
+});
diff --git a/spec/frontend/pipeline_schedules/components/table/cells/pipeline_schedule_target_spec.js b/spec/frontend/pipeline_schedules/components/table/cells/pipeline_schedule_target_spec.js
new file mode 100644
index 00000000000..6817e58790b
--- /dev/null
+++ b/spec/frontend/pipeline_schedules/components/table/cells/pipeline_schedule_target_spec.js
@@ -0,0 +1,41 @@
+import { GlIcon, GlLink } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import PipelineScheduleTarget from '~/pipeline_schedules/components/table/cells/pipeline_schedule_target.vue';
+import { mockPipelineScheduleNodes } from '../../../mock_data';
+
+describe('Pipeline schedule target', () => {
+ let wrapper;
+
+ const defaultProps = {
+ schedule: mockPipelineScheduleNodes[0],
+ };
+
+ const createComponent = (props = defaultProps) => {
+ wrapper = shallowMount(PipelineScheduleTarget, {
+ propsData: {
+ ...props,
+ },
+ });
+ };
+
+ const findIcon = () => wrapper.findComponent(GlIcon);
+ const findLink = () => wrapper.findComponent(GlLink);
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('displays icon', () => {
+ expect(findIcon().exists()).toBe(true);
+ expect(findIcon().props('name')).toBe('fork');
+ });
+
+ it('displays ref link', () => {
+ expect(findLink().attributes('href')).toBe(defaultProps.schedule.refPath);
+ expect(findLink().text()).toBe(defaultProps.schedule.refForDisplay);
+ });
+});
diff --git a/spec/frontend/pipeline_schedules/components/table/pipeline_schedules_table_spec.js b/spec/frontend/pipeline_schedules/components/table/pipeline_schedules_table_spec.js
new file mode 100644
index 00000000000..914897946ee
--- /dev/null
+++ b/spec/frontend/pipeline_schedules/components/table/pipeline_schedules_table_spec.js
@@ -0,0 +1,39 @@
+import { GlTableLite } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import PipelineSchedulesTable from '~/pipeline_schedules/components/table/pipeline_schedules_table.vue';
+import { mockPipelineScheduleNodes } from '../../mock_data';
+
+describe('Pipeline schedules table', () => {
+ let wrapper;
+
+ const defaultProps = {
+ schedules: mockPipelineScheduleNodes,
+ };
+
+ const createComponent = (props = defaultProps) => {
+ wrapper = mountExtended(PipelineSchedulesTable, {
+ propsData: {
+ ...props,
+ },
+ });
+ };
+
+ const findTable = () => wrapper.findComponent(GlTableLite);
+ const findScheduleDescription = () => wrapper.findByTestId('pipeline-schedule-description');
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('displays table', () => {
+ expect(findTable().exists()).toBe(true);
+ });
+
+ it('displays schedule description', () => {
+ expect(findScheduleDescription().text()).toBe('pipeline schedule');
+ });
+});
diff --git a/spec/frontend/pipeline_schedules/mock_data.js b/spec/frontend/pipeline_schedules/mock_data.js
new file mode 100644
index 00000000000..0a60998d8fb
--- /dev/null
+++ b/spec/frontend/pipeline_schedules/mock_data.js
@@ -0,0 +1,35 @@
+// Fixture located at spec/frontend/fixtures/pipeline_schedules.rb
+import mockGetPipelineSchedulesGraphQLResponse from 'test_fixtures/graphql/pipeline_schedules/get_pipeline_schedules.query.graphql.json';
+import mockGetPipelineSchedulesAsGuestGraphQLResponse from 'test_fixtures/graphql/pipeline_schedules/get_pipeline_schedules.query.graphql.as_guest.json';
+
+const {
+ data: {
+ project: {
+ pipelineSchedules: { nodes },
+ },
+ },
+} = mockGetPipelineSchedulesGraphQLResponse;
+
+const {
+ data: {
+ project: {
+ pipelineSchedules: { nodes: guestNodes },
+ },
+ },
+} = mockGetPipelineSchedulesAsGuestGraphQLResponse;
+
+export const mockPipelineScheduleNodes = nodes;
+
+export const mockPipelineScheduleAsGuestNodes = guestNodes;
+
+export const deleteMutationResponse = {
+ data: {
+ pipelineScheduleDelete: {
+ clientMutationId: null,
+ errors: [],
+ __typename: 'PipelineScheduleDeletePayload',
+ },
+ },
+};
+
+export { mockGetPipelineSchedulesGraphQLResponse };
diff --git a/spec/frontend/pipeline_wizard/components/commit_spec.js b/spec/frontend/pipeline_wizard/components/commit_spec.js
index d7e019c642e..fa30b9c2b97 100644
--- a/spec/frontend/pipeline_wizard/components/commit_spec.js
+++ b/spec/frontend/pipeline_wizard/components/commit_spec.js
@@ -211,7 +211,7 @@ describe('Pipeline Wizard - Commit Page', () => {
}) => {
let consoleSpy;
- beforeAll(async () => {
+ beforeEach(async () => {
createComponent(
{
filename,
@@ -246,7 +246,7 @@ describe('Pipeline Wizard - Commit Page', () => {
await waitForPromises();
});
- afterAll(() => {
+ afterEach(() => {
wrapper.destroy();
});
diff --git a/spec/frontend/pipeline_wizard/components/editor_spec.js b/spec/frontend/pipeline_wizard/components/editor_spec.js
index 26e4b8eb0ea..dd0a609043a 100644
--- a/spec/frontend/pipeline_wizard/components/editor_spec.js
+++ b/spec/frontend/pipeline_wizard/components/editor_spec.js
@@ -3,12 +3,20 @@ import { Document } from 'yaml';
import YamlEditor from '~/pipeline_wizard/components/editor.vue';
describe('Pages Yaml Editor wrapper', () => {
+ let wrapper;
+
const defaultOptions = {
propsData: { doc: new Document({ foo: 'bar' }), filename: 'foo.yml' },
};
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
describe('mount hook', () => {
- const wrapper = mount(YamlEditor, defaultOptions);
+ beforeEach(() => {
+ wrapper = mount(YamlEditor, defaultOptions);
+ });
it('editor is mounted', () => {
expect(wrapper.vm.editor).not.toBeUndefined();
@@ -19,16 +27,11 @@ describe('Pages Yaml Editor wrapper', () => {
describe('watchers', () => {
describe('doc', () => {
const doc = new Document({ baz: ['bar'] });
- let wrapper;
beforeEach(() => {
wrapper = mount(YamlEditor, defaultOptions);
});
- afterEach(() => {
- wrapper.destroy();
- });
-
it("causes the editor's value to be set to the stringified document", async () => {
await wrapper.setProps({ doc });
expect(wrapper.vm.editor.getValue()).toEqual(doc.toString());
@@ -48,7 +51,10 @@ describe('Pages Yaml Editor wrapper', () => {
describe('highlight', () => {
const highlight = 'foo';
- const wrapper = mount(YamlEditor, defaultOptions);
+
+ beforeEach(() => {
+ wrapper = mount(YamlEditor, defaultOptions);
+ });
it('calls editor.highlight(path, keep=true)', async () => {
const highlightSpy = jest.spyOn(wrapper.vm.yamlEditorExtension.obj, 'highlight');
diff --git a/spec/frontend/pipeline_wizard/components/widgets/list_spec.js b/spec/frontend/pipeline_wizard/components/widgets/list_spec.js
index 796356634bc..c9e9f5caebe 100644
--- a/spec/frontend/pipeline_wizard/components/widgets/list_spec.js
+++ b/spec/frontend/pipeline_wizard/components/widgets/list_spec.js
@@ -22,6 +22,9 @@ describe('Pipeline Wizard - List Widget', () => {
const setValueOnInputField = (value, atIndex = 0) => {
return findGlFormInputGroupByIndex(atIndex).vm.$emit('input', value);
};
+ const getValueOfInputField = (atIndex = 0) => {
+ return findGlFormInputGroupByIndex(atIndex).get('input').element.value;
+ };
const findAddStepButton = () => wrapper.findByTestId('add-step-button');
const addStep = () => findAddStepButton().vm.$emit('click');
@@ -103,6 +106,24 @@ describe('Pipeline Wizard - List Widget', () => {
expect(addStepBtn.text()).toBe('add another step');
});
+ it('deletes the correct input item', async () => {
+ createComponent({}, mountExtended);
+
+ await addStep();
+ await addStep();
+ setValueOnInputField('foo', 0);
+ setValueOnInputField('bar', 1);
+ setValueOnInputField('baz', 2);
+
+ const button = findAllGlFormInputGroups().at(1).find('[data-testid="remove-step-button"]');
+
+ button.vm.$emit('click');
+ await nextTick();
+
+ expect(getValueOfInputField(0)).toBe('foo');
+ expect(getValueOfInputField(1)).toBe('baz');
+ });
+
it('the "add step" button increases the number of input fields', async () => {
createComponent();
diff --git a/spec/frontend/pipeline_wizard/components/wrapper_spec.js b/spec/frontend/pipeline_wizard/components/wrapper_spec.js
index f064bf01c86..d5b78cebcb3 100644
--- a/spec/frontend/pipeline_wizard/components/wrapper_spec.js
+++ b/spec/frontend/pipeline_wizard/components/wrapper_spec.js
@@ -132,7 +132,7 @@ describe('Pipeline Wizard - wrapper.vue', () => {
expectStepDef,
expectProgressBarValue,
}) => {
- beforeAll(async () => {
+ beforeEach(async () => {
createComponent();
for (const emittedValue of navigationEventChain) {
@@ -145,7 +145,7 @@ describe('Pipeline Wizard - wrapper.vue', () => {
}
});
- afterAll(() => {
+ afterEach(() => {
wrapper.destroy();
});
@@ -184,11 +184,11 @@ describe('Pipeline Wizard - wrapper.vue', () => {
});
describe('editor overlay', () => {
- beforeAll(() => {
+ beforeEach(() => {
createComponent();
});
- afterAll(() => {
+ afterEach(() => {
wrapper.destroy();
});
@@ -236,11 +236,11 @@ describe('Pipeline Wizard - wrapper.vue', () => {
});
describe('line highlights', () => {
- beforeAll(() => {
+ beforeEach(() => {
createComponent();
});
- afterAll(() => {
+ afterEach(() => {
wrapper.destroy();
});
@@ -266,7 +266,7 @@ describe('Pipeline Wizard - wrapper.vue', () => {
});
describe('integration test', () => {
- beforeAll(async () => {
+ beforeEach(async () => {
createComponent({}, mountExtended);
});
@@ -290,14 +290,25 @@ describe('Pipeline Wizard - wrapper.vue', () => {
describe('navigating back', () => {
let inputField;
- beforeAll(async () => {
+ beforeEach(async () => {
+ createComponent({}, mountExtended);
+
+ findFirstInputFieldForTarget('$FOO').setValue('fooVal');
+ await nextTick();
+
+ findFirstVisibleStep().vm.$emit('next');
+ await nextTick();
+
+ findFirstInputFieldForTarget('$BAR').setValue('barVal');
+ await nextTick();
+
findFirstVisibleStep().vm.$emit('back');
await nextTick();
inputField = findFirstInputFieldForTarget('$FOO');
});
- afterAll(() => {
+ afterEach(() => {
wrapper.destroy();
inputField = undefined;
});
diff --git a/spec/frontend/pipeline_wizard/mock/yaml.js b/spec/frontend/pipeline_wizard/mock/yaml.js
index 12b6f1052b2..014a32c5700 100644
--- a/spec/frontend/pipeline_wizard/mock/yaml.js
+++ b/spec/frontend/pipeline_wizard/mock/yaml.js
@@ -62,8 +62,7 @@ export const steps = `
export const compiledScenario1 = `foo: fooVal
`;
-export const compiledScenario2 = `foo: fooVal
-bar: barVal
+export const compiledScenario2 = `bar: barVal
`;
export const compiledScenario3 = `foo: newFooVal
diff --git a/spec/frontend/pipelines/components/jobs/failed_jobs_app_spec.js b/spec/frontend/pipelines/components/jobs/failed_jobs_app_spec.js
index bfbb5f934b9..d1da7cb3acf 100644
--- a/spec/frontend/pipelines/components/jobs/failed_jobs_app_spec.js
+++ b/spec/frontend/pipelines/components/jobs/failed_jobs_app_spec.js
@@ -4,7 +4,7 @@ import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import FailedJobsApp from '~/pipelines/components/jobs/failed_jobs_app.vue';
import FailedJobsTable from '~/pipelines/components/jobs/failed_jobs_table.vue';
import GetFailedJobsQuery from '~/pipelines/graphql/queries/get_failed_jobs.query.graphql';
@@ -70,7 +70,7 @@ describe('Failed Jobs App', () => {
await waitForPromises();
expect(findJobsTable().exists()).toBe(true);
- expect(createFlash).not.toHaveBeenCalled();
+ expect(createAlert).not.toHaveBeenCalled();
});
it('handles query fetch error correctly', async () => {
@@ -80,7 +80,7 @@ describe('Failed Jobs App', () => {
await waitForPromises();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'There was a problem fetching the failed jobs.',
});
});
diff --git a/spec/frontend/pipelines/components/jobs/failed_jobs_table_spec.js b/spec/frontend/pipelines/components/jobs/failed_jobs_table_spec.js
index b597a3bf4b0..0df15afd70d 100644
--- a/spec/frontend/pipelines/components/jobs/failed_jobs_table_spec.js
+++ b/spec/frontend/pipelines/components/jobs/failed_jobs_table_spec.js
@@ -4,7 +4,7 @@ import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { mountExtended } from 'helpers/vue_test_utils_helper';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import { redirectTo } from '~/lib/utils/url_utility';
import FailedJobsTable from '~/pipelines/components/jobs/failed_jobs_table.vue';
import RetryFailedJobMutation from '~/pipelines/graphql/mutations/retry_failed_job.mutation.graphql';
@@ -88,7 +88,7 @@ describe('Failed Jobs Table', () => {
await waitForPromises();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'There was a problem retrying the failed job.',
});
});
diff --git a/spec/frontend/pipelines/components/jobs/jobs_app_spec.js b/spec/frontend/pipelines/components/jobs/jobs_app_spec.js
index 89b6f764b2f..9bc14266593 100644
--- a/spec/frontend/pipelines/components/jobs/jobs_app_spec.js
+++ b/spec/frontend/pipelines/components/jobs/jobs_app_spec.js
@@ -4,7 +4,7 @@ import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import JobsApp from '~/pipelines/components/jobs/jobs_app.vue';
import JobsTable from '~/jobs/components/table/jobs_table.vue';
import getPipelineJobsQuery from '~/pipelines/graphql/queries/get_pipeline_jobs.query.graphql';
@@ -88,7 +88,7 @@ describe('Jobs app', () => {
expect(findJobsTable().exists()).toBe(true);
expect(findSkeletonLoader().exists()).toBe(false);
- expect(createFlash).not.toHaveBeenCalled();
+ expect(createAlert).not.toHaveBeenCalled();
});
it('handles job fetch error correctly', async () => {
@@ -98,7 +98,7 @@ describe('Jobs app', () => {
await waitForPromises();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'An error occurred while fetching the pipelines jobs.',
});
});
diff --git a/spec/frontend/pipelines/pipeline_multi_actions_spec.js b/spec/frontend/pipelines/pipeline_multi_actions_spec.js
index 149b40330e2..f0dae8ebcbe 100644
--- a/spec/frontend/pipelines/pipeline_multi_actions_spec.js
+++ b/spec/frontend/pipelines/pipeline_multi_actions_spec.js
@@ -1,4 +1,4 @@
-import { GlAlert, GlDropdown, GlSprintf, GlLoadingIcon } from '@gitlab/ui';
+import { GlAlert, GlDropdown, GlSprintf, GlLoadingIcon, GlSearchBoxByType } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
@@ -46,6 +46,7 @@ describe('Pipeline Multi Actions Dropdown', () => {
},
stubs: {
GlSprintf,
+ GlDropdown,
},
}),
);
@@ -56,6 +57,7 @@ describe('Pipeline Multi Actions Dropdown', () => {
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findAllArtifactItems = () => wrapper.findAllByTestId(artifactItemTestId);
const findFirstArtifactItem = () => wrapper.findByTestId(artifactItemTestId);
+ const findSearchBox = () => wrapper.findComponent(GlSearchBoxByType);
const findEmptyMessage = () => wrapper.findByTestId('artifacts-empty-message');
beforeEach(() => {
@@ -75,7 +77,7 @@ describe('Pipeline Multi Actions Dropdown', () => {
});
describe('Artifacts', () => {
- it('should fetch artifacts on dropdown click', async () => {
+ it('should fetch artifacts and show search box on dropdown click', async () => {
const endpoint = artifactsEndpoint.replace(artifactsEndpointPlaceholder, pipelineId);
mockAxios.onGet(endpoint).replyOnce(200, { artifacts });
createComponent();
@@ -84,6 +86,16 @@ describe('Pipeline Multi Actions Dropdown', () => {
expect(mockAxios.history.get).toHaveLength(1);
expect(wrapper.vm.artifacts).toEqual(artifacts);
+ expect(findSearchBox().exists()).toBe(true);
+ });
+
+ it('should focus the search box when opened with artifacts', () => {
+ createComponent({ mockData: { artifacts } });
+ wrapper.vm.$refs.searchInput.focusInput = jest.fn();
+
+ findDropdown().vm.$emit('shown');
+
+ expect(wrapper.vm.$refs.searchInput.focusInput).toHaveBeenCalled();
});
it('should render all the provided artifacts when search query is empty', () => {
@@ -109,10 +121,11 @@ describe('Pipeline Multi Actions Dropdown', () => {
expect(findFirstArtifactItem().text()).toBe(artifacts[0].name);
});
- it('should render empty message when no artifacts are found', () => {
+ it('should render empty message and no search box when no artifacts are found', () => {
createComponent({ mockData: { artifacts: [] } });
expect(findEmptyMessage().exists()).toBe(true);
+ expect(findSearchBox().exists()).toBe(false);
});
describe('while loading artifacts', () => {
diff --git a/spec/frontend/pipelines/pipelines_actions_spec.js b/spec/frontend/pipelines/pipelines_actions_spec.js
index fdfced38dca..26e61efc4f6 100644
--- a/spec/frontend/pipelines/pipelines_actions_spec.js
+++ b/spec/frontend/pipelines/pipelines_actions_spec.js
@@ -5,7 +5,7 @@ import { nextTick } from 'vue';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { TEST_HOST } from 'spec/test_constants';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import { confirmAction } from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal';
import PipelinesManualActions from '~/pipelines/components/pipelines_list/pipelines_manual_actions.vue';
@@ -95,7 +95,7 @@ describe('Pipelines Actions dropdown', () => {
await waitForPromises();
expect(findDropdown().props('loading')).toBe(false);
- expect(createFlash).toHaveBeenCalledTimes(1);
+ expect(createAlert).toHaveBeenCalledTimes(1);
});
});
diff --git a/spec/frontend/pipelines/pipelines_spec.js b/spec/frontend/pipelines/pipelines_spec.js
index cc2ff90de57..a3f15e25f36 100644
--- a/spec/frontend/pipelines/pipelines_spec.js
+++ b/spec/frontend/pipelines/pipelines_spec.js
@@ -11,7 +11,7 @@ import { mockTracking } from 'helpers/tracking_helper';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import Api from '~/api';
-import createFlash from '~/flash';
+import { createAlert, VARIANT_WARNING } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import NavigationControls from '~/pipelines/components/pipelines_list/nav_controls.vue';
import PipelinesComponent from '~/pipelines/components/pipelines_list/pipelines.vue';
@@ -261,9 +261,14 @@ describe('Pipelines', () => {
);
});
- it('tracks tab change click', () => {
+ it.each(['all', 'finished', 'branches', 'tags'])('tracks %p tab click', async (scope) => {
+ goToTab(scope);
+
+ await waitForPromises();
+
expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_filter_tabs', {
label: TRACKING_CATEGORIES.tabs,
+ property: scope,
});
});
});
@@ -356,8 +361,11 @@ describe('Pipelines', () => {
});
it('displays a warning message if raw text search is used', () => {
- expect(createFlash).toHaveBeenCalledTimes(1);
- expect(createFlash).toHaveBeenCalledWith({ message: RAW_TEXT_WARNING, type: 'warning' });
+ expect(createAlert).toHaveBeenCalledTimes(1);
+ expect(createAlert).toHaveBeenCalledWith({
+ message: RAW_TEXT_WARNING,
+ variant: VARIANT_WARNING,
+ });
});
it('should update browser bar', () => {
diff --git a/spec/frontend/pipelines/test_reports/stores/actions_spec.js b/spec/frontend/pipelines/test_reports/stores/actions_spec.js
index 74a9d8c354f..6e61ef97257 100644
--- a/spec/frontend/pipelines/test_reports/stores/actions_spec.js
+++ b/spec/frontend/pipelines/test_reports/stores/actions_spec.js
@@ -2,7 +2,7 @@ import MockAdapter from 'axios-mock-adapter';
import testReports from 'test_fixtures/pipelines/test_report.json';
import { TEST_HOST } from 'helpers/test_constants';
import testAction from 'helpers/vuex_action_helper';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import * as actions from '~/pipelines/stores/test_reports/actions';
import * as types from '~/pipelines/stores/test_reports/mutation_types';
@@ -56,7 +56,7 @@ describe('Actions TestReports Store', () => {
[],
[{ type: 'toggleLoading' }, { type: 'toggleLoading' }],
);
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
});
diff --git a/spec/frontend/pipelines/test_reports/stores/mutations_spec.js b/spec/frontend/pipelines/test_reports/stores/mutations_spec.js
index f9b9da01a2b..ed0cc71eb97 100644
--- a/spec/frontend/pipelines/test_reports/stores/mutations_spec.js
+++ b/spec/frontend/pipelines/test_reports/stores/mutations_spec.js
@@ -1,7 +1,7 @@
import testReports from 'test_fixtures/pipelines/test_report.json';
import * as types from '~/pipelines/stores/test_reports/mutation_types';
import mutations from '~/pipelines/stores/test_reports/mutations';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
jest.mock('~/flash.js');
@@ -61,7 +61,7 @@ describe('Mutations TestReports Store', () => {
it('should show a flash message otherwise', () => {
mutations[types.SET_SUITE_ERROR](mockState, {});
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
});
diff --git a/spec/frontend/profile/account/components/update_username_spec.js b/spec/frontend/profile/account/components/update_username_spec.js
index e331eed1863..575df9fb3c0 100644
--- a/spec/frontend/profile/account/components/update_username_spec.js
+++ b/spec/frontend/profile/account/components/update_username_spec.js
@@ -3,7 +3,7 @@ import { shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
import { TEST_HOST } from 'helpers/test_constants';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import UpdateUsername from '~/profile/account/components/update_username.vue';
@@ -149,7 +149,7 @@ describe('UpdateUsername component', () => {
await expect(wrapper.vm.onConfirm()).rejects.toThrow();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'Invalid username',
});
});
@@ -161,7 +161,7 @@ describe('UpdateUsername component', () => {
await expect(wrapper.vm.onConfirm()).rejects.toThrow();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'An error occurred while updating your username, please try again.',
});
});
diff --git a/spec/frontend/profile/preferences/components/profile_preferences_spec.js b/spec/frontend/profile/preferences/components/profile_preferences_spec.js
index 89ce838a383..91cd868daac 100644
--- a/spec/frontend/profile/preferences/components/profile_preferences_spec.js
+++ b/spec/frontend/profile/preferences/components/profile_preferences_spec.js
@@ -3,7 +3,7 @@ import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import createFlash from '~/flash';
+import { createAlert, VARIANT_DANGER, VARIANT_INFO } from '~/flash';
import IntegrationView from '~/profile/preferences/components/integration_view.vue';
import ProfilePreferences from '~/profile/preferences/components/profile_preferences.vue';
import { i18n } from '~/profile/preferences/constants';
@@ -149,7 +149,10 @@ describe('ProfilePreferences component', () => {
const successEvent = new CustomEvent('ajax:success');
form.dispatchEvent(successEvent);
- expect(createFlash).toHaveBeenCalledWith({ message: i18n.defaultSuccess, type: 'notice' });
+ expect(createAlert).toHaveBeenCalledWith({
+ message: i18n.defaultSuccess,
+ variant: VARIANT_INFO,
+ });
});
it('displays the custom success message', () => {
@@ -157,14 +160,17 @@ describe('ProfilePreferences component', () => {
const successEvent = new CustomEvent('ajax:success', { detail: [{ message }] });
form.dispatchEvent(successEvent);
- expect(createFlash).toHaveBeenCalledWith({ message, type: 'notice' });
+ expect(createAlert).toHaveBeenCalledWith({ message, variant: VARIANT_INFO });
});
it('displays the default error message', () => {
const errorEvent = new CustomEvent('ajax:error');
form.dispatchEvent(errorEvent);
- expect(createFlash).toHaveBeenCalledWith({ message: i18n.defaultError, type: 'alert' });
+ expect(createAlert).toHaveBeenCalledWith({
+ message: i18n.defaultError,
+ variant: VARIANT_DANGER,
+ });
});
it('displays the custom error message', () => {
@@ -172,7 +178,7 @@ describe('ProfilePreferences component', () => {
const errorEvent = new CustomEvent('ajax:error', { detail: [{ message }] });
form.dispatchEvent(errorEvent);
- expect(createFlash).toHaveBeenCalledWith({ message, type: 'alert' });
+ expect(createAlert).toHaveBeenCalledWith({ message, variant: VARIANT_DANGER });
});
});
diff --git a/spec/frontend/projects/commit/store/actions_spec.js b/spec/frontend/projects/commit/store/actions_spec.js
index 56dffcbd48e..008710984b9 100644
--- a/spec/frontend/projects/commit/store/actions_spec.js
+++ b/spec/frontend/projects/commit/store/actions_spec.js
@@ -1,6 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import { PROJECT_BRANCHES_ERROR } from '~/projects/commit/constants';
import * as actions from '~/projects/commit/store/actions';
@@ -68,7 +68,7 @@ describe('Commit form modal store actions', () => {
await testAction(actions.fetchBranches, {}, state, [], [{ type: 'requestBranches' }]);
- expect(createFlash).toHaveBeenCalledWith({ message: PROJECT_BRANCHES_ERROR });
+ expect(createAlert).toHaveBeenCalledWith({ message: PROJECT_BRANCHES_ERROR });
});
});
diff --git a/spec/frontend/projects/commits/store/actions_spec.js b/spec/frontend/projects/commits/store/actions_spec.js
index fdb12640b26..930b801af71 100644
--- a/spec/frontend/projects/commits/store/actions_spec.js
+++ b/spec/frontend/projects/commits/store/actions_spec.js
@@ -1,7 +1,7 @@
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import actions from '~/projects/commits/store/actions';
import * as types from '~/projects/commits/store/mutation_types';
import createState from '~/projects/commits/store/state';
@@ -38,8 +38,8 @@ describe('Project commits actions', () => {
const mockDispatchContext = { dispatch: () => {}, commit: () => {}, state };
actions.receiveAuthorsError(mockDispatchContext);
- expect(createFlash).toHaveBeenCalledTimes(1);
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledTimes(1);
+ expect(createAlert).toHaveBeenCalledWith({
message: 'An error occurred fetching the project authors.',
});
});
diff --git a/spec/frontend/projects/compare/components/app_spec.js b/spec/frontend/projects/compare/components/app_spec.js
index 2dbecf7cc61..9b052a17caa 100644
--- a/spec/frontend/projects/compare/components/app_spec.js
+++ b/spec/frontend/projects/compare/components/app_spec.js
@@ -134,6 +134,40 @@ describe('CompareApp component', () => {
});
});
+ describe('mode dropdown', () => {
+ const findModeDropdownButton = () => wrapper.find('[data-testid="modeDropdown"]');
+ const findEnableStraightModeButton = () =>
+ wrapper.find('[data-testid="enableStraightModeButton"]');
+ const findDisableStraightModeButton = () =>
+ wrapper.find('[data-testid="disableStraightModeButton"]');
+
+ it('renders the mode dropdown button', () => {
+ expect(findModeDropdownButton().exists()).toBe(true);
+ });
+
+ it('has the correct text', () => {
+ expect(findEnableStraightModeButton().text()).toBe('...');
+ expect(findDisableStraightModeButton().text()).toBe('..');
+ });
+
+ it('straight mode button when clicked', async () => {
+ expect(wrapper.props('straight')).toBe(false);
+ expect(wrapper.find('input[name="straight"]').attributes('value')).toBe('false');
+
+ findEnableStraightModeButton().vm.$emit('click');
+
+ await nextTick();
+
+ expect(wrapper.find('input[name="straight"]').attributes('value')).toBe('true');
+
+ findDisableStraightModeButton().vm.$emit('click');
+
+ await nextTick();
+
+ expect(wrapper.find('input[name="straight"]').attributes('value')).toBe('false');
+ });
+ });
+
describe('merge request buttons', () => {
const findProjectMrButton = () => wrapper.find('[data-testid="projectMrButton"]');
const findCreateMrButton = () => wrapper.find('[data-testid="createMrButton"]');
diff --git a/spec/frontend/projects/compare/components/mock_data.js b/spec/frontend/projects/compare/components/mock_data.js
index 81d64469a2a..28d9a394038 100644
--- a/spec/frontend/projects/compare/components/mock_data.js
+++ b/spec/frontend/projects/compare/components/mock_data.js
@@ -17,6 +17,7 @@ export const appDefaultProps = {
projects: [sourceProject],
paramsFrom: 'main',
paramsTo: 'target/branch',
+ straight: false,
createMrPath: '',
sourceProjectRefsPath,
targetProjectRefsPath,
diff --git a/spec/frontend/projects/compare/components/revision_dropdown_legacy_spec.js b/spec/frontend/projects/compare/components/revision_dropdown_legacy_spec.js
index f64af1aa994..c21c0f4f9d1 100644
--- a/spec/frontend/projects/compare/components/revision_dropdown_legacy_spec.js
+++ b/spec/frontend/projects/compare/components/revision_dropdown_legacy_spec.js
@@ -2,7 +2,7 @@ import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import RevisionDropdown from '~/projects/compare/components/revision_dropdown_legacy.vue';
@@ -79,7 +79,7 @@ describe('RevisionDropdown component', () => {
axiosMock.onGet('some/invalid/path').replyOnce(404);
await wrapper.vm.fetchBranchesAndTags();
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
describe('GlDropdown component', () => {
diff --git a/spec/frontend/projects/compare/components/revision_dropdown_spec.js b/spec/frontend/projects/compare/components/revision_dropdown_spec.js
index 35e32fd3da0..d598bafea92 100644
--- a/spec/frontend/projects/compare/components/revision_dropdown_spec.js
+++ b/spec/frontend/projects/compare/components/revision_dropdown_spec.js
@@ -2,7 +2,7 @@ import { GlDropdown, GlDropdownItem, GlSearchBoxByType } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import RevisionDropdown from '~/projects/compare/components/revision_dropdown.vue';
import { revisionDropdownDefaultProps as defaultProps } from './mock_data';
@@ -67,7 +67,7 @@ describe('RevisionDropdown component', () => {
createComponent();
await wrapper.vm.fetchBranchesAndTags();
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
it('makes a new request when refsProjectPath is changed', async () => {
@@ -93,7 +93,7 @@ describe('RevisionDropdown component', () => {
createComponent();
await wrapper.vm.searchBranchesAndTags();
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
it('makes request with search param', async () => {
diff --git a/spec/frontend/projects/settings/branch_rules/branch_dropdown_spec.js b/spec/frontend/projects/settings/branch_rules/components/edit/branch_dropdown_spec.js
index 79bce5a4b3f..11f219c1f90 100644
--- a/spec/frontend/projects/settings/branch_rules/branch_dropdown_spec.js
+++ b/spec/frontend/projects/settings/branch_rules/components/edit/branch_dropdown_spec.js
@@ -4,7 +4,7 @@ import { GlDropdown, GlSearchBoxByType, GlDropdownItem, GlSprintf } from '@gitla
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import BranchDropdown, {
i18n,
-} from '~/projects/settings/branch_rules/components/branch_dropdown.vue';
+} from '~/projects/settings/branch_rules/components/edit/branch_dropdown.vue';
import createMockApollo from 'helpers/mock_apollo_helper';
import branchesQuery from '~/projects/settings/branch_rules/queries/branches.query.graphql';
import waitForPromises from 'helpers/wait_for_promises';
diff --git a/spec/frontend/projects/settings/branch_rules/rule_edit_spec.js b/spec/frontend/projects/settings/branch_rules/components/edit/index_spec.js
index b0b2b9191d4..21e63fdb24d 100644
--- a/spec/frontend/projects/settings/branch_rules/rule_edit_spec.js
+++ b/spec/frontend/projects/settings/branch_rules/components/edit/index_spec.js
@@ -1,9 +1,9 @@
import { nextTick } from 'vue';
import { getParameterByName } from '~/lib/utils/url_utility';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import RuleEdit from '~/projects/settings/branch_rules/components/rule_edit.vue';
-import BranchDropdown from '~/projects/settings/branch_rules/components/branch_dropdown.vue';
-import Protections from '~/projects/settings/branch_rules/components/protections/index.vue';
+import RuleEdit from '~/projects/settings/branch_rules/components/edit/index.vue';
+import BranchDropdown from '~/projects/settings/branch_rules/components/edit/branch_dropdown.vue';
+import Protections from '~/projects/settings/branch_rules/components/edit/protections/index.vue';
jest.mock('~/lib/utils/url_utility', () => ({
getParameterByName: jest.fn().mockImplementation(() => 'main'),
diff --git a/spec/frontend/projects/settings/branch_rules/components/protections/index_spec.js b/spec/frontend/projects/settings/branch_rules/components/edit/protections/index_spec.js
index 3592fa50622..ee90ff8318f 100644
--- a/spec/frontend/projects/settings/branch_rules/components/protections/index_spec.js
+++ b/spec/frontend/projects/settings/branch_rules/components/edit/protections/index_spec.js
@@ -3,10 +3,10 @@ import { GlLink } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import Protections, {
i18n,
-} from '~/projects/settings/branch_rules/components/protections/index.vue';
-import PushProtections from '~/projects/settings/branch_rules/components/protections/push_protections.vue';
-import MergeProtections from '~/projects/settings/branch_rules/components/protections/merge_protections.vue';
-import { protections } from '../../mock_data';
+} from '~/projects/settings/branch_rules/components/edit/protections/index.vue';
+import PushProtections from '~/projects/settings/branch_rules/components/edit/protections/push_protections.vue';
+import MergeProtections from '~/projects/settings/branch_rules/components/edit/protections/merge_protections.vue';
+import { protections } from '../../../mock_data';
describe('Branch Protections', () => {
let wrapper;
diff --git a/spec/frontend/projects/settings/branch_rules/components/protections/merge_protections_spec.js b/spec/frontend/projects/settings/branch_rules/components/edit/protections/merge_protections_spec.js
index 0e168a2ad78..b5fdc46d600 100644
--- a/spec/frontend/projects/settings/branch_rules/components/protections/merge_protections_spec.js
+++ b/spec/frontend/projects/settings/branch_rules/components/edit/protections/merge_protections_spec.js
@@ -2,8 +2,8 @@ import { GlFormGroup, GlFormCheckbox } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import MergeProtections, {
i18n,
-} from '~/projects/settings/branch_rules/components/protections/merge_protections.vue';
-import { membersAllowedToMerge, requireCodeOwnersApproval } from '../../mock_data';
+} from '~/projects/settings/branch_rules/components/edit/protections/merge_protections.vue';
+import { membersAllowedToMerge, requireCodeOwnersApproval } from '../../../mock_data';
describe('Merge Protections', () => {
let wrapper;
diff --git a/spec/frontend/projects/settings/branch_rules/components/protections/push_protections_spec.js b/spec/frontend/projects/settings/branch_rules/components/edit/protections/push_protections_spec.js
index d54dad08338..60bb7a51dcb 100644
--- a/spec/frontend/projects/settings/branch_rules/components/protections/push_protections_spec.js
+++ b/spec/frontend/projects/settings/branch_rules/components/edit/protections/push_protections_spec.js
@@ -2,8 +2,8 @@ import { GlFormGroup, GlSprintf, GlFormCheckbox } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import PushProtections, {
i18n,
-} from '~/projects/settings/branch_rules/components/protections/push_protections.vue';
-import { membersAllowedToPush, allowForcePush } from '../../mock_data';
+} from '~/projects/settings/branch_rules/components/edit/protections/push_protections.vue';
+import { membersAllowedToPush, allowForcePush } from '../../../mock_data';
describe('Push Protections', () => {
let wrapper;
diff --git a/spec/frontend/projects/settings/branch_rules/components/view/index_spec.js b/spec/frontend/projects/settings/branch_rules/components/view/index_spec.js
new file mode 100644
index 00000000000..bf4026b65db
--- /dev/null
+++ b/spec/frontend/projects/settings/branch_rules/components/view/index_spec.js
@@ -0,0 +1,113 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import * as util from '~/lib/utils/url_utility';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import RuleView from '~/projects/settings/branch_rules/components/view/index.vue';
+import {
+ I18N,
+ ALL_BRANCHES_WILDCARD,
+} from '~/projects/settings/branch_rules/components/view/constants';
+import Protection from '~/projects/settings/branch_rules/components/view/protection.vue';
+import branchRulesQuery from '~/projects/settings/branch_rules/queries/branch_rules_details.query.graphql';
+import { sprintf } from '~/locale';
+import { branchProtectionsMockResponse } from './mock_data';
+
+jest.mock('~/lib/utils/url_utility', () => ({
+ getParameterByName: jest.fn().mockReturnValue('main'),
+ joinPaths: jest.fn(),
+}));
+
+Vue.use(VueApollo);
+
+const protectionMockProps = {
+ headerLinkHref: 'protected/branches',
+ headerLinkTitle: 'Manage in Protected Branches',
+ roles: [{ accessLevelDescription: 'Maintainers' }],
+ users: [{ avatarUrl: 'test.com/user.png', name: 'peter', webUrl: 'test.com' }],
+};
+
+describe('View branch rules', () => {
+ let wrapper;
+ let fakeApollo;
+ const projectPath = 'test/testing';
+ const protectedBranchesPath = 'protected/branches';
+ const approvalRulesPath = 'approval/rules';
+ const branchProtectionsMockRequestHandler = jest
+ .fn()
+ .mockResolvedValue(branchProtectionsMockResponse);
+
+ const createComponent = async () => {
+ fakeApollo = createMockApollo([[branchRulesQuery, branchProtectionsMockRequestHandler]]);
+
+ wrapper = shallowMountExtended(RuleView, {
+ apolloProvider: fakeApollo,
+ provide: { projectPath, protectedBranchesPath, approvalRulesPath },
+ });
+
+ await waitForPromises();
+ };
+
+ beforeEach(() => createComponent());
+
+ afterEach(() => wrapper.destroy());
+
+ const findBranchName = () => wrapper.findByTestId('branch');
+ const findBranchTitle = () => wrapper.findByTestId('branch-title');
+ const findBranchProtectionTitle = () => wrapper.findByText(I18N.protectBranchTitle);
+ const findBranchProtections = () => wrapper.findAllComponents(Protection);
+ const findForcePushTitle = () => wrapper.findByText(I18N.allowForcePushDescription);
+ const findApprovalsTitle = () => wrapper.findByText(I18N.approvalsTitle);
+
+ it('gets the branch param from url and renders it in the view', () => {
+ expect(util.getParameterByName).toHaveBeenCalledWith('branch');
+ expect(findBranchName().text()).toBe('main');
+ expect(findBranchTitle().text()).toBe(I18N.branchNameOrPattern);
+ });
+
+ it('renders the correct label if all branches are targeted', async () => {
+ jest.spyOn(util, 'getParameterByName').mockReturnValueOnce(ALL_BRANCHES_WILDCARD);
+ await createComponent();
+
+ expect(findBranchName().text()).toBe(I18N.allBranches);
+ expect(findBranchTitle().text()).toBe(I18N.targetBranch);
+ jest.restoreAllMocks();
+ });
+
+ it('renders the correct branch title', () => {
+ expect(findBranchTitle().exists()).toBe(true);
+ });
+
+ it('renders a branch protection title', () => {
+ expect(findBranchProtectionTitle().exists()).toBe(true);
+ });
+
+ it('renders a branch protection component for push rules', () => {
+ expect(findBranchProtections().at(0).props()).toMatchObject({
+ header: sprintf(I18N.allowedToPushHeader, { total: 2 }),
+ ...protectionMockProps,
+ });
+ });
+
+ it('renders force push protection', () => {
+ expect(findForcePushTitle().exists()).toBe(true);
+ });
+
+ it('renders a branch protection component for merge rules', () => {
+ expect(findBranchProtections().at(1).props()).toMatchObject({
+ header: sprintf(I18N.allowedToMergeHeader, { total: 2 }),
+ ...protectionMockProps,
+ });
+ });
+
+ it('renders a branch protection component for approvals', () => {
+ expect(findApprovalsTitle().exists()).toBe(true);
+
+ expect(findBranchProtections().at(2).props()).toMatchObject({
+ header: sprintf(I18N.approvalsHeader, { total: 0 }),
+ headerLinkHref: approvalRulesPath,
+ headerLinkTitle: I18N.manageApprovalsLinkTitle,
+ });
+ });
+});
diff --git a/spec/frontend/projects/settings/branch_rules/components/view/mock_data.js b/spec/frontend/projects/settings/branch_rules/components/view/mock_data.js
new file mode 100644
index 00000000000..c3f573061da
--- /dev/null
+++ b/spec/frontend/projects/settings/branch_rules/components/view/mock_data.js
@@ -0,0 +1,141 @@
+const usersMock = [
+ {
+ username: 'usr1',
+ webUrl: 'http://test.test/usr1',
+ name: 'User 1',
+ avatarUrl: 'http://test.test/avt1.png',
+ },
+ {
+ username: 'usr2',
+ webUrl: 'http://test.test/usr2',
+ name: 'User 2',
+ avatarUrl: 'http://test.test/avt2.png',
+ },
+ {
+ username: 'usr3',
+ webUrl: 'http://test.test/usr3',
+ name: 'User 3',
+ avatarUrl: 'http://test.test/avt3.png',
+ },
+ {
+ username: 'usr4',
+ webUrl: 'http://test.test/usr4',
+ name: 'User 4',
+ avatarUrl: 'http://test.test/avt4.png',
+ },
+ {
+ username: 'usr5',
+ webUrl: 'http://test.test/usr5',
+ name: 'User 5',
+ avatarUrl: 'http://test.test/avt5.png',
+ },
+];
+
+const accessLevelsMock = [
+ { accessLevelDescription: 'Administrator' },
+ { accessLevelDescription: 'Maintainer' },
+];
+
+const approvalsRequired = 3;
+
+const groupsMock = [{ name: 'test_group_1' }, { name: 'test_group_2' }];
+
+export const protectionPropsMock = {
+ header: 'Test protection',
+ headerLinkTitle: 'Test link title',
+ headerLinkHref: 'Test link href',
+ roles: accessLevelsMock,
+ users: usersMock,
+ groups: groupsMock,
+ approvals: [
+ {
+ name: 'test',
+ eligibleApprovers: { nodes: usersMock },
+ approvalsRequired,
+ },
+ ],
+};
+
+export const protectionRowPropsMock = {
+ title: 'Test title',
+ users: usersMock,
+ accessLevels: accessLevelsMock,
+ approvalsRequired,
+};
+
+export const accessLevelsMockResponse = [
+ {
+ __typename: 'PushAccessLevelEdge',
+ node: {
+ __typename: 'PushAccessLevel',
+ accessLevel: 40,
+ accessLevelDescription: 'Jona Langworth',
+ group: null,
+ user: {
+ __typename: 'UserCore',
+ id: '123',
+ webUrl: 'test.com',
+ name: 'peter',
+ avatarUrl: 'test.com/user.png',
+ },
+ },
+ },
+ {
+ __typename: 'PushAccessLevelEdge',
+ node: {
+ __typename: 'PushAccessLevel',
+ accessLevel: 40,
+ accessLevelDescription: 'Maintainers',
+ group: null,
+ user: null,
+ },
+ },
+];
+
+export const branchProtectionsMockResponse = {
+ data: {
+ project: {
+ id: 'gid://gitlab/Project/6',
+ __typename: 'Project',
+ branchRules: {
+ __typename: 'BranchRuleConnection',
+ nodes: [
+ {
+ __typename: 'BranchRule',
+ name: 'main',
+ branchProtection: {
+ __typename: 'BranchProtection',
+ allowForcePush: true,
+ codeOwnerApprovalRequired: true,
+ mergeAccessLevels: {
+ __typename: 'MergeAccessLevelConnection',
+ edges: accessLevelsMockResponse,
+ },
+ pushAccessLevels: {
+ __typename: 'PushAccessLevelConnection',
+ edges: accessLevelsMockResponse,
+ },
+ },
+ },
+ {
+ __typename: 'BranchRule',
+ name: '*',
+ branchProtection: {
+ __typename: 'BranchProtection',
+ allowForcePush: true,
+ codeOwnerApprovalRequired: true,
+ mergeAccessLevels: {
+ __typename: 'MergeAccessLevelConnection',
+ edges: [],
+ },
+ pushAccessLevels: {
+ __typename: 'PushAccessLevelConnection',
+ edges: [],
+ },
+ },
+ },
+ ],
+ },
+ },
+ },
+};
diff --git a/spec/frontend/projects/settings/branch_rules/components/view/protection_row_spec.js b/spec/frontend/projects/settings/branch_rules/components/view/protection_row_spec.js
new file mode 100644
index 00000000000..b0a69bedd3e
--- /dev/null
+++ b/spec/frontend/projects/settings/branch_rules/components/view/protection_row_spec.js
@@ -0,0 +1,71 @@
+import { GlAvatarsInline, GlAvatar, GlAvatarLink } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import ProtectionRow, {
+ MAX_VISIBLE_AVATARS,
+ AVATAR_SIZE,
+} from '~/projects/settings/branch_rules/components/view/protection_row.vue';
+import { protectionRowPropsMock } from './mock_data';
+
+describe('Branch rule protection row', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMountExtended(ProtectionRow, {
+ propsData: protectionRowPropsMock,
+ stubs: { GlAvatarsInline },
+ });
+ };
+
+ beforeEach(() => createComponent());
+
+ afterEach(() => wrapper.destroy());
+
+ const findTitle = () => wrapper.findByText(protectionRowPropsMock.title);
+ const findAvatarsInline = () => wrapper.findComponent(GlAvatarsInline);
+ const findAvatarLinks = () => wrapper.findAllComponents(GlAvatarLink);
+ const findAvatars = () => wrapper.findAllComponents(GlAvatar);
+ const findAccessLevels = () => wrapper.findAllByTestId('access-level');
+ const findApprovalsRequired = () =>
+ wrapper.findByText(`${protectionRowPropsMock.approvalsRequired} approvals required`);
+
+ it('renders a title', () => {
+ expect(findTitle().exists()).toBe(true);
+ });
+
+ it('renders an avatars-inline component', () => {
+ expect(findAvatarsInline().props('avatars')).toMatchObject(protectionRowPropsMock.users);
+ expect(findAvatarsInline().props('badgeSrOnlyText')).toBe('1 additional user');
+ });
+
+ it('renders avatar-link components', () => {
+ expect(findAvatarLinks().length).toBe(MAX_VISIBLE_AVATARS);
+
+ expect(findAvatarLinks().at(1).attributes('href')).toBe(protectionRowPropsMock.users[1].webUrl);
+ expect(findAvatarLinks().at(1).attributes('title')).toBe(protectionRowPropsMock.users[1].name);
+ });
+
+ it('renders avatar components', () => {
+ expect(findAvatars().length).toBe(MAX_VISIBLE_AVATARS);
+
+ expect(findAvatars().at(1).attributes('src')).toBe(protectionRowPropsMock.users[1].avatarUrl);
+ expect(findAvatars().at(1).attributes('label')).toBe(protectionRowPropsMock.users[1].name);
+ expect(findAvatars().at(1).props('size')).toBe(AVATAR_SIZE);
+ });
+
+ it('renders access level descriptions', () => {
+ expect(findAccessLevels().length).toBe(protectionRowPropsMock.accessLevels.length);
+
+ expect(findAccessLevels().at(0).text()).toBe(
+ protectionRowPropsMock.accessLevels[0].accessLevelDescription,
+ );
+ expect(findAccessLevels().at(1).text()).toContain(',');
+
+ expect(findAccessLevels().at(1).text()).toContain(
+ protectionRowPropsMock.accessLevels[1].accessLevelDescription,
+ );
+ });
+
+ it('renders the number of approvals required', () => {
+ expect(findApprovalsRequired().exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/projects/settings/branch_rules/components/view/protection_spec.js b/spec/frontend/projects/settings/branch_rules/components/view/protection_spec.js
new file mode 100644
index 00000000000..e2fbb4f5bbb
--- /dev/null
+++ b/spec/frontend/projects/settings/branch_rules/components/view/protection_spec.js
@@ -0,0 +1,68 @@
+import { GlCard, GlLink } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import Protection, { i18n } from '~/projects/settings/branch_rules/components/view/protection.vue';
+import ProtectionRow from '~/projects/settings/branch_rules/components/view/protection_row.vue';
+import { protectionPropsMock } from './mock_data';
+
+describe('Branch rule protection', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMountExtended(Protection, {
+ propsData: protectionPropsMock,
+ stubs: { GlCard },
+ });
+ };
+
+ beforeEach(() => createComponent());
+
+ afterEach(() => wrapper.destroy());
+
+ const findCard = () => wrapper.findComponent(GlCard);
+ const findHeader = () => wrapper.findByText(protectionPropsMock.header);
+ const findLink = () => wrapper.findComponent(GlLink);
+ const findProtectionRows = () => wrapper.findAllComponents(ProtectionRow);
+
+ it('renders a card component', () => {
+ expect(findCard().exists()).toBe(true);
+ });
+
+ it('renders a header with a link', () => {
+ expect(findHeader().exists()).toBe(true);
+ expect(findLink().text()).toBe(protectionPropsMock.headerLinkTitle);
+ expect(findLink().attributes('href')).toBe(protectionPropsMock.headerLinkHref);
+ });
+
+ it('renders a protection row for roles', () => {
+ expect(findProtectionRows().at(0).props()).toMatchObject({
+ accessLevels: protectionPropsMock.roles,
+ showDivider: false,
+ title: i18n.rolesTitle,
+ });
+ });
+
+ it('renders a protection row for users', () => {
+ expect(findProtectionRows().at(1).props()).toMatchObject({
+ users: protectionPropsMock.users,
+ showDivider: true,
+ title: i18n.usersTitle,
+ });
+ });
+
+ it('renders a protection row for groups', () => {
+ expect(findProtectionRows().at(2).props()).toMatchObject({
+ accessLevels: protectionPropsMock.groups,
+ showDivider: true,
+ title: i18n.groupsTitle,
+ });
+ });
+
+ it('renders a protection row for approvals', () => {
+ const approval = protectionPropsMock.approvals[0];
+ expect(findProtectionRows().at(3).props()).toMatchObject({
+ title: approval.name,
+ users: approval.eligibleApprovers.nodes,
+ approvalsRequired: approval.approvalsRequired,
+ });
+ });
+});
diff --git a/spec/frontend/projects/settings/components/default_branch_selector_spec.js b/spec/frontend/projects/settings/components/default_branch_selector_spec.js
new file mode 100644
index 00000000000..94648d87524
--- /dev/null
+++ b/spec/frontend/projects/settings/components/default_branch_selector_spec.js
@@ -0,0 +1,46 @@
+import { shallowMount } from '@vue/test-utils';
+import DefaultBranchSelector from '~/projects/settings/components/default_branch_selector.vue';
+import RefSelector from '~/ref/components/ref_selector.vue';
+import { REF_TYPE_BRANCHES } from '~/ref/constants';
+
+describe('projects/settings/components/default_branch_selector', () => {
+ const persistedDefaultBranch = 'main';
+ const projectId = '123';
+ let wrapper;
+
+ const findRefSelector = () => wrapper.findComponent(RefSelector);
+
+ const buildWrapper = () => {
+ wrapper = shallowMount(DefaultBranchSelector, {
+ propsData: {
+ persistedDefaultBranch,
+ projectId,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ beforeEach(() => {
+ buildWrapper();
+ });
+
+ it('displays a RefSelector component', () => {
+ expect(findRefSelector().props()).toEqual({
+ value: persistedDefaultBranch,
+ enabledRefTypes: [REF_TYPE_BRANCHES],
+ projectId,
+ state: true,
+ translations: {
+ dropdownHeader: expect.any(String),
+ searchPlaceholder: expect.any(String),
+ },
+ useSymbolicRefNames: false,
+ name: 'project[default_branch]',
+ });
+
+ expect(findRefSelector().classes()).toContain('gl-w-full');
+ });
+});
diff --git a/spec/frontend/projects/settings/components/transfer_project_form_spec.js b/spec/frontend/projects/settings/components/transfer_project_form_spec.js
index bde7148078d..6e639f895a8 100644
--- a/spec/frontend/projects/settings/components/transfer_project_form_spec.js
+++ b/spec/frontend/projects/settings/components/transfer_project_form_spec.js
@@ -1,41 +1,65 @@
import Vue, { nextTick } from 'vue';
+import { GlAlert } from '@gitlab/ui';
import VueApollo from 'vue-apollo';
-import searchNamespacesWhereUserCanTransferProjectsQueryResponsePage1 from 'test_fixtures/graphql/projects/settings/search_namespaces_where_user_can_transfer_projects_page_1.query.graphql.json';
-import searchNamespacesWhereUserCanTransferProjectsQueryResponsePage2 from 'test_fixtures/graphql/projects/settings/search_namespaces_where_user_can_transfer_projects_page_2.query.graphql.json';
-import {
- groupNamespaces,
- userNamespaces,
-} from 'jest/vue_shared/components/namespace_select/mock_data';
+import currentUserNamespaceQueryResponse from 'test_fixtures/graphql/projects/settings/current_user_namespace.query.graphql.json';
+import transferLocationsResponsePage1 from 'test_fixtures/api/projects/transfer_locations_page_1.json';
+import transferLocationsResponsePage2 from 'test_fixtures/api/projects/transfer_locations_page_2.json';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import TransferProjectForm from '~/projects/settings/components/transfer_project_form.vue';
-import NamespaceSelect from '~/vue_shared/components/namespace_select/namespace_select.vue';
+import NamespaceSelect from '~/vue_shared/components/namespace_select/namespace_select_deprecated.vue';
import ConfirmDanger from '~/vue_shared/components/confirm_danger/confirm_danger.vue';
-import searchNamespacesWhereUserCanTransferProjectsQuery from '~/projects/settings/graphql/queries/search_namespaces_where_user_can_transfer_projects.query.graphql';
+import currentUserNamespaceQuery from '~/projects/settings/graphql/queries/current_user_namespace.query.graphql';
+import { getTransferLocations } from '~/api/projects_api';
import waitForPromises from 'helpers/wait_for_promises';
+jest.mock('~/api/projects_api', () => ({
+ getTransferLocations: jest.fn(),
+}));
+
describe('Transfer project form', () => {
let wrapper;
+ const projectId = '1';
const confirmButtonText = 'Confirm';
const confirmationPhrase = 'You must construct additional pylons!';
- const runDebounce = () => jest.runAllTimers();
-
Vue.use(VueApollo);
- const defaultQueryHandler = jest
- .fn()
- .mockResolvedValue(searchNamespacesWhereUserCanTransferProjectsQueryResponsePage1);
+ const defaultQueryHandler = jest.fn().mockResolvedValue(currentUserNamespaceQueryResponse);
+ const mockResolvedGetTransferLocations = ({
+ data = transferLocationsResponsePage1,
+ page = '1',
+ nextPage = '2',
+ prevPage = null,
+ } = {}) => {
+ getTransferLocations.mockResolvedValueOnce({
+ data,
+ headers: {
+ 'x-per-page': '2',
+ 'x-page': page,
+ 'x-total': '4',
+ 'x-total-pages': '2',
+ 'x-next-page': nextPage,
+ 'x-prev-page': prevPage,
+ },
+ });
+ };
+ const mockRejectedGetTransferLocations = () => {
+ const error = new Error();
+
+ getTransferLocations.mockRejectedValueOnce(error);
+ };
const createComponent = ({
- requestHandlers = [[searchNamespacesWhereUserCanTransferProjectsQuery, defaultQueryHandler]],
+ requestHandlers = [[currentUserNamespaceQuery, defaultQueryHandler]],
} = {}) => {
wrapper = shallowMountExtended(TransferProjectForm, {
+ provide: {
+ projectId,
+ },
propsData: {
- userNamespaces,
- groupNamespaces,
confirmButtonText,
confirmationPhrase,
},
@@ -44,7 +68,12 @@ describe('Transfer project form', () => {
};
const findNamespaceSelect = () => wrapper.findComponent(NamespaceSelect);
+ const showNamespaceSelect = async () => {
+ findNamespaceSelect().vm.$emit('show');
+ await waitForPromises();
+ };
const findConfirmDanger = () => wrapper.findComponent(ConfirmDanger);
+ const findAlert = () => wrapper.findComponent(GlAlert);
afterEach(() => {
wrapper.destroy();
@@ -69,66 +98,113 @@ describe('Transfer project form', () => {
});
describe('with a selected namespace', () => {
- const [selectedItem] = groupNamespaces;
+ const [selectedItem] = transferLocationsResponsePage1;
- beforeEach(() => {
+ const arrange = async () => {
+ mockResolvedGetTransferLocations();
createComponent();
-
+ await showNamespaceSelect();
findNamespaceSelect().vm.$emit('select', selectedItem);
- });
+ };
+
+ it('emits the `selectNamespace` event when a namespace is selected', async () => {
+ await arrange();
- it('emits the `selectNamespace` event when a namespace is selected', () => {
const args = [selectedItem.id];
expect(wrapper.emitted('selectNamespace')).toEqual([args]);
});
- it('enables the confirm button', () => {
+ it('enables the confirm button', async () => {
+ await arrange();
+
expect(findConfirmDanger().attributes('disabled')).toBeUndefined();
});
- it('clicking the confirm button emits the `confirm` event', () => {
+ it('clicking the confirm button emits the `confirm` event', async () => {
+ await arrange();
+
findConfirmDanger().vm.$emit('confirm');
expect(wrapper.emitted('confirm')).toBeDefined();
});
});
- it('passes correct props to `NamespaceSelect` component', async () => {
- createComponent();
+ describe('when `NamespaceSelect` is opened', () => {
+ it('fetches user and group namespaces and passes correct props to `NamespaceSelect` component', async () => {
+ mockResolvedGetTransferLocations();
+ createComponent();
+ await showNamespaceSelect();
+
+ const { namespace } = currentUserNamespaceQueryResponse.data.currentUser;
+
+ expect(findNamespaceSelect().props()).toMatchObject({
+ userNamespaces: [
+ {
+ id: getIdFromGraphQLId(namespace.id),
+ humanName: namespace.fullName,
+ },
+ ],
+ groupNamespaces: transferLocationsResponsePage1.map(({ id, full_name: humanName }) => ({
+ id,
+ humanName,
+ })),
+ hasNextPageOfGroups: true,
+ isLoading: false,
+ isSearchLoading: false,
+ shouldFilterNamespaces: false,
+ });
+ });
- runDebounce();
- await waitForPromises();
+ describe('when namespaces have already been fetched', () => {
+ beforeEach(async () => {
+ mockResolvedGetTransferLocations();
+ createComponent();
+ await showNamespaceSelect();
+ });
+
+ it('does not fetch namespaces', async () => {
+ getTransferLocations.mockClear();
+ defaultQueryHandler.mockClear();
+
+ await showNamespaceSelect();
- const {
- namespace,
- groups,
- } = searchNamespacesWhereUserCanTransferProjectsQueryResponsePage1.data.currentUser;
-
- expect(findNamespaceSelect().props()).toMatchObject({
- userNamespaces: [
- {
- id: getIdFromGraphQLId(namespace.id),
- humanName: namespace.fullName,
- },
- ],
- groupNamespaces: groups.nodes.map((node) => ({
- id: getIdFromGraphQLId(node.id),
- humanName: node.fullName,
- })),
- hasNextPageOfGroups: true,
- isLoadingMoreGroups: false,
- isSearchLoading: false,
- shouldFilterNamespaces: false,
+ expect(getTransferLocations).not.toHaveBeenCalled();
+ expect(defaultQueryHandler).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('when `getTransferLocations` API call fails', () => {
+ it('displays error alert', async () => {
+ mockRejectedGetTransferLocations();
+ createComponent();
+ await showNamespaceSelect();
+
+ expect(findAlert().exists()).toBe(true);
+ });
+ });
+
+ describe('when `currentUser` GraphQL query fails', () => {
+ it('displays error alert', async () => {
+ mockResolvedGetTransferLocations();
+ const error = new Error();
+ createComponent({
+ requestHandlers: [[currentUserNamespaceQuery, jest.fn().mockRejectedValueOnce(error)]],
+ });
+ await showNamespaceSelect();
+
+ expect(findAlert().exists()).toBe(true);
+ });
});
});
describe('when `search` event is fired', () => {
const arrange = async () => {
+ mockResolvedGetTransferLocations();
createComponent();
-
+ await showNamespaceSelect();
+ mockResolvedGetTransferLocations();
findNamespaceSelect().vm.$emit('search', 'foo');
-
await nextTick();
};
@@ -138,87 +214,106 @@ describe('Transfer project form', () => {
expect(findNamespaceSelect().props('isSearchLoading')).toBe(true);
});
- it('passes `search` variable to query', async () => {
+ it('passes `search` param to API call', async () => {
await arrange();
- runDebounce();
await waitForPromises();
- expect(defaultQueryHandler).toHaveBeenCalledWith(expect.objectContaining({ search: 'foo' }));
+ expect(getTransferLocations).toHaveBeenCalledWith(
+ projectId,
+ expect.objectContaining({ search: 'foo' }),
+ );
+ });
+
+ describe('when `getTransferLocations` API call fails', () => {
+ it('displays dismissible error alert', async () => {
+ mockResolvedGetTransferLocations();
+ createComponent();
+ await showNamespaceSelect();
+ mockRejectedGetTransferLocations();
+ findNamespaceSelect().vm.$emit('search', 'foo');
+ await waitForPromises();
+
+ const alert = findAlert();
+
+ expect(alert.exists()).toBe(true);
+
+ alert.vm.$emit('dismiss');
+ await nextTick();
+
+ expect(alert.exists()).toBe(false);
+ });
});
});
describe('when `load-more-groups` event is fired', () => {
- let queryHandler;
-
const arrange = async () => {
- queryHandler = jest.fn();
- queryHandler.mockResolvedValueOnce(
- searchNamespacesWhereUserCanTransferProjectsQueryResponsePage1,
- );
- queryHandler.mockResolvedValueOnce(
- searchNamespacesWhereUserCanTransferProjectsQueryResponsePage2,
- );
+ mockResolvedGetTransferLocations();
+ createComponent();
+ await showNamespaceSelect();
- createComponent({
- requestHandlers: [[searchNamespacesWhereUserCanTransferProjectsQuery, queryHandler]],
+ mockResolvedGetTransferLocations({
+ data: transferLocationsResponsePage2,
+ page: '2',
+ nextPage: null,
+ prevPage: '1',
});
- runDebounce();
- await waitForPromises();
-
findNamespaceSelect().vm.$emit('load-more-groups');
await nextTick();
};
- it('sets `isLoadingMoreGroups` prop to `true`', async () => {
+ it('sets `isLoading` prop to `true`', async () => {
await arrange();
- expect(findNamespaceSelect().props('isLoadingMoreGroups')).toBe(true);
+ expect(findNamespaceSelect().props('isLoading')).toBe(true);
});
- it('passes `after` and `first` variables to query', async () => {
+ it('passes `page` param to API call', async () => {
await arrange();
- runDebounce();
await waitForPromises();
- expect(queryHandler).toHaveBeenCalledWith(
- expect.objectContaining({
- first: 25,
- after:
- searchNamespacesWhereUserCanTransferProjectsQueryResponsePage1.data.currentUser.groups
- .pageInfo.endCursor,
- }),
+ expect(getTransferLocations).toHaveBeenCalledWith(
+ projectId,
+ expect.objectContaining({ page: 2 }),
);
});
it('updates `groupNamespaces` prop with new groups', async () => {
await arrange();
- runDebounce();
await waitForPromises();
- expect(findNamespaceSelect().props('groupNamespaces')).toEqual(
- [
- ...searchNamespacesWhereUserCanTransferProjectsQueryResponsePage1.data.currentUser.groups
- .nodes,
- ...searchNamespacesWhereUserCanTransferProjectsQueryResponsePage2.data.currentUser.groups
- .nodes,
- ].map((node) => ({
- id: getIdFromGraphQLId(node.id),
- humanName: node.fullName,
- })),
+ expect(findNamespaceSelect().props('groupNamespaces')).toMatchObject(
+ [...transferLocationsResponsePage1, ...transferLocationsResponsePage2].map(
+ ({ id, full_name: humanName }) => ({
+ id,
+ humanName,
+ }),
+ ),
);
});
it('updates `hasNextPageOfGroups` prop', async () => {
await arrange();
- runDebounce();
await waitForPromises();
expect(findNamespaceSelect().props('hasNextPageOfGroups')).toBe(false);
});
+
+ describe('when `getTransferLocations` API call fails', () => {
+ it('displays error alert', async () => {
+ mockResolvedGetTransferLocations();
+ createComponent();
+ await showNamespaceSelect();
+ mockRejectedGetTransferLocations();
+ findNamespaceSelect().vm.$emit('load-more-groups');
+ await waitForPromises();
+
+ expect(findAlert().exists()).toBe(true);
+ });
+ });
});
});
diff --git a/spec/frontend/projects/settings/repository/branch_rules/app_spec.js b/spec/frontend/projects/settings/repository/branch_rules/app_spec.js
index e920cd48163..4603436c40a 100644
--- a/spec/frontend/projects/settings/repository/branch_rules/app_spec.js
+++ b/spec/frontend/projects/settings/repository/branch_rules/app_spec.js
@@ -6,8 +6,8 @@ import { mountExtended } from 'helpers/vue_test_utils_helper';
import BranchRules, { i18n } from '~/projects/settings/repository/branch_rules/app.vue';
import BranchRule from '~/projects/settings/repository/branch_rules/components/branch_rule.vue';
import branchRulesQuery from '~/projects/settings/repository/branch_rules/graphql/queries/branch_rules.query.graphql';
-import createFlash from '~/flash';
-import { branchRulesMockResponse, propsDataMock } from './mock_data';
+import { createAlert } from '~/flash';
+import { branchRulesMockResponse, appProvideMock } from './mock_data';
jest.mock('~/flash');
@@ -24,9 +24,7 @@ describe('Branch rules app', () => {
wrapper = mountExtended(BranchRules, {
apolloProvider: fakeApollo,
- propsData: {
- ...propsDataMock,
- },
+ provide: appProvideMock,
});
await waitForPromises();
@@ -39,7 +37,7 @@ describe('Branch rules app', () => {
it('displays an error if branch rules query fails', async () => {
await createComponent({ queryHandler: jest.fn().mockRejectedValue() });
- expect(createFlash).toHaveBeenCalledWith({ message: i18n.queryError });
+ expect(createAlert).toHaveBeenCalledWith({ message: i18n.queryError });
});
it('displays an empty state if no branch rules are present', async () => {
@@ -49,7 +47,11 @@ describe('Branch rules app', () => {
it('renders branch rules', () => {
const { nodes } = branchRulesMockResponse.data.project.branchRules;
- expect(findAllBranchRules().at(0).text()).toBe(nodes[0].name);
- expect(findAllBranchRules().at(1).text()).toBe(nodes[1].name);
+
+ expect(findAllBranchRules().length).toBe(nodes.length);
+
+ expect(findAllBranchRules().at(0).props('name')).toBe(nodes[0].name);
+
+ expect(findAllBranchRules().at(1).props('name')).toBe(nodes[1].name);
});
});
diff --git a/spec/frontend/projects/settings/repository/branch_rules/components/branch_rule_spec.js b/spec/frontend/projects/settings/repository/branch_rules/components/branch_rule_spec.js
index 924dab60704..2bc705f538b 100644
--- a/spec/frontend/projects/settings/repository/branch_rules/components/branch_rule_spec.js
+++ b/spec/frontend/projects/settings/repository/branch_rules/components/branch_rule_spec.js
@@ -2,26 +2,24 @@ import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import BranchRule, {
i18n,
} from '~/projects/settings/repository/branch_rules/components/branch_rule.vue';
-
-const defaultProps = {
- name: 'main',
- isDefault: true,
- isProtected: true,
- approvalDetails: ['requires approval from TEST', '2 status checks'],
-};
+import { branchRuleProvideMock, branchRulePropsMock } from '../mock_data';
describe('Branch rule', () => {
let wrapper;
const createComponent = (props = {}) => {
- wrapper = shallowMountExtended(BranchRule, { propsData: { ...defaultProps, ...props } });
+ wrapper = shallowMountExtended(BranchRule, {
+ provide: branchRuleProvideMock,
+ propsData: { ...branchRulePropsMock, ...props },
+ });
};
const findDefaultBadge = () => wrapper.findByText(i18n.defaultLabel);
const findProtectedBadge = () => wrapper.findByText(i18n.protectedLabel);
- const findBranchName = () => wrapper.findByText(defaultProps.name);
+ const findBranchName = () => wrapper.findByText(branchRulePropsMock.name);
const findProtectionDetailsList = () => wrapper.findByRole('list');
const findProtectionDetailsListItems = () => wrapper.findAllByRole('listitem');
+ const findDetailsButton = () => wrapper.findByText(i18n.detailsButtonLabel);
beforeEach(() => createComponent());
@@ -52,7 +50,17 @@ describe('Branch rule', () => {
});
it('renders the protection details list items', () => {
- expect(findProtectionDetailsListItems().at(0).text()).toBe(defaultProps.approvalDetails[0]);
- expect(findProtectionDetailsListItems().at(1).text()).toBe(defaultProps.approvalDetails[1]);
+ expect(findProtectionDetailsListItems().at(0).text()).toBe(
+ branchRulePropsMock.approvalDetails[0],
+ );
+ expect(findProtectionDetailsListItems().at(1).text()).toBe(
+ branchRulePropsMock.approvalDetails[1],
+ );
+ });
+
+ it('renders a detail button with the correct href', () => {
+ expect(findDetailsButton().attributes('href')).toBe(
+ `${branchRuleProvideMock.branchRulesPath}?branch=${branchRulePropsMock.name}`,
+ );
});
});
diff --git a/spec/frontend/projects/settings/repository/branch_rules/mock_data.js b/spec/frontend/projects/settings/repository/branch_rules/mock_data.js
index 14ed35f047d..bac82992c4d 100644
--- a/spec/frontend/projects/settings/repository/branch_rules/mock_data.js
+++ b/spec/frontend/projects/settings/repository/branch_rules/mock_data.js
@@ -20,6 +20,17 @@ export const branchRulesMockResponse = {
},
};
-export const propsDataMock = {
+export const appProvideMock = {
projectPath: 'some/project/path',
};
+
+export const branchRuleProvideMock = {
+ branchRulesPath: 'settings/repository/branch_rules',
+};
+
+export const branchRulePropsMock = {
+ name: 'main',
+ isDefault: true,
+ isProtected: true,
+ approvalDetails: ['requires approval from TEST', '2 status checks'],
+};
diff --git a/spec/frontend/protected_branches/protected_branch_edit_spec.js b/spec/frontend/protected_branches/protected_branch_edit_spec.js
index 6ef1b58a956..0aec4fbc037 100644
--- a/spec/frontend/protected_branches/protected_branch_edit_spec.js
+++ b/spec/frontend/protected_branches/protected_branch_edit_spec.js
@@ -2,7 +2,7 @@ import MockAdapter from 'axios-mock-adapter';
import $ from 'jquery';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import { TEST_HOST } from 'helpers/test_constants';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import ProtectedBranchEdit from '~/protected_branches/protected_branch_edit';
@@ -136,7 +136,7 @@ describe('ProtectedBranchEdit', () => {
expect(toggle).not.toHaveClass(IS_DISABLED_CLASS);
expect(toggle.querySelector(IS_LOADING_SELECTOR)).toBe(null);
- expect(createFlash).not.toHaveBeenCalled();
+ expect(createAlert).not.toHaveBeenCalled();
});
});
@@ -149,7 +149,7 @@ describe('ProtectedBranchEdit', () => {
it('flashes error', async () => {
await axios.waitForAll();
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
});
});
diff --git a/spec/frontend/ref/components/ref_selector_spec.js b/spec/frontend/ref/components/ref_selector_spec.js
index 6c5af5a2625..96601a729b2 100644
--- a/spec/frontend/ref/components/ref_selector_spec.js
+++ b/spec/frontend/ref/components/ref_selector_spec.js
@@ -109,6 +109,8 @@ describe('Ref selector component', () => {
const findCommitDropdownItems = () => findCommitsSection().findAllComponents(GlDropdownItem);
const findFirstCommitDropdownItem = () => findCommitDropdownItems().at(0);
+ const findHiddenInputField = () => wrapper.find('[data-testid="selected-ref-form-field"]');
+
//
// Expecters
//
@@ -181,6 +183,24 @@ describe('Ref selector component', () => {
expect(findLoadingIcon().exists()).toBe(false);
});
});
+
+ describe('when name property is provided', () => {
+ it('renders an forrm input hidden field', () => {
+ const name = 'default_tag';
+
+ createComponent({ propsData: { name } });
+
+ expect(findHiddenInputField().attributes().name).toBe(name);
+ });
+ });
+
+ describe('when name property is not provided', () => {
+ it('renders an forrm input hidden field', () => {
+ createComponent();
+
+ expect(findHiddenInputField().exists()).toBe(false);
+ });
+ });
});
describe('post-initialization behavior', () => {
@@ -194,7 +214,7 @@ describe('Ref selector component', () => {
});
it('adds the provided ID to the GlDropdown instance', () => {
- expect(wrapper.attributes().id).toBe(id);
+ expect(wrapper.findComponent(GlDropdown).attributes().id).toBe(id);
});
});
@@ -202,7 +222,7 @@ describe('Ref selector component', () => {
const preselectedRef = fixtures.branches[0].name;
beforeEach(() => {
- createComponent({ propsData: { value: preselectedRef } });
+ createComponent({ propsData: { value: preselectedRef, name: 'selectedRef' } });
return waitForRequests();
});
@@ -210,6 +230,10 @@ describe('Ref selector component', () => {
it('renders the pre-selected ref name', () => {
expect(findButtonContent().text()).toBe(preselectedRef);
});
+
+ it('binds hidden input field to the pre-selected ref', () => {
+ expect(findHiddenInputField().attributes().value).toBe(preselectedRef);
+ });
});
describe('when the selected ref is updated by the parent component', () => {
diff --git a/spec/frontend/releases/__snapshots__/util_spec.js.snap b/spec/frontend/releases/__snapshots__/util_spec.js.snap
index 55e3dda60a0..d88d79d2cde 100644
--- a/spec/frontend/releases/__snapshots__/util_spec.js.snap
+++ b/spec/frontend/releases/__snapshots__/util_spec.js.snap
@@ -155,8 +155,8 @@ Object {
Object {
"__typename": "ReleaseEvidence",
"collectedAt": "2018-12-03T00:00:00Z",
- "filepath": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/evidences/1.json",
- "id": "gid://gitlab/Releases::Evidence/1",
+ "filepath": Any<String>,
+ "id": Any<String>,
"sha": "760d6cdfb0879c3ffedec13af470e0f71cf52c6cde4d",
},
],
@@ -198,10 +198,10 @@ Object {
],
"paginationInfo": Object {
"__typename": "PageInfo",
- "endCursor": "eyJyZWxlYXNlZF9hdCI6IjIwMTgtMTItMTAgMDA6MDA6MDAuMDAwMDAwMDAwICswMDAwIiwiaWQiOiIxIn0",
+ "endCursor": Any<String>,
"hasNextPage": false,
"hasPreviousPage": false,
- "startCursor": "eyJyZWxlYXNlZF9hdCI6IjIwMTktMDEtMTAgMDA6MDA6MDAuMDAwMDAwMDAwICswMDAwIiwiaWQiOiIyIn0",
+ "startCursor": Any<String>,
},
}
`;
@@ -377,8 +377,8 @@ Object {
Object {
"__typename": "ReleaseEvidence",
"collectedAt": "2018-12-03T00:00:00Z",
- "filepath": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/evidences/1.json",
- "id": "gid://gitlab/Releases::Evidence/1",
+ "filepath": Any<String>,
+ "id": Any<String>,
"sha": "760d6cdfb0879c3ffedec13af470e0f71cf52c6cde4d",
},
],
diff --git a/spec/frontend/releases/components/app_index_spec.js b/spec/frontend/releases/components/app_index_spec.js
index f64f07de90e..48589a54ec4 100644
--- a/spec/frontend/releases/components/app_index_spec.js
+++ b/spec/frontend/releases/components/app_index_spec.js
@@ -6,7 +6,7 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import allReleasesQuery from '~/releases/graphql/queries/all_releases.query.graphql';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import { historyPushState } from '~/lib/utils/common_utils';
import { sprintf, __ } from '~/locale';
import ReleasesIndexApp from '~/releases/components/app_index.vue';
@@ -161,13 +161,13 @@ describe('app_index.vue', () => {
it(`${toDescription(flashMessage)} show a flash message`, async () => {
await waitForPromises();
if (flashMessage) {
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: ReleasesIndexApp.i18n.errorMessage,
captureError: true,
error: expect.any(Error),
});
} else {
- expect(createFlash).not.toHaveBeenCalled();
+ expect(createAlert).not.toHaveBeenCalled();
}
});
diff --git a/spec/frontend/releases/components/app_show_spec.js b/spec/frontend/releases/components/app_show_spec.js
index 9ca25b3b69a..c5cb8589ee8 100644
--- a/spec/frontend/releases/components/app_show_spec.js
+++ b/spec/frontend/releases/components/app_show_spec.js
@@ -4,7 +4,7 @@ import VueApollo from 'vue-apollo';
import oneReleaseQueryResponse from 'test_fixtures/graphql/releases/graphql/queries/one_release.query.graphql.json';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import ReleaseShowApp from '~/releases/components/app_show.vue';
import ReleaseBlock from '~/releases/components/release_block.vue';
import ReleaseSkeletonLoader from '~/releases/components/release_skeleton_loader.vue';
@@ -53,13 +53,13 @@ describe('Release show component', () => {
const expectNoFlash = () => {
it('does not show a flash message', () => {
- expect(createFlash).not.toHaveBeenCalled();
+ expect(createAlert).not.toHaveBeenCalled();
});
};
const expectFlashWithMessage = (message) => {
it(`shows a flash message that reads "${message}"`, () => {
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message,
captureError: true,
error: expect.any(Error),
diff --git a/spec/frontend/releases/components/evidence_block_spec.js b/spec/frontend/releases/components/evidence_block_spec.js
index 2db1e9e38a2..6f935215dd7 100644
--- a/spec/frontend/releases/components/evidence_block_spec.js
+++ b/spec/frontend/releases/components/evidence_block_spec.js
@@ -36,7 +36,7 @@ describe('Evidence Block', () => {
});
it('renders the title for the dowload link', () => {
- expect(wrapper.findComponent(GlLink).text()).toBe(`v1.1-evidences-1.json`);
+ expect(wrapper.findComponent(GlLink).text()).toMatch(/v1\.1-evidences-[0-9]+\.json/);
});
it('renders the correct hover text for the download', () => {
@@ -44,7 +44,9 @@ describe('Evidence Block', () => {
});
it('renders the correct file link for download', () => {
- expect(wrapper.findComponent(GlLink).attributes().download).toBe(`v1.1-evidences-1.json`);
+ expect(wrapper.findComponent(GlLink).attributes().download).toMatch(
+ /v1\.1-evidences-[0-9]+\.json/,
+ );
});
describe('sha text', () => {
diff --git a/spec/frontend/releases/components/tag_field_new_spec.js b/spec/frontend/releases/components/tag_field_new_spec.js
index b8047cae8c2..fcba0da3462 100644
--- a/spec/frontend/releases/components/tag_field_new_spec.js
+++ b/spec/frontend/releases/components/tag_field_new_spec.js
@@ -1,14 +1,17 @@
-import { GlDropdownItem } from '@gitlab/ui';
+import { GlDropdownItem, GlFormGroup, GlSprintf } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import Vue, { nextTick } from 'vue';
+import { trimText } from 'helpers/text_helper';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import { __ } from '~/locale';
import TagFieldNew from '~/releases/components/tag_field_new.vue';
import createStore from '~/releases/stores';
import createEditNewModule from '~/releases/stores/modules/edit_new';
const TEST_TAG_NAME = 'test-tag-name';
+const TEST_TAG_MESSAGE = 'Test tag message';
const TEST_PROJECT_ID = '1234';
const TEST_CREATE_FROM = 'test-create-from';
const NONEXISTENT_TAG_NAME = 'nonexistent-tag';
@@ -47,6 +50,8 @@ describe('releases/components/tag_field_new', () => {
store,
stubs: {
RefSelector: RefSelectorStub,
+ GlFormGroup,
+ GlSprintf,
},
});
};
@@ -61,9 +66,11 @@ describe('releases/components/tag_field_new', () => {
});
store.state.editNew.createFrom = TEST_CREATE_FROM;
+ store.state.editNew.showCreateFrom = true;
store.state.editNew.release = {
tagName: TEST_TAG_NAME,
+ tagMessage: '',
assets: {
links: [],
},
@@ -86,6 +93,9 @@ describe('releases/components/tag_field_new', () => {
const findCreateNewTagOption = () => wrapper.findComponent(GlDropdownItem);
+ const findAnnotatedTagMessageFormGroup = () =>
+ wrapper.find('[data-testid="annotated-tag-message-field"]');
+
describe('"Tag name" field', () => {
describe('rendering and behavior', () => {
beforeEach(() => createComponent());
@@ -124,6 +134,10 @@ describe('releases/components/tag_field_new', () => {
expect(findCreateFromFormGroup().exists()).toBe(false);
});
+ it('hides the "Tag message" field', () => {
+ expect(findAnnotatedTagMessageFormGroup().exists()).toBe(false);
+ });
+
it('fetches the release notes for the tag', () => {
const expectedUrl = `/api/v4/projects/1234/repository/tags/${updatedTagName}`;
expect(mock.history.get).toContainEqual(expect.objectContaining({ url: expectedUrl }));
@@ -230,4 +244,34 @@ describe('releases/components/tag_field_new', () => {
});
});
});
+
+ describe('"Annotated Tag" field', () => {
+ beforeEach(() => {
+ createComponent(mountExtended);
+ });
+
+ it('renders a label', () => {
+ expect(wrapper.findByRole('textbox', { name: 'Set tag message' }).exists()).toBe(true);
+ });
+
+ it('renders a description', () => {
+ expect(trimText(findAnnotatedTagMessageFormGroup().text())).toContain(
+ 'Add a message to the tag. Leaving this blank creates a lightweight tag.',
+ );
+ });
+
+ it('updates the store', async () => {
+ await findAnnotatedTagMessageFormGroup().find('textarea').setValue(TEST_TAG_MESSAGE);
+
+ expect(store.state.editNew.release.tagMessage).toBe(TEST_TAG_MESSAGE);
+ });
+
+ it('shows a link', () => {
+ const link = wrapper.findByRole('link', {
+ name: 'lightweight tag',
+ });
+
+ expect(link.attributes('href')).toBe('https://git-scm.com/book/en/v2/Git-Basics-Tagging/');
+ });
+ });
});
diff --git a/spec/frontend/releases/stores/modules/detail/actions_spec.js b/spec/frontend/releases/stores/modules/detail/actions_spec.js
index 48fba3adb24..eeee6747349 100644
--- a/spec/frontend/releases/stores/modules/detail/actions_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/actions_spec.js
@@ -2,7 +2,7 @@ import { cloneDeep } from 'lodash';
import originalOneReleaseForEditingQueryResponse from 'test_fixtures/graphql/releases/graphql/queries/one_release_for_editing.query.graphql.json';
import testAction from 'helpers/vuex_action_helper';
import { getTag } from '~/api/tags_api';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import { redirectTo } from '~/lib/utils/url_utility';
import { s__ } from '~/locale';
import { ASSET_LINK_TYPE } from '~/releases/constants';
@@ -59,7 +59,7 @@ describe('Release edit/new actions', () => {
releaseResponse = cloneDeep(originalOneReleaseForEditingQueryResponse);
gon.api_version = 'v4';
error = new Error('Yikes!');
- createFlash.mockClear();
+ createAlert.mockClear();
});
describe('when creating a new release', () => {
@@ -151,8 +151,8 @@ describe('Release edit/new actions', () => {
it(`shows a flash message`, () => {
return actions.fetchRelease({ commit: jest.fn(), state, rootState: state }).then(() => {
- expect(createFlash).toHaveBeenCalledTimes(1);
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledTimes(1);
+ expect(createAlert).toHaveBeenCalledWith({
message: 'Something went wrong while getting the release details.',
});
});
@@ -169,6 +169,15 @@ describe('Release edit/new actions', () => {
});
});
+ describe('updateReleaseTagMessage', () => {
+ it(`commits ${types.UPDATE_RELEASE_TAG_MESSAGE} with the updated tag name`, () => {
+ const newMessage = 'updated-tag-message';
+ return testAction(actions.updateReleaseTagMessage, newMessage, state, [
+ { type: types.UPDATE_RELEASE_TAG_MESSAGE, payload: newMessage },
+ ]);
+ });
+ });
+
describe('updateReleasedAt', () => {
it(`commits ${types.UPDATE_RELEASED_AT} with the updated date`, () => {
const newDate = new Date();
@@ -370,8 +379,8 @@ describe('Release edit/new actions', () => {
return actions
.createRelease({ commit: jest.fn(), dispatch: jest.fn(), state, getters: {} })
.then(() => {
- expect(createFlash).toHaveBeenCalledTimes(1);
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledTimes(1);
+ expect(createAlert).toHaveBeenCalledWith({
message: 'Yikes!',
});
});
@@ -396,8 +405,8 @@ describe('Release edit/new actions', () => {
return actions
.createRelease({ commit: jest.fn(), dispatch: jest.fn(), state, getters: {} })
.then(() => {
- expect(createFlash).toHaveBeenCalledTimes(1);
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledTimes(1);
+ expect(createAlert).toHaveBeenCalledWith({
message: 'Something went wrong while creating a new release.',
});
});
@@ -527,8 +536,8 @@ describe('Release edit/new actions', () => {
it('shows a flash message', async () => {
await actions.updateRelease({ commit, dispatch, state, getters });
- expect(createFlash).toHaveBeenCalledTimes(1);
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledTimes(1);
+ expect(createAlert).toHaveBeenCalledWith({
message: 'Something went wrong while saving the release details.',
});
});
@@ -547,8 +556,8 @@ describe('Release edit/new actions', () => {
it('shows a flash message', async () => {
await actions.updateRelease({ commit, dispatch, state, getters });
- expect(createFlash).toHaveBeenCalledTimes(1);
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledTimes(1);
+ expect(createAlert).toHaveBeenCalledWith({
message: 'Something went wrong while saving the release details.',
});
});
@@ -700,8 +709,8 @@ describe('Release edit/new actions', () => {
it('shows a flash message', async () => {
await actions.deleteRelease({ commit, dispatch, state, getters });
- expect(createFlash).toHaveBeenCalledTimes(1);
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledTimes(1);
+ expect(createAlert).toHaveBeenCalledWith({
message: 'Something went wrong while deleting the release.',
});
});
@@ -736,8 +745,8 @@ describe('Release edit/new actions', () => {
it('shows a flash message', async () => {
await actions.deleteRelease({ commit, dispatch, state, getters });
- expect(createFlash).toHaveBeenCalledTimes(1);
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledTimes(1);
+ expect(createAlert).toHaveBeenCalledWith({
message: 'Something went wrong while deleting the release.',
});
});
@@ -779,7 +788,7 @@ describe('Release edit/new actions', () => {
[],
);
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: s__('Release|Unable to fetch the tag notes.'),
});
expect(getTag).toHaveBeenCalledWith(state.projectId, tagName);
diff --git a/spec/frontend/releases/stores/modules/detail/getters_spec.js b/spec/frontend/releases/stores/modules/detail/getters_spec.js
index 2982dc5c46c..f8b87ec71dc 100644
--- a/spec/frontend/releases/stores/modules/detail/getters_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/getters_spec.js
@@ -317,7 +317,7 @@ describe('Release edit/new getters', () => {
{ milestones: ['release.milestone[0].title'] },
],
])('releaseUpdateMutatationVariables', (description, state, expectedVariables) => {
- it(description, () => {
+ it(`${description}`, () => {
const expectedVariablesObject = { input: expect.objectContaining(expectedVariables) };
const actualVariables = getters.releaseUpdateMutatationVariables(state, {
@@ -332,6 +332,7 @@ describe('Release edit/new getters', () => {
it('returns all the data needed for the releaseCreate GraphQL query', () => {
const state = {
createFrom: 'main',
+ release: { tagMessage: 'hello' },
};
const otherGetters = {
@@ -352,6 +353,7 @@ describe('Release edit/new getters', () => {
const expectedVariables = {
input: {
name: 'release.name',
+ tagMessage: 'hello',
ref: 'main',
assets: {
links: [
diff --git a/spec/frontend/releases/stores/modules/detail/mutations_spec.js b/spec/frontend/releases/stores/modules/detail/mutations_spec.js
index 8bbf550b77d..944769d22cc 100644
--- a/spec/frontend/releases/stores/modules/detail/mutations_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/mutations_spec.js
@@ -26,6 +26,7 @@ describe('Release edit/new mutations', () => {
expect(state.release).toEqual({
tagName: 'v1.3',
+ tagMessage: '',
name: '',
description: '',
milestones: [],
@@ -90,6 +91,16 @@ describe('Release edit/new mutations', () => {
});
});
+ describe(`${types.UPDATE_RELEASE_TAG_MESSAGE}`, () => {
+ it("updates the release's tag message", () => {
+ state.release = release;
+ const newMessage = 'updated-tag-message';
+ mutations[types.UPDATE_RELEASE_TAG_MESSAGE](state, newMessage);
+
+ expect(state.release.tagMessage).toBe(newMessage);
+ });
+ });
+
describe(`${types.UPDATE_RELEASED_AT}`, () => {
it("updates the release's released at date", () => {
state.release = release;
diff --git a/spec/frontend/releases/util_spec.js b/spec/frontend/releases/util_spec.js
index 055c8e8b39f..14cce8320e9 100644
--- a/spec/frontend/releases/util_spec.js
+++ b/spec/frontend/releases/util_spec.js
@@ -115,8 +115,18 @@ describe('releases/util.js', () => {
author: {
id: expect.any(String),
},
+ evidences: [
+ {
+ id: expect.any(String),
+ filepath: expect.any(String),
+ },
+ ],
},
],
+ paginationInfo: {
+ startCursor: expect.any(String),
+ endCursor: expect.any(String),
+ },
});
});
});
@@ -128,6 +138,12 @@ describe('releases/util.js', () => {
author: {
id: expect.any(String),
},
+ evidences: [
+ {
+ id: expect.any(String),
+ filepath: expect.any(String),
+ },
+ ],
},
});
});
diff --git a/spec/frontend/reports/accessibility_report/components/accessibility_issue_body_spec.js b/spec/frontend/reports/accessibility_report/components/accessibility_issue_body_spec.js
deleted file mode 100644
index d835ca4c733..00000000000
--- a/spec/frontend/reports/accessibility_report/components/accessibility_issue_body_spec.js
+++ /dev/null
@@ -1,112 +0,0 @@
-import { GlBadge } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import AccessibilityIssueBody from '~/reports/accessibility_report/components/accessibility_issue_body.vue';
-
-const issue = {
- name:
- 'The accessibility scanning found 2 errors of the following type: WCAG2AA.Principle4.Guideline4_1.4_1_2.H91.A.NoContent',
- code: 'WCAG2AA.Principle4.Guideline4_1.4_1_2.H91.A.NoContent',
- message: 'This element has insufficient contrast at this conformance level.',
- status: 'failed',
- className: 'spec.test_spec',
- learnMoreUrl: 'https://www.w3.org/TR/WCAG20-TECHS/H91.html',
-};
-
-describe('CustomMetricsForm', () => {
- let wrapper;
-
- const mountComponent = ({ name, code, message, status, className }, isNew = false) => {
- wrapper = shallowMount(AccessibilityIssueBody, {
- propsData: {
- issue: {
- name,
- code,
- message,
- status,
- className,
- },
- isNew,
- },
- });
- };
-
- const findIsNewBadge = () => wrapper.findComponent(GlBadge);
-
- beforeEach(() => {
- mountComponent(issue);
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('Displays the issue message', () => {
- const description = wrapper.findComponent({ ref: 'accessibility-issue-description' }).text();
-
- expect(description).toContain(`Message: ${issue.message}`);
- });
-
- describe('When an issue code is present', () => {
- it('Creates the correct URL for learning more about the issue code', () => {
- const learnMoreUrl = wrapper
- .findComponent({ ref: 'accessibility-issue-learn-more' })
- .attributes('href');
-
- expect(learnMoreUrl).toBe(issue.learnMoreUrl);
- });
- });
-
- describe('When an issue code is not present', () => {
- beforeEach(() => {
- mountComponent({
- ...issue,
- code: undefined,
- });
- });
-
- it('Creates a URL leading to the overview documentation page', () => {
- const learnMoreUrl = wrapper
- .findComponent({ ref: 'accessibility-issue-learn-more' })
- .attributes('href');
-
- expect(learnMoreUrl).toBe('https://www.w3.org/TR/WCAG20-TECHS/Overview.html');
- });
- });
-
- describe('When an issue code does not contain the TECHS code', () => {
- beforeEach(() => {
- mountComponent({
- ...issue,
- code: 'WCAG2AA.Principle4.Guideline4_1.4_1_2',
- });
- });
-
- it('Creates a URL leading to the overview documentation page', () => {
- const learnMoreUrl = wrapper
- .findComponent({ ref: 'accessibility-issue-learn-more' })
- .attributes('href');
-
- expect(learnMoreUrl).toBe('https://www.w3.org/TR/WCAG20-TECHS/Overview.html');
- });
- });
-
- describe('When issue is new', () => {
- beforeEach(() => {
- mountComponent(issue, true);
- });
-
- it('Renders the new badge', () => {
- expect(findIsNewBadge().exists()).toBe(true);
- });
- });
-
- describe('When issue is not new', () => {
- beforeEach(() => {
- mountComponent(issue, false);
- });
-
- it('Does not render the new badge', () => {
- expect(findIsNewBadge().exists()).toBe(false);
- });
- });
-});
diff --git a/spec/frontend/reports/accessibility_report/grouped_accessibility_reports_app_spec.js b/spec/frontend/reports/accessibility_report/grouped_accessibility_reports_app_spec.js
deleted file mode 100644
index 9d3535291eb..00000000000
--- a/spec/frontend/reports/accessibility_report/grouped_accessibility_reports_app_spec.js
+++ /dev/null
@@ -1,125 +0,0 @@
-import { mount } from '@vue/test-utils';
-import Vue from 'vue';
-import Vuex from 'vuex';
-import AccessibilityIssueBody from '~/reports/accessibility_report/components/accessibility_issue_body.vue';
-import GroupedAccessibilityReportsApp from '~/reports/accessibility_report/grouped_accessibility_reports_app.vue';
-import { getStoreConfig } from '~/reports/accessibility_report/store';
-import { mockReport } from './mock_data';
-
-Vue.use(Vuex);
-
-describe('Grouped accessibility reports app', () => {
- let wrapper;
- let mockStore;
-
- const mountComponent = () => {
- wrapper = mount(GroupedAccessibilityReportsApp, {
- store: mockStore,
- propsData: {
- endpoint: 'endpoint.json',
- },
- });
- };
-
- const findHeader = () => wrapper.find('[data-testid="report-section-code-text"]');
-
- beforeEach(() => {
- mockStore = new Vuex.Store({
- ...getStoreConfig(),
- actions: { fetchReport: () => {}, setEndpoint: () => {} },
- });
-
- mountComponent();
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- describe('while loading', () => {
- beforeEach(() => {
- mockStore.state.isLoading = true;
- mountComponent();
- });
-
- it('renders loading state', () => {
- expect(findHeader().text()).toEqual('Accessibility scanning results are being parsed');
- });
- });
-
- describe('with error', () => {
- beforeEach(() => {
- mockStore.state.isLoading = false;
- mockStore.state.hasError = true;
- mountComponent();
- });
-
- it('renders error state', () => {
- expect(findHeader().text()).toEqual('Accessibility scanning failed loading results');
- });
- });
-
- describe('with a report', () => {
- describe('with no issues', () => {
- beforeEach(() => {
- mockStore.state.report = {
- summary: {
- errored: 0,
- },
- };
- });
-
- it('renders no issues header', () => {
- expect(findHeader().text()).toContain(
- 'Accessibility scanning detected no issues for the source branch only',
- );
- });
- });
-
- describe('with one issue', () => {
- beforeEach(() => {
- mockStore.state.report = {
- summary: {
- errored: 1,
- },
- };
- });
-
- it('renders one issue header', () => {
- expect(findHeader().text()).toContain(
- 'Accessibility scanning detected 1 issue for the source branch only',
- );
- });
- });
-
- describe('with multiple issues', () => {
- beforeEach(() => {
- mockStore.state.report = {
- summary: {
- errored: 2,
- },
- };
- });
-
- it('renders multiple issues header', () => {
- expect(findHeader().text()).toContain(
- 'Accessibility scanning detected 2 issues for the source branch only',
- );
- });
- });
-
- describe('with issues to show', () => {
- beforeEach(() => {
- mockStore.state.report = mockReport;
- });
-
- it('renders custom accessibility issue body', () => {
- const issueBody = wrapper.findComponent(AccessibilityIssueBody);
-
- expect(issueBody.props('issue').code).toBe(mockReport.new_errors[0].code);
- expect(issueBody.props('issue').message).toBe(mockReport.new_errors[0].message);
- expect(issueBody.props('isNew')).toBe(true);
- });
- });
- });
-});
diff --git a/spec/frontend/reports/accessibility_report/mock_data.js b/spec/frontend/reports/accessibility_report/mock_data.js
deleted file mode 100644
index 9dace1e7c54..00000000000
--- a/spec/frontend/reports/accessibility_report/mock_data.js
+++ /dev/null
@@ -1,53 +0,0 @@
-export const mockReport = {
- status: 'failed',
- summary: {
- total: 2,
- resolved: 0,
- errored: 2,
- },
- new_errors: [
- {
- code: 'WCAG2AA.Principle1.Guideline1_4.1_4_3.G18.Fail',
- type: 'error',
- typeCode: 1,
- message:
- 'This element has insufficient contrast at this conformance level. Expected a contrast ratio of at least 4.5:1, but text in this element has a contrast ratio of 3.84:1. Recommendation: change text colour to #767676.',
- context: '<a href="/stages-devops-lifecycle/" class="main-nav-link">Product</a>',
- selector: '#main-nav > div:nth-child(2) > ul > li:nth-child(1) > a',
- runner: 'htmlcs',
- runnerExtras: {},
- },
- ],
- new_notes: [],
- new_warnings: [],
- resolved_errors: [
- {
- code: 'WCAG2AA.Principle4.Guideline4_1.4_1_2.H91.A.NoContent',
- type: 'error',
- typeCode: 1,
- message:
- 'Anchor element found with a valid href attribute, but no link content has been supplied.',
- context: '<a href="/" class="navbar-brand animated"><svg height="36" viewBox="0 0 1...</a>',
- selector: '#main-nav > div:nth-child(1) > a',
- runner: 'htmlcs',
- runnerExtras: {},
- },
- ],
- resolved_notes: [],
- resolved_warnings: [],
- existing_errors: [
- {
- code: 'WCAG2AA.Principle4.Guideline4_1.4_1_2.H91.A.NoContent',
- type: 'error',
- typeCode: 1,
- message:
- 'Anchor element found with a valid href attribute, but no link content has been supplied.',
- context: '<a href="/" class="navbar-brand animated"><svg height="36" viewBox="0 0 1...</a>',
- selector: '#main-nav > div:nth-child(1) > a',
- runner: 'htmlcs',
- runnerExtras: {},
- },
- ],
- existing_notes: [],
- existing_warnings: [],
-};
diff --git a/spec/frontend/reports/accessibility_report/store/actions_spec.js b/spec/frontend/reports/accessibility_report/store/actions_spec.js
deleted file mode 100644
index bab6c4905a7..00000000000
--- a/spec/frontend/reports/accessibility_report/store/actions_spec.js
+++ /dev/null
@@ -1,115 +0,0 @@
-import MockAdapter from 'axios-mock-adapter';
-import testAction from 'helpers/vuex_action_helper';
-import { TEST_HOST } from 'spec/test_constants';
-import axios from '~/lib/utils/axios_utils';
-import createStore from '~/reports/accessibility_report/store';
-import * as actions from '~/reports/accessibility_report/store/actions';
-import * as types from '~/reports/accessibility_report/store/mutation_types';
-import { mockReport } from '../mock_data';
-
-describe('Accessibility Reports actions', () => {
- let localState;
- let localStore;
-
- beforeEach(() => {
- localStore = createStore();
- localState = localStore.state;
- });
-
- describe('setEndpoints', () => {
- it('should commit SET_ENDPOINTS mutation', () => {
- const endpoint = 'endpoint.json';
-
- return testAction(
- actions.setEndpoint,
- endpoint,
- localState,
- [{ type: types.SET_ENDPOINT, payload: endpoint }],
- [],
- );
- });
- });
-
- describe('fetchReport', () => {
- let mock;
-
- beforeEach(() => {
- localState.endpoint = `${TEST_HOST}/endpoint.json`;
- mock = new MockAdapter(axios);
- });
-
- afterEach(() => {
- mock.restore();
- actions.stopPolling();
- actions.clearEtagPoll();
- });
-
- describe('success', () => {
- it('should commit REQUEST_REPORT mutation and dispatch receiveReportSuccess', () => {
- const data = { report: { summary: {} } };
- mock.onGet(`${TEST_HOST}/endpoint.json`).reply(200, data);
-
- return testAction(
- actions.fetchReport,
- null,
- localState,
- [{ type: types.REQUEST_REPORT }],
- [
- {
- payload: { status: 200, data },
- type: 'receiveReportSuccess',
- },
- ],
- );
- });
- });
-
- describe('error', () => {
- it('should commit REQUEST_REPORT and RECEIVE_REPORT_ERROR mutations', () => {
- mock.onGet(`${TEST_HOST}/endpoint.json`).reply(500);
-
- return testAction(
- actions.fetchReport,
- null,
- localState,
- [{ type: types.REQUEST_REPORT }],
- [{ type: 'receiveReportError' }],
- );
- });
- });
- });
-
- describe('receiveReportSuccess', () => {
- it('should commit RECEIVE_REPORT_SUCCESS mutation with 200', () => {
- return testAction(
- actions.receiveReportSuccess,
- { status: 200, data: mockReport },
- localState,
- [{ type: types.RECEIVE_REPORT_SUCCESS, payload: mockReport }],
- [{ type: 'stopPolling' }],
- );
- });
-
- it('should not commit RECEIVE_REPORTS_SUCCESS mutation with 204', () => {
- return testAction(
- actions.receiveReportSuccess,
- { status: 204, data: mockReport },
- localState,
- [],
- [],
- );
- });
- });
-
- describe('receiveReportError', () => {
- it('should commit RECEIVE_REPORT_ERROR mutation', () => {
- return testAction(
- actions.receiveReportError,
- null,
- localState,
- [{ type: types.RECEIVE_REPORT_ERROR }],
- [{ type: 'stopPolling' }],
- );
- });
- });
-});
diff --git a/spec/frontend/reports/accessibility_report/store/getters_spec.js b/spec/frontend/reports/accessibility_report/store/getters_spec.js
deleted file mode 100644
index 96344596003..00000000000
--- a/spec/frontend/reports/accessibility_report/store/getters_spec.js
+++ /dev/null
@@ -1,149 +0,0 @@
-import createStore from '~/reports/accessibility_report/store';
-import * as getters from '~/reports/accessibility_report/store/getters';
-import { LOADING, ERROR, SUCCESS, STATUS_FAILED } from '~/reports/constants';
-
-describe('Accessibility reports store getters', () => {
- let localState;
- let localStore;
-
- beforeEach(() => {
- localStore = createStore();
- localState = localStore.state;
- });
-
- describe('summaryStatus', () => {
- describe('when summary is loading', () => {
- it('returns loading status', () => {
- localState.isLoading = true;
-
- expect(getters.summaryStatus(localState)).toEqual(LOADING);
- });
- });
-
- describe('when summary has error', () => {
- it('returns error status', () => {
- localState.hasError = true;
-
- expect(getters.summaryStatus(localState)).toEqual(ERROR);
- });
- });
-
- describe('when summary has failed status', () => {
- it('returns loading status', () => {
- localState.status = STATUS_FAILED;
-
- expect(getters.summaryStatus(localState)).toEqual(ERROR);
- });
- });
-
- describe('when summary has successfully loaded', () => {
- it('returns loading status', () => {
- expect(getters.summaryStatus(localState)).toEqual(SUCCESS);
- });
- });
- });
-
- describe('groupedSummaryText', () => {
- describe('when state is loading', () => {
- it('returns the loading summary message', () => {
- localState.isLoading = true;
- const result = 'Accessibility scanning results are being parsed';
-
- expect(getters.groupedSummaryText(localState)).toEqual(result);
- });
- });
-
- describe('when state has error', () => {
- it('returns the error summary message', () => {
- localState.hasError = true;
- const result = 'Accessibility scanning failed loading results';
-
- expect(getters.groupedSummaryText(localState)).toEqual(result);
- });
- });
-
- describe('when state has successfully loaded', () => {
- describe('when report has errors', () => {
- it('returns summary message containing number of errors', () => {
- localState.report = {
- summary: {
- errored: 2,
- },
- };
- const result = 'Accessibility scanning detected 2 issues for the source branch only';
-
- expect(getters.groupedSummaryText(localState)).toEqual(result);
- });
- });
-
- describe('when report has no errors', () => {
- it('returns summary message containing no errors', () => {
- localState.report = {
- summary: {
- errored: 0,
- },
- };
- const result = 'Accessibility scanning detected no issues for the source branch only';
-
- expect(getters.groupedSummaryText(localState)).toEqual(result);
- });
- });
- });
- });
-
- describe('shouldRenderIssuesList', () => {
- describe('when has issues to render', () => {
- it('returns true', () => {
- localState.report = {
- existing_errors: [{ name: 'Issue' }],
- };
-
- expect(getters.shouldRenderIssuesList(localState)).toEqual(true);
- });
- });
-
- describe('when does not have issues to render', () => {
- it('returns false', () => {
- localState.report = {
- status: 'success',
- summary: { errored: 0 },
- };
-
- expect(getters.shouldRenderIssuesList(localState)).toEqual(false);
- });
- });
- });
-
- describe('unresolvedIssues', () => {
- it('returns the array unresolved errors', () => {
- localState.report = {
- existing_errors: [1],
- };
- const result = [1];
-
- expect(getters.unresolvedIssues(localState)).toEqual(result);
- });
- });
-
- describe('resolvedIssues', () => {
- it('returns array of resolved errors', () => {
- localState.report = {
- resolved_errors: [1],
- };
- const result = [1];
-
- expect(getters.resolvedIssues(localState)).toEqual(result);
- });
- });
-
- describe('newIssues', () => {
- it('returns array of new errors', () => {
- localState.report = {
- new_errors: [1],
- };
- const result = [1];
-
- expect(getters.newIssues(localState)).toEqual(result);
- });
- });
-});
diff --git a/spec/frontend/reports/accessibility_report/store/mutations_spec.js b/spec/frontend/reports/accessibility_report/store/mutations_spec.js
deleted file mode 100644
index b336261d804..00000000000
--- a/spec/frontend/reports/accessibility_report/store/mutations_spec.js
+++ /dev/null
@@ -1,64 +0,0 @@
-import createStore from '~/reports/accessibility_report/store';
-import mutations from '~/reports/accessibility_report/store/mutations';
-
-describe('Accessibility Reports mutations', () => {
- let localState;
- let localStore;
-
- beforeEach(() => {
- localStore = createStore();
- localState = localStore.state;
- });
-
- describe('SET_ENDPOINT', () => {
- it('sets endpoint to given value', () => {
- const endpoint = 'endpoint.json';
- mutations.SET_ENDPOINT(localState, endpoint);
-
- expect(localState.endpoint).toEqual(endpoint);
- });
- });
-
- describe('REQUEST_REPORT', () => {
- it('sets isLoading to true', () => {
- mutations.REQUEST_REPORT(localState);
-
- expect(localState.isLoading).toEqual(true);
- });
- });
-
- describe('RECEIVE_REPORT_SUCCESS', () => {
- it('sets isLoading to false', () => {
- mutations.RECEIVE_REPORT_SUCCESS(localState, {});
-
- expect(localState.isLoading).toEqual(false);
- });
-
- it('sets hasError to false', () => {
- mutations.RECEIVE_REPORT_SUCCESS(localState, {});
-
- expect(localState.hasError).toEqual(false);
- });
-
- it('sets report to response report', () => {
- const report = { data: 'testing' };
- mutations.RECEIVE_REPORT_SUCCESS(localState, report);
-
- expect(localState.report).toEqual(report);
- });
- });
-
- describe('RECEIVE_REPORT_ERROR', () => {
- it('sets isLoading to false', () => {
- mutations.RECEIVE_REPORT_ERROR(localState);
-
- expect(localState.isLoading).toEqual(false);
- });
-
- it('sets hasError to true', () => {
- mutations.RECEIVE_REPORT_ERROR(localState);
-
- expect(localState.hasError).toEqual(true);
- });
- });
-});
diff --git a/spec/frontend/reports/components/report_section_spec.js b/spec/frontend/reports/components/report_section_spec.js
index bdfba8d6878..cc35b99a199 100644
--- a/spec/frontend/reports/components/report_section_spec.js
+++ b/spec/frontend/reports/components/report_section_spec.js
@@ -7,9 +7,13 @@ import ReportSection from '~/reports/components/report_section.vue';
describe('ReportSection component', () => {
let wrapper;
- const findButton = () => wrapper.findComponent(GlButton);
+ const findExpandButton = () => wrapper.findComponent(GlButton);
const findPopover = () => wrapper.findComponent(HelpPopover);
const findReportSection = () => wrapper.find('.js-report-section-container');
+ const expectExpandButtonOpen = () =>
+ expect(findExpandButton().props('icon')).toBe('chevron-lg-up');
+ const expectExpandButtonClosed = () =>
+ expect(findExpandButton().props('icon')).toBe('chevron-lg-down');
const resolvedIssues = [
{
@@ -122,22 +126,22 @@ describe('ReportSection component', () => {
it('toggles issues', async () => {
createComponent({ props: { hasIssues: true } });
- await findButton().trigger('click');
+ await findExpandButton().trigger('click');
expect(findReportSection().isVisible()).toBe(true);
- expect(findButton().text()).toBe('Collapse');
+ expectExpandButtonOpen();
- await findButton().trigger('click');
+ await findExpandButton().trigger('click');
expect(findReportSection().isVisible()).toBe(false);
- expect(findButton().text()).toBe('Expand');
+ expectExpandButtonClosed();
});
it('is always expanded, if always-open is set to true', () => {
createComponent({ props: { hasIssues: true, alwaysOpen: true } });
expect(findReportSection().isVisible()).toBe(true);
- expect(findButton().exists()).toBe(false);
+ expect(findExpandButton().exists()).toBe(false);
});
});
});
@@ -148,7 +152,7 @@ describe('ReportSection component', () => {
expect(wrapper.emitted('toggleEvent')).toBeUndefined();
- findButton().trigger('click');
+ findExpandButton().trigger('click');
expect(wrapper.emitted('toggleEvent')).toEqual([[]]);
});
@@ -158,7 +162,7 @@ describe('ReportSection component', () => {
expect(wrapper.emitted('toggleEvent')).toBeUndefined();
- findButton().trigger('click');
+ findExpandButton().trigger('click');
expect(wrapper.emitted('toggleEvent')).toBeUndefined();
});
@@ -208,7 +212,7 @@ describe('ReportSection component', () => {
});
it('should still render the expand/collapse button', () => {
- expect(findButton().text()).toBe('Expand');
+ expectExpandButtonClosed();
});
});
diff --git a/spec/frontend/repository/commits_service_spec.js b/spec/frontend/repository/commits_service_spec.js
index 697fa7c4fd1..de7c56f239a 100644
--- a/spec/frontend/repository/commits_service_spec.js
+++ b/spec/frontend/repository/commits_service_spec.js
@@ -2,7 +2,7 @@ import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import { loadCommits, isRequested, resetRequestedCommits } from '~/repository/commits_service';
import httpStatus from '~/lib/utils/http_status';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import { I18N_COMMIT_DATA_FETCH_ERROR } from '~/repository/constants';
jest.mock('~/flash');
@@ -65,13 +65,13 @@ describe('commits service', () => {
expect(isRequested(300)).toBe(false);
});
- it('calls `createFlash` when the request fails', async () => {
+ it('calls `createAlert` when the request fails', async () => {
const invalidPath = '/#@ some/path';
const invalidUrl = `${url}${invalidPath}`;
mock.onGet(invalidUrl).replyOnce(httpStatus.INTERNAL_SERVER_ERROR, [], {});
await requestCommits(1, 'my-project', invalidPath);
- expect(createFlash).toHaveBeenCalledWith({ message: I18N_COMMIT_DATA_FETCH_ERROR });
+ expect(createAlert).toHaveBeenCalledWith({ message: I18N_COMMIT_DATA_FETCH_ERROR });
});
});
diff --git a/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap b/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap
index 01494cb6a24..6fe60f3c2e6 100644
--- a/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap
+++ b/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap
@@ -7,7 +7,7 @@ exports[`Repository last commit component renders commit widget 1`] = `
<user-avatar-link-stub
class="gl-my-2 gl-mr-4"
imgalt=""
- imgcssclasses="gl-mr-0!"
+ imgcssclasses=""
imgsize="32"
imgsrc="https://test.com"
linkhref="/test"
diff --git a/spec/frontend/repository/components/blob_controls_spec.js b/spec/frontend/repository/components/blob_controls_spec.js
index 6da1861ea7c..0d52542397f 100644
--- a/spec/frontend/repository/components/blob_controls_spec.js
+++ b/spec/frontend/repository/components/blob_controls_spec.js
@@ -8,9 +8,13 @@ import blobControlsQuery from '~/repository/queries/blob_controls.query.graphql'
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createRouter from '~/repository/router';
import { updateElementsVisibility } from '~/repository/utils/dom';
+import ShortcutsBlob from '~/behaviors/shortcuts/shortcuts_blob';
+import BlobLinePermalinkUpdater from '~/blob/blob_line_permalink_updater';
import { blobControlsDataMock, refMock } from '../mock_data';
jest.mock('~/repository/utils/dom');
+jest.mock('~/behaviors/shortcuts/shortcuts_blob');
+jest.mock('~/blob/blob_line_permalink_updater');
let router;
let wrapper;
@@ -82,4 +86,12 @@ describe('Blob controls component', () => {
expect(updateElementsVisibility).toHaveBeenCalledWith('.tree-controls', true);
},
);
+
+ it('loads the ShortcutsBlob', () => {
+ expect(ShortcutsBlob).toHaveBeenCalled();
+ });
+
+ it('loads the BlobLinePermalinkUpdater', () => {
+ expect(BlobLinePermalinkUpdater).toHaveBeenCalled();
+ });
});
diff --git a/spec/frontend/repository/components/last_commit_spec.js b/spec/frontend/repository/components/last_commit_spec.js
index bf9528953b6..964b135bee3 100644
--- a/spec/frontend/repository/components/last_commit_spec.js
+++ b/spec/frontend/repository/components/last_commit_spec.js
@@ -185,7 +185,7 @@ describe('Repository last commit component', () => {
it('strips the first newline of the description', () => {
expect(findCommitRowDescription().html()).toBe(
- '<pre class="commit-row-description gl-mb-3">Update ADOPTERS.md</pre>',
+ '<pre class="commit-row-description gl-mb-3 gl-white-space-pre-line">Update ADOPTERS.md</pre>',
);
});
diff --git a/spec/frontend/repository/components/new_directory_modal_spec.js b/spec/frontend/repository/components/new_directory_modal_spec.js
index aaf751a9a8d..cf0d48280f4 100644
--- a/spec/frontend/repository/components/new_directory_modal_spec.js
+++ b/spec/frontend/repository/components/new_directory_modal_spec.js
@@ -4,7 +4,7 @@ import { nextTick } from 'vue';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import httpStatusCodes from '~/lib/utils/http_status';
import { visitUrl } from '~/lib/utils/url_utility';
import NewDirectoryModal from '~/repository/components/new_directory_modal.vue';
@@ -194,7 +194,7 @@ describe('NewDirectoryModal', () => {
await fillForm({ dirName: 'foo', branchName: 'master', commitMessage: 'foo' });
await submitForm();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: NewDirectoryModal.i18n.ERROR_MESSAGE,
});
});
diff --git a/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap b/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap
index 26064e9b248..b99d741e984 100644
--- a/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap
+++ b/spec/frontend/repository/components/table/__snapshots__/row_spec.js.snap
@@ -40,10 +40,10 @@ exports[`Repository table row component renders a symlink table row 1`] = `
</td>
<td
- class="d-none d-sm-table-cell tree-commit cursor-default"
+ class="d-none d-sm-table-cell tree-commit cursor-default gl-text-secondary"
>
<gl-link-stub
- class="str-truncated-100 tree-commit-link"
+ class="str-truncated-100 tree-commit-link gl-text-secondary"
/>
<gl-intersection-observer-stub>
@@ -52,7 +52,7 @@ exports[`Repository table row component renders a symlink table row 1`] = `
</td>
<td
- class="tree-time-ago text-right cursor-default"
+ class="tree-time-ago text-right cursor-default gl-text-secondary"
>
<timeago-tooltip-stub
cssclass=""
@@ -105,10 +105,10 @@ exports[`Repository table row component renders table row 1`] = `
</td>
<td
- class="d-none d-sm-table-cell tree-commit cursor-default"
+ class="d-none d-sm-table-cell tree-commit cursor-default gl-text-secondary"
>
<gl-link-stub
- class="str-truncated-100 tree-commit-link"
+ class="str-truncated-100 tree-commit-link gl-text-secondary"
/>
<gl-intersection-observer-stub>
@@ -117,7 +117,7 @@ exports[`Repository table row component renders table row 1`] = `
</td>
<td
- class="tree-time-ago text-right cursor-default"
+ class="tree-time-ago text-right cursor-default gl-text-secondary"
>
<timeago-tooltip-stub
cssclass=""
@@ -170,10 +170,10 @@ exports[`Repository table row component renders table row for path with special
</td>
<td
- class="d-none d-sm-table-cell tree-commit cursor-default"
+ class="d-none d-sm-table-cell tree-commit cursor-default gl-text-secondary"
>
<gl-link-stub
- class="str-truncated-100 tree-commit-link"
+ class="str-truncated-100 tree-commit-link gl-text-secondary"
/>
<gl-intersection-observer-stub>
@@ -182,7 +182,7 @@ exports[`Repository table row component renders table row for path with special
</td>
<td
- class="tree-time-ago text-right cursor-default"
+ class="tree-time-ago text-right cursor-default gl-text-secondary"
>
<timeago-tooltip-stub
cssclass=""
diff --git a/spec/frontend/repository/components/table/index_spec.js b/spec/frontend/repository/components/table/index_spec.js
index 697d2dcc7f5..2180f78a8df 100644
--- a/spec/frontend/repository/components/table/index_spec.js
+++ b/spec/frontend/repository/components/table/index_spec.js
@@ -159,7 +159,7 @@ describe('Repository table component', () => {
});
describe('Show more button', () => {
- const showMoreButton = () => vm.find(GlButton);
+ const showMoreButton = () => vm.findComponent(GlButton);
it.each`
hasMore | expectButtonToExist
diff --git a/spec/frontend/repository/components/table/parent_row_spec.js b/spec/frontend/repository/components/table/parent_row_spec.js
index 9daae8c36ef..03fb4242e40 100644
--- a/spec/frontend/repository/components/table/parent_row_spec.js
+++ b/spec/frontend/repository/components/table/parent_row_spec.js
@@ -39,7 +39,7 @@ describe('Repository parent row component', () => {
`('renders link in $path to $to', ({ path, to }) => {
factory(path);
- expect(vm.find(RouterLinkStub).props().to).toEqual({
+ expect(vm.findComponent(RouterLinkStub).props().to).toEqual({
path: to,
});
});
@@ -69,6 +69,6 @@ describe('Repository parent row component', () => {
it('renders loading icon when loading parent', () => {
factory('app/assets', 'app');
- expect(vm.find(GlLoadingIcon).exists()).toBe(true);
+ expect(vm.findComponent(GlLoadingIcon).exists()).toBe(true);
});
});
diff --git a/spec/frontend/repository/components/table/row_spec.js b/spec/frontend/repository/components/table/row_spec.js
index 13b09e57473..64aa6d179a8 100644
--- a/spec/frontend/repository/components/table/row_spec.js
+++ b/spec/frontend/repository/components/table/row_spec.js
@@ -47,7 +47,7 @@ function factory(propsData = {}) {
}
describe('Repository table row component', () => {
- const findRouterLink = () => vm.find(RouterLinkStub);
+ const findRouterLink = () => vm.findComponent(RouterLinkStub);
const findIntersectionObserver = () => vm.findComponent(GlIntersectionObserver);
afterEach(() => {
@@ -124,7 +124,7 @@ describe('Repository table row component', () => {
});
await nextTick();
- expect(vm.find(component).exists()).toBe(true);
+ expect(vm.findComponent(component).exists()).toBe(true);
});
it.each`
@@ -141,7 +141,7 @@ describe('Repository table row component', () => {
});
await nextTick();
- expect(vm.find({ ref: 'link' }).props('to')).toEqual({
+ expect(vm.findComponent({ ref: 'link' }).props('to')).toEqual({
path: `/-/tree/main/${encodeURIComponent(path)}`,
});
});
@@ -197,7 +197,7 @@ describe('Repository table row component', () => {
});
await nextTick();
- expect(vm.find(GlBadge).exists()).toBe(true);
+ expect(vm.findComponent(GlBadge).exists()).toBe(true);
});
it('renders commit and web links with href for submodule', async () => {
@@ -213,7 +213,7 @@ describe('Repository table row component', () => {
await nextTick();
expect(vm.find('a').attributes('href')).toEqual('https://test.com');
- expect(vm.find(GlLink).attributes('href')).toEqual('https://test.com/commit');
+ expect(vm.findComponent(GlLink).attributes('href')).toEqual('https://test.com/commit');
});
it('renders lock icon', async () => {
@@ -226,8 +226,8 @@ describe('Repository table row component', () => {
});
await nextTick();
- expect(vm.find(GlIcon).exists()).toBe(true);
- expect(vm.find(GlIcon).props('name')).toBe('lock');
+ expect(vm.findComponent(GlIcon).exists()).toBe(true);
+ expect(vm.findComponent(GlIcon).props('name')).toBe('lock');
});
it('renders loading icon when path is loading', () => {
@@ -240,7 +240,7 @@ describe('Repository table row component', () => {
loadingPath: 'test',
});
- expect(vm.find(FileIcon).props('loading')).toBe(true);
+ expect(vm.findComponent(FileIcon).props('loading')).toBe(true);
});
describe('row visibility', () => {
diff --git a/spec/frontend/repository/components/tree_content_spec.js b/spec/frontend/repository/components/tree_content_spec.js
index 9d3a5394df8..352f4314232 100644
--- a/spec/frontend/repository/components/tree_content_spec.js
+++ b/spec/frontend/repository/components/tree_content_spec.js
@@ -38,7 +38,7 @@ function factory(path, data = () => ({})) {
}
describe('Repository table component', () => {
- const findFileTable = () => vm.find(FileTable);
+ const findFileTable = () => vm.findComponent(FileTable);
afterEach(() => {
vm.destroy();
@@ -53,7 +53,7 @@ describe('Repository table component', () => {
await nextTick();
- expect(vm.find(FilePreview).exists()).toBe(true);
+ expect(vm.findComponent(FilePreview).exists()).toBe(true);
});
it('trigger fetchFiles and resetRequestedCommits when mounted', async () => {
diff --git a/spec/frontend/repository/components/upload_blob_modal_spec.js b/spec/frontend/repository/components/upload_blob_modal_spec.js
index 505ff7f3dd6..8db169b02b4 100644
--- a/spec/frontend/repository/components/upload_blob_modal_spec.js
+++ b/spec/frontend/repository/components/upload_blob_modal_spec.js
@@ -4,7 +4,7 @@ import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import httpStatusCodes from '~/lib/utils/http_status';
import { visitUrl } from '~/lib/utils/url_utility';
import UploadBlobModal from '~/repository/components/upload_blob_modal.vue';
@@ -47,12 +47,12 @@ describe('UploadBlobModal', () => {
});
};
- const findModal = () => wrapper.find(GlModal);
- const findAlert = () => wrapper.find(GlAlert);
- const findCommitMessage = () => wrapper.find(GlFormTextarea);
- const findBranchName = () => wrapper.find(GlFormInput);
- const findMrToggle = () => wrapper.find(GlToggle);
- const findUploadDropzone = () => wrapper.find(UploadDropzone);
+ const findModal = () => wrapper.findComponent(GlModal);
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findCommitMessage = () => wrapper.findComponent(GlFormTextarea);
+ const findBranchName = () => wrapper.findComponent(GlFormInput);
+ const findMrToggle = () => wrapper.findComponent(GlToggle);
+ const findUploadDropzone = () => wrapper.findComponent(UploadDropzone);
const actionButtonDisabledState = () => findModal().props('actionPrimary').attributes[0].disabled;
const cancelButtonDisabledState = () => findModal().props('actionCancel').attributes[0].disabled;
const actionButtonLoadingState = () => findModal().props('actionPrimary').attributes[0].loading;
@@ -185,7 +185,7 @@ describe('UploadBlobModal', () => {
});
it('creates a flash error', () => {
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'Error uploading file. Please try again.',
});
});
diff --git a/spec/frontend/repository/pages/blob_spec.js b/spec/frontend/repository/pages/blob_spec.js
index 41ab4d616b8..4fe6188370e 100644
--- a/spec/frontend/repository/pages/blob_spec.js
+++ b/spec/frontend/repository/pages/blob_spec.js
@@ -7,7 +7,7 @@ jest.mock('~/repository/utils/dom');
describe('Repository blob page component', () => {
let wrapper;
- const findBlobContentViewer = () => wrapper.find(BlobContentViewer);
+ const findBlobContentViewer = () => wrapper.findComponent(BlobContentViewer);
const path = 'file.js';
beforeEach(() => {
diff --git a/spec/frontend/repository/pages/index_spec.js b/spec/frontend/repository/pages/index_spec.js
index c0afb7931b1..559257d414c 100644
--- a/spec/frontend/repository/pages/index_spec.js
+++ b/spec/frontend/repository/pages/index_spec.js
@@ -34,7 +34,7 @@ describe('Repository index page component', () => {
it('renders TreePage', () => {
factory();
- const child = wrapper.find(TreePage);
+ const child = wrapper.findComponent(TreePage);
expect(child.exists()).toBe(true);
expect(child.props()).toEqual({ path: '/' });
diff --git a/spec/frontend/runner/admin_runner_show/admin_runner_show_app_spec.js b/spec/frontend/runner/admin_runner_show/admin_runner_show_app_spec.js
index 7ab4aeee9bc..64f66d8f3ba 100644
--- a/spec/frontend/runner/admin_runner_show/admin_runner_show_app_spec.js
+++ b/spec/frontend/runner/admin_runner_show/admin_runner_show_app_spec.js
@@ -104,6 +104,10 @@ describe('AdminRunnerShowApp', () => {
Platform darwin
Configuration Runs untagged jobs
Maximum job timeout None
+ Token expiry
+ Runner authentication token expiration
+ Runner authentication tokens will expire based on a set interval.
+ They will automatically rotate once expired. Learn more Never expires
Tags None`.replace(/\s+/g, ' ');
expect(wrapper.text().replace(/\s+/g, ' ')).toContain(expected);
diff --git a/spec/frontend/runner/admin_runners/admin_runners_app_spec.js b/spec/frontend/runner/admin_runners/admin_runners_app_spec.js
index 55a298e1695..7afde3bdc96 100644
--- a/spec/frontend/runner/admin_runners/admin_runners_app_spec.js
+++ b/spec/frontend/runner/admin_runners/admin_runners_app_spec.js
@@ -20,8 +20,6 @@ import AdminRunnersApp from '~/runner/admin_runners/admin_runners_app.vue';
import RunnerStackedLayoutBanner from '~/runner/components/runner_stacked_layout_banner.vue';
import RunnerTypeTabs from '~/runner/components/runner_type_tabs.vue';
import RunnerFilteredSearchBar from '~/runner/components/runner_filtered_search_bar.vue';
-import RunnerBulkDelete from '~/runner/components/runner_bulk_delete.vue';
-import RunnerBulkDeleteCheckbox from '~/runner/components/runner_bulk_delete_checkbox.vue';
import RunnerList from '~/runner/components/runner_list.vue';
import RunnerListEmptyState from '~/runner/components/runner_list_empty_state.vue';
import RunnerStats from '~/runner/components/stat/runner_stats.vue';
@@ -45,6 +43,7 @@ import {
PARAM_KEY_STATUS,
PARAM_KEY_TAG,
STATUS_ONLINE,
+ DEFAULT_MEMBERSHIP,
RUNNER_PAGE_SIZE,
} from '~/runner/constants';
import allRunnersQuery from 'ee_else_ce/runner/graphql/list/all_runners.query.graphql';
@@ -83,8 +82,6 @@ const COUNT_QUERIES = 7; // 4 tabs + 3 status queries
describe('AdminRunnersApp', () => {
let wrapper;
- let cacheConfig;
- let localMutations;
let showToast;
const findRunnerStackedLayoutBanner = () => wrapper.findComponent(RunnerStackedLayoutBanner);
@@ -92,8 +89,6 @@ describe('AdminRunnersApp', () => {
const findRunnerActionsCell = () => wrapper.findComponent(RunnerActionsCell);
const findRegistrationDropdown = () => wrapper.findComponent(RegistrationDropdown);
const findRunnerTypeTabs = () => wrapper.findComponent(RunnerTypeTabs);
- const findRunnerBulkDelete = () => wrapper.findComponent(RunnerBulkDelete);
- const findRunnerBulkDeleteCheckbox = () => wrapper.findComponent(RunnerBulkDeleteCheckbox);
const findRunnerList = () => wrapper.findComponent(RunnerList);
const findRunnerListEmptyState = () => wrapper.findComponent(RunnerListEmptyState);
const findRunnerPagination = () => extendedWrapper(wrapper.findComponent(RunnerPagination));
@@ -106,7 +101,7 @@ describe('AdminRunnersApp', () => {
provide,
...options
} = {}) => {
- ({ cacheConfig, localMutations } = createLocalState());
+ const { cacheConfig, localMutations } = createLocalState();
const handlers = [
[allRunnersQuery, mockRunnersHandler],
@@ -195,7 +190,7 @@ describe('AdminRunnersApp', () => {
const { id, shortSha } = mockRunners[0];
const numericId = getIdFromGraphQLId(id);
- const runnerLink = wrapper.find('tr [data-testid="td-summary"]').find(GlLink);
+ const runnerLink = wrapper.find('tr [data-testid="td-summary"]').findComponent(GlLink);
expect(runnerLink.text()).toBe(`#${numericId} (${shortSha})`);
expect(runnerLink.attributes('href')).toBe(`http://localhost/admin/runners/${numericId}`);
@@ -204,7 +199,9 @@ describe('AdminRunnersApp', () => {
it('renders runner actions for each runner', async () => {
await createComponent({ mountFn: mountExtended });
- const runnerActions = wrapper.find('tr [data-testid="td-actions"]').find(RunnerActionsCell);
+ const runnerActions = wrapper
+ .find('tr [data-testid="td-actions"]')
+ .findComponent(RunnerActionsCell);
const runner = mockRunners[0];
expect(runnerActions.props()).toEqual({
@@ -219,6 +216,7 @@ describe('AdminRunnersApp', () => {
expect(mockRunnersHandler).toHaveBeenLastCalledWith({
status: undefined,
type: undefined,
+ membership: DEFAULT_MEMBERSHIP,
sort: DEFAULT_SORT,
first: RUNNER_PAGE_SIZE,
});
@@ -255,7 +253,7 @@ describe('AdminRunnersApp', () => {
});
it('Links to the runner page', async () => {
- const runnerLink = wrapper.find('tr [data-testid="td-summary"]').find(GlLink);
+ const runnerLink = wrapper.find('tr [data-testid="td-summary"]').findComponent(GlLink);
expect(runnerLink.text()).toBe(`#${id} (${shortSha})`);
expect(runnerLink.attributes('href')).toBe(`http://localhost/admin/runners/${id}`);
@@ -288,6 +286,7 @@ describe('AdminRunnersApp', () => {
it('sets the filters in the search bar', () => {
expect(findRunnerFilteredSearchBar().props('value')).toEqual({
runnerType: INSTANCE_TYPE,
+ membership: DEFAULT_MEMBERSHIP,
filters: [
{ type: PARAM_KEY_STATUS, value: { data: STATUS_ONLINE, operator: '=' } },
{ type: PARAM_KEY_PAUSED, value: { data: 'true', operator: '=' } },
@@ -301,6 +300,7 @@ describe('AdminRunnersApp', () => {
expect(mockRunnersHandler).toHaveBeenLastCalledWith({
status: STATUS_ONLINE,
type: INSTANCE_TYPE,
+ membership: DEFAULT_MEMBERSHIP,
paused: true,
sort: DEFAULT_SORT,
first: RUNNER_PAGE_SIZE,
@@ -310,6 +310,7 @@ describe('AdminRunnersApp', () => {
it('fetches count results for requested status', () => {
expect(mockRunnersCountHandler).toHaveBeenCalledWith({
type: INSTANCE_TYPE,
+ membership: DEFAULT_MEMBERSHIP,
status: STATUS_ONLINE,
paused: true,
});
@@ -322,6 +323,7 @@ describe('AdminRunnersApp', () => {
findRunnerFilteredSearchBar().vm.$emit('input', {
runnerType: null,
+ membership: DEFAULT_MEMBERSHIP,
filters: [{ type: PARAM_KEY_STATUS, value: { data: STATUS_ONLINE, operator: '=' } }],
sort: CREATED_ASC,
});
@@ -339,6 +341,7 @@ describe('AdminRunnersApp', () => {
it('requests the runners with filters', () => {
expect(mockRunnersHandler).toHaveBeenLastCalledWith({
status: STATUS_ONLINE,
+ membership: DEFAULT_MEMBERSHIP,
sort: CREATED_ASC,
first: RUNNER_PAGE_SIZE,
});
@@ -347,6 +350,7 @@ describe('AdminRunnersApp', () => {
it('fetches count results for requested status', () => {
expect(mockRunnersCountHandler).toHaveBeenCalledWith({
status: STATUS_ONLINE,
+ membership: DEFAULT_MEMBERSHIP,
});
});
});
@@ -357,65 +361,26 @@ describe('AdminRunnersApp', () => {
expect(findRunnerPagination().attributes('disabled')).toBe('true');
});
- describe('when bulk delete is enabled', () => {
+ describe('Bulk delete', () => {
describe('Before runners are deleted', () => {
beforeEach(async () => {
- await createComponent({
- mountFn: mountExtended,
- provide: {
- glFeatures: { adminRunnersBulkDelete: true },
- },
- });
- });
-
- it('runner bulk delete is available', () => {
- expect(findRunnerBulkDelete().props('runners')).toEqual(mockRunners);
- });
-
- it('runner bulk delete checkbox is available', () => {
- expect(findRunnerBulkDeleteCheckbox().props('runners')).toEqual(mockRunners);
+ await createComponent({ mountFn: mountExtended });
});
it('runner list is checkable', () => {
expect(findRunnerList().props('checkable')).toBe(true);
});
-
- it('responds to checked items by updating the local cache', () => {
- const setRunnerCheckedMock = jest
- .spyOn(localMutations, 'setRunnerChecked')
- .mockImplementation(() => {});
-
- const runner = mockRunners[0];
-
- expect(setRunnerCheckedMock).toHaveBeenCalledTimes(0);
-
- findRunnerList().vm.$emit('checked', {
- runner,
- isChecked: true,
- });
-
- expect(setRunnerCheckedMock).toHaveBeenCalledTimes(1);
- expect(setRunnerCheckedMock).toHaveBeenCalledWith({
- runner,
- isChecked: true,
- });
- });
});
describe('When runners are deleted', () => {
beforeEach(async () => {
- await createComponent({
- mountFn: mountExtended,
- provide: {
- glFeatures: { adminRunnersBulkDelete: true },
- },
- });
+ await createComponent({ mountFn: mountExtended });
});
it('count data is refetched', async () => {
expect(mockRunnersCountHandler).toHaveBeenCalledTimes(COUNT_QUERIES);
- findRunnerBulkDelete().vm.$emit('deleted', { message: 'Runners deleted' });
+ findRunnerList().vm.$emit('deleted', { message: 'Runners deleted' });
expect(mockRunnersCountHandler).toHaveBeenCalledTimes(COUNT_QUERIES * 2);
});
@@ -423,7 +388,7 @@ describe('AdminRunnersApp', () => {
it('toast is shown', async () => {
expect(showToast).toHaveBeenCalledTimes(0);
- findRunnerBulkDelete().vm.$emit('deleted', { message: 'Runners deleted' });
+ findRunnerList().vm.$emit('deleted', { message: 'Runners deleted' });
expect(showToast).toHaveBeenCalledTimes(1);
expect(showToast).toHaveBeenCalledWith('Runners deleted');
@@ -457,6 +422,7 @@ describe('AdminRunnersApp', () => {
beforeEach(async () => {
findRunnerFilteredSearchBar().vm.$emit('input', {
runnerType: null,
+ membership: DEFAULT_MEMBERSHIP,
filters: [{ type: PARAM_KEY_STATUS, value: { data: STATUS_ONLINE, operator: '=' } }],
sort: CREATED_ASC,
});
@@ -504,6 +470,7 @@ describe('AdminRunnersApp', () => {
await findRunnerPaginationNext().trigger('click');
expect(mockRunnersHandler).toHaveBeenLastCalledWith({
+ membership: DEFAULT_MEMBERSHIP,
sort: CREATED_DESC,
first: RUNNER_PAGE_SIZE,
after: pageInfo.endCursor,
diff --git a/spec/frontend/runner/components/cells/link_cell_spec.js b/spec/frontend/runner/components/cells/link_cell_spec.js
index a59a0eaa5d8..46ab1adb6b6 100644
--- a/spec/frontend/runner/components/cells/link_cell_spec.js
+++ b/spec/frontend/runner/components/cells/link_cell_spec.js
@@ -5,7 +5,7 @@ import LinkCell from '~/runner/components/cells/link_cell.vue';
describe('LinkCell', () => {
let wrapper;
- const findGlLink = () => wrapper.find(GlLink);
+ const findGlLink = () => wrapper.findComponent(GlLink);
const findSpan = () => wrapper.find('span');
const createComponent = ({ props = {}, ...options } = {}) => {
diff --git a/spec/frontend/runner/components/cells/runner_actions_cell_spec.js b/spec/frontend/runner/components/cells/runner_actions_cell_spec.js
index ffd6f126627..58974d4f85f 100644
--- a/spec/frontend/runner/components/cells/runner_actions_cell_spec.js
+++ b/spec/frontend/runner/components/cells/runner_actions_cell_spec.js
@@ -122,7 +122,7 @@ describe('RunnerActionsCell', () => {
expect(wrapper.emitted('deleted')).toEqual([[value]]);
});
- it('Renders the runner delete disabled button when user cannot delete', () => {
+ it('Does not render the runner delete button when user cannot delete', () => {
createComponent({
runner: {
userPermissions: {
@@ -132,7 +132,7 @@ describe('RunnerActionsCell', () => {
},
});
- expect(findDeleteBtn().props('disabled')).toBe(true);
+ expect(findDeleteBtn().exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/runner/components/cells/runner_owner_cell_spec.js b/spec/frontend/runner/components/cells/runner_owner_cell_spec.js
new file mode 100644
index 00000000000..e9965d8855d
--- /dev/null
+++ b/spec/frontend/runner/components/cells/runner_owner_cell_spec.js
@@ -0,0 +1,111 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlLink } from '@gitlab/ui';
+import { s__ } from '~/locale';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+
+import RunnerOwnerCell from '~/runner/components/cells/runner_owner_cell.vue';
+
+import { INSTANCE_TYPE, GROUP_TYPE, PROJECT_TYPE } from '~/runner/constants';
+
+describe('RunnerOwnerCell', () => {
+ let wrapper;
+
+ const findLink = () => wrapper.findComponent(GlLink);
+ const getLinkTooltip = () => getBinding(findLink().element, 'gl-tooltip').value;
+
+ const createComponent = ({ runner } = {}) => {
+ wrapper = shallowMount(RunnerOwnerCell, {
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
+ propsData: {
+ runner,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('When its an instance runner', () => {
+ beforeEach(() => {
+ createComponent({
+ runner: {
+ runnerType: INSTANCE_TYPE,
+ },
+ });
+ });
+
+ it('shows an administrator label', () => {
+ expect(findLink().exists()).toBe(false);
+ expect(wrapper.text()).toBe(s__('Runners|Administrator'));
+ });
+ });
+
+ describe('When its a group runner', () => {
+ const mockName = 'Group 2';
+ const mockFullName = 'Group 1 / Group 2';
+ const mockWebUrl = '/group-1/group-2';
+
+ beforeEach(() => {
+ createComponent({
+ runner: {
+ runnerType: GROUP_TYPE,
+ groups: {
+ nodes: [
+ {
+ name: mockName,
+ fullName: mockFullName,
+ webUrl: mockWebUrl,
+ },
+ ],
+ },
+ },
+ });
+ });
+
+ it('Displays a group link', () => {
+ expect(findLink().attributes('href')).toBe(mockWebUrl);
+ expect(wrapper.text()).toBe(mockName);
+ expect(getLinkTooltip()).toBe(mockFullName);
+ });
+ });
+
+ describe('When its a project runner', () => {
+ const mockName = 'Project 1';
+ const mockNameWithNamespace = 'Group 1 / Project 1';
+ const mockWebUrl = '/group-1/project-1';
+
+ beforeEach(() => {
+ createComponent({
+ runner: {
+ runnerType: PROJECT_TYPE,
+ ownerProject: {
+ name: mockName,
+ nameWithNamespace: mockNameWithNamespace,
+ webUrl: mockWebUrl,
+ },
+ },
+ });
+ });
+
+ it('Displays a project link', () => {
+ expect(findLink().attributes('href')).toBe(mockWebUrl);
+ expect(wrapper.text()).toBe(mockName);
+ expect(getLinkTooltip()).toBe(mockNameWithNamespace);
+ });
+ });
+
+ describe('When its an empty runner', () => {
+ beforeEach(() => {
+ createComponent({
+ runner: {},
+ });
+ });
+
+ it('shows no label', () => {
+ expect(wrapper.text()).toBe('');
+ });
+ });
+});
diff --git a/spec/frontend/runner/components/cells/runner_stacked_summary_cell_spec.js b/spec/frontend/runner/components/cells/runner_stacked_summary_cell_spec.js
index 21ec9f61f37..e7cadefc140 100644
--- a/spec/frontend/runner/components/cells/runner_stacked_summary_cell_spec.js
+++ b/spec/frontend/runner/components/cells/runner_stacked_summary_cell_spec.js
@@ -85,7 +85,7 @@ describe('RunnerTypeCell', () => {
contactedAt: '2022-01-02',
});
- expect(findRunnerSummaryField('clock').find(TimeAgo).props('time')).toBe('2022-01-02');
+ expect(findRunnerSummaryField('clock').findComponent(TimeAgo).props('time')).toBe('2022-01-02');
});
it('Displays empty last contact', () => {
@@ -93,7 +93,7 @@ describe('RunnerTypeCell', () => {
contactedAt: null,
});
- expect(findRunnerSummaryField('clock').find(TimeAgo).exists()).toBe(false);
+ expect(findRunnerSummaryField('clock').findComponent(TimeAgo).exists()).toBe(false);
expect(findRunnerSummaryField('clock').text()).toContain(__('Never'));
});
@@ -134,7 +134,7 @@ describe('RunnerTypeCell', () => {
});
it('Displays created at', () => {
- expect(findRunnerSummaryField('calendar').find(TimeAgo).props('time')).toBe(
+ expect(findRunnerSummaryField('calendar').findComponent(TimeAgo).props('time')).toBe(
mockRunner.createdAt,
);
});
diff --git a/spec/frontend/runner/components/runner_bulk_delete_checkbox_spec.js b/spec/frontend/runner/components/runner_bulk_delete_checkbox_spec.js
index 0ac89e82314..424a4e61ccd 100644
--- a/spec/frontend/runner/components/runner_bulk_delete_checkbox_spec.js
+++ b/spec/frontend/runner/components/runner_bulk_delete_checkbox_spec.js
@@ -5,11 +5,21 @@ import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import RunnerBulkDeleteCheckbox from '~/runner/components/runner_bulk_delete_checkbox.vue';
import createMockApollo from 'helpers/mock_apollo_helper';
import { createLocalState } from '~/runner/graphql/list/local_state';
-import { allRunnersData } from '../mock_data';
Vue.use(VueApollo);
-jest.mock('~/flash');
+const makeRunner = (id, deleteRunner = true) => ({
+ id,
+ userPermissions: { deleteRunner },
+});
+
+// Multi-select checkbox possible states:
+const stateToAttrs = {
+ unchecked: { disabled: undefined, checked: undefined, indeterminate: undefined },
+ checked: { disabled: undefined, checked: 'true', indeterminate: undefined },
+ indeterminate: { disabled: undefined, checked: undefined, indeterminate: 'true' },
+ disabled: { disabled: 'true', checked: undefined, indeterminate: undefined },
+};
describe('RunnerBulkDeleteCheckbox', () => {
let wrapper;
@@ -18,12 +28,14 @@ describe('RunnerBulkDeleteCheckbox', () => {
const findCheckbox = () => wrapper.findComponent(GlFormCheckbox);
- const mockRunners = allRunnersData.data.runners.nodes;
- const mockIds = allRunnersData.data.runners.nodes.map(({ id }) => id);
- const mockId = mockIds[0];
- const mockIdAnotherPage = 'RUNNER_IN_ANOTHER_PAGE_ID';
+ const expectCheckboxToBe = (state) => {
+ const expected = stateToAttrs[state];
+ expect(findCheckbox().attributes('disabled')).toBe(expected.disabled);
+ expect(findCheckbox().attributes('checked')).toBe(expected.checked);
+ expect(findCheckbox().attributes('indeterminate')).toBe(expected.indeterminate);
+ };
- const createComponent = ({ props = {} } = {}) => {
+ const createComponent = ({ runners = [] } = {}) => {
const { cacheConfig, localMutations } = mockState;
const apolloProvider = createMockApollo(undefined, undefined, cacheConfig);
@@ -33,8 +45,7 @@ describe('RunnerBulkDeleteCheckbox', () => {
localMutations,
},
propsData: {
- runners: mockRunners,
- ...props,
+ runners,
},
});
};
@@ -49,31 +60,61 @@ describe('RunnerBulkDeleteCheckbox', () => {
jest.spyOn(mockState.localMutations, 'setRunnersChecked');
});
- describe.each`
- case | is | checkedRunnerIds | disabled | checked | indeterminate
- ${'no runners'} | ${'unchecked'} | ${[]} | ${undefined} | ${undefined} | ${undefined}
- ${'no runners in this page'} | ${'unchecked'} | ${[mockIdAnotherPage]} | ${undefined} | ${undefined} | ${undefined}
- ${'all runners'} | ${'checked'} | ${mockIds} | ${undefined} | ${'true'} | ${undefined}
- ${'some runners'} | ${'indeterminate'} | ${[mockId]} | ${undefined} | ${undefined} | ${'true'}
- ${'all plus other runners'} | ${'checked'} | ${[...mockIds, mockIdAnotherPage]} | ${undefined} | ${'true'} | ${undefined}
- `('When $case are checked', ({ is, checkedRunnerIds, disabled, checked, indeterminate }) => {
- beforeEach(async () => {
+ describe('when all runners can be deleted', () => {
+ const mockIds = ['1', '2', '3'];
+ const mockIdAnotherPage = '4';
+ const mockRunners = mockIds.map((id) => makeRunner(id));
+
+ it.each`
+ case | checkedRunnerIds | state
+ ${'no runners'} | ${[]} | ${'unchecked'}
+ ${'no runners in this page'} | ${[mockIdAnotherPage]} | ${'unchecked'}
+ ${'all runners'} | ${mockIds} | ${'checked'}
+ ${'some runners'} | ${[mockIds[0]]} | ${'indeterminate'}
+ ${'all plus other runners'} | ${[...mockIds, mockIdAnotherPage]} | ${'checked'}
+ `('if $case are checked, checkbox is $state', ({ checkedRunnerIds, state }) => {
mockCheckedRunnerIds = checkedRunnerIds;
- createComponent();
+ createComponent({ runners: mockRunners });
+ expectCheckboxToBe(state);
});
+ });
+
+ describe('when some runners cannot be deleted', () => {
+ it('all allowed runners are selected, checkbox is checked', () => {
+ mockCheckedRunnerIds = ['a', 'b', 'c'];
+ createComponent({
+ runners: [makeRunner('a'), makeRunner('b'), makeRunner('c', false)],
+ });
- it(`is ${is}`, () => {
- expect(findCheckbox().attributes('disabled')).toBe(disabled);
- expect(findCheckbox().attributes('checked')).toBe(checked);
- expect(findCheckbox().attributes('indeterminate')).toBe(indeterminate);
+ expectCheckboxToBe('checked');
+ });
+
+ it('some allowed runners are selected, checkbox is indeterminate', () => {
+ mockCheckedRunnerIds = ['a', 'b'];
+ createComponent({
+ runners: [makeRunner('a'), makeRunner('b'), makeRunner('c')],
+ });
+
+ expectCheckboxToBe('indeterminate');
+ });
+
+ it('no allowed runners are selected, checkbox is disabled', () => {
+ mockCheckedRunnerIds = ['a', 'b'];
+ createComponent({
+ runners: [makeRunner('a', false), makeRunner('b', false)],
+ });
+
+ expectCheckboxToBe('disabled');
});
});
describe('When user selects', () => {
+ const mockRunners = [makeRunner('1'), makeRunner('2')];
+
beforeEach(() => {
- mockCheckedRunnerIds = mockIds;
- createComponent();
+ mockCheckedRunnerIds = ['1', '2'];
+ createComponent({ runners: mockRunners });
});
it.each([[true], [false]])('sets checked to %s', (checked) => {
@@ -89,13 +130,11 @@ describe('RunnerBulkDeleteCheckbox', () => {
describe('When runners are loading', () => {
beforeEach(() => {
- createComponent({ props: { runners: [] } });
+ createComponent();
});
- it(`is disabled`, () => {
- expect(findCheckbox().attributes('disabled')).toBe('true');
- expect(findCheckbox().attributes('checked')).toBe(undefined);
- expect(findCheckbox().attributes('indeterminate')).toBe(undefined);
+ it('is disabled', () => {
+ expectCheckboxToBe('disabled');
});
});
});
diff --git a/spec/frontend/runner/components/runner_delete_button_spec.js b/spec/frontend/runner/components/runner_delete_button_spec.js
index 52fe803c536..c8fb7a69379 100644
--- a/spec/frontend/runner/components/runner_delete_button_spec.js
+++ b/spec/frontend/runner/components/runner_delete_button_spec.js
@@ -9,11 +9,7 @@ import waitForPromises from 'helpers/wait_for_promises';
import { captureException } from '~/runner/sentry_utils';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import { createAlert } from '~/flash';
-import {
- I18N_DELETE_RUNNER,
- I18N_DELETE_DISABLED_MANY_PROJECTS,
- I18N_DELETE_DISABLED_UNKNOWN_REASON,
-} from '~/runner/constants';
+import { I18N_DELETE_RUNNER } from '~/runner/constants';
import RunnerDeleteButton from '~/runner/components/runner_delete_button.vue';
import RunnerDeleteModal from '~/runner/components/runner_delete_modal.vue';
@@ -267,29 +263,4 @@ describe('RunnerDeleteButton', () => {
});
});
});
-
- describe.each`
- reason | runner | tooltip
- ${'runner belongs to more than 1 project'} | ${{ projectCount: 2 }} | ${I18N_DELETE_DISABLED_MANY_PROJECTS}
- ${'unknown reason'} | ${{}} | ${I18N_DELETE_DISABLED_UNKNOWN_REASON}
- `('When button is disabled because $reason', ({ runner, tooltip }) => {
- beforeEach(() => {
- createComponent({
- props: {
- disabled: true,
- runner,
- },
- });
- });
-
- it('Displays a disabled delete button', () => {
- expect(findBtn().props('disabled')).toBe(true);
- });
-
- it(`Tooltip "${tooltip}" is shown`, () => {
- // tabindex is required for a11y
- expect(wrapper.attributes('tabindex')).toBe('0');
- expect(getTooltip()).toBe(tooltip);
- });
- });
});
diff --git a/spec/frontend/runner/components/runner_details_spec.js b/spec/frontend/runner/components/runner_details_spec.js
index f2281223a25..e6cc936e260 100644
--- a/spec/frontend/runner/components/runner_details_spec.js
+++ b/spec/frontend/runner/components/runner_details_spec.js
@@ -25,12 +25,7 @@ describe('RunnerDetails', () => {
const findDetailGroups = () => wrapper.findComponent(RunnerGroups);
- const createComponent = ({
- props = {},
- stubs,
- mountFn = shallowMountExtended,
- enforceRunnerTokenExpiresAt = false,
- } = {}) => {
+ const createComponent = ({ props = {}, stubs, mountFn = shallowMountExtended } = {}) => {
wrapper = mountFn(RunnerDetails, {
propsData: {
...props,
@@ -39,9 +34,6 @@ describe('RunnerDetails', () => {
RunnerDetail,
...stubs,
},
- provide: {
- glFeatures: { enforceRunnerTokenExpiresAt },
- },
});
};
@@ -82,7 +74,6 @@ describe('RunnerDetails', () => {
...runner,
},
},
- enforceRunnerTokenExpiresAt: true,
stubs: {
GlIntersperse,
GlSprintf,
@@ -135,22 +126,5 @@ describe('RunnerDetails', () => {
expect(findDetailGroups().props('runner')).toEqual(mockGroupRunner);
});
});
-
- describe('Token expiration field', () => {
- it.each`
- case | flag | shown
- ${'is shown when feature flag is enabled'} | ${true} | ${true}
- ${'is not shown when feature flag is disabled'} | ${false} | ${false}
- `('$case', ({ flag, shown }) => {
- createComponent({
- props: {
- runner: mockGroupRunner,
- },
- enforceRunnerTokenExpiresAt: flag,
- });
-
- expect(findDd('Token expiry', wrapper).exists()).toBe(shown);
- });
- });
});
});
diff --git a/spec/frontend/runner/components/runner_filtered_search_bar_spec.js b/spec/frontend/runner/components/runner_filtered_search_bar_spec.js
index e35bec3aa38..c92e19f9263 100644
--- a/spec/frontend/runner/components/runner_filtered_search_bar_spec.js
+++ b/spec/frontend/runner/components/runner_filtered_search_bar_spec.js
@@ -4,10 +4,26 @@ import RunnerFilteredSearchBar from '~/runner/components/runner_filtered_search_
import { statusTokenConfig } from '~/runner/components/search_tokens/status_token_config';
import TagToken from '~/runner/components/search_tokens/tag_token.vue';
import { tagTokenConfig } from '~/runner/components/search_tokens/tag_token_config';
-import { PARAM_KEY_STATUS, PARAM_KEY_TAG, STATUS_ONLINE, INSTANCE_TYPE } from '~/runner/constants';
+import {
+ PARAM_KEY_STATUS,
+ PARAM_KEY_TAG,
+ STATUS_ONLINE,
+ INSTANCE_TYPE,
+ DEFAULT_MEMBERSHIP,
+ DEFAULT_SORT,
+ CONTACTED_DESC,
+} from '~/runner/constants';
import FilteredSearch from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue';
import BaseToken from '~/vue_shared/components/filtered_search_bar/tokens/base_token.vue';
+const mockSearch = {
+ runnerType: null,
+ membership: DEFAULT_MEMBERSHIP,
+ filters: [],
+ pagination: { page: 1 },
+ sort: DEFAULT_SORT,
+};
+
describe('RunnerList', () => {
let wrapper;
@@ -15,8 +31,7 @@ describe('RunnerList', () => {
const findGlFilteredSearch = () => wrapper.findComponent(GlFilteredSearch);
const findSortOptions = () => wrapper.findAllComponents(GlDropdownItem);
- const mockDefaultSort = 'CREATED_DESC';
- const mockOtherSort = 'CONTACTED_DESC';
+ const mockOtherSort = CONTACTED_DESC;
const mockFilters = [
{ type: PARAM_KEY_STATUS, value: { data: STATUS_ONLINE, operator: '=' } },
{ type: 'filtered-search-term', value: { data: '' } },
@@ -32,11 +47,7 @@ describe('RunnerList', () => {
propsData: {
namespace: 'runners',
tokens: [],
- value: {
- runnerType: null,
- filters: [],
- sort: mockDefaultSort,
- },
+ value: mockSearch,
...props,
},
stubs: {
@@ -115,6 +126,7 @@ describe('RunnerList', () => {
props: {
value: {
runnerType: INSTANCE_TYPE,
+ membership: DEFAULT_MEMBERSHIP,
sort: mockOtherSort,
filters: mockFilters,
},
@@ -141,6 +153,7 @@ describe('RunnerList', () => {
expectToHaveLastEmittedInput({
runnerType: INSTANCE_TYPE,
+ membership: DEFAULT_MEMBERSHIP,
filters: mockFilters,
sort: mockOtherSort,
pagination: {},
@@ -154,8 +167,9 @@ describe('RunnerList', () => {
expectToHaveLastEmittedInput({
runnerType: null,
+ membership: DEFAULT_MEMBERSHIP,
filters: mockFilters,
- sort: mockDefaultSort,
+ sort: DEFAULT_SORT,
pagination: {},
});
});
@@ -165,6 +179,7 @@ describe('RunnerList', () => {
expectToHaveLastEmittedInput({
runnerType: null,
+ membership: DEFAULT_MEMBERSHIP,
filters: [],
sort: mockOtherSort,
pagination: {},
diff --git a/spec/frontend/runner/components/runner_list_empty_state_spec.js b/spec/frontend/runner/components/runner_list_empty_state_spec.js
index 59cff863106..038162b889e 100644
--- a/spec/frontend/runner/components/runner_list_empty_state_spec.js
+++ b/spec/frontend/runner/components/runner_list_empty_state_spec.js
@@ -8,6 +8,7 @@ import RunnerListEmptyState from '~/runner/components/runner_list_empty_state.vu
const mockSvgPath = 'mock-svg-path.svg';
const mockFilteredSvgPath = 'mock-filtered-svg-path.svg';
+const mockRegistrationToken = 'REGISTRATION_TOKEN';
describe('RunnerListEmptyState', () => {
let wrapper;
@@ -21,6 +22,7 @@ describe('RunnerListEmptyState', () => {
propsData: {
svgPath: mockSvgPath,
filteredSvgPath: mockFilteredSvgPath,
+ registrationToken: mockRegistrationToken,
...props,
},
directives: {
@@ -35,27 +37,52 @@ describe('RunnerListEmptyState', () => {
};
describe('when search is not filtered', () => {
- beforeEach(() => {
- createComponent();
- });
+ const title = s__('Runners|Get started with runners');
- it('renders an illustration', () => {
- expect(findEmptyState().props('svgPath')).toBe(mockSvgPath);
- });
+ describe('when there is a registration token', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders an illustration', () => {
+ expect(findEmptyState().props('svgPath')).toBe(mockSvgPath);
+ });
+
+ it('displays "no results" text with instructions', () => {
+ const desc = s__(
+ 'Runners|Runners are the agents that run your CI/CD jobs. Follow the %{linkStart}installation and registration instructions%{linkEnd} to set up a runner.',
+ );
- it('displays "no results" text', () => {
- const title = s__('Runners|Get started with runners');
- const desc = s__(
- 'Runners|Runners are the agents that run your CI/CD jobs. Follow the %{linkStart}installation and registration instructions%{linkEnd} to set up a runner.',
- );
+ expect(findEmptyState().text()).toMatchInterpolatedText(`${title} ${desc}`);
+ });
- expect(findEmptyState().text()).toMatchInterpolatedText(`${title} ${desc}`);
+ it('opens a runner registration instructions modal with a link', () => {
+ const { value } = getBinding(findLink().element, 'gl-modal');
+
+ expect(findRunnerInstructionsModal().props('modalId')).toEqual(value);
+ });
});
- it('opens a runner registration instructions modal with a link', () => {
- const { value } = getBinding(findLink().element, 'gl-modal');
+ describe('when there is no registration token', () => {
+ beforeEach(() => {
+ createComponent({ props: { registrationToken: null } });
+ });
+
+ it('renders an illustration', () => {
+ expect(findEmptyState().props('svgPath')).toBe(mockSvgPath);
+ });
+
+ it('displays "no results" text', () => {
+ const desc = s__(
+ 'Runners|Runners are the agents that run your CI/CD jobs. To register new runners, please contact your administrator.',
+ );
+
+ expect(findEmptyState().text()).toMatchInterpolatedText(`${title} ${desc}`);
+ });
- expect(findRunnerInstructionsModal().props('modalId')).toEqual(value);
+ it('has no registration instructions link', () => {
+ expect(findLink().exists()).toBe(false);
+ });
});
});
diff --git a/spec/frontend/runner/components/runner_list_spec.js b/spec/frontend/runner/components/runner_list_spec.js
index 54a9e713721..a31990f8f7e 100644
--- a/spec/frontend/runner/components/runner_list_spec.js
+++ b/spec/frontend/runner/components/runner_list_spec.js
@@ -1,12 +1,19 @@
import { GlTableLite, GlSkeletonLoader } from '@gitlab/ui';
+import HelpPopover from '~/vue_shared/components/help_popover.vue';
import {
extendedWrapper,
shallowMountExtended,
mountExtended,
} from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { s__ } from '~/locale';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
+import { createLocalState } from '~/runner/graphql/list/local_state';
+
import RunnerList from '~/runner/components/runner_list.vue';
-import RunnerStatusPopover from '~/runner/components/runner_status_popover.vue';
+import RunnerBulkDelete from '~/runner/components/runner_bulk_delete.vue';
+import RunnerBulkDeleteCheckbox from '~/runner/components/runner_bulk_delete_checkbox.vue';
+
import { I18N_PROJECT_TYPE, I18N_STATUS_NEVER_CONTACTED } from '~/runner/constants';
import { allRunnersData, onlineContactTimeoutSecs, staleTimeoutSecs } from '../mock_data';
@@ -15,6 +22,8 @@ const mockActiveRunnersCount = mockRunners.length;
describe('RunnerList', () => {
let wrapper;
+ let cacheConfig;
+ let localMutations;
const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoader);
const findTable = () => wrapper.findComponent(GlTableLite);
@@ -22,18 +31,24 @@ describe('RunnerList', () => {
const findRows = () => wrapper.findAll('[data-testid^="runner-row-"]');
const findCell = ({ row = 0, fieldKey }) =>
extendedWrapper(findRows().at(row).find(`[data-testid="td-${fieldKey}"]`));
+ const findRunnerBulkDelete = () => wrapper.findComponent(RunnerBulkDelete);
+ const findRunnerBulkDeleteCheckbox = () => wrapper.findComponent(RunnerBulkDeleteCheckbox);
const createComponent = (
{ props = {}, provide = {}, ...options } = {},
mountFn = shallowMountExtended,
) => {
+ ({ cacheConfig, localMutations } = createLocalState());
+
wrapper = mountFn(RunnerList, {
+ apolloProvider: createMockApollo([], {}, cacheConfig),
propsData: {
runners: mockRunners,
activeRunnersCount: mockActiveRunnersCount,
...props,
},
provide: {
+ localMutations,
onlineContactTimeoutSecs,
staleTimeoutSecs,
...provide,
@@ -50,7 +65,7 @@ describe('RunnerList', () => {
createComponent(
{
stubs: {
- RunnerStatusPopover: {
+ HelpPopover: {
template: '<div/>',
},
},
@@ -60,11 +75,13 @@ describe('RunnerList', () => {
const headerLabels = findHeaders().wrappers.map((w) => w.text());
- expect(findHeaders().at(0).findComponent(RunnerStatusPopover).exists()).toBe(true);
+ expect(findHeaders().at(0).findComponent(HelpPopover).exists()).toBe(true);
+ expect(findHeaders().at(2).findComponent(HelpPopover).exists()).toBe(true);
expect(headerLabels).toEqual([
- 'Status',
- 'Runner',
+ s__('Runners|Status'),
+ s__('Runners|Runner'),
+ s__('Runners|Owner'),
'', // actions has no label
]);
});
@@ -123,21 +140,40 @@ describe('RunnerList', () => {
);
});
+ it('runner bulk delete is available', () => {
+ expect(findRunnerBulkDelete().props('runners')).toEqual(mockRunners);
+ });
+
+ it('runner bulk delete checkbox is available', () => {
+ expect(findRunnerBulkDeleteCheckbox().props('runners')).toEqual(mockRunners);
+ });
+
it('Displays a checkbox field', () => {
expect(findCell({ fieldKey: 'checkbox' }).find('input').exists()).toBe(true);
});
- it('Emits a checked event', async () => {
- const checkbox = findCell({ fieldKey: 'checkbox' }).find('input');
+ it('Sets a runner as checked', async () => {
+ const runner = mockRunners[0];
+ const setRunnerCheckedMock = jest
+ .spyOn(localMutations, 'setRunnerChecked')
+ .mockImplementation(() => {});
+ const checkbox = findCell({ fieldKey: 'checkbox' }).find('input');
await checkbox.setChecked();
- expect(wrapper.emitted('checked')).toHaveLength(1);
- expect(wrapper.emitted('checked')[0][0]).toEqual({
+ expect(setRunnerCheckedMock).toHaveBeenCalledTimes(1);
+ expect(setRunnerCheckedMock).toHaveBeenCalledWith({
+ runner,
isChecked: true,
- runner: mockRunners[0],
});
});
+
+ it('Emits a deleted event', async () => {
+ const event = { message: 'Deleted!' };
+ findRunnerBulkDelete().vm.$emit('deleted', event);
+
+ expect(wrapper.emitted('deleted')).toEqual([[event]]);
+ });
});
describe('Scoped cell slots', () => {
diff --git a/spec/frontend/runner/components/runner_membership_toggle_spec.js b/spec/frontend/runner/components/runner_membership_toggle_spec.js
new file mode 100644
index 00000000000..1a7ae22618a
--- /dev/null
+++ b/spec/frontend/runner/components/runner_membership_toggle_spec.js
@@ -0,0 +1,57 @@
+import { GlToggle } from '@gitlab/ui';
+import { shallowMount, mount } from '@vue/test-utils';
+import RunnerMembershipToggle from '~/runner/components/runner_membership_toggle.vue';
+import {
+ I18N_SHOW_ONLY_INHERITED,
+ MEMBERSHIP_DESCENDANTS,
+ MEMBERSHIP_ALL_AVAILABLE,
+} from '~/runner/constants';
+
+describe('RunnerMembershipToggle', () => {
+ let wrapper;
+
+ const findToggle = () => wrapper.findComponent(GlToggle);
+
+ const createComponent = ({ props = {}, mountFn = shallowMount } = {}) => {
+ wrapper = mountFn(RunnerMembershipToggle, {
+ propsData: props,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('Displays text', () => {
+ createComponent({ mountFn: mount });
+
+ expect(wrapper.text()).toBe(I18N_SHOW_ONLY_INHERITED);
+ });
+
+ it.each`
+ membershipValue | toggleValue
+ ${MEMBERSHIP_DESCENDANTS} | ${true}
+ ${MEMBERSHIP_ALL_AVAILABLE} | ${false}
+ `(
+ 'Displays a membership of $membershipValue as enabled=$toggleValue',
+ ({ membershipValue, toggleValue }) => {
+ createComponent({ props: { value: membershipValue } });
+
+ expect(findToggle().props('value')).toBe(toggleValue);
+ },
+ );
+
+ it.each`
+ changeEvt | membershipValue
+ ${true} | ${MEMBERSHIP_DESCENDANTS}
+ ${false} | ${MEMBERSHIP_ALL_AVAILABLE}
+ `(
+ 'Emits $changeEvt when value is changed to $membershipValue',
+ ({ changeEvt, membershipValue }) => {
+ createComponent();
+ findToggle().vm.$emit('change', changeEvt);
+
+ expect(wrapper.emitted('input')).toStrictEqual([[membershipValue]]);
+ },
+ );
+});
diff --git a/spec/frontend/runner/components/runner_stacked_layout_banner_spec.js b/spec/frontend/runner/components/runner_stacked_layout_banner_spec.js
index 1a8aced9292..d1f04f0ee37 100644
--- a/spec/frontend/runner/components/runner_stacked_layout_banner_spec.js
+++ b/spec/frontend/runner/components/runner_stacked_layout_banner_spec.js
@@ -29,6 +29,8 @@ describe('RunnerStackedLayoutBanner', () => {
});
it('Does not display a banner when dismissed', async () => {
+ createComponent();
+
findLocalStorageSync().vm.$emit('input', true);
await nextTick();
diff --git a/spec/frontend/runner/components/runner_type_tabs_spec.js b/spec/frontend/runner/components/runner_type_tabs_spec.js
index 45ab8684332..dde35533bc3 100644
--- a/spec/frontend/runner/components/runner_type_tabs_spec.js
+++ b/spec/frontend/runner/components/runner_type_tabs_spec.js
@@ -2,9 +2,21 @@ import { GlTab } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import RunnerTypeTabs from '~/runner/components/runner_type_tabs.vue';
import RunnerCount from '~/runner/components/stat/runner_count.vue';
-import { INSTANCE_TYPE, GROUP_TYPE, PROJECT_TYPE } from '~/runner/constants';
-
-const mockSearch = { runnerType: null, filters: [], pagination: { page: 1 }, sort: 'CREATED_DESC' };
+import {
+ INSTANCE_TYPE,
+ GROUP_TYPE,
+ PROJECT_TYPE,
+ DEFAULT_MEMBERSHIP,
+ DEFAULT_SORT,
+} from '~/runner/constants';
+
+const mockSearch = {
+ runnerType: null,
+ membership: DEFAULT_MEMBERSHIP,
+ filters: [],
+ pagination: { page: 1 },
+ sort: DEFAULT_SORT,
+};
const mockCount = (type, multiplier = 1) => {
let count;
@@ -113,7 +125,7 @@ describe('RunnerTypeTabs', () => {
});
findTabs().wrappers.forEach((tab) => {
- expect(tab.find(RunnerCount).props()).toEqual({
+ expect(tab.findComponent(RunnerCount).props()).toEqual({
scope: INSTANCE_TYPE,
skip: false,
variables: expect.objectContaining(mockVariables),
diff --git a/spec/frontend/runner/components/runner_update_form_spec.js b/spec/frontend/runner/components/runner_update_form_spec.js
index 7b67a89f989..e12736216a0 100644
--- a/spec/frontend/runner/components/runner_update_form_spec.js
+++ b/spec/frontend/runner/components/runner_update_form_spec.js
@@ -145,7 +145,7 @@ describe('RunnerUpdateForm', () => {
});
it('Form skeleton is shown', () => {
- expect(wrapper.find(GlSkeletonLoader).exists()).toBe(true);
+ expect(wrapper.findComponent(GlSkeletonLoader).exists()).toBe(true);
expect(findFields()).toHaveLength(0);
});
diff --git a/spec/frontend/runner/components/search_tokens/tag_token_spec.js b/spec/frontend/runner/components/search_tokens/tag_token_spec.js
index 22f0561ca5f..a7363eb11cd 100644
--- a/spec/frontend/runner/components/search_tokens/tag_token_spec.js
+++ b/spec/frontend/runner/components/search_tokens/tag_token_spec.js
@@ -77,7 +77,7 @@ describe('TagToken', () => {
const findToken = () => wrapper.findComponent(GlToken);
const findGlLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
- beforeEach(async () => {
+ beforeEach(() => {
mock = new MockAdapter(axios);
mock.onGet(TAG_SUGGESTIONS_PATH, { params: { search: '' } }).reply(200, mockTags);
@@ -86,9 +86,6 @@ describe('TagToken', () => {
.reply(200, mockTagsFiltered);
getRecentlyUsedSuggestions.mockReturnValue([]);
-
- createComponent();
- await waitForPromises();
});
afterEach(() => {
@@ -97,11 +94,17 @@ describe('TagToken', () => {
});
describe('when the tags token is displayed', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
it('requests tags suggestions', () => {
expect(mock.history.get[0].params).toEqual({ search: '' });
});
- it('displays tags suggestions', () => {
+ it('displays tags suggestions', async () => {
+ await waitForPromises();
+
mockTags.forEach(({ name }, i) => {
expect(findGlFilteredSearchSuggestions().at(i).text()).toBe(name);
});
@@ -132,13 +135,13 @@ describe('TagToken', () => {
});
describe('when the users filters suggestions', () => {
- beforeEach(async () => {
+ beforeEach(() => {
+ createComponent();
+
findGlFilteredSearchToken().vm.$emit('input', { data: mockSearchTerm });
});
- it('requests filtered tags suggestions', async () => {
- await waitForPromises();
-
+ it('requests filtered tags suggestions', () => {
expect(mock.history.get[1].params).toEqual({ search: mockSearchTerm });
});
@@ -166,7 +169,7 @@ describe('TagToken', () => {
await waitForPromises();
});
- it('error is shown', async () => {
+ it('error is shown', () => {
expect(createAlert).toHaveBeenCalledTimes(1);
expect(createAlert).toHaveBeenCalledWith({ message: expect.any(String) });
});
@@ -180,8 +183,26 @@ describe('TagToken', () => {
await waitForPromises();
});
- it('selected tag is displayed', async () => {
+ it('selected tag is displayed', () => {
expect(findToken().exists()).toBe(true);
});
});
+
+ describe('when suggestions are disabled', () => {
+ beforeEach(async () => {
+ createComponent({
+ config: {
+ ...mockTagTokenConfig,
+ suggestionsDisabled: true,
+ },
+ });
+
+ await waitForPromises();
+ });
+
+ it('displays no suggestions', () => {
+ expect(findGlFilteredSearchSuggestions()).toHaveLength(0);
+ expect(mock.history.get).toHaveLength(0);
+ });
+ });
});
diff --git a/spec/frontend/runner/graphql/local_state_spec.js b/spec/frontend/runner/graphql/local_state_spec.js
index ae874fef00d..915170b53f9 100644
--- a/spec/frontend/runner/graphql/local_state_spec.js
+++ b/spec/frontend/runner/graphql/local_state_spec.js
@@ -4,6 +4,13 @@ import { createLocalState } from '~/runner/graphql/list/local_state';
import getCheckedRunnerIdsQuery from '~/runner/graphql/list/checked_runner_ids.query.graphql';
import { RUNNER_TYPENAME } from '~/runner/constants';
+const makeRunner = (id, deleteRunner = true) => ({
+ id,
+ userPermissions: {
+ deleteRunner,
+ },
+});
+
describe('~/runner/graphql/list/local_state', () => {
let localState;
let apolloClient;
@@ -57,16 +64,21 @@ describe('~/runner/graphql/list/local_state', () => {
});
it('returns checked runners that have a reference in the cache', () => {
- addMockRunnerToCache('a');
- localState.localMutations.setRunnerChecked({ runner: { id: 'a' }, isChecked: true });
+ const id = 'a';
+
+ addMockRunnerToCache(id);
+ localState.localMutations.setRunnerChecked({
+ runner: makeRunner(id),
+ isChecked: true,
+ });
expect(queryCheckedRunnerIds()).toEqual(['a']);
});
it('return checked runners that are not dangling references', () => {
addMockRunnerToCache('a'); // 'b' is missing from the cache, perhaps because it was deleted
- localState.localMutations.setRunnerChecked({ runner: { id: 'a' }, isChecked: true });
- localState.localMutations.setRunnerChecked({ runner: { id: 'b' }, isChecked: true });
+ localState.localMutations.setRunnerChecked({ runner: makeRunner('a'), isChecked: true });
+ localState.localMutations.setRunnerChecked({ runner: makeRunner('b'), isChecked: true });
expect(queryCheckedRunnerIds()).toEqual(['a']);
});
@@ -81,7 +93,7 @@ describe('~/runner/graphql/list/local_state', () => {
beforeEach(() => {
inputs.forEach(([id, isChecked]) => {
addMockRunnerToCache(id);
- localState.localMutations.setRunnerChecked({ runner: { id }, isChecked });
+ localState.localMutations.setRunnerChecked({ runner: makeRunner(id), isChecked });
});
});
it(`for inputs="${inputs}" has a ids="[${expected}]"`, () => {
@@ -102,7 +114,7 @@ describe('~/runner/graphql/list/local_state', () => {
ids.forEach(addMockRunnerToCache);
localState.localMutations.setRunnersChecked({
- runners: ids.map((id) => ({ id })),
+ runners: ids.map((id) => makeRunner(id)),
isChecked,
});
});
@@ -117,7 +129,7 @@ describe('~/runner/graphql/list/local_state', () => {
it('clears all checked items', () => {
['a', 'b', 'c'].forEach((id) => {
addMockRunnerToCache(id);
- localState.localMutations.setRunnerChecked({ runner: { id }, isChecked: true });
+ localState.localMutations.setRunnerChecked({ runner: makeRunner(id), isChecked: true });
});
expect(queryCheckedRunnerIds()).toEqual(['a', 'b', 'c']);
@@ -127,4 +139,29 @@ describe('~/runner/graphql/list/local_state', () => {
expect(queryCheckedRunnerIds()).toEqual([]);
});
});
+
+ describe('when some runners cannot be deleted', () => {
+ beforeEach(() => {
+ addMockRunnerToCache('a');
+ addMockRunnerToCache('b');
+ });
+
+ it('setRunnerChecked does not check runner that cannot be deleted', () => {
+ localState.localMutations.setRunnerChecked({
+ runner: makeRunner('a', false),
+ isChecked: true,
+ });
+
+ expect(queryCheckedRunnerIds()).toEqual([]);
+ });
+
+ it('setRunnersChecked does not check runner that cannot be deleted', () => {
+ localState.localMutations.setRunnersChecked({
+ runners: [makeRunner('a', false), makeRunner('b', false)],
+ isChecked: true,
+ });
+
+ expect(queryCheckedRunnerIds()).toEqual([]);
+ });
+ });
});
diff --git a/spec/frontend/runner/group_runner_show/group_runner_show_app_spec.js b/spec/frontend/runner/group_runner_show/group_runner_show_app_spec.js
index cee1d436942..a3b67674c94 100644
--- a/spec/frontend/runner/group_runner_show/group_runner_show_app_spec.js
+++ b/spec/frontend/runner/group_runner_show/group_runner_show_app_spec.js
@@ -101,6 +101,11 @@ describe('GroupRunnerShowApp', () => {
Platform darwin
Configuration Runs untagged jobs
Maximum job timeout None
+ Token expiry
+ Runner authentication token expiration
+ Runner authentication tokens will expire based on a set interval.
+ They will automatically rotate once expired. Learn more
+ Never expires
Tags None`.replace(/\s+/g, ' ');
expect(wrapper.text().replace(/\s+/g, ' ')).toContain(expected);
diff --git a/spec/frontend/runner/group_runners/group_runners_app_spec.js b/spec/frontend/runner/group_runners/group_runners_app_spec.js
index a17502c7eec..7482926e151 100644
--- a/spec/frontend/runner/group_runners/group_runners_app_spec.js
+++ b/spec/frontend/runner/group_runners/group_runners_app_spec.js
@@ -14,6 +14,7 @@ import { s__ } from '~/locale';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import { updateHistory } from '~/lib/utils/url_utility';
import { upgradeStatusTokenConfig } from 'ee_else_ce/runner/components/search_tokens/upgrade_status_token_config';
+import { createLocalState } from '~/runner/graphql/list/local_state';
import RunnerStackedLayoutBanner from '~/runner/components/runner_stacked_layout_banner.vue';
import RunnerTypeTabs from '~/runner/components/runner_type_tabs.vue';
@@ -24,6 +25,7 @@ import RunnerStats from '~/runner/components/stat/runner_stats.vue';
import RunnerActionsCell from '~/runner/components/cells/runner_actions_cell.vue';
import RegistrationDropdown from '~/runner/components/registration/registration_dropdown.vue';
import RunnerPagination from '~/runner/components/runner_pagination.vue';
+import RunnerMembershipToggle from '~/runner/components/runner_membership_toggle.vue';
import {
CREATED_ASC,
@@ -36,9 +38,12 @@ import {
GROUP_TYPE,
PARAM_KEY_PAUSED,
PARAM_KEY_STATUS,
+ PARAM_KEY_TAG,
STATUS_ONLINE,
STATUS_OFFLINE,
STATUS_STALE,
+ MEMBERSHIP_ALL_AVAILABLE,
+ MEMBERSHIP_DESCENDANTS,
RUNNER_PAGE_SIZE,
I18N_EDIT,
} from '~/runner/constants';
@@ -89,15 +94,23 @@ describe('GroupRunnersApp', () => {
const findRunnerPagination = () => extendedWrapper(wrapper.findComponent(RunnerPagination));
const findRunnerPaginationNext = () => findRunnerPagination().findByText(s__('Pagination|Next'));
const findRunnerFilteredSearchBar = () => wrapper.findComponent(RunnerFilteredSearchBar);
+ const findRunnerMembershipToggle = () => wrapper.findComponent(RunnerMembershipToggle);
+
+ const createComponent = ({
+ props = {},
+ provide = {},
+ mountFn = shallowMountExtended,
+ ...options
+ } = {}) => {
+ const { cacheConfig, localMutations } = createLocalState();
- const createComponent = ({ props = {}, mountFn = shallowMountExtended, ...options } = {}) => {
const handlers = [
[groupRunnersQuery, mockGroupRunnersHandler],
[groupRunnersCountQuery, mockGroupRunnersCountHandler],
];
wrapper = mountFn(GroupRunnersApp, {
- apolloProvider: createMockApollo(handlers),
+ apolloProvider: createMockApollo(handlers, {}, cacheConfig),
propsData: {
registrationToken: mockRegistrationToken,
groupFullPath: mockGroupFullPath,
@@ -105,10 +118,12 @@ describe('GroupRunnersApp', () => {
...props,
},
provide: {
+ localMutations,
onlineContactTimeoutSecs,
staleTimeoutSecs,
emptyStateSvgPath,
emptyStateFilteredSvgPath,
+ ...provide,
},
...options,
});
@@ -147,19 +162,50 @@ describe('GroupRunnersApp', () => {
expect(findRegistrationDropdown().props('type')).toBe(GROUP_TYPE);
});
+ describe('show all available runners toggle', () => {
+ it('shows the membership toggle', () => {
+ createComponent();
+ expect(findRunnerMembershipToggle().exists()).toBe(true);
+ });
+
+ it('sets the membership toggle', () => {
+ setWindowLocation(`?membership[]=${MEMBERSHIP_ALL_AVAILABLE}`);
+
+ createComponent();
+
+ expect(findRunnerMembershipToggle().props('value')).toBe(MEMBERSHIP_ALL_AVAILABLE);
+ });
+
+ it('requests filter', async () => {
+ createComponent();
+ findRunnerMembershipToggle().vm.$emit('input', MEMBERSHIP_ALL_AVAILABLE);
+
+ await waitForPromises();
+
+ expect(mockGroupRunnersHandler).toHaveBeenLastCalledWith(
+ expect.objectContaining({
+ membership: MEMBERSHIP_ALL_AVAILABLE,
+ }),
+ );
+ });
+ });
+
it('shows total runner counts', async () => {
await createComponent({ mountFn: mountExtended });
expect(mockGroupRunnersCountHandler).toHaveBeenCalledWith({
status: STATUS_ONLINE,
+ membership: MEMBERSHIP_DESCENDANTS,
groupFullPath: mockGroupFullPath,
});
expect(mockGroupRunnersCountHandler).toHaveBeenCalledWith({
status: STATUS_OFFLINE,
+ membership: MEMBERSHIP_DESCENDANTS,
groupFullPath: mockGroupFullPath,
});
expect(mockGroupRunnersCountHandler).toHaveBeenCalledWith({
status: STATUS_STALE,
+ membership: MEMBERSHIP_DESCENDANTS,
groupFullPath: mockGroupFullPath,
});
@@ -183,6 +229,7 @@ describe('GroupRunnersApp', () => {
groupFullPath: mockGroupFullPath,
status: undefined,
type: undefined,
+ membership: MEMBERSHIP_DESCENDANTS,
sort: DEFAULT_SORT,
first: RUNNER_PAGE_SIZE,
});
@@ -202,6 +249,10 @@ describe('GroupRunnersApp', () => {
type: PARAM_KEY_STATUS,
options: expect.any(Array),
}),
+ expect.objectContaining({
+ type: PARAM_KEY_TAG,
+ suggestionsDisabled: true,
+ }),
upgradeStatusTokenConfig,
]);
});
@@ -213,7 +264,7 @@ describe('GroupRunnersApp', () => {
const { id: graphqlId, shortSha } = node;
const id = getIdFromGraphQLId(graphqlId);
const COUNT_QUERIES = 6; // Smart queries that display a filtered count of runners
- const FILTERED_COUNT_QUERIES = 3; // Smart queries that display a count of runners in tabs
+ const FILTERED_COUNT_QUERIES = 6; // Smart queries that display a count of runners in tabs and single stats
beforeEach(async () => {
await createComponent({ mountFn: mountExtended });
@@ -266,6 +317,7 @@ describe('GroupRunnersApp', () => {
it('sets the filters in the search bar', () => {
expect(findRunnerFilteredSearchBar().props('value')).toEqual({
runnerType: INSTANCE_TYPE,
+ membership: MEMBERSHIP_DESCENDANTS,
filters: [{ type: 'status', value: { data: STATUS_ONLINE, operator: '=' } }],
sort: 'CREATED_DESC',
pagination: {},
@@ -277,6 +329,7 @@ describe('GroupRunnersApp', () => {
groupFullPath: mockGroupFullPath,
status: STATUS_ONLINE,
type: INSTANCE_TYPE,
+ membership: MEMBERSHIP_DESCENDANTS,
sort: DEFAULT_SORT,
first: RUNNER_PAGE_SIZE,
});
@@ -286,6 +339,7 @@ describe('GroupRunnersApp', () => {
expect(mockGroupRunnersCountHandler).toHaveBeenCalledWith({
groupFullPath: mockGroupFullPath,
type: INSTANCE_TYPE,
+ membership: MEMBERSHIP_DESCENDANTS,
status: STATUS_ONLINE,
});
});
@@ -297,6 +351,7 @@ describe('GroupRunnersApp', () => {
findRunnerFilteredSearchBar().vm.$emit('input', {
runnerType: null,
+ membership: MEMBERSHIP_DESCENDANTS,
filters: [{ type: PARAM_KEY_STATUS, value: { data: STATUS_ONLINE, operator: '=' } }],
sort: CREATED_ASC,
});
@@ -315,6 +370,7 @@ describe('GroupRunnersApp', () => {
expect(mockGroupRunnersHandler).toHaveBeenLastCalledWith({
groupFullPath: mockGroupFullPath,
status: STATUS_ONLINE,
+ membership: MEMBERSHIP_DESCENDANTS,
sort: CREATED_ASC,
first: RUNNER_PAGE_SIZE,
});
@@ -324,6 +380,7 @@ describe('GroupRunnersApp', () => {
expect(mockGroupRunnersCountHandler).toHaveBeenCalledWith({
groupFullPath: mockGroupFullPath,
status: STATUS_ONLINE,
+ membership: MEMBERSHIP_DESCENDANTS,
});
});
});
@@ -334,6 +391,11 @@ describe('GroupRunnersApp', () => {
expect(findRunnerPagination().attributes('disabled')).toBe('true');
});
+ it('runners cannot be deleted in bulk', () => {
+ createComponent();
+ expect(findRunnerList().props('checkable')).toBe(false);
+ });
+
describe('when no runners are found', () => {
beforeEach(async () => {
mockGroupRunnersHandler.mockResolvedValue({
@@ -395,6 +457,7 @@ describe('GroupRunnersApp', () => {
expect(mockGroupRunnersHandler).toHaveBeenLastCalledWith({
groupFullPath: mockGroupFullPath,
+ membership: MEMBERSHIP_DESCENDANTS,
sort: CREATED_DESC,
first: RUNNER_PAGE_SIZE,
after: pageInfo.endCursor,
diff --git a/spec/frontend/runner/mock_data.js b/spec/frontend/runner/mock_data.js
index 555ec40184f..da0c0433b3e 100644
--- a/spec/frontend/runner/mock_data.js
+++ b/spec/frontend/runner/mock_data.js
@@ -17,7 +17,7 @@ import groupRunnersData from 'test_fixtures/graphql/runner/list/group_runners.qu
import groupRunnersDataPaginated from 'test_fixtures/graphql/runner/list/group_runners.query.graphql.paginated.json';
import groupRunnersCountData from 'test_fixtures/graphql/runner/list/group_runners_count.query.graphql.json';
-import { RUNNER_PAGE_SIZE } from '~/runner/constants';
+import { DEFAULT_MEMBERSHIP, RUNNER_PAGE_SIZE } from '~/runner/constants';
const emptyPageInfo = {
__typename: 'PageInfo',
@@ -34,8 +34,18 @@ export const mockSearchExamples = [
{
name: 'a default query',
urlQuery: '',
- search: { runnerType: null, filters: [], pagination: {}, sort: 'CREATED_DESC' },
- graphqlVariables: { sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
+ search: {
+ runnerType: null,
+ membership: DEFAULT_MEMBERSHIP,
+ filters: [],
+ pagination: {},
+ sort: 'CREATED_DESC',
+ },
+ graphqlVariables: {
+ membership: DEFAULT_MEMBERSHIP,
+ sort: 'CREATED_DESC',
+ first: RUNNER_PAGE_SIZE,
+ },
isDefault: true,
},
{
@@ -43,17 +53,24 @@ export const mockSearchExamples = [
urlQuery: '?status[]=ACTIVE',
search: {
runnerType: null,
+ membership: DEFAULT_MEMBERSHIP,
filters: [{ type: 'status', value: { data: 'ACTIVE', operator: '=' } }],
pagination: {},
sort: 'CREATED_DESC',
},
- graphqlVariables: { status: 'ACTIVE', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
+ graphqlVariables: {
+ membership: DEFAULT_MEMBERSHIP,
+ status: 'ACTIVE',
+ sort: 'CREATED_DESC',
+ first: RUNNER_PAGE_SIZE,
+ },
},
{
name: 'a single term text search',
urlQuery: '?search=something',
search: {
runnerType: null,
+ membership: DEFAULT_MEMBERSHIP,
filters: [
{
type: 'filtered-search-term',
@@ -63,13 +80,19 @@ export const mockSearchExamples = [
pagination: {},
sort: 'CREATED_DESC',
},
- graphqlVariables: { search: 'something', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
+ graphqlVariables: {
+ membership: DEFAULT_MEMBERSHIP,
+ search: 'something',
+ sort: 'CREATED_DESC',
+ first: RUNNER_PAGE_SIZE,
+ },
},
{
name: 'a two terms text search',
urlQuery: '?search=something+else',
search: {
runnerType: null,
+ membership: DEFAULT_MEMBERSHIP,
filters: [
{
type: 'filtered-search-term',
@@ -83,24 +106,36 @@ export const mockSearchExamples = [
pagination: {},
sort: 'CREATED_DESC',
},
- graphqlVariables: { search: 'something else', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
+ graphqlVariables: {
+ membership: DEFAULT_MEMBERSHIP,
+ search: 'something else',
+ sort: 'CREATED_DESC',
+ first: RUNNER_PAGE_SIZE,
+ },
},
{
name: 'single instance type',
urlQuery: '?runner_type[]=INSTANCE_TYPE',
search: {
runnerType: 'INSTANCE_TYPE',
+ membership: DEFAULT_MEMBERSHIP,
filters: [],
pagination: {},
sort: 'CREATED_DESC',
},
- graphqlVariables: { type: 'INSTANCE_TYPE', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
+ graphqlVariables: {
+ type: 'INSTANCE_TYPE',
+ membership: DEFAULT_MEMBERSHIP,
+ sort: 'CREATED_DESC',
+ first: RUNNER_PAGE_SIZE,
+ },
},
{
name: 'multiple runner status',
urlQuery: '?status[]=ACTIVE&status[]=PAUSED',
search: {
runnerType: null,
+ membership: DEFAULT_MEMBERSHIP,
filters: [
{ type: 'status', value: { data: 'ACTIVE', operator: '=' } },
{ type: 'status', value: { data: 'PAUSED', operator: '=' } },
@@ -108,13 +143,19 @@ export const mockSearchExamples = [
pagination: {},
sort: 'CREATED_DESC',
},
- graphqlVariables: { status: 'ACTIVE', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
+ graphqlVariables: {
+ status: 'ACTIVE',
+ membership: DEFAULT_MEMBERSHIP,
+ sort: 'CREATED_DESC',
+ first: RUNNER_PAGE_SIZE,
+ },
},
{
name: 'multiple status, a single instance type and a non default sort',
urlQuery: '?status[]=ACTIVE&runner_type[]=INSTANCE_TYPE&sort=CREATED_ASC',
search: {
runnerType: 'INSTANCE_TYPE',
+ membership: DEFAULT_MEMBERSHIP,
filters: [{ type: 'status', value: { data: 'ACTIVE', operator: '=' } }],
pagination: {},
sort: 'CREATED_ASC',
@@ -122,6 +163,7 @@ export const mockSearchExamples = [
graphqlVariables: {
status: 'ACTIVE',
type: 'INSTANCE_TYPE',
+ membership: DEFAULT_MEMBERSHIP,
sort: 'CREATED_ASC',
first: RUNNER_PAGE_SIZE,
},
@@ -131,11 +173,13 @@ export const mockSearchExamples = [
urlQuery: '?tag[]=tag-1',
search: {
runnerType: null,
+ membership: DEFAULT_MEMBERSHIP,
filters: [{ type: 'tag', value: { data: 'tag-1', operator: '=' } }],
pagination: {},
sort: 'CREATED_DESC',
},
graphqlVariables: {
+ membership: DEFAULT_MEMBERSHIP,
tagList: ['tag-1'],
first: 20,
sort: 'CREATED_DESC',
@@ -146,6 +190,7 @@ export const mockSearchExamples = [
urlQuery: '?tag[]=tag-1&tag[]=tag-2',
search: {
runnerType: null,
+ membership: DEFAULT_MEMBERSHIP,
filters: [
{ type: 'tag', value: { data: 'tag-1', operator: '=' } },
{ type: 'tag', value: { data: 'tag-2', operator: '=' } },
@@ -154,6 +199,7 @@ export const mockSearchExamples = [
sort: 'CREATED_DESC',
},
graphqlVariables: {
+ membership: DEFAULT_MEMBERSHIP,
tagList: ['tag-1', 'tag-2'],
first: 20,
sort: 'CREATED_DESC',
@@ -164,22 +210,34 @@ export const mockSearchExamples = [
urlQuery: '?after=AFTER_CURSOR',
search: {
runnerType: null,
+ membership: DEFAULT_MEMBERSHIP,
filters: [],
pagination: { after: 'AFTER_CURSOR' },
sort: 'CREATED_DESC',
},
- graphqlVariables: { sort: 'CREATED_DESC', after: 'AFTER_CURSOR', first: RUNNER_PAGE_SIZE },
+ graphqlVariables: {
+ membership: DEFAULT_MEMBERSHIP,
+ sort: 'CREATED_DESC',
+ after: 'AFTER_CURSOR',
+ first: RUNNER_PAGE_SIZE,
+ },
},
{
name: 'the previous page',
urlQuery: '?before=BEFORE_CURSOR',
search: {
runnerType: null,
+ membership: DEFAULT_MEMBERSHIP,
filters: [],
pagination: { before: 'BEFORE_CURSOR' },
sort: 'CREATED_DESC',
},
- graphqlVariables: { sort: 'CREATED_DESC', before: 'BEFORE_CURSOR', last: RUNNER_PAGE_SIZE },
+ graphqlVariables: {
+ membership: DEFAULT_MEMBERSHIP,
+ sort: 'CREATED_DESC',
+ before: 'BEFORE_CURSOR',
+ last: RUNNER_PAGE_SIZE,
+ },
},
{
name: 'the next page filtered by a status, an instance type, tags and a non default sort',
@@ -187,6 +245,7 @@ export const mockSearchExamples = [
'?status[]=ACTIVE&runner_type[]=INSTANCE_TYPE&tag[]=tag-1&tag[]=tag-2&sort=CREATED_ASC&after=AFTER_CURSOR',
search: {
runnerType: 'INSTANCE_TYPE',
+ membership: DEFAULT_MEMBERSHIP,
filters: [
{ type: 'status', value: { data: 'ACTIVE', operator: '=' } },
{ type: 'tag', value: { data: 'tag-1', operator: '=' } },
@@ -198,6 +257,7 @@ export const mockSearchExamples = [
graphqlVariables: {
status: 'ACTIVE',
type: 'INSTANCE_TYPE',
+ membership: DEFAULT_MEMBERSHIP,
tagList: ['tag-1', 'tag-2'],
sort: 'CREATED_ASC',
after: 'AFTER_CURSOR',
@@ -209,22 +269,34 @@ export const mockSearchExamples = [
urlQuery: '?paused[]=true',
search: {
runnerType: null,
+ membership: DEFAULT_MEMBERSHIP,
filters: [{ type: 'paused', value: { data: 'true', operator: '=' } }],
pagination: {},
sort: 'CREATED_DESC',
},
- graphqlVariables: { paused: true, sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
+ graphqlVariables: {
+ paused: true,
+ membership: DEFAULT_MEMBERSHIP,
+ sort: 'CREATED_DESC',
+ first: RUNNER_PAGE_SIZE,
+ },
},
{
name: 'active runners',
urlQuery: '?paused[]=false',
search: {
runnerType: null,
+ membership: DEFAULT_MEMBERSHIP,
filters: [{ type: 'paused', value: { data: 'false', operator: '=' } }],
pagination: {},
sort: 'CREATED_DESC',
},
- graphqlVariables: { paused: false, sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
+ graphqlVariables: {
+ paused: false,
+ membership: DEFAULT_MEMBERSHIP,
+ sort: 'CREATED_DESC',
+ first: RUNNER_PAGE_SIZE,
+ },
},
];
diff --git a/spec/frontend/search/sidebar/components/app_spec.js b/spec/frontend/search/sidebar/components/app_spec.js
index 3bea0748c47..89959feec39 100644
--- a/spec/frontend/search/sidebar/components/app_spec.js
+++ b/spec/frontend/search/sidebar/components/app_spec.js
@@ -42,20 +42,39 @@ describe('GlobalSearchSidebar', () => {
const findResetLinkButton = () => wrapper.findComponent(GlLink);
describe('template', () => {
- beforeEach(() => {
- createComponent();
- });
+ describe('scope=projects', () => {
+ beforeEach(() => {
+ createComponent({ urlQuery: { ...MOCK_QUERY, scope: 'projects' } });
+ });
- it('renders StatusFilter always', () => {
- expect(findStatusFilter().exists()).toBe(true);
- });
+ it("doesn't render StatusFilter", () => {
+ expect(findStatusFilter().exists()).toBe(false);
+ });
+
+ it("doesn't render ConfidentialityFilter", () => {
+ expect(findConfidentialityFilter().exists()).toBe(false);
+ });
- it('renders ConfidentialityFilter always', () => {
- expect(findConfidentialityFilter().exists()).toBe(true);
+ it("doesn't render ApplyButton", () => {
+ expect(findApplyButton().exists()).toBe(false);
+ });
});
- it('renders ApplyButton always', () => {
- expect(findApplyButton().exists()).toBe(true);
+ describe('scope=issues', () => {
+ beforeEach(() => {
+ createComponent({ urlQuery: MOCK_QUERY });
+ });
+ it('renders StatusFilter', () => {
+ expect(findStatusFilter().exists()).toBe(true);
+ });
+
+ it('renders ConfidentialityFilter', () => {
+ expect(findConfidentialityFilter().exists()).toBe(true);
+ });
+
+ it('renders ApplyButton', () => {
+ expect(findApplyButton().exists()).toBe(true);
+ });
});
});
@@ -115,7 +134,7 @@ describe('GlobalSearchSidebar', () => {
describe('actions', () => {
beforeEach(() => {
- createComponent();
+ createComponent({});
});
it('clicking ApplyButton calls applyQuery', () => {
diff --git a/spec/frontend/search/sidebar/components/confidentiality_filter_spec.js b/spec/frontend/search/sidebar/components/confidentiality_filter_spec.js
index a377ddae0eb..c57eabd57b9 100644
--- a/spec/frontend/search/sidebar/components/confidentiality_filter_spec.js
+++ b/spec/frontend/search/sidebar/components/confidentiality_filter_spec.js
@@ -34,7 +34,7 @@ describe('ConfidentialityFilter', () => {
wrapper = null;
});
- const findRadioFilter = () => wrapper.find(RadioFilter);
+ const findRadioFilter = () => wrapper.findComponent(RadioFilter);
describe('template', () => {
beforeEach(() => {
diff --git a/spec/frontend/search/sidebar/components/radio_filter_spec.js b/spec/frontend/search/sidebar/components/radio_filter_spec.js
index c0a8259b4fe..94d529348a9 100644
--- a/spec/frontend/search/sidebar/components/radio_filter_spec.js
+++ b/spec/frontend/search/sidebar/components/radio_filter_spec.js
@@ -43,7 +43,7 @@ describe('RadioFilter', () => {
wrapper = null;
});
- const findGlRadioButtonGroup = () => wrapper.find(GlFormRadioGroup);
+ const findGlRadioButtonGroup = () => wrapper.findComponent(GlFormRadioGroup);
const findGlRadioButtons = () => findGlRadioButtonGroup().findAllComponents(GlFormRadio);
const findGlRadioButtonsText = () => findGlRadioButtons().wrappers.map((w) => w.text());
diff --git a/spec/frontend/search/sidebar/components/status_filter_spec.js b/spec/frontend/search/sidebar/components/status_filter_spec.js
index 5d8ecd8733a..f3152c014b6 100644
--- a/spec/frontend/search/sidebar/components/status_filter_spec.js
+++ b/spec/frontend/search/sidebar/components/status_filter_spec.js
@@ -34,7 +34,7 @@ describe('StatusFilter', () => {
wrapper = null;
});
- const findRadioFilter = () => wrapper.find(RadioFilter);
+ const findRadioFilter = () => wrapper.findComponent(RadioFilter);
describe('template', () => {
beforeEach(() => {
diff --git a/spec/frontend/search/sort/components/app_spec.js b/spec/frontend/search/sort/components/app_spec.js
index 0e8eebba3cb..a566b9b99d3 100644
--- a/spec/frontend/search/sort/components/app_spec.js
+++ b/spec/frontend/search/sort/components/app_spec.js
@@ -43,9 +43,9 @@ describe('GlobalSearchSort', () => {
wrapper = null;
});
- const findSortButtonGroup = () => wrapper.find(GlButtonGroup);
- const findSortDropdown = () => wrapper.find(GlDropdown);
- const findSortDirectionButton = () => wrapper.find(GlButton);
+ const findSortButtonGroup = () => wrapper.findComponent(GlButtonGroup);
+ const findSortDropdown = () => wrapper.findComponent(GlDropdown);
+ const findSortDirectionButton = () => wrapper.findComponent(GlButton);
const findDropdownItems = () => findSortDropdown().findAllComponents(GlDropdownItem);
const findDropdownItemsText = () => findDropdownItems().wrappers.map((w) => w.text());
diff --git a/spec/frontend/search/store/actions_spec.js b/spec/frontend/search/store/actions_spec.js
index 2f93d3f6805..c442ffa521d 100644
--- a/spec/frontend/search/store/actions_spec.js
+++ b/spec/frontend/search/store/actions_spec.js
@@ -1,7 +1,7 @@
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
import Api from '~/api';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import * as urlUtils from '~/lib/utils/url_utility';
import * as actions from '~/search/store/actions';
@@ -37,8 +37,8 @@ describe('Global Search Store Actions', () => {
let state;
const flashCallback = (callCount) => {
- expect(createFlash).toHaveBeenCalledTimes(callCount);
- createFlash.mockClear();
+ expect(createAlert).toHaveBeenCalledTimes(callCount);
+ createAlert.mockClear();
};
beforeEach(() => {
diff --git a/spec/frontend/search/topbar/components/app_spec.js b/spec/frontend/search/topbar/components/app_spec.js
index 0a44688bfe0..c7fd7084101 100644
--- a/spec/frontend/search/topbar/components/app_spec.js
+++ b/spec/frontend/search/topbar/components/app_spec.js
@@ -36,9 +36,9 @@ describe('GlobalSearchTopbar', () => {
wrapper.destroy();
});
- const findGlSearchBox = () => wrapper.find(GlSearchBoxByClick);
- const findGroupFilter = () => wrapper.find(GroupFilter);
- const findProjectFilter = () => wrapper.find(ProjectFilter);
+ const findGlSearchBox = () => wrapper.findComponent(GlSearchBoxByClick);
+ const findGroupFilter = () => wrapper.findComponent(GroupFilter);
+ const findProjectFilter = () => wrapper.findComponent(ProjectFilter);
describe('template', () => {
beforeEach(() => {
diff --git a/spec/frontend/search/topbar/components/group_filter_spec.js b/spec/frontend/search/topbar/components/group_filter_spec.js
index bd173791fee..b2d0297fdc2 100644
--- a/spec/frontend/search/topbar/components/group_filter_spec.js
+++ b/spec/frontend/search/topbar/components/group_filter_spec.js
@@ -53,7 +53,7 @@ describe('GroupFilter', () => {
wrapper.destroy();
});
- const findSearchableDropdown = () => wrapper.find(SearchableDropdown);
+ const findSearchableDropdown = () => wrapper.findComponent(SearchableDropdown);
describe('template', () => {
beforeEach(() => {
diff --git a/spec/frontend/search/topbar/components/project_filter_spec.js b/spec/frontend/search/topbar/components/project_filter_spec.js
index 5afcd281d0c..297a536e075 100644
--- a/spec/frontend/search/topbar/components/project_filter_spec.js
+++ b/spec/frontend/search/topbar/components/project_filter_spec.js
@@ -53,7 +53,7 @@ describe('ProjectFilter', () => {
wrapper.destroy();
});
- const findSearchableDropdown = () => wrapper.find(SearchableDropdown);
+ const findSearchableDropdown = () => wrapper.findComponent(SearchableDropdown);
describe('template', () => {
beforeEach(() => {
diff --git a/spec/frontend/search_settings/components/search_settings_spec.js b/spec/frontend/search_settings/components/search_settings_spec.js
index d0a2018c7f0..3f856968db6 100644
--- a/spec/frontend/search_settings/components/search_settings_spec.js
+++ b/spec/frontend/search_settings/components/search_settings_spec.js
@@ -1,4 +1,4 @@
-import { GlSearchBoxByType } from '@gitlab/ui';
+import { GlEmptyState, GlSearchBoxByType } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { setHTMLFixture } from 'helpers/fixtures';
import SearchSettings from '~/search_settings/components/search_settings.vue';
@@ -14,7 +14,7 @@ describe('search_settings/components/search_settings.vue', () => {
const EXTRA_SETTINGS_ID = 'js-extra-settings';
const TEXT_CONTAIN_SEARCH_TERM = `This text contain ${SEARCH_TERM}.`;
const TEXT_WITH_SIBLING_ELEMENTS = `${SEARCH_TERM} <a data-testid="sibling" href="#">Learn more</a>.`;
-
+ const HIDE_WHEN_EMPTY_CLASS = 'js-hide-when-nothing-matches-search';
let wrapper;
const buildWrapper = () => {
@@ -22,6 +22,7 @@ describe('search_settings/components/search_settings.vue', () => {
propsData: {
searchRoot: document.querySelector(`#${ROOT_ID}`),
sectionSelector: SECTION_SELECTOR,
+ hideWhenEmptySelector: `.${HIDE_WHEN_EMPTY_CLASS}`,
isExpandedFn: isExpanded,
},
// Add real listeners so we can simplify and strengthen some tests.
@@ -45,7 +46,9 @@ describe('search_settings/components/search_settings.vue', () => {
};
const findMatchSiblingElement = () => document.querySelector(`[data-testid="sibling"]`);
- const findSearchBox = () => wrapper.find(GlSearchBoxByType);
+ const findSearchBox = () => wrapper.findComponent(GlSearchBoxByType);
+ const findEmptyState = () => wrapper.findComponent(GlEmptyState);
+ const findHideWhenEmpty = () => document.querySelector(`.${HIDE_WHEN_EMPTY_CLASS}`);
const search = (term) => {
findSearchBox().vm.$emit('input', term);
};
@@ -67,6 +70,9 @@ describe('search_settings/components/search_settings.vue', () => {
<span>${TEXT_CONTAIN_SEARCH_TERM}</span>
<span>${TEXT_WITH_SIBLING_ELEMENTS}</span>
</section>
+ <div class="row ${HIDE_WHEN_EMPTY_CLASS}">
+ <button type="submit">Save</button>
+ </div>
</div>
</div>
`);
@@ -93,13 +99,41 @@ describe('search_settings/components/search_settings.vue', () => {
expect(wrapper.emitted('expand')).toEqual([[section]]);
});
+ describe('when nothing matches the search term', () => {
+ beforeEach(() => {
+ search('xxxxxxxxxxx');
+ });
+
+ it('shows an empty state', () => {
+ expect(findEmptyState().exists()).toBe(true);
+ });
+
+ it('hides the form buttons', () => {
+ expect(findHideWhenEmpty()).toHaveClass(HIDE_CLASS);
+ });
+ });
+
+ describe('when something matches the search term', () => {
+ beforeEach(() => {
+ search(SEARCH_TERM);
+ });
+
+ it('shows no empty state', () => {
+ expect(findEmptyState().exists()).toBe(false);
+ });
+
+ it('shows the form buttons', () => {
+ expect(findHideWhenEmpty()).not.toHaveClass(HIDE_CLASS);
+ });
+ });
+
it('highlight elements that match the search term', () => {
search(SEARCH_TERM);
expect(highlightedElementsCount()).toBe(3);
});
- it('highlight only search term and not the whole line', () => {
+ it('highlights only search term and not the whole line', () => {
search(SEARCH_TERM);
expect(highlightedTextNodes()).toBe(true);
@@ -142,6 +176,10 @@ describe('search_settings/components/search_settings.vue', () => {
expect(visibleSectionsCount()).toBe(sectionsCount());
});
+ it('hides the empty state', () => {
+ expect(findEmptyState().exists()).toBe(false);
+ });
+
it('removes the highlight from all elements', () => {
expect(highlightedElementsCount()).toBe(0);
});
diff --git a/spec/frontend/security_configuration/components/app_spec.js b/spec/frontend/security_configuration/components/app_spec.js
index 222cabc6a63..ddefda2ffc3 100644
--- a/spec/frontend/security_configuration/components/app_spec.js
+++ b/spec/frontend/security_configuration/components/app_spec.js
@@ -281,7 +281,7 @@ describe('App component', () => {
});
});
- it(shouldRender ? 'renders' : 'does not render', () => {
+ it(`${shouldRender ? 'renders' : 'does not render'}`, () => {
expect(findAutoDevopsEnabledAlert().exists()).toBe(shouldRender);
});
});
diff --git a/spec/frontend/security_configuration/components/training_provider_list_spec.js b/spec/frontend/security_configuration/components/training_provider_list_spec.js
index b6451af57d7..8f2b5383191 100644
--- a/spec/frontend/security_configuration/components/training_provider_list_spec.js
+++ b/spec/frontend/security_configuration/components/training_provider_list_spec.js
@@ -193,7 +193,7 @@ describe('TrainingProviderList component', () => {
});
it(`shows the learn more link for enabled card ${index}`, () => {
- const learnMoreLink = findCards().at(index).find(GlLink);
+ const learnMoreLink = findCards().at(index).findComponent(GlLink);
const tempLogo = TEMP_PROVIDER_URLS[name];
if (tempLogo) {
@@ -224,7 +224,7 @@ describe('TrainingProviderList component', () => {
});
it('shows a info-tooltip that describes the purpose of a primary provider', () => {
- const infoIcon = findPrimaryProviderRadios().at(index).find(GlIcon);
+ const infoIcon = findPrimaryProviderRadios().at(index).findComponent(GlIcon);
const tooltip = getBinding(infoIcon.element, 'gl-tooltip');
expect(infoIcon.props()).toMatchObject({
diff --git a/spec/frontend/security_configuration/components/upgrade_banner_spec.js b/spec/frontend/security_configuration/components/upgrade_banner_spec.js
index ff44acfc4f9..c34d8e47a6c 100644
--- a/spec/frontend/security_configuration/components/upgrade_banner_spec.js
+++ b/spec/frontend/security_configuration/components/upgrade_banner_spec.js
@@ -79,7 +79,7 @@ describe('UpgradeBanner component', () => {
expect(wrapperText).toContain('statistics in the merge request');
expect(wrapperText).toContain('statistics across projects');
expect(wrapperText).toContain('Runtime security metrics');
- expect(wrapperText).toContain('More scan types, including Container Scanning,');
+ expect(wrapperText).toContain('More scan types, including DAST,');
});
describe('when user interacts', () => {
diff --git a/spec/frontend/self_monitor/components/self_monitor_form_spec.js b/spec/frontend/self_monitor/components/self_monitor_form_spec.js
index 89ad5a00a14..c690bbf1c57 100644
--- a/spec/frontend/self_monitor/components/self_monitor_form_spec.js
+++ b/spec/frontend/self_monitor/components/self_monitor_form_spec.js
@@ -42,7 +42,7 @@ describe('self monitor component', () => {
it('renders as an expand button by default', () => {
wrapper = shallowMount(SelfMonitor, { store });
- const button = wrapper.find(GlButton);
+ const button = wrapper.findComponent(GlButton);
expect(button.text()).toBe('Expand');
});
@@ -79,7 +79,7 @@ describe('self monitor component', () => {
wrapper = shallowMount(SelfMonitor, { store });
expect(
- wrapper.find({ ref: 'selfMonitoringFormText' }).find('a').attributes('href'),
+ wrapper.findComponent({ ref: 'selfMonitoringFormText' }).find('a').attributes('href'),
).toEqual(`${TEST_HOST}/instance-administrators-random/gitlab-self-monitoring`);
});
diff --git a/spec/frontend/set_status_modal/set_status_form_spec.js b/spec/frontend/set_status_modal/set_status_form_spec.js
index 8e1623eedf5..486e06d2906 100644
--- a/spec/frontend/set_status_modal/set_status_form_spec.js
+++ b/spec/frontend/set_status_modal/set_status_form_spec.js
@@ -127,6 +127,8 @@ describe('SetStatusForm', () => {
describe('when `Clear status after` dropdown is changed', () => {
it('emits `clear-status-after-click`', async () => {
+ await createComponent();
+
await wrapper.findByTestId('thirtyMinutes').trigger('click');
expect(wrapper.emitted('clear-status-after-click')).toEqual([[timeRanges[0]]]);
diff --git a/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js b/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js
index c5fb590646d..53d2a9e0978 100644
--- a/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js
+++ b/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js
@@ -4,7 +4,7 @@ import { mountExtended } from 'helpers/vue_test_utils_helper';
import { initEmojiMock, clearEmojiMock } from 'helpers/emoji';
import * as UserApi from '~/api/user_api';
import EmojiPicker from '~/emoji/components/picker.vue';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import stubChildren from 'helpers/stub_children';
import SetStatusModalWrapper from '~/set_status_modal/set_status_modal_wrapper.vue';
import { AVAILABILITY_STATUS } from '~/set_status_modal/constants';
@@ -51,11 +51,11 @@ describe('SetStatusModalWrapper', () => {
});
};
- const findModal = () => wrapper.find(GlModal);
+ const findModal = () => wrapper.findComponent(GlModal);
const findMessageField = () =>
wrapper.findByPlaceholderText(SetStatusForm.i18n.statusMessagePlaceholder);
const findClearStatusButton = () => wrapper.find('.js-clear-user-status-button');
- const findAvailabilityCheckbox = () => wrapper.find(GlFormCheckbox);
+ const findAvailabilityCheckbox = () => wrapper.findComponent(GlFormCheckbox);
const findClearStatusAtMessage = () => wrapper.find('[data-testid="clear-status-at-message"]');
const getEmojiPicker = () => wrapper.findComponent(EmojiPickerStub);
@@ -253,7 +253,7 @@ describe('SetStatusModalWrapper', () => {
findModal().vm.$emit('primary');
await nextTick();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: "Sorry, we weren't able to set your status. Please try again later.",
});
});
diff --git a/spec/frontend/sidebar/assignee_title_spec.js b/spec/frontend/sidebar/assignee_title_spec.js
index e29e3d489a5..14a6bdbf907 100644
--- a/spec/frontend/sidebar/assignee_title_spec.js
+++ b/spec/frontend/sidebar/assignee_title_spec.js
@@ -85,7 +85,7 @@ describe('AssigneeTitle component', () => {
editable: false,
});
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(false);
});
it('renders spinner when loading', () => {
@@ -95,7 +95,7 @@ describe('AssigneeTitle component', () => {
editable: false,
});
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
it('does not render edit link when not editable', () => {
diff --git a/spec/frontend/sidebar/assignees_spec.js b/spec/frontend/sidebar/assignees_spec.js
index c2aff456abb..7cf7fd33022 100644
--- a/spec/frontend/sidebar/assignees_spec.js
+++ b/spec/frontend/sidebar/assignees_spec.js
@@ -33,7 +33,7 @@ describe('Assignee component', () => {
it('displays no assignee icon when collapsed', () => {
createWrapper();
const collapsedChildren = findCollapsedChildren();
- const userIcon = collapsedChildren.at(0).find(GlIcon);
+ const userIcon = collapsedChildren.at(0).findComponent(GlIcon);
expect(collapsedChildren.length).toBe(1);
expect(collapsedChildren.at(0).attributes('aria-label')).toBe('None');
diff --git a/spec/frontend/sidebar/components/assignees/assignee_avatar_link_spec.js b/spec/frontend/sidebar/components/assignees/assignee_avatar_link_spec.js
index 8cde70ff8da..4764f3607bc 100644
--- a/spec/frontend/sidebar/components/assignees/assignee_avatar_link_spec.js
+++ b/spec/frontend/sidebar/components/assignees/assignee_avatar_link_spec.js
@@ -46,7 +46,7 @@ describe('AssigneeAvatarLink component', () => {
it('renders assignee avatar', () => {
createComponent();
- expect(wrapper.find(AssigneeAvatar).props()).toEqual(
+ expect(wrapper.findComponent(AssigneeAvatar).props()).toEqual(
expect.objectContaining({
issuableType: TEST_ISSUABLE_TYPE,
user: userDataMock(),
diff --git a/spec/frontend/sidebar/components/assignees/collapsed_assignee_list_spec.js b/spec/frontend/sidebar/components/assignees/collapsed_assignee_list_spec.js
index 81ff51133bf..7e7d4921cfa 100644
--- a/spec/frontend/sidebar/components/assignees/collapsed_assignee_list_spec.js
+++ b/spec/frontend/sidebar/components/assignees/collapsed_assignee_list_spec.js
@@ -21,7 +21,7 @@ describe('CollapsedAssigneeList component', () => {
});
}
- const findNoUsersIcon = () => wrapper.find(GlIcon);
+ const findNoUsersIcon = () => wrapper.findComponent(GlIcon);
const findAvatarCounter = () => wrapper.find('.avatar-counter');
const findAssignees = () => wrapper.findAllComponents(CollapsedAssignee);
const getTooltipTitle = () => wrapper.attributes('title');
diff --git a/spec/frontend/sidebar/components/assignees/collapsed_assignee_spec.js b/spec/frontend/sidebar/components/assignees/collapsed_assignee_spec.js
index 2d5a3653631..4db95114b96 100644
--- a/spec/frontend/sidebar/components/assignees/collapsed_assignee_spec.js
+++ b/spec/frontend/sidebar/components/assignees/collapsed_assignee_spec.js
@@ -34,7 +34,7 @@ describe('CollapsedAssignee assignee component', () => {
it('has assignee avatar', () => {
createComponent();
- expect(wrapper.find(AssigneeAvatar).props()).toEqual({
+ expect(wrapper.findComponent(AssigneeAvatar).props()).toEqual({
imgSize: 24,
user: TEST_USER,
issuableType: TEST_ISSUABLE_TYPE,
diff --git a/spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js b/spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js
index 3644a51c7fd..cbb4c41dd14 100644
--- a/spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js
+++ b/spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js
@@ -5,7 +5,7 @@ import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import { IssuableType } from '~/issues/constants';
import SidebarAssigneesRealtime from '~/sidebar/components/assignees/assignees_realtime.vue';
import IssuableAssignees from '~/sidebar/components/assignees/issuable_assignees.vue';
@@ -167,7 +167,7 @@ describe('Sidebar assignees widget', () => {
});
await waitForPromises();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'An error occurred while fetching participants.',
});
});
@@ -333,7 +333,7 @@ describe('Sidebar assignees widget', () => {
await waitForPromises();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'An error occurred while updating assignees.',
});
});
diff --git a/spec/frontend/sidebar/components/assignees/sidebar_editable_item_spec.js b/spec/frontend/sidebar/components/assignees/sidebar_editable_item_spec.js
index 724fba62479..6c22d2f687d 100644
--- a/spec/frontend/sidebar/components/assignees/sidebar_editable_item_spec.js
+++ b/spec/frontend/sidebar/components/assignees/sidebar_editable_item_spec.js
@@ -67,15 +67,33 @@ describe('boards sidebar remove issue', () => {
expect(findLoader().exists()).toBe(true);
});
- it('shows expanded content and hides collapsed content when clicking edit button', async () => {
- const slots = { default: '<div>Select item</div>' };
- createComponent({ canUpdate: true, slots });
- findEditButton().vm.$emit('click');
-
- await nextTick();
-
- expect(findCollapsed().isVisible()).toBe(false);
- expect(findExpanded().isVisible()).toBe(true);
+ describe('when clicking edit button', () => {
+ describe('when can edit', () => {
+ it('shows expanded (editable) content', async () => {
+ const slots = { default: '<div>Select item</div>' };
+ createComponent({ canUpdate: true, slots });
+ findEditButton().vm.$emit('click');
+
+ await nextTick();
+
+ expect(findCollapsed().isVisible()).toBe(false);
+ expect(findExpanded().isVisible()).toBe(true);
+ });
+ });
+
+ describe('when cannot edit', () => {
+ it('shows collapsed (non-editable) content', async () => {
+ const slots = { default: '<div>Select item</div>' };
+ createComponent({ canUpdate: false, slots });
+ // Simulate parent component calling `expand` method when user
+ // clicks on collapsed sidebar (e.g. in sidebar_weight_widget.vue)
+ wrapper.vm.expand();
+ await nextTick();
+
+ expect(findCollapsed().isVisible()).toBe(true);
+ expect(findExpanded().isVisible()).toBe(false);
+ });
+ });
});
});
diff --git a/spec/frontend/sidebar/components/assignees/uncollapsed_assignee_list_spec.js b/spec/frontend/sidebar/components/assignees/uncollapsed_assignee_list_spec.js
index b902d7313fd..03c2e1a37a9 100644
--- a/spec/frontend/sidebar/components/assignees/uncollapsed_assignee_list_spec.js
+++ b/spec/frontend/sidebar/components/assignees/uncollapsed_assignee_list_spec.js
@@ -46,7 +46,7 @@ describe('UncollapsedAssigneeList component', () => {
});
it('calls the AssigneeAvatarLink with the proper props', () => {
- expect(wrapper.find(AssigneeAvatarLink).exists()).toBe(true);
+ expect(wrapper.findComponent(AssigneeAvatarLink).exists()).toBe(true);
});
it('Shows one user with avatar, username and author name', () => {
diff --git a/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_form_spec.js b/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_form_spec.js
index 1ea035c7184..b27f7c6b4e1 100644
--- a/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_form_spec.js
+++ b/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_form_spec.js
@@ -2,7 +2,7 @@ import { GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import SidebarConfidentialityForm from '~/sidebar/components/confidential/sidebar_confidentiality_form.vue';
import { confidentialityQueries } from '~/sidebar/constants';
@@ -63,7 +63,7 @@ describe('Sidebar Confidentiality Form', () => {
findConfidentialToggle().vm.$emit('click', new MouseEvent('click'));
await waitForPromises();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'Something went wrong while setting issue confidentiality.',
});
});
@@ -77,7 +77,7 @@ describe('Sidebar Confidentiality Form', () => {
findConfidentialToggle().vm.$emit('click', new MouseEvent('click'));
await waitForPromises();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'Houston, we have a problem!',
});
});
diff --git a/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_widget_spec.js b/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_widget_spec.js
index 3a3f0b1d9fa..e486a8e9ec7 100644
--- a/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_widget_spec.js
+++ b/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_widget_spec.js
@@ -4,7 +4,7 @@ import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import SidebarConfidentialityContent from '~/sidebar/components/confidential/sidebar_confidentiality_content.vue';
import SidebarConfidentialityForm from '~/sidebar/components/confidential/sidebar_confidentiality_form.vue';
import SidebarConfidentialityWidget, {
@@ -126,7 +126,7 @@ describe('Sidebar Confidentiality Widget', () => {
});
await waitForPromises();
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
it('closes the form and dispatches an event when `closeForm` is emitted', async () => {
diff --git a/spec/frontend/sidebar/components/copy_email_to_clipboard_spec.js b/spec/frontend/sidebar/components/copy_email_to_clipboard_spec.js
index 699b2bbd0b1..69a8d645973 100644
--- a/spec/frontend/sidebar/components/copy_email_to_clipboard_spec.js
+++ b/spec/frontend/sidebar/components/copy_email_to_clipboard_spec.js
@@ -12,6 +12,6 @@ describe('CopyEmailToClipboard component', () => {
});
it('sets CopyableField `value` prop to issueEmailAddress', () => {
- expect(wrapper.find(CopyableField).props('value')).toBe(mockIssueEmailAddress);
+ expect(wrapper.findComponent(CopyableField).props('value')).toBe(mockIssueEmailAddress);
});
});
diff --git a/spec/frontend/sidebar/components/crm_contacts_spec.js b/spec/frontend/sidebar/components/crm_contacts_spec.js
index 6456829258f..6d76fa1f9df 100644
--- a/spec/frontend/sidebar/components/crm_contacts_spec.js
+++ b/spec/frontend/sidebar/components/crm_contacts_spec.js
@@ -3,7 +3,7 @@ import VueApollo from 'vue-apollo';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import CrmContacts from '~/sidebar/components/crm_contacts/crm_contacts.vue';
import getIssueCrmContactsQuery from '~/sidebar/components/crm_contacts/queries/get_issue_crm_contacts.query.graphql';
import issueCrmContactsSubscription from '~/sidebar/components/crm_contacts/queries/issue_crm_contacts.subscription.graphql';
@@ -47,7 +47,7 @@ describe('Issue crm contacts component', () => {
mountComponent({ queryHandler: jest.fn().mockRejectedValue('ERROR') });
await waitForPromises();
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
it('calls the query with correct variables', () => {
diff --git a/spec/frontend/sidebar/components/date/sidebar_date_widget_spec.js b/spec/frontend/sidebar/components/date/sidebar_date_widget_spec.js
index 1e2173e2988..67413cffdda 100644
--- a/spec/frontend/sidebar/components/date/sidebar_date_widget_spec.js
+++ b/spec/frontend/sidebar/components/date/sidebar_date_widget_spec.js
@@ -4,7 +4,7 @@ import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import SidebarDateWidget from '~/sidebar/components/date/sidebar_date_widget.vue';
import SidebarFormattedDate from '~/sidebar/components/date/sidebar_formatted_date.vue';
import SidebarInheritDate from '~/sidebar/components/date/sidebar_inherit_date.vue';
@@ -28,7 +28,7 @@ describe('Sidebar date Widget', () => {
const findEditableItem = () => wrapper.findComponent(SidebarEditableItem);
const findPopoverIcon = () => wrapper.find('[data-testid="inherit-date-popover"]');
- const findDatePicker = () => wrapper.find(GlDatepicker);
+ const findDatePicker = () => wrapper.findComponent(GlDatepicker);
const createComponent = ({
dueDateQueryHandler = jest.fn().mockResolvedValue(issuableDueDateResponse()),
@@ -149,14 +149,14 @@ describe('Sidebar date Widget', () => {
createComponent({ canInherit });
await waitForPromises();
- expect(wrapper.find(component).exists()).toBe(expected);
+ expect(wrapper.findComponent(component).exists()).toBe(expected);
},
);
it('does not render SidebarInheritDate when canInherit is true and date is loading', async () => {
createComponent({ canInherit: true });
- expect(wrapper.find(SidebarInheritDate).exists()).toBe(false);
+ expect(wrapper.findComponent(SidebarInheritDate).exists()).toBe(false);
});
it('displays a flash message when query is rejected', async () => {
@@ -165,7 +165,7 @@ describe('Sidebar date Widget', () => {
});
await waitForPromises();
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
it.each`
diff --git a/spec/frontend/sidebar/components/date/sidebar_formatted_date_spec.js b/spec/frontend/sidebar/components/date/sidebar_formatted_date_spec.js
index 1eda4ea977f..cbe01263dcd 100644
--- a/spec/frontend/sidebar/components/date/sidebar_formatted_date_spec.js
+++ b/spec/frontend/sidebar/components/date/sidebar_formatted_date_spec.js
@@ -5,7 +5,7 @@ import SidebarFormattedDate from '~/sidebar/components/date/sidebar_formatted_da
describe('SidebarFormattedDate', () => {
let wrapper;
const findFormattedDate = () => wrapper.find("[data-testid='sidebar-date-value']");
- const findRemoveButton = () => wrapper.find(GlButton);
+ const findRemoveButton = () => wrapper.findComponent(GlButton);
const createComponent = ({ hasDate = true } = {}) => {
wrapper = shallowMount(SidebarFormattedDate, {
diff --git a/spec/frontend/sidebar/components/severity/severity_spec.js b/spec/frontend/sidebar/components/severity/severity_spec.js
index 1e4624e4dcd..2146155791e 100644
--- a/spec/frontend/sidebar/components/severity/severity_spec.js
+++ b/spec/frontend/sidebar/components/severity/severity_spec.js
@@ -21,7 +21,7 @@ describe('SeverityToken', () => {
}
});
- const findIcon = () => wrapper.find(GlIcon);
+ const findIcon = () => wrapper.findComponent(GlIcon);
it('renders severity token for each severity type', () => {
Object.values(INCIDENT_SEVERITY).forEach((severity) => {
diff --git a/spec/frontend/sidebar/components/severity/sidebar_severity_spec.js b/spec/frontend/sidebar/components/severity/sidebar_severity_spec.js
index 83eb9a18597..bdea33371d8 100644
--- a/spec/frontend/sidebar/components/severity/sidebar_severity_spec.js
+++ b/spec/frontend/sidebar/components/severity/sidebar_severity_spec.js
@@ -2,7 +2,7 @@ import { GlDropdown, GlDropdownItem, GlLoadingIcon, GlTooltip, GlSprintf } from
import { nextTick } from 'vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import { INCIDENT_SEVERITY, ISSUABLE_TYPES } from '~/sidebar/components/severity/constants';
import updateIssuableSeverity from '~/sidebar/components/severity/graphql/mutations/update_issuable_severity.mutation.graphql';
import SeverityToken from '~/sidebar/components/severity/severity.vue';
@@ -59,7 +59,7 @@ describe('SidebarSeverity', () => {
const findCriticalSeverityDropdownItem = () => wrapper.findComponent(GlDropdownItem);
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findTooltip = () => wrapper.findComponent(GlTooltip);
- const findCollapsedSeverity = () => wrapper.find({ ref: 'severity' });
+ const findCollapsedSeverity = () => wrapper.findComponent({ ref: 'severity' });
describe('Severity widget', () => {
it('renders severity dropdown and token', () => {
@@ -104,7 +104,7 @@ describe('SidebarSeverity', () => {
await waitForPromises();
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
it('shows loading icon while updating', async () => {
diff --git a/spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js b/spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js
index 6761731c093..8ab4d8ea051 100644
--- a/spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js
+++ b/spec/frontend/sidebar/components/sidebar_dropdown_widget_spec.js
@@ -15,7 +15,7 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import { IssuableType } from '~/issues/constants';
import { timeFor } from '~/lib/utils/datetime_utility';
@@ -369,9 +369,9 @@ describe('SidebarDropdownWidget', () => {
findDropdownItemWithText('title').vm.$emit('click');
});
- it(`calls createFlash with "${expectedMsg}"`, async () => {
+ it(`calls createAlert with "${expectedMsg}"`, async () => {
await nextTick();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: expectedMsg,
captureError: true,
error: expectedMsg,
@@ -455,14 +455,14 @@ describe('SidebarDropdownWidget', () => {
describe('milestones', () => {
let projectMilestonesSpy;
- it('should call createFlash if milestones query fails', async () => {
+ it('should call createAlert if milestones query fails', async () => {
await createComponentWithApollo({
projectMilestonesSpy: jest.fn().mockRejectedValue(error),
});
await clickEdit();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: wrapper.vm.i18n.listFetchError,
captureError: true,
error: expect.any(Error),
@@ -514,12 +514,12 @@ describe('SidebarDropdownWidget', () => {
});
describe('currentAttributes', () => {
- it('should call createFlash if currentAttributes query fails', async () => {
+ it('should call createAlert if currentAttributes query fails', async () => {
await createComponentWithApollo({
currentMilestoneSpy: jest.fn().mockRejectedValue(error),
});
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: wrapper.vm.i18n.currentFetchError,
captureError: true,
error: expect.any(Error),
diff --git a/spec/frontend/sidebar/components/subscriptions/sidebar_subscriptions_widget_spec.js b/spec/frontend/sidebar/components/subscriptions/sidebar_subscriptions_widget_spec.js
index 430acf9f9e7..c94f9918243 100644
--- a/spec/frontend/sidebar/components/subscriptions/sidebar_subscriptions_widget_spec.js
+++ b/spec/frontend/sidebar/components/subscriptions/sidebar_subscriptions_widget_spec.js
@@ -4,7 +4,7 @@ import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import SidebarEditableItem from '~/sidebar/components/sidebar_editable_item.vue';
import SidebarSubscriptionWidget from '~/sidebar/components/subscriptions/sidebar_subscriptions_widget.vue';
import issueSubscribedQuery from '~/sidebar/queries/issue_subscribed.query.graphql';
@@ -144,7 +144,7 @@ describe('Sidebar Subscriptions Widget', () => {
});
await waitForPromises();
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
describe('merge request', () => {
diff --git a/spec/frontend/sidebar/components/time_tracking/report_spec.js b/spec/frontend/sidebar/components/time_tracking/report_spec.js
index 4e619a4e609..af72122052f 100644
--- a/spec/frontend/sidebar/components/time_tracking/report_spec.js
+++ b/spec/frontend/sidebar/components/time_tracking/report_spec.js
@@ -6,7 +6,7 @@ import VueApollo from 'vue-apollo';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import Report from '~/sidebar/components/time_tracking/report.vue';
import getIssueTimelogsQuery from '~/vue_shared/components/sidebar/queries/get_issue_timelogs.query.graphql';
import getMrTimelogsQuery from '~/vue_shared/components/sidebar/queries/get_mr_timelogs.query.graphql';
@@ -65,7 +65,7 @@ describe('Issuable Time Tracking Report', () => {
mountComponent({ queryHandler: jest.fn().mockRejectedValue('ERROR') });
await waitForPromises();
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
describe('for issue', () => {
@@ -153,7 +153,7 @@ describe('Issuable Time Tracking Report', () => {
await findDeleteButton().trigger('click');
await waitForPromises();
- expect(createFlash).not.toHaveBeenCalled();
+ expect(createAlert).not.toHaveBeenCalled();
expect(mutateSpy).toHaveBeenCalledWith({
mutation: deleteTimelogMutation,
variables: {
@@ -164,7 +164,7 @@ describe('Issuable Time Tracking Report', () => {
});
});
- it('calls `createFlash` with errorMessage and does not remove the row on promise reject', async () => {
+ it('calls `createAlert` with errorMessage and does not remove the row on promise reject', async () => {
const mutateSpy = jest.spyOn(wrapper.vm.$apollo, 'mutate').mockRejectedValue({});
await waitForPromises();
@@ -180,7 +180,7 @@ describe('Issuable Time Tracking Report', () => {
},
});
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'An error occurred while removing the timelog.',
captureError: true,
error: expect.any(Object),
diff --git a/spec/frontend/sidebar/components/todo_toggle/sidebar_todo_widget_spec.js b/spec/frontend/sidebar/components/todo_toggle/sidebar_todo_widget_spec.js
index ea931782d1e..f73491ca95f 100644
--- a/spec/frontend/sidebar/components/todo_toggle/sidebar_todo_widget_spec.js
+++ b/spec/frontend/sidebar/components/todo_toggle/sidebar_todo_widget_spec.js
@@ -4,7 +4,7 @@ import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import SidebarTodoWidget from '~/sidebar/components/todo_toggle/sidebar_todo_widget.vue';
import epicTodoQuery from '~/sidebar/queries/epic_todo.query.graphql';
import TodoButton from '~/vue_shared/components/sidebar/todo_toggle/todo_button.vue';
@@ -83,7 +83,7 @@ describe('Sidebar Todo Widget', () => {
});
await waitForPromises();
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
describe('collapsed', () => {
@@ -97,13 +97,13 @@ describe('Sidebar Todo Widget', () => {
});
it('shows add todo icon', () => {
- expect(wrapper.find(GlIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlIcon).exists()).toBe(true);
- expect(wrapper.find(GlIcon).props('name')).toBe('todo-add');
+ expect(wrapper.findComponent(GlIcon).props('name')).toBe('todo-add');
});
it('sets default tooltip title', () => {
- expect(wrapper.find(GlButton).attributes('title')).toBe('Add a to do');
+ expect(wrapper.findComponent(GlButton).attributes('title')).toBe('Add a to do');
});
it('when user has a to do', async () => {
@@ -112,12 +112,12 @@ describe('Sidebar Todo Widget', () => {
});
await waitForPromises();
- expect(wrapper.find(GlIcon).props('name')).toBe('todo-done');
- expect(wrapper.find(GlButton).attributes('title')).toBe('Mark as done');
+ expect(wrapper.findComponent(GlIcon).props('name')).toBe('todo-done');
+ expect(wrapper.findComponent(GlButton).attributes('title')).toBe('Mark as done');
});
it('emits `todoUpdated` event on click on icon', async () => {
- wrapper.find(GlIcon).vm.$emit('click', event);
+ wrapper.findComponent(GlIcon).vm.$emit('click', event);
await nextTick();
expect(wrapper.emitted('todoUpdated')).toEqual([[false]]);
diff --git a/spec/frontend/sidebar/issuable_assignees_spec.js b/spec/frontend/sidebar/issuable_assignees_spec.js
index dc59b68bbd4..1161fefcc64 100644
--- a/spec/frontend/sidebar/issuable_assignees_spec.js
+++ b/spec/frontend/sidebar/issuable_assignees_spec.js
@@ -17,7 +17,7 @@ describe('IssuableAssignees', () => {
},
});
};
- const findUncollapsedAssigneeList = () => wrapper.find(UncollapsedAssigneeList);
+ const findUncollapsedAssigneeList = () => wrapper.findComponent(UncollapsedAssigneeList);
const findEmptyAssignee = () => wrapper.find('[data-testid="none"]');
afterEach(() => {
diff --git a/spec/frontend/sidebar/lock/edit_form_buttons_spec.js b/spec/frontend/sidebar/lock/edit_form_buttons_spec.js
index 971744edb0f..2abb0c24d7d 100644
--- a/spec/frontend/sidebar/lock/edit_form_buttons_spec.js
+++ b/spec/frontend/sidebar/lock/edit_form_buttons_spec.js
@@ -1,6 +1,6 @@
import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import { createStore as createMrStore } from '~/mr_notes/stores';
import createStore from '~/notes/stores';
import EditFormButtons from '~/sidebar/components/lock/edit_form_buttons.vue';
@@ -129,7 +129,7 @@ describe('EditFormButtons', () => {
});
it('does not flash an error message', () => {
- expect(createFlash).not.toHaveBeenCalled();
+ expect(createAlert).not.toHaveBeenCalled();
});
});
@@ -162,7 +162,7 @@ describe('EditFormButtons', () => {
});
it('calls flash with the correct message', () => {
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: `Something went wrong trying to change the locked state of this ${issuableDisplayName}`,
});
});
diff --git a/spec/frontend/sidebar/lock/issuable_lock_form_spec.js b/spec/frontend/sidebar/lock/issuable_lock_form_spec.js
index 986ccaea4b6..8f825847cfc 100644
--- a/spec/frontend/sidebar/lock/issuable_lock_form_spec.js
+++ b/spec/frontend/sidebar/lock/issuable_lock_form_spec.js
@@ -26,7 +26,7 @@ describe('IssuableLockForm', () => {
const findSidebarCollapseIcon = () => wrapper.find('[data-testid="sidebar-collapse-icon"]');
const findLockStatus = () => wrapper.find('[data-testid="lock-status"]');
const findEditLink = () => wrapper.find('[data-testid="edit-link"]');
- const findEditForm = () => wrapper.find(EditForm);
+ const findEditForm = () => wrapper.findComponent(EditForm);
const findSidebarLockStatusTooltip = () =>
getBinding(findSidebarCollapseIcon().element, 'gl-tooltip');
diff --git a/spec/frontend/sidebar/mock_data.js b/spec/frontend/sidebar/mock_data.js
index 2afe9647cbe..391cbb1e0d5 100644
--- a/spec/frontend/sidebar/mock_data.js
+++ b/spec/frontend/sidebar/mock_data.js
@@ -283,7 +283,6 @@ export const epicParticipantsResponse = () => ({
name: 'Jacki Kub',
username: 'francina.skiles',
webUrl: '/franc',
- status: null,
},
],
},
diff --git a/spec/frontend/sidebar/participants_spec.js b/spec/frontend/sidebar/participants_spec.js
index 2517b625225..f7a626a189c 100644
--- a/spec/frontend/sidebar/participants_spec.js
+++ b/spec/frontend/sidebar/participants_spec.js
@@ -36,7 +36,7 @@ describe('Participants', () => {
loading: true,
});
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
it('does not show loading spinner not loading', () => {
@@ -44,7 +44,7 @@ describe('Participants', () => {
loading: false,
});
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(false);
});
it('shows participant count when given', () => {
@@ -73,7 +73,7 @@ describe('Participants', () => {
loading: true,
});
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
it('when only showing visible participants, shows an avatar only for each participant under the limit', async () => {
diff --git a/spec/frontend/sidebar/reviewer_title_spec.js b/spec/frontend/sidebar/reviewer_title_spec.js
index 6b4eed5ad0f..68ecd62e4c6 100644
--- a/spec/frontend/sidebar/reviewer_title_spec.js
+++ b/spec/frontend/sidebar/reviewer_title_spec.js
@@ -47,7 +47,7 @@ describe('ReviewerTitle component', () => {
editable: false,
});
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(false);
});
it('renders spinner when loading', () => {
@@ -57,7 +57,7 @@ describe('ReviewerTitle component', () => {
editable: false,
});
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
it('does not render edit link when not editable', () => {
diff --git a/spec/frontend/sidebar/reviewers_spec.js b/spec/frontend/sidebar/reviewers_spec.js
index 88bacc9b7f7..229f7ffbe04 100644
--- a/spec/frontend/sidebar/reviewers_spec.js
+++ b/spec/frontend/sidebar/reviewers_spec.js
@@ -43,7 +43,7 @@ describe('Reviewer component', () => {
it('displays no reviewer icon when collapsed', () => {
createWrapper();
const collapsedChildren = findCollapsedChildren();
- const userIcon = collapsedChildren.at(0).find(GlIcon);
+ const userIcon = collapsedChildren.at(0).findComponent(GlIcon);
expect(collapsedChildren.length).toBe(1);
expect(collapsedChildren.at(0).attributes('aria-label')).toBe('None');
diff --git a/spec/frontend/sidebar/sidebar_assignees_spec.js b/spec/frontend/sidebar/sidebar_assignees_spec.js
index 68d20060c37..2cb2425532b 100644
--- a/spec/frontend/sidebar/sidebar_assignees_spec.js
+++ b/spec/frontend/sidebar/sidebar_assignees_spec.js
@@ -73,19 +73,19 @@ describe('sidebar assignees', () => {
it('hides assignees until fetched', async () => {
createComponent();
- expect(wrapper.find(Assigness).exists()).toBe(false);
+ expect(wrapper.findComponent(Assigness).exists()).toBe(false);
wrapper.vm.store.isFetching.assignees = false;
await nextTick();
- expect(wrapper.find(Assigness).exists()).toBe(true);
+ expect(wrapper.findComponent(Assigness).exists()).toBe(true);
});
describe('when issuableType is issue', () => {
it('finds AssigneesRealtime component', () => {
createComponent();
- expect(wrapper.find(AssigneesRealtime).exists()).toBe(true);
+ expect(wrapper.findComponent(AssigneesRealtime).exists()).toBe(true);
});
});
@@ -93,7 +93,7 @@ describe('sidebar assignees', () => {
it('does not find AssigneesRealtime component', () => {
createComponent({ issuableType: 'MR' });
- expect(wrapper.find(AssigneesRealtime).exists()).toBe(false);
+ expect(wrapper.findComponent(AssigneesRealtime).exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/sidebar/sidebar_mediator_spec.js b/spec/frontend/sidebar/sidebar_mediator_spec.js
index 355f0c45bbe..bb5e7f7ff16 100644
--- a/spec/frontend/sidebar/sidebar_mediator_spec.js
+++ b/spec/frontend/sidebar/sidebar_mediator_spec.js
@@ -1,7 +1,7 @@
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import * as urlUtility from '~/lib/utils/url_utility';
-import SidebarService, { gqClient } from '~/sidebar/services/sidebar_service';
+import SidebarService from '~/sidebar/services/sidebar_service';
import SidebarMediator from '~/sidebar/sidebar_mediator';
import SidebarStore from '~/sidebar/stores/sidebar_store';
import Mock from './mock_data';
@@ -42,22 +42,14 @@ describe('Sidebar mediator', () => {
});
});
- it('fetches the data', () => {
+ it('fetches the data', async () => {
const mockData = Mock.responseMap.GET[mediatorMockData.endpoint];
mock.onGet(mediatorMockData.endpoint).reply(200, mockData);
-
- const mockGraphQlData = Mock.graphQlResponseData;
- const graphQlSpy = jest.spyOn(gqClient, 'query').mockReturnValue({
- data: mockGraphQlData,
- });
const spy = jest.spyOn(mediator, 'processFetchedData').mockReturnValue(Promise.resolve());
+ await mediator.fetch();
- return mediator.fetch().then(() => {
- expect(spy).toHaveBeenCalledWith(mockData, mockGraphQlData);
-
- spy.mockRestore();
- graphQlSpy.mockRestore();
- });
+ expect(spy).toHaveBeenCalledWith(mockData);
+ spy.mockRestore();
});
it('processes fetched data', () => {
diff --git a/spec/frontend/sidebar/sidebar_move_issue_spec.js b/spec/frontend/sidebar/sidebar_move_issue_spec.js
index 2e6807ed9d8..195cc6ddeeb 100644
--- a/spec/frontend/sidebar/sidebar_move_issue_spec.js
+++ b/spec/frontend/sidebar/sidebar_move_issue_spec.js
@@ -1,7 +1,7 @@
import MockAdapter from 'axios-mock-adapter';
import $ from 'jquery';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import SidebarMoveIssue from '~/sidebar/lib/sidebar_move_issue';
import SidebarService from '~/sidebar/services/sidebar_service';
@@ -115,7 +115,7 @@ describe('SidebarMoveIssue', () => {
// Wait for the move issue request to fail
await waitForPromises();
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
expect(test.$confirmButton.prop('disabled')).toBe(false);
expect(test.$confirmButton.hasClass('is-loading')).toBe(false);
});
diff --git a/spec/frontend/sidebar/subscriptions_spec.js b/spec/frontend/sidebar/subscriptions_spec.js
index 6ab8e1e0ebc..1a1aa370eef 100644
--- a/spec/frontend/sidebar/subscriptions_spec.js
+++ b/spec/frontend/sidebar/subscriptions_spec.js
@@ -108,7 +108,7 @@ describe('Subscriptions', () => {
expect(wrapper.findByTestId('subscription-title').text()).toContain(
subscribeDisabledDescription,
);
- expect(wrapper.find({ ref: 'tooltip' }).attributes('title')).toBe(
+ expect(wrapper.findComponent({ ref: 'tooltip' }).attributes('title')).toBe(
subscribeDisabledDescription,
);
});
diff --git a/spec/frontend/sidebar/todo_spec.js b/spec/frontend/sidebar/todo_spec.js
index 5f696b237e0..8e6597bf80f 100644
--- a/spec/frontend/sidebar/todo_spec.js
+++ b/spec/frontend/sidebar/todo_spec.js
@@ -43,8 +43,8 @@ describe('SidebarTodo', () => {
({ isTodo, iconClass, label, icon }) => {
createComponent({ isTodo });
- expect(wrapper.find(GlIcon).classes().join(' ')).toStrictEqual(iconClass);
- expect(wrapper.find(GlIcon).props('name')).toStrictEqual(icon);
+ expect(wrapper.findComponent(GlIcon).classes().join(' ')).toStrictEqual(iconClass);
+ expect(wrapper.findComponent(GlIcon).props('name')).toStrictEqual(icon);
expect(wrapper.find('button').text()).toBe(label);
},
);
@@ -76,19 +76,19 @@ describe('SidebarTodo', () => {
it('renders button icon when `collapsed` prop is `true`', () => {
createComponent({ collapsed: true });
- expect(wrapper.find(GlIcon).props('name')).toBe('todo-done');
+ expect(wrapper.findComponent(GlIcon).props('name')).toBe('todo-done');
});
it('renders loading icon when `isActionActive` prop is true', () => {
createComponent({ isActionActive: true });
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
it('hides button icon when `isActionActive` prop is true', () => {
createComponent({ collapsed: true, isActionActive: true });
- expect(wrapper.find(GlIcon).isVisible()).toBe(false);
+ expect(wrapper.findComponent(GlIcon).isVisible()).toBe(false);
});
});
});
diff --git a/spec/frontend/smart_interval_spec.js b/spec/frontend/smart_interval_spec.js
index 5dda097ae6a..64928fc4ae9 100644
--- a/spec/frontend/smart_interval_spec.js
+++ b/spec/frontend/smart_interval_spec.js
@@ -109,7 +109,7 @@ describe('SmartInterval', () => {
return waitForPromises().then(() => {
const { intervalId } = interval.state;
- expect(intervalId).toBeTruthy();
+ expect(intervalId).not.toBeUndefined();
});
});
});
@@ -130,7 +130,7 @@ describe('SmartInterval', () => {
jest.runOnlyPendingTimers();
return waitForPromises().then(() => {
- expect(interval.state.intervalId).toBeTruthy();
+ expect(interval.state.intervalId).not.toBeUndefined();
// simulates triggering of visibilitychange event
interval.onVisibilityChange({ target: { visibilityState: 'hidden' } });
@@ -148,16 +148,16 @@ describe('SmartInterval', () => {
jest.runOnlyPendingTimers();
return waitForPromises().then(() => {
- expect(interval.state.intervalId).toBeTruthy();
+ expect(interval.state.intervalId).not.toBeUndefined();
expect(
interval.getCurrentInterval() >= DEFAULT_STARTING_INTERVAL &&
interval.getCurrentInterval() <= DEFAULT_MAX_INTERVAL,
- ).toBeTruthy();
+ ).toBe(true);
// simulates triggering of visibilitychange event
interval.onVisibilityChange({ target: { visibilityState: 'hidden' } });
- expect(interval.state.intervalId).toBeTruthy();
+ expect(interval.state.intervalId).not.toBeUndefined();
expect(interval.getCurrentInterval()).toBe(HIDDEN_INTERVAL);
});
});
@@ -166,7 +166,7 @@ describe('SmartInterval', () => {
jest.runOnlyPendingTimers();
return waitForPromises().then(() => {
- expect(interval.state.intervalId).toBeTruthy();
+ expect(interval.state.intervalId).not.toBeUndefined();
// simulates triggering of visibilitychange event
interval.onVisibilityChange({ target: { visibilityState: 'hidden' } });
@@ -176,7 +176,7 @@ describe('SmartInterval', () => {
// simulates triggering of visibilitychange event
interval.onVisibilityChange({ target: { visibilityState: 'visible' } });
- expect(interval.state.intervalId).toBeTruthy();
+ expect(interval.state.intervalId).not.toBeUndefined();
});
});
@@ -194,7 +194,7 @@ describe('SmartInterval', () => {
it('should execute callback before first interval', () => {
interval = createDefaultSmartInterval({ immediateExecution: true });
- expect(interval.cfg.immediateExecution).toBeFalsy();
+ expect(interval.cfg.immediateExecution).toBe(false);
});
});
});
diff --git a/spec/frontend/snippet/collapsible_input_spec.js b/spec/frontend/snippet/collapsible_input_spec.js
index 56e64d136c2..4a6fd33b9e4 100644
--- a/spec/frontend/snippet/collapsible_input_spec.js
+++ b/spec/frontend/snippet/collapsible_input_spec.js
@@ -9,7 +9,7 @@ describe('~/snippet/collapsible_input', () => {
beforeEach(() => {
setHTMLFixture(`
- <form>
+ <form>
<div class="js-collapsible-input js-title">
<div class="js-collapsed d-none">
<input type="text" />
@@ -72,7 +72,7 @@ describe('~/snippet/collapsible_input', () => {
${'is collapsed'} | ${''} | ${true}
${'stays open if given value'} | ${'Hello world!'} | ${false}
`('when loses focus', ({ desc, value, isCollapsed }) => {
- it(desc, () => {
+ it(`${desc}`, () => {
findExpandedInput(descriptionEl).value = value;
focusIn(fooEl);
diff --git a/spec/frontend/snippets/components/edit_spec.js b/spec/frontend/snippets/components/edit_spec.js
index cf897414ccb..e7dab0ad79d 100644
--- a/spec/frontend/snippets/components/edit_spec.js
+++ b/spec/frontend/snippets/components/edit_spec.js
@@ -9,7 +9,7 @@ import { stubPerformanceWebAPI } from 'helpers/performance';
import waitForPromises from 'helpers/wait_for_promises';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import GetSnippetQuery from 'shared_queries/snippet/snippet.query.graphql';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import * as urlUtils from '~/lib/utils/url_utility';
import SnippetEditApp from '~/snippets/components/edit.vue';
import SnippetBlobActionsEdit from '~/snippets/components/snippet_blob_actions_edit.vue';
@@ -206,7 +206,7 @@ describe('Snippet Edit app', () => {
});
it('should hide loader', () => {
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(false);
});
});
@@ -237,7 +237,7 @@ describe('Snippet Edit app', () => {
!titleHasErrors,
);
- expect(wrapper.find(SnippetBlobActionsEdit).props('isValid')).toEqual(
+ expect(wrapper.findComponent(SnippetBlobActionsEdit).props('isValid')).toEqual(
!blobActionsHasErrors,
);
},
@@ -273,7 +273,7 @@ describe('Snippet Edit app', () => {
selectedLevel: visibility,
});
- expect(wrapper.find(SnippetVisibilityEdit).props('value')).toBe(visibility);
+ expect(wrapper.findComponent(SnippetVisibilityEdit).props('value')).toBe(visibility);
});
describe('form submission handling', () => {
@@ -361,7 +361,7 @@ describe('Snippet Edit app', () => {
await waitForPromises();
expect(urlUtils.redirectTo).not.toHaveBeenCalled();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: `Can't create snippet: ${TEST_MUTATION_ERROR}`,
});
});
@@ -385,7 +385,7 @@ describe('Snippet Edit app', () => {
});
expect(urlUtils.redirectTo).not.toHaveBeenCalled();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: `Can't update snippet: ${TEST_MUTATION_ERROR}`,
});
},
@@ -407,7 +407,7 @@ describe('Snippet Edit app', () => {
it('should flash', () => {
// Apollo automatically wraps the resolver's error in a NetworkError
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: `Can't update snippet: ${TEST_API_ERROR.message}`,
});
});
diff --git a/spec/frontend/snippets/components/embed_dropdown_spec.js b/spec/frontend/snippets/components/embed_dropdown_spec.js
index 389b1c618a3..ed5ea6cab8a 100644
--- a/spec/frontend/snippets/components/embed_dropdown_spec.js
+++ b/spec/frontend/snippets/components/embed_dropdown_spec.js
@@ -36,7 +36,7 @@ describe('snippets/components/embed_dropdown', () => {
sections.push(current);
} else {
- const value = x.find(GlFormInputGroup).props('value');
+ const value = x.findComponent(GlFormInputGroup).props('value');
const copyValue = x.find('button[title="Copy"]').attributes('data-clipboard-text');
Object.assign(current, {
diff --git a/spec/frontend/snippets/components/snippet_blob_edit_spec.js b/spec/frontend/snippets/components/snippet_blob_edit_spec.js
index 7ea27864519..33b8e2be969 100644
--- a/spec/frontend/snippets/components/snippet_blob_edit_spec.js
+++ b/spec/frontend/snippets/components/snippet_blob_edit_spec.js
@@ -4,7 +4,7 @@ import AxiosMockAdapter from 'axios-mock-adapter';
import { TEST_HOST } from 'helpers/test_constants';
import waitForPromises from 'helpers/wait_for_promises';
import BlobHeaderEdit from '~/blob/components/blob_edit_header.vue';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import { joinPaths } from '~/lib/utils/url_utility';
import SnippetBlobEdit from '~/snippets/components/snippet_blob_edit.vue';
@@ -46,9 +46,9 @@ describe('Snippet Blob Edit component', () => {
});
};
- const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
- const findHeader = () => wrapper.find(BlobHeaderEdit);
- const findContent = () => wrapper.find(SourceEditor);
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findHeader = () => wrapper.findComponent(BlobHeaderEdit);
+ const findContent = () => wrapper.findComponent(SourceEditor);
const getLastUpdatedArgs = () => {
const event = wrapper.emitted()['blob-updated'];
@@ -125,7 +125,7 @@ describe('Snippet Blob Edit component', () => {
it('should call flash', async () => {
await waitForPromises();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: "Can't fetch content for the blob: Error: Request failed with status code 500",
});
});
diff --git a/spec/frontend/snippets/components/snippet_blob_view_spec.js b/spec/frontend/snippets/components/snippet_blob_view_spec.js
index aa31377f390..c7ff8c21d80 100644
--- a/spec/frontend/snippets/components/snippet_blob_view_spec.js
+++ b/spec/frontend/snippets/components/snippet_blob_view_spec.js
@@ -69,13 +69,13 @@ describe('Blob Embeddable', () => {
describe('rendering', () => {
it('renders correct components', () => {
createComponent();
- expect(wrapper.find(BlobHeader).exists()).toBe(true);
- expect(wrapper.find(BlobContent).exists()).toBe(true);
+ expect(wrapper.findComponent(BlobHeader).exists()).toBe(true);
+ expect(wrapper.findComponent(BlobContent).exists()).toBe(true);
});
it('sets simple viewer correctly', () => {
createComponent();
- expect(wrapper.find(SimpleViewer).exists()).toBe(true);
+ expect(wrapper.findComponent(SimpleViewer).exists()).toBe(true);
});
it('sets rich viewer correctly', () => {
@@ -83,20 +83,20 @@ describe('Blob Embeddable', () => {
createComponent({
data,
});
- expect(wrapper.find(RichViewer).exists()).toBe(true);
+ expect(wrapper.findComponent(RichViewer).exists()).toBe(true);
});
it('correctly switches viewer type', async () => {
createComponent();
- expect(wrapper.find(SimpleViewer).exists()).toBe(true);
+ expect(wrapper.findComponent(SimpleViewer).exists()).toBe(true);
wrapper.vm.switchViewer(RichViewerMock.type);
await nextTick();
- expect(wrapper.find(RichViewer).exists()).toBe(true);
+ expect(wrapper.findComponent(RichViewer).exists()).toBe(true);
await wrapper.vm.switchViewer(SimpleViewerMock.type);
- expect(wrapper.find(SimpleViewer).exists()).toBe(true);
+ expect(wrapper.findComponent(SimpleViewer).exists()).toBe(true);
});
it('passes information about render error down to blob header', () => {
@@ -110,7 +110,7 @@ describe('Blob Embeddable', () => {
},
});
- expect(wrapper.find(BlobHeader).props('hasRenderError')).toBe(true);
+ expect(wrapper.findComponent(BlobHeader).props('hasRenderError')).toBe(true);
});
describe('bob content in multi-file scenario', () => {
@@ -161,7 +161,7 @@ describe('Blob Embeddable', () => {
await nextTick();
- const findContent = () => wrapper.find(BlobContent);
+ const findContent = () => wrapper.findComponent(BlobContent);
expect(findContent().props('content')).toBe(expectedContent);
},
@@ -169,36 +169,69 @@ describe('Blob Embeddable', () => {
});
describe('URLS with hash', () => {
- beforeEach(() => {
- window.location.hash = '#LC2';
- });
-
afterEach(() => {
window.location.hash = '';
});
- it('renders simple viewer by default if URL contains hash', () => {
- createComponent({
- data: {},
+ describe('if hash starts with #LC', () => {
+ beforeEach(() => {
+ window.location.hash = '#LC2';
+ });
+
+ it('renders simple viewer by default', () => {
+ createComponent({
+ data: {},
+ });
+
+ expect(wrapper.vm.activeViewerType).toBe(SimpleViewerMock.type);
+ expect(wrapper.findComponent(SimpleViewer).exists()).toBe(true);
});
- expect(wrapper.vm.activeViewerType).toBe(SimpleViewerMock.type);
- expect(wrapper.find(SimpleViewer).exists()).toBe(true);
+ describe('switchViewer()', () => {
+ it('switches to the passed viewer', async () => {
+ createComponent();
+
+ wrapper.vm.switchViewer(RichViewerMock.type);
+
+ await nextTick();
+ expect(wrapper.vm.activeViewerType).toBe(RichViewerMock.type);
+ expect(wrapper.findComponent(RichViewer).exists()).toBe(true);
+
+ await wrapper.vm.switchViewer(SimpleViewerMock.type);
+ expect(wrapper.vm.activeViewerType).toBe(SimpleViewerMock.type);
+ expect(wrapper.findComponent(SimpleViewer).exists()).toBe(true);
+ });
+ });
});
- describe('switchViewer()', () => {
- it('switches to the passed viewer', async () => {
- createComponent();
+ describe('if hash starts with anything else', () => {
+ beforeEach(() => {
+ window.location.hash = '#last-headline';
+ });
- wrapper.vm.switchViewer(RichViewerMock.type);
+ it('renders rich viewer by default', () => {
+ createComponent({
+ data: {},
+ });
- await nextTick();
expect(wrapper.vm.activeViewerType).toBe(RichViewerMock.type);
- expect(wrapper.find(RichViewer).exists()).toBe(true);
+ expect(wrapper.findComponent(RichViewer).exists()).toBe(true);
+ });
- await wrapper.vm.switchViewer(SimpleViewerMock.type);
- expect(wrapper.vm.activeViewerType).toBe(SimpleViewerMock.type);
- expect(wrapper.find(SimpleViewer).exists()).toBe(true);
+ describe('switchViewer()', () => {
+ it('switches to the passed viewer', async () => {
+ createComponent();
+
+ wrapper.vm.switchViewer(SimpleViewerMock.type);
+
+ await nextTick();
+ expect(wrapper.vm.activeViewerType).toBe(SimpleViewerMock.type);
+ expect(wrapper.findComponent(SimpleViewer).exists()).toBe(true);
+
+ await wrapper.vm.switchViewer(RichViewerMock.type);
+ expect(wrapper.vm.activeViewerType).toBe(RichViewerMock.type);
+ expect(wrapper.findComponent(RichViewer).exists()).toBe(true);
+ });
});
});
});
@@ -206,7 +239,7 @@ describe('Blob Embeddable', () => {
describe('functionality', () => {
describe('render error', () => {
- const findContentEl = () => wrapper.find(BlobContent);
+ const findContentEl = () => wrapper.findComponent(BlobContent);
it('correctly sets blob on the blob-content-error component', () => {
createComponent();
diff --git a/spec/frontend/snippets/components/snippet_header_spec.js b/spec/frontend/snippets/components/snippet_header_spec.js
index b750225a383..c930c9f635b 100644
--- a/spec/frontend/snippets/components/snippet_header_spec.js
+++ b/spec/frontend/snippets/components/snippet_header_spec.js
@@ -10,7 +10,7 @@ import { differenceInMilliseconds } from '~/lib/utils/datetime_utility';
import SnippetHeader, { i18n } from '~/snippets/components/snippet_header.vue';
import DeleteSnippetMutation from '~/snippets/mutations/delete_snippet.mutation.graphql';
import axios from '~/lib/utils/axios_utils';
-import createFlash, { FLASH_TYPES } from '~/flash';
+import { createAlert, VARIANT_DANGER, VARIANT_SUCCESS } from '~/flash';
jest.mock('~/flash');
@@ -267,9 +267,9 @@ describe('Snippet header component', () => {
});
it.each`
- request | variant | text
- ${200} | ${'SUCCESS'} | ${i18n.snippetSpamSuccess}
- ${500} | ${'DANGER'} | ${i18n.snippetSpamFailure}
+ request | variant | text
+ ${200} | ${VARIANT_SUCCESS} | ${i18n.snippetSpamSuccess}
+ ${500} | ${VARIANT_DANGER} | ${i18n.snippetSpamFailure}
`(
'renders a "$variant" flash message with "$text" message for a request with a "$request" response',
async ({ request, variant, text }) => {
@@ -278,9 +278,9 @@ describe('Snippet header component', () => {
submitAsSpamBtn.trigger('click');
await waitForPromises();
- expect(createFlash).toHaveBeenLastCalledWith({
+ expect(createAlert).toHaveBeenLastCalledWith({
message: expect.stringContaining(text),
- type: FLASH_TYPES[variant],
+ variant,
});
},
);
@@ -311,7 +311,7 @@ describe('Snippet header component', () => {
it('renders modal for deletion of a snippet', () => {
createComponent();
- expect(wrapper.find(GlModal).exists()).toBe(true);
+ expect(wrapper.findComponent(GlModal).exists()).toBe(true);
});
it.each`
diff --git a/spec/frontend/snippets/components/snippet_title_spec.js b/spec/frontend/snippets/components/snippet_title_spec.js
index 48fb51ce703..7c40735d64e 100644
--- a/spec/frontend/snippets/components/snippet_title_spec.js
+++ b/spec/frontend/snippets/components/snippet_title_spec.js
@@ -39,12 +39,12 @@ describe('Snippet header component', () => {
createComponent();
expect(wrapper.text().trim()).toContain(title);
- expect(wrapper.find(SnippetDescription).props('description')).toBe(descriptionHtml);
+ expect(wrapper.findComponent(SnippetDescription).props('description')).toBe(descriptionHtml);
});
it('does not render recent changes time stamp if there were no updates', () => {
createComponent();
- expect(wrapper.find(GlSprintf).exists()).toBe(false);
+ expect(wrapper.findComponent(GlSprintf).exists()).toBe(false);
});
it('does not render recent changes time stamp if the time for creation and updates match', () => {
@@ -57,7 +57,7 @@ describe('Snippet header component', () => {
});
createComponent({ props });
- expect(wrapper.find(GlSprintf).exists()).toBe(false);
+ expect(wrapper.findComponent(GlSprintf).exists()).toBe(false);
});
it('renders translated string with most recent changes timestamp if changes were made', () => {
@@ -70,6 +70,6 @@ describe('Snippet header component', () => {
});
createComponent({ props });
- expect(wrapper.find(GlSprintf).exists()).toBe(true);
+ expect(wrapper.findComponent(GlSprintf).exists()).toBe(true);
});
});
diff --git a/spec/frontend/snippets/components/snippet_visibility_edit_spec.js b/spec/frontend/snippets/components/snippet_visibility_edit_spec.js
index 2d043a5caba..29eb002ef4a 100644
--- a/spec/frontend/snippets/components/snippet_visibility_edit_spec.js
+++ b/spec/frontend/snippets/components/snippet_visibility_edit_spec.js
@@ -39,13 +39,13 @@ describe('Snippet Visibility Edit component', () => {
});
}
- const findLink = () => wrapper.find('label').find(GlLink);
- const findRadios = () => wrapper.find(GlFormRadioGroup).findAllComponents(GlFormRadio);
+ const findLink = () => wrapper.find('label').findComponent(GlLink);
+ const findRadios = () => wrapper.findComponent(GlFormRadioGroup).findAllComponents(GlFormRadio);
const findRadiosData = () =>
findRadios().wrappers.map((x) => {
return {
value: x.find('input').attributes('value'),
- icon: x.find(GlIcon).props('name'),
+ icon: x.findComponent(GlIcon).props('name'),
description: x.find('.help-text').text(),
text: x.find('.js-visibility-option').text(),
};
@@ -147,7 +147,7 @@ describe('Snippet Visibility Edit component', () => {
createComponent({ propsData: { value } });
- expect(wrapper.find(GlFormRadioGroup).attributes('checked')).toBe(value);
+ expect(wrapper.findComponent(GlFormRadioGroup).attributes('checked')).toBe(value);
});
});
});
diff --git a/spec/frontend/terms/components/app_spec.js b/spec/frontend/terms/components/app_spec.js
index ee78b35843a..f1dbc004da8 100644
--- a/spec/frontend/terms/components/app_spec.js
+++ b/spec/frontend/terms/components/app_spec.js
@@ -74,7 +74,7 @@ describe('TermsApp', () => {
expect(findButton(defaultProvide.paths.accept).attributes('disabled')).toBe('disabled');
- wrapper.find(GlIntersectionObserver).vm.$emit('appear');
+ wrapper.findComponent(GlIntersectionObserver).vm.$emit('appear');
await nextTick();
diff --git a/spec/frontend/terraform/components/states_table_spec.js b/spec/frontend/terraform/components/states_table_spec.js
index 12a44452717..0b3b169891b 100644
--- a/spec/frontend/terraform/components/states_table_spec.js
+++ b/spec/frontend/terraform/components/states_table_spec.js
@@ -160,8 +160,8 @@ describe('StatesTable', () => {
const state = states.at(lineNumber);
expect(state.text()).toContain(name);
- expect(state.find(GlBadge).exists()).toBe(hasBadge);
- expect(state.find(GlLoadingIcon).exists()).toBe(loading);
+ expect(state.findComponent(GlBadge).exists()).toBe(hasBadge);
+ expect(state.findComponent(GlLoadingIcon).exists()).toBe(loading);
if (hasBadge) {
const badge = wrapper.findByTestId(`state-badge-${name}`);
@@ -198,7 +198,7 @@ describe('StatesTable', () => {
const states = wrapper.findAll('[data-testid="terraform-states-table-pipeline"]');
const state = states.at(lineNumber);
- expect(state.find(GlTooltip).exists()).toBe(toolTipAdded);
+ expect(state.findComponent(GlTooltip).exists()).toBe(toolTipAdded);
expect(state.text()).toMatchInterpolatedText(pipelineText);
},
);
diff --git a/spec/frontend/terraform/components/terraform_list_spec.js b/spec/frontend/terraform/components/terraform_list_spec.js
index cfd82768098..580951e799a 100644
--- a/spec/frontend/terraform/components/terraform_list_spec.js
+++ b/spec/frontend/terraform/components/terraform_list_spec.js
@@ -57,11 +57,11 @@ describe('TerraformList', () => {
});
};
- const findBadge = () => wrapper.find(GlBadge);
- const findEmptyState = () => wrapper.find(EmptyState);
- const findPaginationButtons = () => wrapper.find(GlKeysetPagination);
- const findStatesTable = () => wrapper.find(StatesTable);
- const findTab = () => wrapper.find(GlTab);
+ const findBadge = () => wrapper.findComponent(GlBadge);
+ const findEmptyState = () => wrapper.findComponent(EmptyState);
+ const findPaginationButtons = () => wrapper.findComponent(GlKeysetPagination);
+ const findStatesTable = () => wrapper.findComponent(StatesTable);
+ const findTab = () => wrapper.findComponent(GlTab);
afterEach(() => {
wrapper.destroy();
@@ -182,7 +182,7 @@ describe('TerraformList', () => {
});
it('displays an alert message', () => {
- expect(wrapper.find(GlAlert).exists()).toBe(true);
+ expect(wrapper.findComponent(GlAlert).exists()).toBe(true);
});
});
@@ -195,7 +195,7 @@ describe('TerraformList', () => {
});
it('displays a loading icon', () => {
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/toggles/index_spec.js b/spec/frontend/toggles/index_spec.js
index 19c4d6f1f1d..f8c43e0ad0c 100644
--- a/spec/frontend/toggles/index_spec.js
+++ b/spec/frontend/toggles/index_spec.js
@@ -83,12 +83,12 @@ describe('toggles/index.js', () => {
expect(listener).toHaveBeenCalledTimes(0);
- wrapper.find(GlToggle).vm.$emit(event, true);
+ wrapper.findComponent(GlToggle).vm.$emit(event, true);
expect(listener).toHaveBeenCalledTimes(1);
expect(listener).toHaveBeenLastCalledWith(true);
- wrapper.find(GlToggle).vm.$emit(event, false);
+ wrapper.findComponent(GlToggle).vm.$emit(event, false);
expect(listener).toHaveBeenCalledTimes(2);
expect(listener).toHaveBeenLastCalledWith(false);
diff --git a/spec/frontend/token_access/token_access_spec.js b/spec/frontend/token_access/token_access_spec.js
index 024e7dfff8c..c55ac32b6a6 100644
--- a/spec/frontend/token_access/token_access_spec.js
+++ b/spec/frontend/token_access/token_access_spec.js
@@ -4,7 +4,7 @@ import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import TokenAccess from '~/token_access/components/token_access.vue';
import addProjectCIJobTokenScopeMutation from '~/token_access/graphql/mutations/add_project_ci_job_token_scope.mutation.graphql';
import removeProjectCIJobTokenScopeMutation from '~/token_access/graphql/mutations/remove_project_ci_job_token_scope.mutation.graphql';
@@ -40,7 +40,7 @@ describe('TokenAccess component', () => {
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findAddProjectBtn = () => wrapper.findByRole('button', { name: 'Add project' });
const findRemoveProjectBtn = () => wrapper.findByRole('button', { name: 'Remove access' });
- const findTokenSection = () => wrapper.find('[data-testid="token-section"]');
+ const findTokenDisabledAlert = () => wrapper.findByTestId('token-disabled-alert');
const createMockApolloProvider = (requestHandlers) => {
return createMockApollo(requestHandlers);
@@ -80,7 +80,7 @@ describe('TokenAccess component', () => {
});
describe('toggle', () => {
- it('the toggle should be enabled and the token section should show', async () => {
+ it('the toggle is on and the alert is hidden', async () => {
createComponent([
[getCIJobTokenScopeQuery, enabledJobTokenScopeHandler],
[getProjectsWithCIJobTokenScopeQuery, getProjectsWithScope],
@@ -89,10 +89,10 @@ describe('TokenAccess component', () => {
await waitForPromises();
expect(findToggle().props('value')).toBe(true);
- expect(findTokenSection().exists()).toBe(true);
+ expect(findTokenDisabledAlert().exists()).toBe(false);
});
- it('the toggle should be disabled and the token section should show', async () => {
+ it('the toggle is off and the alert is visible', async () => {
createComponent([
[getCIJobTokenScopeQuery, disabledJobTokenScopeHandler],
[getProjectsWithCIJobTokenScopeQuery, getProjectsWithScope],
@@ -101,7 +101,7 @@ describe('TokenAccess component', () => {
await waitForPromises();
expect(findToggle().props('value')).toBe(false);
- expect(findTokenSection().exists()).toBe(true);
+ expect(findTokenDisabledAlert().exists()).toBe(true);
});
});
@@ -144,7 +144,7 @@ describe('TokenAccess component', () => {
await waitForPromises();
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
});
@@ -187,7 +187,7 @@ describe('TokenAccess component', () => {
await waitForPromises();
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
});
});
diff --git a/spec/frontend/tooltips/components/tooltips_spec.js b/spec/frontend/tooltips/components/tooltips_spec.js
index 998bb2a9ea2..d5a63a99601 100644
--- a/spec/frontend/tooltips/components/tooltips_spec.js
+++ b/spec/frontend/tooltips/components/tooltips_spec.js
@@ -49,7 +49,7 @@ describe('tooltips/components/tooltips.vue', () => {
await nextTick();
- expect(wrapper.find(GlTooltip).props('target')).toBe(target);
+ expect(wrapper.findComponent(GlTooltip).props('target')).toBe(target);
});
it('does not attach a tooltip to a target with empty title', async () => {
@@ -59,7 +59,7 @@ describe('tooltips/components/tooltips.vue', () => {
await nextTick();
- expect(wrapper.find(GlTooltip).exists()).toBe(false);
+ expect(wrapper.findComponent(GlTooltip).exists()).toBe(false);
});
it('does not attach a tooltip twice to the same element', async () => {
@@ -76,7 +76,7 @@ describe('tooltips/components/tooltips.vue', () => {
await nextTick();
- expect(wrapper.find(GlTooltip).text()).toBe(target.getAttribute('title'));
+ expect(wrapper.findComponent(GlTooltip).text()).toBe(target.getAttribute('title'));
});
it('supports HTML content', async () => {
@@ -88,7 +88,7 @@ describe('tooltips/components/tooltips.vue', () => {
await nextTick();
- expect(wrapper.find(GlTooltip).html()).toContain(target.getAttribute('title'));
+ expect(wrapper.findComponent(GlTooltip).html()).toContain(target.getAttribute('title'));
});
it('sets the configuration values passed in the config object', async () => {
@@ -96,7 +96,7 @@ describe('tooltips/components/tooltips.vue', () => {
target = createTooltipTarget();
wrapper.vm.addTooltips([target], config);
await nextTick();
- expect(wrapper.find(GlTooltip).props()).toMatchObject(config);
+ expect(wrapper.findComponent(GlTooltip).props()).toMatchObject(config);
});
it.each`
@@ -113,7 +113,7 @@ describe('tooltips/components/tooltips.vue', () => {
await nextTick();
- expect(wrapper.find(GlTooltip).props(prop)).toBe(value);
+ expect(wrapper.findComponent(GlTooltip).props(prop)).toBe(value);
},
);
});
@@ -180,7 +180,7 @@ describe('tooltips/components/tooltips.vue', () => {
wrapper.vm.triggerEvent(target, event);
- expect(wrapper.find(GlTooltip).emitted(event)).toHaveLength(1);
+ expect(wrapper.findComponent(GlTooltip).emitted(event)).toHaveLength(1);
});
});
@@ -198,14 +198,14 @@ describe('tooltips/components/tooltips.vue', () => {
await nextTick();
- expect(wrapper.find(GlTooltip).text()).toBe(currentTitle);
+ expect(wrapper.findComponent(GlTooltip).text()).toBe(currentTitle);
target.setAttribute('title', newTitle);
wrapper.vm.fixTitle(target);
await nextTick();
- expect(wrapper.find(GlTooltip).text()).toBe(newTitle);
+ expect(wrapper.findComponent(GlTooltip).text()).toBe(newTitle);
});
});
diff --git a/spec/frontend/user_lists/components/edit_user_list_spec.js b/spec/frontend/user_lists/components/edit_user_list_spec.js
index 941c8244247..5f067d9de3c 100644
--- a/spec/frontend/user_lists/components/edit_user_list_spec.js
+++ b/spec/frontend/user_lists/components/edit_user_list_spec.js
@@ -47,7 +47,7 @@ describe('user_lists/components/edit_user_list', () => {
});
it('should show a loading icon', () => {
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
});
@@ -60,7 +60,7 @@ describe('user_lists/components/edit_user_list', () => {
factory();
await waitForPromises();
- alert = wrapper.find(GlAlert);
+ alert = wrapper.findComponent(GlAlert);
});
it('should show a flash with the error respopnse', () => {
@@ -72,7 +72,7 @@ describe('user_lists/components/edit_user_list', () => {
});
it('should not show a user list form', () => {
- expect(wrapper.find(UserListForm).exists()).toBe(false);
+ expect(wrapper.findComponent(UserListForm).exists()).toBe(false);
});
});
@@ -129,7 +129,7 @@ describe('user_lists/components/edit_user_list', () => {
clickSave();
await waitForPromises();
- alert = wrapper.find(GlAlert);
+ alert = wrapper.findComponent(GlAlert);
});
it('should show a flash with the error respopnse', () => {
diff --git a/spec/frontend/user_lists/components/new_user_list_spec.js b/spec/frontend/user_lists/components/new_user_list_spec.js
index ace4a284347..8683cf2463c 100644
--- a/spec/frontend/user_lists/components/new_user_list_spec.js
+++ b/spec/frontend/user_lists/components/new_user_list_spec.js
@@ -72,7 +72,7 @@ describe('user_lists/components/new_user_list', () => {
await waitForPromises();
- alert = wrapper.find(GlAlert);
+ alert = wrapper.findComponent(GlAlert);
});
it('should show a flash with the error respopnse', () => {
diff --git a/spec/frontend/user_lists/components/user_list_spec.js b/spec/frontend/user_lists/components/user_list_spec.js
index f126c733dd5..e02862cad2b 100644
--- a/spec/frontend/user_lists/components/user_list_spec.js
+++ b/spec/frontend/user_lists/components/user_list_spec.js
@@ -50,7 +50,7 @@ describe('User List', () => {
});
it('shows a loading icon', () => {
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
});
@@ -157,7 +157,7 @@ describe('User List', () => {
});
describe('error', () => {
- const findAlert = () => wrapper.find(GlAlert);
+ const findAlert = () => wrapper.findComponent(GlAlert);
beforeEach(async () => {
Api.fetchFeatureFlagUserList.mockRejectedValue();
@@ -190,7 +190,7 @@ describe('User List', () => {
});
it('displays an empty state', () => {
- expect(wrapper.find(GlEmptyState).exists()).toBe(true);
+ expect(wrapper.findComponent(GlEmptyState).exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/user_lists/components/user_lists_table_spec.js b/spec/frontend/user_lists/components/user_lists_table_spec.js
index fb5093eb065..3324b040b86 100644
--- a/spec/frontend/user_lists/components/user_lists_table_spec.js
+++ b/spec/frontend/user_lists/components/user_lists_table_spec.js
@@ -59,7 +59,7 @@ describe('User Lists Table', () => {
describe('delete button', () => {
it('should display the confirmation modal', async () => {
- const modal = wrapper.find(GlModal);
+ const modal = wrapper.findComponent(GlModal);
wrapper.find('[data-testid="delete-user-list"]').trigger('click');
@@ -73,7 +73,7 @@ describe('User Lists Table', () => {
let modal;
beforeEach(async () => {
- modal = wrapper.find(GlModal);
+ modal = wrapper.findComponent(GlModal);
wrapper.find('button').trigger('click');
diff --git a/spec/frontend/user_popovers_spec.js b/spec/frontend/user_popovers_spec.js
index 0530569c9df..8ce071c075f 100644
--- a/spec/frontend/user_popovers_spec.js
+++ b/spec/frontend/user_popovers_spec.js
@@ -188,8 +188,8 @@ describe('User Popovers', () => {
});
it('removes title attribute from user links', () => {
- expect(userLink.getAttribute('title')).toBeFalsy();
- expect(userLink.dataset.originalTitle).toBeFalsy();
+ expect(userLink.getAttribute('title')).toBe('');
+ expect(userLink.dataset.originalTitle).toBe('');
});
it('fetches user info and status from the user cache', () => {
diff --git a/spec/frontend/vue_merge_request_widget/components/approvals/approvals_spec.js b/spec/frontend/vue_merge_request_widget/components/approvals/approvals_spec.js
index 05cd1bb5b3d..1f3b6dce620 100644
--- a/spec/frontend/vue_merge_request_widget/components/approvals/approvals_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/approvals/approvals_spec.js
@@ -1,7 +1,7 @@
import { nextTick } from 'vue';
import { GlButton, GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import Approvals from '~/vue_merge_request_widget/components/approvals/approvals.vue';
import ApprovalsSummary from '~/vue_merge_request_widget/components/approvals/approvals_summary.vue';
import ApprovalsSummaryOptional from '~/vue_merge_request_widget/components/approvals/approvals_summary_optional.vue';
@@ -49,7 +49,7 @@ describe('MRWidget approvals', () => {
});
};
- const findAction = () => wrapper.find(GlButton);
+ const findAction = () => wrapper.findComponent(GlButton);
const findActionData = () => {
const action = findAction();
@@ -61,8 +61,8 @@ describe('MRWidget approvals', () => {
text: action.text(),
};
};
- const findSummary = () => wrapper.find(ApprovalsSummary);
- const findOptionalSummary = () => wrapper.find(ApprovalsSummaryOptional);
+ const findSummary = () => wrapper.findComponent(ApprovalsSummary);
+ const findOptionalSummary = () => wrapper.findComponent(ApprovalsSummaryOptional);
const findInvalidRules = () => wrapper.find('[data-testid="invalid-rules"]');
beforeEach(() => {
@@ -129,7 +129,7 @@ describe('MRWidget approvals', () => {
});
it('flashes error', () => {
- expect(createFlash).toHaveBeenCalledWith({ message: FETCH_ERROR });
+ expect(createAlert).toHaveBeenCalledWith({ message: FETCH_ERROR });
});
});
@@ -268,7 +268,7 @@ describe('MRWidget approvals', () => {
});
it('flashes error message', () => {
- expect(createFlash).toHaveBeenCalledWith({ message: APPROVE_ERROR });
+ expect(createAlert).toHaveBeenCalledWith({ message: APPROVE_ERROR });
});
});
});
@@ -319,7 +319,7 @@ describe('MRWidget approvals', () => {
});
it('flashes error message', () => {
- expect(createFlash).toHaveBeenCalledWith({ message: UNAPPROVE_ERROR });
+ expect(createAlert).toHaveBeenCalledWith({ message: UNAPPROVE_ERROR });
});
});
});
diff --git a/spec/frontend/vue_merge_request_widget/components/approvals/approvals_summary_optional_spec.js b/spec/frontend/vue_merge_request_widget/components/approvals/approvals_summary_optional_spec.js
index 65cafc647e0..e6fb0495947 100644
--- a/spec/frontend/vue_merge_request_widget/components/approvals/approvals_summary_optional_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/approvals/approvals_summary_optional_spec.js
@@ -18,7 +18,7 @@ describe('MRWidget approvals summary optional', () => {
wrapper = null;
});
- const findHelpLink = () => wrapper.find(GlLink);
+ const findHelpLink = () => wrapper.findComponent(GlLink);
describe('when can approve', () => {
beforeEach(() => {
diff --git a/spec/frontend/vue_merge_request_widget/components/approvals/approvals_summary_spec.js b/spec/frontend/vue_merge_request_widget/components/approvals/approvals_summary_spec.js
index c2606346292..f4234083346 100644
--- a/spec/frontend/vue_merge_request_widget/components/approvals/approvals_summary_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/approvals/approvals_summary_spec.js
@@ -29,7 +29,7 @@ describe('MRWidget approvals summary', () => {
});
};
- const findAvatars = () => wrapper.find(UserAvatarList);
+ const findAvatars = () => wrapper.findComponent(UserAvatarList);
afterEach(() => {
wrapper.destroy();
@@ -136,7 +136,7 @@ describe('MRWidget approvals summary', () => {
});
it('does not render avatar list', () => {
- expect(wrapper.find(UserAvatarList).exists()).toBe(false);
+ expect(wrapper.findComponent(UserAvatarList).exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/vue_merge_request_widget/components/artifacts_list_app_spec.js b/spec/frontend/vue_merge_request_widget/components/artifacts_list_app_spec.js
index e2386bc7f2b..73fa4b7b08f 100644
--- a/spec/frontend/vue_merge_request_widget/components/artifacts_list_app_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/artifacts_list_app_spec.js
@@ -60,7 +60,7 @@ describe('Merge Requests Artifacts list app', () => {
});
it('renders a loading icon', () => {
- const loadingIcon = wrapper.find(GlLoadingIcon);
+ const loadingIcon = wrapper.findComponent(GlLoadingIcon);
expect(loadingIcon.exists()).toBe(true);
});
diff --git a/spec/frontend/vue_merge_request_widget/components/artifacts_list_spec.js b/spec/frontend/vue_merge_request_widget/components/artifacts_list_spec.js
index d519ad2cdb0..b7bf72cd215 100644
--- a/spec/frontend/vue_merge_request_widget/components/artifacts_list_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/artifacts_list_spec.js
@@ -31,11 +31,11 @@ describe('Artifacts List', () => {
});
it('renders link for the artifact', () => {
- expect(wrapper.find(GlLink).attributes('href')).toEqual(data.artifacts[0].url);
+ expect(wrapper.findComponent(GlLink).attributes('href')).toEqual(data.artifacts[0].url);
});
it('renders artifact name', () => {
- expect(wrapper.find(GlLink).text()).toEqual(data.artifacts[0].text);
+ expect(wrapper.findComponent(GlLink).text()).toEqual(data.artifacts[0].text);
});
it('renders job url', () => {
diff --git a/spec/frontend/vue_merge_request_widget/components/mr_collapsible_extension_spec.js b/spec/frontend/vue_merge_request_widget/components/mr_collapsible_extension_spec.js
index 01fbcb2154f..c253dc63f23 100644
--- a/spec/frontend/vue_merge_request_widget/components/mr_collapsible_extension_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/mr_collapsible_extension_spec.js
@@ -23,7 +23,7 @@ describe('Merge Request Collapsible Extension', () => {
const findTitle = () => wrapper.find('[data-testid="mr-collapsible-title"]');
const findErrorMessage = () => wrapper.find('.js-error-state');
- const findIcon = () => wrapper.find(GlIcon);
+ const findIcon = () => wrapper.findComponent(GlIcon);
afterEach(() => {
wrapper.destroy();
@@ -77,7 +77,7 @@ describe('Merge Request Collapsible Extension', () => {
});
it('renders loading spinner', () => {
- expect(wrapper.find(GlLoadingIcon).isVisible()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).isVisible()).toBe(true);
});
});
diff --git a/spec/frontend/vue_merge_request_widget/components/mr_widget_author_time_spec.js b/spec/frontend/vue_merge_request_widget/components/mr_widget_author_time_spec.js
index 8fd93809e01..90a29d15488 100644
--- a/spec/frontend/vue_merge_request_widget/components/mr_widget_author_time_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/mr_widget_author_time_spec.js
@@ -32,7 +32,9 @@ describe('MrWidgetAuthorTime', () => {
});
it('renders author', () => {
- expect(wrapper.find(MrWidgetAuthor).props('author')).toStrictEqual(defaultProps.author);
+ expect(wrapper.findComponent(MrWidgetAuthor).props('author')).toStrictEqual(
+ defaultProps.author,
+ );
});
it('renders provided time', () => {
diff --git a/spec/frontend/vue_merge_request_widget/components/mr_widget_expandable_section_spec.js b/spec/frontend/vue_merge_request_widget/components/mr_widget_expandable_section_spec.js
index 631aef412a6..8eaed998eb5 100644
--- a/spec/frontend/vue_merge_request_widget/components/mr_widget_expandable_section_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/mr_widget_expandable_section_spec.js
@@ -6,8 +6,8 @@ import MrCollapsibleSection from '~/vue_merge_request_widget/components/mr_widge
describe('MrWidgetExpanableSection', () => {
let wrapper;
- const findButton = () => wrapper.find(GlButton);
- const findCollapse = () => wrapper.find(GlCollapse);
+ const findButton = () => wrapper.findComponent(GlButton);
+ const findCollapse = () => wrapper.findComponent(GlCollapse);
beforeEach(() => {
wrapper = shallowMount(MrCollapsibleSection, {
@@ -19,7 +19,7 @@ describe('MrWidgetExpanableSection', () => {
});
it('renders Icon', () => {
- expect(wrapper.find(GlIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlIcon).exists()).toBe(true);
});
it('renders header slot', () => {
diff --git a/spec/frontend/vue_merge_request_widget/components/mr_widget_icon_spec.js b/spec/frontend/vue_merge_request_widget/components/mr_widget_icon_spec.js
index ebd10f31fa7..6a9b019fb4f 100644
--- a/spec/frontend/vue_merge_request_widget/components/mr_widget_icon_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/mr_widget_icon_spec.js
@@ -21,6 +21,6 @@ describe('MrWidgetIcon', () => {
it('renders icon and container', () => {
expect(wrapper.element.className).toContain('circle-icon-container');
- expect(wrapper.find(GlIcon).props('name')).toEqual(TEST_ICON);
+ expect(wrapper.findComponent(GlIcon).props('name')).toEqual(TEST_ICON);
});
});
diff --git a/spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_container_spec.js b/spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_container_spec.js
index efe2bf75c3f..c3f6331e560 100644
--- a/spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_container_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_container_spec.js
@@ -41,8 +41,8 @@ describe('MrWidgetPipelineContainer', () => {
});
it('renders pipeline', () => {
- expect(wrapper.find(MrWidgetPipeline).exists()).toBe(true);
- expect(wrapper.find(MrWidgetPipeline).props()).toMatchObject({
+ expect(wrapper.findComponent(MrWidgetPipeline).exists()).toBe(true);
+ expect(wrapper.findComponent(MrWidgetPipeline).props()).toMatchObject({
pipeline: mockStore.pipeline,
pipelineCoverageDelta: mockStore.pipelineCoverageDelta,
ciStatus: mockStore.ciStatus,
@@ -82,9 +82,9 @@ describe('MrWidgetPipelineContainer', () => {
});
it('renders pipeline', () => {
- expect(wrapper.find(MrWidgetPipeline).exists()).toBe(true);
+ expect(wrapper.findComponent(MrWidgetPipeline).exists()).toBe(true);
expect(findCIErrorMessage().exists()).toBe(false);
- expect(wrapper.find(MrWidgetPipeline).props()).toMatchObject({
+ expect(wrapper.findComponent(MrWidgetPipeline).props()).toMatchObject({
pipeline: mockStore.mergePipeline,
pipelineCoverageDelta: mockStore.pipelineCoverageDelta,
ciStatus: mockStore.mergePipeline.details.status.text,
@@ -102,7 +102,7 @@ describe('MrWidgetPipelineContainer', () => {
targetBranch: 'Foo<script>alert("XSS")</script>',
},
});
- expect(wrapper.find(MrWidgetPipeline).props().sourceBranchLink).toBe('Foo');
+ expect(wrapper.findComponent(MrWidgetPipeline).props().sourceBranchLink).toBe('Foo');
});
it('renders deployments', () => {
@@ -125,7 +125,7 @@ describe('MrWidgetPipelineContainer', () => {
it('renders the artifacts app', () => {
factory();
- expect(wrapper.find(ArtifactsApp).isVisible()).toBe(true);
+ expect(wrapper.findComponent(ArtifactsApp).isVisible()).toBe(true);
});
});
});
diff --git a/spec/frontend/vue_merge_request_widget/components/mr_widget_suggest_pipeline_spec.js b/spec/frontend/vue_merge_request_widget/components/mr_widget_suggest_pipeline_spec.js
index d6c67dab381..73358edee78 100644
--- a/spec/frontend/vue_merge_request_widget/components/mr_widget_suggest_pipeline_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/mr_widget_suggest_pipeline_spec.js
@@ -57,7 +57,7 @@ describe('MRWidgetSuggestPipeline', () => {
});
it('renders widget icon', () => {
- const icon = wrapper.find(MrWidgetIcon);
+ const icon = wrapper.findComponent(MrWidgetIcon);
expect(icon.exists()).toBe(true);
expect(icon.props()).toEqual(
@@ -115,7 +115,7 @@ describe('MRWidgetSuggestPipeline', () => {
});
describe('dismissible', () => {
- const findDismissContainer = () => wrapper.find(dismissibleContainer);
+ const findDismissContainer = () => wrapper.findComponent(dismissibleContainer);
beforeEach(() => {
wrapper = shallowMount(suggestPipelineComponent, { propsData: suggestProps });
diff --git a/spec/frontend/vue_merge_request_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap b/spec/frontend/vue_merge_request_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap
index 635ef0f6b0d..5f383c468d8 100644
--- a/spec/frontend/vue_merge_request_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap
+++ b/spec/frontend/vue_merge_request_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap
@@ -72,11 +72,14 @@ exports[`MRWidgetAutoMergeEnabled when graphql is disabled template should have
<div
class="gl-display-flex gl-md-display-block gl-font-size-0 gl-ml-auto"
>
- <div>
+ <div
+ class="gl-display-flex gl-align-items-flex-start"
+ >
<div
class="dropdown b-dropdown gl-new-dropdown gl-display-block gl-md-display-none! btn-group"
lazy=""
no-caret=""
+ title="Options"
>
<!---->
<button
@@ -246,11 +249,14 @@ exports[`MRWidgetAutoMergeEnabled when graphql is enabled template should have c
<div
class="gl-display-flex gl-md-display-block gl-font-size-0 gl-ml-auto"
>
- <div>
+ <div
+ class="gl-display-flex gl-align-items-flex-start"
+ >
<div
class="dropdown b-dropdown gl-new-dropdown gl-display-block gl-md-display-none! btn-group"
lazy=""
no-caret=""
+ title="Options"
>
<!---->
<button
diff --git a/spec/frontend/vue_merge_request_widget/components/states/merge_checks_failed_spec.js b/spec/frontend/vue_merge_request_widget/components/states/merge_checks_failed_spec.js
index 1900b53ac11..d85574262fe 100644
--- a/spec/frontend/vue_merge_request_widget/components/states/merge_checks_failed_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/merge_checks_failed_spec.js
@@ -15,9 +15,9 @@ describe('Merge request widget merge checks failed state component', () => {
});
it.each`
- mrState | displayText
- ${{ approvals: true, isApproved: false }} | ${'approvalNeeded'}
- ${{ blockingMergeRequests: { total_count: 1 } }} | ${'blockingMergeRequests'}
+ mrState | displayText
+ ${{ approvals: true, isApproved: false }} | ${'approvalNeeded'}
+ ${{ detailedMergeStatus: 'BLOCKED_STATUS' }} | ${'blockingMergeRequests'}
`('display $displayText text for $mrState', ({ mrState, displayText }) => {
factory({ mr: mrState });
diff --git a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_auto_merge_failed_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_auto_merge_failed_spec.js
index 9320e733636..398a3912882 100644
--- a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_auto_merge_failed_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_auto_merge_failed_spec.js
@@ -7,7 +7,7 @@ import eventHub from '~/vue_merge_request_widget/event_hub';
describe('MRWidgetAutoMergeFailed', () => {
let wrapper;
const mergeError = 'This is the merge error';
- const findButton = () => wrapper.find(GlButton);
+ const findButton = () => wrapper.findComponent(GlButton);
const createComponent = (props = {}, mergeRequestWidgetGraphql = false) => {
wrapper = mount(AutoMergeFailedComponent, {
@@ -61,7 +61,7 @@ describe('MRWidgetAutoMergeFailed', () => {
await nextTick();
expect(findButton().attributes('disabled')).toBe('disabled');
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_merged_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_merged_spec.js
index 2606933450e..a3aa563b516 100644
--- a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_merged_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_merged_spec.js
@@ -1,180 +1,172 @@
import { getByRole } from '@testing-library/dom';
-import Vue from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
+import { nextTick } from 'vue';
+import { mount } from '@vue/test-utils';
import waitForPromises from 'helpers/wait_for_promises';
import { OPEN_REVERT_MODAL, OPEN_CHERRY_PICK_MODAL } from '~/projects/commit/constants';
import modalEventHub from '~/projects/commit/event_hub';
-import mergedComponent from '~/vue_merge_request_widget/components/states/mr_widget_merged.vue';
+import MergedComponent from '~/vue_merge_request_widget/components/states/mr_widget_merged.vue';
import eventHub from '~/vue_merge_request_widget/event_hub';
describe('MRWidgetMerged', () => {
- let vm;
+ let wrapper;
const targetBranch = 'foo';
-
- beforeEach(() => {
- jest.spyOn(document, 'dispatchEvent');
- const Component = Vue.extend(mergedComponent);
- const mr = {
- isRemovingSourceBranch: false,
- cherryPickInForkPath: false,
- canCherryPickInCurrentMR: true,
- revertInForkPath: false,
- canRevertInCurrentMR: true,
- canRemoveSourceBranch: true,
- sourceBranchRemoved: true,
- metrics: {
- mergedBy: {
- name: 'Administrator',
- username: 'root',
- webUrl: 'http://localhost:3000/root',
- avatarUrl:
- 'http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ const mr = {
+ isRemovingSourceBranch: false,
+ cherryPickInForkPath: false,
+ canCherryPickInCurrentMR: true,
+ revertInForkPath: false,
+ canRevertInCurrentMR: true,
+ canRemoveSourceBranch: true,
+ sourceBranchRemoved: true,
+ metrics: {
+ mergedBy: {
+ name: 'Administrator',
+ username: 'root',
+ webUrl: 'http://localhost:3000/root',
+ avatarUrl:
+ 'http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ },
+ mergedAt: 'Jan 24, 2018 1:02pm UTC',
+ readableMergedAt: '',
+ closedBy: {},
+ closedAt: 'Jan 24, 2018 1:02pm UTC',
+ readableClosedAt: '',
+ },
+ updatedAt: 'mergedUpdatedAt',
+ shortMergeCommitSha: '958c0475',
+ mergeCommitSha: '958c047516e182dfc52317f721f696e8a1ee85ed',
+ mergeCommitPath:
+ 'http://localhost:3000/root/nautilus/commit/f7ce827c314c9340b075657fd61c789fb01cf74d',
+ sourceBranch: 'bar',
+ targetBranch,
+ };
+
+ const service = {
+ removeSourceBranch: () => nextTick(),
+ };
+
+ const createComponent = (customMrFields = {}) => {
+ wrapper = mount(MergedComponent, {
+ propsData: {
+ mr: {
+ ...mr,
+ ...customMrFields,
},
- mergedAt: 'Jan 24, 2018 1:02pm UTC',
- readableMergedAt: '',
- closedBy: {},
- closedAt: 'Jan 24, 2018 1:02pm UTC',
- readableClosedAt: '',
+ service,
},
- updatedAt: 'mergedUpdatedAt',
- shortMergeCommitSha: '958c0475',
- mergeCommitSha: '958c047516e182dfc52317f721f696e8a1ee85ed',
- mergeCommitPath:
- 'http://localhost:3000/root/nautilus/commit/f7ce827c314c9340b075657fd61c789fb01cf74d',
- sourceBranch: 'bar',
- targetBranch,
- };
-
- const service = {
- removeSourceBranch() {},
- };
+ });
+ };
+ beforeEach(() => {
+ jest.spyOn(document, 'dispatchEvent');
jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
-
- vm = mountComponent(Component, { mr, service });
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
- describe('computed', () => {
- describe('shouldShowRemoveSourceBranch', () => {
- it('returns true when sourceBranchRemoved is false', () => {
- vm.mr.sourceBranchRemoved = false;
-
- expect(vm.shouldShowRemoveSourceBranch).toEqual(true);
- });
-
- it('returns false when sourceBranchRemoved is true', () => {
- vm.mr.sourceBranchRemoved = true;
-
- expect(vm.shouldShowRemoveSourceBranch).toEqual(false);
- });
-
- it('returns false when canRemoveSourceBranch is false', () => {
- vm.mr.sourceBranchRemoved = false;
- vm.mr.canRemoveSourceBranch = false;
-
- expect(vm.shouldShowRemoveSourceBranch).toEqual(false);
- });
-
- it('returns false when is making request', () => {
- vm.mr.canRemoveSourceBranch = true;
- vm.isMakingRequest = true;
-
- expect(vm.shouldShowRemoveSourceBranch).toEqual(false);
- });
+ const findButtonByText = (text) =>
+ wrapper.findAll('button').wrappers.find((w) => w.text() === text);
+ const findRemoveSourceBranchButton = () => findButtonByText('Delete source branch');
- it('returns true when all are true', () => {
- vm.mr.isRemovingSourceBranch = true;
- vm.mr.canRemoveSourceBranch = true;
- vm.isMakingRequest = true;
+ describe('remove source branch button', () => {
+ it('is displayed when sourceBranchRemoved is false', () => {
+ createComponent({ sourceBranchRemoved: false });
- expect(vm.shouldShowRemoveSourceBranch).toEqual(false);
- });
+ expect(findRemoveSourceBranchButton().exists()).toBe(true);
});
- describe('shouldShowSourceBranchRemoving', () => {
- it('should correct value when fields changed', () => {
- vm.mr.sourceBranchRemoved = false;
+ it('is not displayed when sourceBranchRemoved is true', () => {
+ createComponent({ sourceBranchRemoved: true });
- expect(vm.shouldShowSourceBranchRemoving).toEqual(false);
+ expect(findRemoveSourceBranchButton()).toBe(undefined);
+ });
- vm.mr.sourceBranchRemoved = true;
+ it('is not displayed when canRemoveSourceBranch is true', () => {
+ createComponent({ sourceBranchRemoved: false, canRemoveSourceBranch: false });
- expect(vm.shouldShowRemoveSourceBranch).toEqual(false);
+ expect(findRemoveSourceBranchButton()).toBe(undefined);
+ });
- vm.mr.sourceBranchRemoved = false;
- vm.isMakingRequest = true;
+ it('is not displayed when is making request', async () => {
+ createComponent({ sourceBranchRemoved: false, canRemoveSourceBranch: true });
- expect(vm.shouldShowSourceBranchRemoving).toEqual(true);
+ await findRemoveSourceBranchButton().trigger('click');
- vm.isMakingRequest = false;
- vm.mr.isRemovingSourceBranch = true;
+ expect(findRemoveSourceBranchButton()).toBe(undefined);
+ });
- expect(vm.shouldShowSourceBranchRemoving).toEqual(true);
+ it('is not displayed when all are true', () => {
+ createComponent({
+ isRemovingSourceBranch: true,
+ sourceBranchRemoved: false,
+ canRemoveSourceBranch: true,
});
+
+ expect(findRemoveSourceBranchButton()).toBe(undefined);
});
});
- describe('methods', () => {
- describe('removeSourceBranch', () => {
- it('should set flag and call service then request main component to update the widget', async () => {
- jest.spyOn(vm.service, 'removeSourceBranch').mockReturnValue(
- new Promise((resolve) => {
- resolve({
- data: {
- message: 'Branch was deleted',
- },
- });
- }),
- );
+ it('should set flag and call service then request main component to update the widget when branch is removed', async () => {
+ createComponent({ sourceBranchRemoved: false });
+ jest.spyOn(service, 'removeSourceBranch').mockResolvedValue({
+ data: {
+ message: 'Branch was deleted',
+ },
+ });
- vm.removeSourceBranch();
+ await findRemoveSourceBranchButton().trigger('click');
- await waitForPromises();
+ await waitForPromises();
- const args = eventHub.$emit.mock.calls[0];
+ const args = eventHub.$emit.mock.calls[0];
- expect(vm.isMakingRequest).toEqual(true);
- expect(args[0]).toEqual('MRWidgetUpdateRequested');
- expect(args[1]).not.toThrow();
- });
- });
+ expect(args[0]).toEqual('MRWidgetUpdateRequested');
+ expect(args[1]).not.toThrow();
});
it('calls dispatchDocumentEvent to load in the modal component', () => {
+ createComponent();
+
expect(document.dispatchEvent).toHaveBeenCalledWith(new CustomEvent('merged:UpdateActions'));
});
it('emits event to open the revert modal on revert button click', () => {
+ createComponent();
const eventHubSpy = jest.spyOn(modalEventHub, '$emit');
- getByRole(vm.$el, 'button', { name: /Revert/i }).click();
+ getByRole(wrapper.element, 'button', { name: /Revert/i }).click();
expect(eventHubSpy).toHaveBeenCalledWith(OPEN_REVERT_MODAL);
});
it('emits event to open the cherry-pick modal on cherry-pick button click', () => {
+ createComponent();
const eventHubSpy = jest.spyOn(modalEventHub, '$emit');
- getByRole(vm.$el, 'button', { name: /Cherry-pick/i }).click();
+ getByRole(wrapper.element, 'button', { name: /Cherry-pick/i }).click();
expect(eventHubSpy).toHaveBeenCalledWith(OPEN_CHERRY_PICK_MODAL);
});
it('has merged by information', () => {
- expect(vm.$el.textContent).toContain('Merged by');
- expect(vm.$el.textContent).toContain('Administrator');
+ createComponent();
+
+ expect(wrapper.text()).toContain('Merged by');
+ expect(wrapper.text()).toContain('Administrator');
});
it('shows revert and cherry-pick buttons', () => {
- expect(vm.$el.textContent).toContain('Revert');
- expect(vm.$el.textContent).toContain('Cherry-pick');
+ createComponent();
+
+ expect(wrapper.text()).toContain('Revert');
+ expect(wrapper.text()).toContain('Cherry-pick');
});
it('should use mergedEvent mergedAt as tooltip title', () => {
- expect(vm.$el.querySelector('time').getAttribute('title')).toBe('Jan 24, 2018 1:02pm UTC');
+ createComponent();
+
+ expect(wrapper.find('time').attributes('title')).toBe('Jan 24, 2018 1:02pm UTC');
});
});
diff --git a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_pipeline_failed_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_pipeline_failed_spec.js
index d5619d4996d..bd158d59d74 100644
--- a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_pipeline_failed_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_pipeline_failed_spec.js
@@ -6,31 +6,42 @@ import StatusIcon from '~/vue_merge_request_widget/components/mr_widget_status_i
describe('PipelineFailed', () => {
let wrapper;
- const createComponent = () => {
+ const createComponent = (mr = {}) => {
wrapper = shallowMount(PipelineFailed, {
+ propsData: {
+ mr,
+ },
stubs: {
GlSprintf,
},
});
};
- beforeEach(() => {
- createComponent();
- });
-
afterEach(() => {
wrapper.destroy();
wrapper = null;
});
it('should render error status icon', () => {
+ createComponent();
+
expect(wrapper.findComponent(StatusIcon).exists()).toBe(true);
expect(wrapper.findComponent(StatusIcon).props().status).toBe('failed');
});
it('should render error message with a disabled merge button', () => {
+ createComponent();
+
expect(wrapper.text()).toContain('Merge blocked: pipeline must succeed.');
expect(wrapper.text()).toContain('Push a commit that fixes the failure');
expect(wrapper.findComponent(GlLink).text()).toContain('learn about other solutions');
});
+
+ it('should render pipeline blocked message', () => {
+ createComponent({ isPipelineBlocked: true });
+
+ expect(wrapper.text()).toContain(
+ "Merge blocked: pipeline must succeed. It's waiting for a manual action to continue.",
+ );
+ });
});
diff --git a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_ready_to_merge_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_ready_to_merge_spec.js
index 9a6bf66909e..48d3f15560b 100644
--- a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_ready_to_merge_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_ready_to_merge_spec.js
@@ -105,16 +105,17 @@ const createComponent = (
},
stubs: {
CommitEdit,
+ GlSprintf,
},
apolloProvider: createMockApollo([[readyToMergeQuery, readyToMergeResponseSpy]]),
});
};
-const findCheckboxElement = () => wrapper.find(SquashBeforeMerge);
+const findCheckboxElement = () => wrapper.findComponent(SquashBeforeMerge);
const findCommitEditElements = () => wrapper.findAllComponents(CommitEdit);
-const findCommitDropdownElement = () => wrapper.find(CommitMessageDropdown);
+const findCommitDropdownElement = () => wrapper.findComponent(CommitMessageDropdown);
const findFirstCommitEditLabel = () => findCommitEditElements().at(0).props('label');
-const findTipLink = () => wrapper.find(GlSprintf);
+const findTipLink = () => wrapper.findComponent(GlSprintf);
const findCommitEditWithInputId = (inputId) =>
findCommitEditElements().wrappers.find((x) => x.props('inputId') === inputId);
const findMergeCommitMessage = () => findCommitEditWithInputId('merge-message-edit').props('value');
@@ -300,6 +301,48 @@ describe('ReadyToMerge', () => {
expect(wrapper.vm.isMergeButtonDisabled).toBe(true);
});
});
+
+ describe('sourceBranchDeletedText', () => {
+ const should = 'Source branch will be deleted.';
+ const shouldNot = 'Source branch will not be deleted.';
+ const did = 'Deleted the source branch.';
+ const didNot = 'Did not delete the source branch.';
+ const scenarios = [
+ "the MR hasn't merged yet, and the backend-provided value expects to delete the branch",
+ "the MR hasn't merged yet, and the backend-provided value expects to leave the branch",
+ "the MR hasn't merged yet, and the backend-provided value is a non-boolean falsey value",
+ "the MR hasn't merged yet, and the backend-provided value is a non-boolean truthy value",
+ 'the MR has been merged, and the backend reports that the branch has been removed',
+ 'the MR has been merged, and the backend reports that the branch has not been removed',
+ 'the MR has been merged, and the backend reports a non-boolean falsey value',
+ 'the MR has been merged, and the backend reports a non-boolean truthy value',
+ ];
+
+ it.each`
+ describe | premerge | mrShould | mrRemoved | output
+ ${scenarios[0]} | ${true} | ${true} | ${null} | ${should}
+ ${scenarios[1]} | ${true} | ${false} | ${null} | ${shouldNot}
+ ${scenarios[2]} | ${true} | ${null} | ${null} | ${shouldNot}
+ ${scenarios[3]} | ${true} | ${'yeah'} | ${null} | ${should}
+ ${scenarios[4]} | ${false} | ${null} | ${true} | ${did}
+ ${scenarios[5]} | ${false} | ${null} | ${false} | ${didNot}
+ ${scenarios[6]} | ${false} | ${null} | ${null} | ${didNot}
+ ${scenarios[7]} | ${false} | ${null} | ${'yep'} | ${did}
+ `(
+ 'in the case that $describe, returns "$output"',
+ ({ premerge, mrShould, mrRemoved, output }) => {
+ createComponent({
+ mr: {
+ state: !premerge ? 'merged' : 'literally-anything-else',
+ shouldRemoveSourceBranch: mrShould,
+ sourceBranchRemoved: mrRemoved,
+ },
+ });
+
+ expect(wrapper.vm.sourceBranchDeletedText).toBe(output);
+ },
+ );
+ });
});
describe('methods', () => {
@@ -733,6 +776,34 @@ describe('ReadyToMerge', () => {
});
});
+ describe('source and target branches diverged', () => {
+ describe('when the MR is showing the Merge button', () => {
+ it('does not display the diverged commits message if the source branch is not behind the target', () => {
+ createComponent({ mr: { divergedCommitsCount: 0 } });
+
+ const textBody = wrapper.text();
+
+ expect(textBody).toEqual(
+ expect.not.stringContaining('The source branch is 0 commits behind the target branch'),
+ );
+ expect(textBody).toEqual(
+ expect.not.stringContaining('The source branch is 0 commit behind the target branch'),
+ );
+ expect(textBody).toEqual(
+ expect.not.stringContaining('The source branch is behind the target branch'),
+ );
+ });
+
+ it('shows the diverged commits text when the source branch is behind the target', () => {
+ createComponent({ mr: { divergedCommitsCount: 9001, canMerge: false } });
+
+ expect(wrapper.text()).toEqual(
+ expect.stringContaining('The source branch is 9001 commits behind the target branch'),
+ );
+ });
+ });
+ });
+
describe('Merge button when pipeline has failed', () => {
beforeEach(() => {
createComponent({
diff --git a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_squash_before_merge_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_squash_before_merge_spec.js
index 6ea2e8675d3..c839fa17fe5 100644
--- a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_squash_before_merge_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_squash_before_merge_spec.js
@@ -18,7 +18,7 @@ describe('Squash before merge component', () => {
wrapper.destroy();
});
- const findCheckbox = () => wrapper.find(GlFormCheckbox);
+ const findCheckbox = () => wrapper.findComponent(GlFormCheckbox);
describe('checkbox', () => {
it('is unchecked if passed value prop is false', () => {
diff --git a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_wip_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_wip_spec.js
index af52901f508..7259f210b6e 100644
--- a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_wip_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_wip_spec.js
@@ -38,7 +38,7 @@ describe('Wip', () => {
it('should have default data', () => {
const vm = createComponent();
- expect(vm.isMakingRequest).toBeFalsy();
+ expect(vm.isMakingRequest).toBe(false);
});
});
diff --git a/spec/frontend/vue_merge_request_widget/components/terraform/mr_widget_terraform_container_spec.js b/spec/frontend/vue_merge_request_widget/components/terraform/mr_widget_terraform_container_spec.js
deleted file mode 100644
index 7a868eb8cc9..00000000000
--- a/spec/frontend/vue_merge_request_widget/components/terraform/mr_widget_terraform_container_spec.js
+++ /dev/null
@@ -1,175 +0,0 @@
-import { GlSkeletonLoader, GlSprintf } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import MockAdapter from 'axios-mock-adapter';
-import { nextTick } from 'vue';
-import axios from '~/lib/utils/axios_utils';
-import Poll from '~/lib/utils/poll';
-import MrWidgetExpanableSection from '~/vue_merge_request_widget/components/mr_widget_expandable_section.vue';
-import MrWidgetTerraformContainer from '~/vue_merge_request_widget/components/terraform/mr_widget_terraform_container.vue';
-import TerraformPlan from '~/vue_merge_request_widget/components/terraform/terraform_plan.vue';
-import { invalidPlanWithName, plans, validPlanWithName } from './mock_data';
-
-describe('MrWidgetTerraformConainer', () => {
- let mock;
- let wrapper;
-
- const propsData = { endpoint: '/path/to/terraform/report.json' };
-
- const findHeader = () => wrapper.find('[data-testid="terraform-header-text"]');
- const findPlans = () =>
- wrapper.findAllComponents(TerraformPlan).wrappers.map((x) => x.props('plan'));
-
- const mockPollingApi = (response, body, header) => {
- mock.onGet(propsData.endpoint).reply(response, body, header);
- };
-
- const mountWrapper = () => {
- wrapper = shallowMount(MrWidgetTerraformContainer, {
- propsData,
- stubs: { MrWidgetExpanableSection, GlSprintf },
- });
- return axios.waitForAll();
- };
-
- beforeEach(() => {
- mock = new MockAdapter(axios);
- });
-
- afterEach(() => {
- wrapper.destroy();
- mock.restore();
- });
-
- describe('when data is loading', () => {
- beforeEach(async () => {
- mockPollingApi(200, plans, {});
-
- await mountWrapper();
- // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
- // eslint-disable-next-line no-restricted-syntax
- wrapper.setData({ loading: true });
- await nextTick();
- });
-
- it('diplays loading skeleton', () => {
- expect(wrapper.findComponent(GlSkeletonLoader).exists()).toBe(true);
- expect(wrapper.find(MrWidgetExpanableSection).exists()).toBe(false);
- });
- });
-
- describe('when data has finished loading', () => {
- beforeEach(() => {
- mockPollingApi(200, plans, {});
- return mountWrapper();
- });
-
- it('displays terraform content', () => {
- expect(wrapper.findComponent(GlSkeletonLoader).exists()).toBe(false);
- expect(wrapper.find(MrWidgetExpanableSection).exists()).toBe(true);
- expect(findPlans()).toEqual(Object.values(plans));
- });
-
- describe('when data includes one invalid plan', () => {
- beforeEach(() => {
- const invalidPlanGroup = { bad_plan: invalidPlanWithName };
- mockPollingApi(200, invalidPlanGroup, {});
- return mountWrapper();
- });
-
- it('displays header text for one invalid plan', () => {
- expect(findHeader().text()).toBe('1 Terraform report failed to generate');
- });
- });
-
- describe('when data includes multiple invalid plans', () => {
- beforeEach(() => {
- const invalidPlanGroup = {
- bad_plan_one: invalidPlanWithName,
- bad_plan_two: invalidPlanWithName,
- };
-
- mockPollingApi(200, invalidPlanGroup, {});
- return mountWrapper();
- });
-
- it('displays header text for multiple invalid plans', () => {
- expect(findHeader().text()).toBe('2 Terraform reports failed to generate');
- });
- });
-
- describe('when data includes one valid plan', () => {
- beforeEach(() => {
- const validPlanGroup = { valid_plan: validPlanWithName };
- mockPollingApi(200, validPlanGroup, {});
- return mountWrapper();
- });
-
- it('displays header text for one valid plans', () => {
- expect(findHeader().text()).toBe('1 Terraform report was generated in your pipelines');
- });
- });
-
- describe('when data includes multiple valid plans', () => {
- beforeEach(() => {
- const validPlanGroup = {
- valid_plan_one: validPlanWithName,
- valid_plan_two: validPlanWithName,
- };
- mockPollingApi(200, validPlanGroup, {});
- return mountWrapper();
- });
-
- it('displays header text for multiple valid plans', () => {
- expect(findHeader().text()).toBe('2 Terraform reports were generated in your pipelines');
- });
- });
- });
-
- describe('polling', () => {
- let pollRequest;
- let pollStop;
-
- beforeEach(() => {
- pollRequest = jest.spyOn(Poll.prototype, 'makeRequest');
- pollStop = jest.spyOn(Poll.prototype, 'stop');
- });
-
- afterEach(() => {
- pollRequest.mockRestore();
- pollStop.mockRestore();
- });
-
- describe('successful poll', () => {
- beforeEach(() => {
- mockPollingApi(200, plans, {});
-
- return mountWrapper();
- });
-
- it('does not make additional requests after poll is successful', () => {
- expect(pollRequest).toHaveBeenCalledTimes(1);
- expect(pollStop).toHaveBeenCalledTimes(1);
- });
- });
-
- describe('polling fails', () => {
- beforeEach(() => {
- mockPollingApi(500, null, {});
- return mountWrapper();
- });
-
- it('stops loading', () => {
- expect(wrapper.findComponent(GlSkeletonLoader).exists()).toBe(false);
- });
-
- it('generates one broken plan', () => {
- expect(findPlans()).toEqual([{ tf_report_error: 'api_error' }]);
- });
-
- it('does not make additional requests after poll is unsuccessful', () => {
- expect(pollRequest).toHaveBeenCalledTimes(1);
- expect(pollStop).toHaveBeenCalledTimes(1);
- });
- });
- });
-});
diff --git a/spec/frontend/vue_merge_request_widget/components/terraform/terraform_plan_spec.js b/spec/frontend/vue_merge_request_widget/components/terraform/terraform_plan_spec.js
deleted file mode 100644
index 3c9f6c2e165..00000000000
--- a/spec/frontend/vue_merge_request_widget/components/terraform/terraform_plan_spec.js
+++ /dev/null
@@ -1,93 +0,0 @@
-import { GlLink, GlSprintf } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import TerraformPlan from '~/vue_merge_request_widget/components/terraform/terraform_plan.vue';
-import {
- invalidPlanWithName,
- invalidPlanWithoutName,
- validPlanWithName,
- validPlanWithoutName,
-} from './mock_data';
-
-describe('TerraformPlan', () => {
- let wrapper;
-
- const findIcon = () => wrapper.find('[data-testid="change-type-icon"]');
- const findLogButton = () => wrapper.find('[data-testid="terraform-report-link"]');
-
- const mountWrapper = (propsData) => {
- wrapper = shallowMount(TerraformPlan, { stubs: { GlLink, GlSprintf }, propsData });
- };
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- describe('valid plan with job_name', () => {
- beforeEach(() => {
- mountWrapper({ plan: validPlanWithName });
- });
-
- it('displays a document icon', () => {
- expect(findIcon().attributes('name')).toBe('doc-changes');
- });
-
- it('diplays the header text with a name', () => {
- expect(wrapper.text()).toContain(`The job ${validPlanWithName.job_name} generated a report.`);
- });
-
- it('diplays the reported changes', () => {
- expect(wrapper.text()).toContain(
- `Reported Resource Changes: ${validPlanWithName.create} to add, ${validPlanWithName.update} to change, ${validPlanWithName.delete} to delete`,
- );
- });
-
- it('renders button when url is found', () => {
- expect(findLogButton().exists()).toBe(true);
- expect(findLogButton().text()).toEqual('View full log');
- });
- });
-
- describe('valid plan without job_name', () => {
- beforeEach(() => {
- mountWrapper({ plan: validPlanWithoutName });
- });
-
- it('diplays the header text without a name', () => {
- expect(wrapper.text()).toContain('A report was generated in your pipelines.');
- });
- });
-
- describe('invalid plan with job_name', () => {
- beforeEach(() => {
- mountWrapper({ plan: invalidPlanWithName });
- });
-
- it('displays a warning icon', () => {
- expect(findIcon().attributes('name')).toBe('warning');
- });
-
- it('diplays the header text with a name', () => {
- expect(wrapper.text()).toContain(
- `The job ${invalidPlanWithName.job_name} failed to generate a report.`,
- );
- });
-
- it('diplays generic error since report values are missing', () => {
- expect(wrapper.text()).toContain('Generating the report caused an error.');
- });
- });
-
- describe('invalid plan with out job_name', () => {
- beforeEach(() => {
- mountWrapper({ plan: invalidPlanWithoutName });
- });
-
- it('diplays the header text without a name', () => {
- expect(wrapper.text()).toContain('A report failed to generate.');
- });
-
- it('does not render button because url is missing', () => {
- expect(findLogButton().exists()).toBe(false);
- });
- });
-});
diff --git a/spec/frontend/vue_merge_request_widget/components/widget/__snapshots__/dynamic_content_spec.js.snap b/spec/frontend/vue_merge_request_widget/components/widget/__snapshots__/dynamic_content_spec.js.snap
new file mode 100644
index 00000000000..08424077269
--- /dev/null
+++ b/spec/frontend/vue_merge_request_widget/components/widget/__snapshots__/dynamic_content_spec.js.snap
@@ -0,0 +1,35 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`~/vue_merge_request_widget/components/widget/dynamic_content.vue renders given data 1`] = `
+"<content-row-stub level=\\"2\\" statusiconname=\\"success\\" widgetname=\\"MyWidget\\" header=\\"This is a header,This is a subheader\\">
+ <div class=\\"gl-display-flex gl-flex-direction-column\\">
+ <div>
+ <p class=\\"gl-mb-0\\">Main text for the row</p>
+ <gl-link-stub href=\\"https://gitlab.com\\">Optional link to display after text</gl-link-stub>
+ <!---->
+ <gl-badge-stub size=\\"md\\" variant=\\"info\\">
+ Badge is optional. Text to be displayed inside badge
+ </gl-badge-stub>
+ <actions-stub widget=\\"MyWidget\\" tertiarybuttons=\\"\\" class=\\"gl-ml-auto gl-pl-3\\"></actions-stub>
+ <p class=\\"gl-m-0 gl-font-sm\\">Optional: Smaller sub-text to be displayed below the main text</p>
+ </div>
+ <ul class=\\"gl-m-0 gl-p-0 gl-list-style-none\\">
+ <li>
+ <content-row-stub level=\\"3\\" statusiconname=\\"\\" widgetname=\\"MyWidget\\" header=\\"Child row header\\" data-qa-selector=\\"child_content\\">
+ <div class=\\"gl-display-flex gl-flex-direction-column\\">
+ <div>
+ <p class=\\"gl-mb-0\\">This is recursive. It will be listed in level 3.</p>
+ <!---->
+ <!---->
+ <!---->
+ <actions-stub widget=\\"MyWidget\\" tertiarybuttons=\\"\\" class=\\"gl-ml-auto gl-pl-3\\"></actions-stub>
+ <!---->
+ </div>
+ <!---->
+ </div>
+ </content-row-stub>
+ </li>
+ </ul>
+ </div>
+</content-row-stub>"
+`;
diff --git a/spec/frontend/vue_merge_request_widget/components/widget/dynamic_content_spec.js b/spec/frontend/vue_merge_request_widget/components/widget/dynamic_content_spec.js
new file mode 100644
index 00000000000..b7753a58747
--- /dev/null
+++ b/spec/frontend/vue_merge_request_widget/components/widget/dynamic_content_spec.js
@@ -0,0 +1,52 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { EXTENSION_ICONS } from '~/vue_merge_request_widget/constants';
+import DynamicContent from '~/vue_merge_request_widget/components/widget/dynamic_content.vue';
+
+describe('~/vue_merge_request_widget/components/widget/dynamic_content.vue', () => {
+ let wrapper;
+
+ const createComponent = ({ propsData } = {}) => {
+ wrapper = shallowMountExtended(DynamicContent, {
+ propsData: {
+ widgetName: 'MyWidget',
+ ...propsData,
+ },
+ stubs: {
+ DynamicContent,
+ },
+ });
+ };
+
+ it('renders given data', () => {
+ createComponent({
+ propsData: {
+ data: {
+ id: 'row-id',
+ header: ['This is a header', 'This is a subheader'],
+ text: 'Main text for the row',
+ subtext: 'Optional: Smaller sub-text to be displayed below the main text',
+ icon: {
+ name: EXTENSION_ICONS.success,
+ },
+ badge: {
+ text: 'Badge is optional. Text to be displayed inside badge',
+ variant: 'info',
+ },
+ link: {
+ text: 'Optional link to display after text',
+ href: 'https://gitlab.com',
+ },
+ children: [
+ {
+ id: 'row-id-2',
+ header: 'Child row header',
+ text: 'This is recursive. It will be listed in level 3.',
+ },
+ ],
+ },
+ },
+ });
+
+ expect(wrapper.html()).toMatchSnapshot();
+ });
+});
diff --git a/spec/frontend/vue_merge_request_widget/components/widget/widget_content_row_spec.js b/spec/frontend/vue_merge_request_widget/components/widget/widget_content_row_spec.js
new file mode 100644
index 00000000000..9eddd091ad0
--- /dev/null
+++ b/spec/frontend/vue_merge_request_widget/components/widget/widget_content_row_spec.js
@@ -0,0 +1,65 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import WidgetContentRow from '~/vue_merge_request_widget/components/widget/widget_content_row.vue';
+import StatusIcon from '~/vue_merge_request_widget/components/widget/status_icon.vue';
+
+describe('~/vue_merge_request_widget/components/widget/widget_content_row.vue', () => {
+ let wrapper;
+
+ const findStatusIcon = () => wrapper.findComponent(StatusIcon);
+
+ const createComponent = ({ propsData, slots } = {}) => {
+ wrapper = shallowMountExtended(WidgetContentRow, {
+ propsData: {
+ widgetName: 'MyWidget',
+ level: 2,
+ ...propsData,
+ },
+ slots,
+ });
+ };
+
+ describe('body', () => {
+ it('renders the status icon when provided', () => {
+ createComponent({ propsData: { statusIconName: 'failed' } });
+ expect(findStatusIcon().exists()).toBe(true);
+ });
+
+ it('does not render the status icon when it is not provided', () => {
+ createComponent();
+ expect(findStatusIcon().exists()).toBe(false);
+ });
+
+ it('renders slots properly', () => {
+ createComponent({
+ propsData: {
+ statusIconName: 'success',
+ },
+ slots: {
+ header: '<span>this is a header</span>',
+ body: '<span>this is a body</span>',
+ },
+ });
+
+ expect(wrapper.findByText('this is a body').exists()).toBe(true);
+ expect(wrapper.findByText('this is a header').exists()).toBe(true);
+ });
+ });
+
+ describe('header', () => {
+ it('renders an array of header and subheader', () => {
+ createComponent({ propsData: { header: ['this is a header', 'this is a subheader'] } });
+ expect(wrapper.findByText('this is a header').exists()).toBe(true);
+ expect(wrapper.findByText('this is a subheader').exists()).toBe(true);
+ });
+
+ it('renders a string', () => {
+ createComponent({ propsData: { header: 'this is a header' } });
+ expect(wrapper.findByText('this is a header').exists()).toBe(true);
+ });
+
+ it('escapes html injection properly', () => {
+ createComponent({ propsData: { header: '<b role="header">this is a header</b>' } });
+ expect(wrapper.findByText('<b role="header">this is a header</b>').exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/vue_merge_request_widget/components/widget/widget_content_section_spec.js b/spec/frontend/vue_merge_request_widget/components/widget/widget_content_section_spec.js
deleted file mode 100644
index c2128d3ff33..00000000000
--- a/spec/frontend/vue_merge_request_widget/components/widget/widget_content_section_spec.js
+++ /dev/null
@@ -1,39 +0,0 @@
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import WidgetContentSection from '~/vue_merge_request_widget/components/widget/widget_content_section.vue';
-import StatusIcon from '~/vue_merge_request_widget/components/extensions/status_icon.vue';
-
-describe('~/vue_merge_request_widget/components/widget/widget_content_section.vue', () => {
- let wrapper;
-
- const findStatusIcon = () => wrapper.findComponent(StatusIcon);
-
- const createComponent = ({ propsData, slots } = {}) => {
- wrapper = shallowMountExtended(WidgetContentSection, {
- propsData: {
- widgetName: 'MyWidget',
- ...propsData,
- },
- slots,
- });
- };
-
- it('does not render the status icon when it is not provided', () => {
- createComponent();
- expect(findStatusIcon().exists()).toBe(false);
- });
-
- it('renders the status icon when provided', () => {
- createComponent({ propsData: { statusIconName: 'failed' } });
- expect(findStatusIcon().exists()).toBe(true);
- });
-
- it('renders the default slot', () => {
- createComponent({
- slots: {
- default: 'Hello world',
- },
- });
-
- expect(wrapper.findByText('Hello world').exists()).toBe(true);
- });
-});
diff --git a/spec/frontend/vue_merge_request_widget/components/widget/widget_spec.js b/spec/frontend/vue_merge_request_widget/components/widget/widget_spec.js
index b67b5703ad5..4826fecf98d 100644
--- a/spec/frontend/vue_merge_request_widget/components/widget/widget_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/widget/widget_spec.js
@@ -5,8 +5,9 @@ import waitForPromises from 'helpers/wait_for_promises';
import StatusIcon from '~/vue_merge_request_widget/components/extensions/status_icon.vue';
import ActionButtons from '~/vue_merge_request_widget/components/action_buttons.vue';
import Widget from '~/vue_merge_request_widget/components/widget/widget.vue';
+import WidgetContentRow from '~/vue_merge_request_widget/components/widget/widget_content_row.vue';
-describe('MR Widget', () => {
+describe('~/vue_merge_request_widget/components/widget/widget.vue', () => {
let wrapper;
const findStatusIcon = () => wrapper.findComponent(StatusIcon);
@@ -27,6 +28,10 @@ describe('MR Widget', () => {
...propsData,
},
slots,
+ stubs: {
+ StatusIcon,
+ ContentRow: WidgetContentRow,
+ },
});
};
diff --git a/spec/frontend/vue_merge_request_widget/deployment/deployment_action_button_spec.js b/spec/frontend/vue_merge_request_widget/deployment/deployment_action_button_spec.js
index 7e7438bcc0f..1bad5dacefa 100644
--- a/spec/frontend/vue_merge_request_widget/deployment/deployment_action_button_spec.js
+++ b/spec/frontend/vue_merge_request_widget/deployment/deployment_action_button_spec.js
@@ -41,7 +41,7 @@ describe('Deployment action button', () => {
});
it('renders prop icon correctly', () => {
- expect(wrapper.find(GlIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlIcon).exists()).toBe(true);
});
});
@@ -59,7 +59,7 @@ describe('Deployment action button', () => {
});
it('renders slot and icon prop correctly', () => {
- expect(wrapper.find(GlIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlIcon).exists()).toBe(true);
expect(wrapper.text()).toContain(actionButtonMocks[DEPLOYING].toString());
});
});
@@ -75,8 +75,8 @@ describe('Deployment action button', () => {
});
it('is disabled and shows the loading icon', () => {
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
- expect(wrapper.find(GlButton).props('disabled')).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlButton).props('disabled')).toBe(true);
});
});
@@ -90,8 +90,8 @@ describe('Deployment action button', () => {
});
});
it('is disabled and does not show the loading icon', () => {
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
- expect(wrapper.find(GlButton).props('disabled')).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.findComponent(GlButton).props('disabled')).toBe(true);
});
});
@@ -106,8 +106,8 @@ describe('Deployment action button', () => {
});
});
it('is disabled and does not show the loading icon', () => {
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
- expect(wrapper.find(GlButton).props('disabled')).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.findComponent(GlButton).props('disabled')).toBe(true);
});
});
@@ -118,8 +118,8 @@ describe('Deployment action button', () => {
});
});
it('is not disabled nor does it show the loading icon', () => {
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
- expect(wrapper.find(GlButton).props('disabled')).toBe(false);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.findComponent(GlButton).props('disabled')).toBe(false);
});
});
});
diff --git a/spec/frontend/vue_merge_request_widget/deployment/deployment_actions_spec.js b/spec/frontend/vue_merge_request_widget/deployment/deployment_actions_spec.js
index a8912405fa8..58dadb2c679 100644
--- a/spec/frontend/vue_merge_request_widget/deployment/deployment_actions_spec.js
+++ b/spec/frontend/vue_merge_request_widget/deployment/deployment_actions_spec.js
@@ -1,6 +1,6 @@
import { mount } from '@vue/test-utils';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import { confirmAction } from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal';
import { visitUrl } from '~/lib/utils/url_utility';
import {
@@ -11,6 +11,7 @@ import {
REDEPLOYING,
STOPPING,
} from '~/vue_merge_request_widget/components/deployment/constants';
+import eventHub from '~/vue_merge_request_widget/event_hub';
import DeploymentActions from '~/vue_merge_request_widget/components/deployment/deployment_actions.vue';
import MRWidgetService from '~/vue_merge_request_widget/services/mr_widget_service';
import {
@@ -167,7 +168,7 @@ describe('DeploymentAction component', () => {
});
it('should not throw an error', () => {
- expect(createFlash).not.toHaveBeenCalled();
+ expect(createAlert).not.toHaveBeenCalled();
});
describe('response includes redirect_url', () => {
@@ -192,6 +193,7 @@ describe('DeploymentAction component', () => {
describe('it should call the executeAction method', () => {
beforeEach(async () => {
jest.spyOn(wrapper.vm, 'executeAction').mockImplementation();
+ jest.spyOn(eventHub, '$emit');
await waitForPromises();
@@ -206,11 +208,16 @@ describe('DeploymentAction component', () => {
actionButtonMocks[configConst],
);
});
+
+ it('emits the FetchDeployments event', () => {
+ expect(eventHub.$emit).toHaveBeenCalledWith('FetchDeployments');
+ });
});
describe('when executeInlineAction errors', () => {
beforeEach(async () => {
executeActionSpy.mockRejectedValueOnce();
+ jest.spyOn(eventHub, '$emit');
await waitForPromises();
@@ -218,12 +225,15 @@ describe('DeploymentAction component', () => {
finderFn().trigger('click');
});
- it('should call createFlash with error message', () => {
- expect(createFlash).toHaveBeenCalled();
- expect(createFlash).toHaveBeenCalledWith({
+ it('should call createAlert with error message', () => {
+ expect(createAlert).toHaveBeenCalledWith({
message: actionButtonMocks[configConst].errorMessage,
});
});
+
+ it('emits the FetchDeployments event', () => {
+ expect(eventHub.$emit).toHaveBeenCalledWith('FetchDeployments');
+ });
});
});
});
diff --git a/spec/frontend/vue_merge_request_widget/deployment/deployment_spec.js b/spec/frontend/vue_merge_request_widget/deployment/deployment_spec.js
index c27cbd8b781..f310f7669a9 100644
--- a/spec/frontend/vue_merge_request_widget/deployment/deployment_spec.js
+++ b/spec/frontend/vue_merge_request_widget/deployment/deployment_spec.js
@@ -37,7 +37,7 @@ describe('Deployment component', () => {
});
it('always renders DeploymentInfo', () => {
- expect(wrapper.find(DeploymentInfo).exists()).toBe(true);
+ expect(wrapper.findComponent(DeploymentInfo).exists()).toBe(true);
});
describe('status message and buttons', () => {
@@ -111,7 +111,7 @@ describe('Deployment component', () => {
});
it(`renders the text: ${text}`, () => {
- expect(wrapper.find(DeploymentInfo).text()).toContain(text);
+ expect(wrapper.findComponent(DeploymentInfo).text()).toContain(text);
});
if (actionButtons.length > 0) {
@@ -137,9 +137,11 @@ describe('Deployment component', () => {
if (actionButtons.includes(DeploymentViewButton)) {
it('renders the View button with expected text', () => {
if (status === SUCCESS) {
- expect(wrapper.find(DeploymentViewButton).text()).toContain('View app');
+ expect(wrapper.findComponent(DeploymentViewButton).text()).toContain('View app');
} else {
- expect(wrapper.find(DeploymentViewButton).text()).toContain('View latest app');
+ expect(wrapper.findComponent(DeploymentViewButton).text()).toContain(
+ 'View latest app',
+ );
}
});
}
@@ -150,7 +152,7 @@ describe('Deployment component', () => {
describe('hasExternalUrls', () => {
describe('when deployment has both external_url_formatted and external_url', () => {
it('should render the View Button', () => {
- expect(wrapper.find(DeploymentViewButton).exists()).toBe(true);
+ expect(wrapper.findComponent(DeploymentViewButton).exists()).toBe(true);
});
});
@@ -165,7 +167,7 @@ describe('Deployment component', () => {
});
it('should not render the View Button', () => {
- expect(wrapper.find(DeploymentViewButton).exists()).toBe(false);
+ expect(wrapper.findComponent(DeploymentViewButton).exists()).toBe(false);
});
});
@@ -180,7 +182,7 @@ describe('Deployment component', () => {
});
it('should not render the View Button', () => {
- expect(wrapper.find(DeploymentViewButton).exists()).toBe(false);
+ expect(wrapper.findComponent(DeploymentViewButton).exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/vue_merge_request_widget/deployment/deployment_view_button_spec.js b/spec/frontend/vue_merge_request_widget/deployment/deployment_view_button_spec.js
index eb6e3711e2e..8994fa522d0 100644
--- a/spec/frontend/vue_merge_request_widget/deployment/deployment_view_button_spec.js
+++ b/spec/frontend/vue_merge_request_widget/deployment/deployment_view_button_spec.js
@@ -2,6 +2,7 @@ import { GlDropdown, GlLink } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import DeploymentViewButton from '~/vue_merge_request_widget/components/deployment/deployment_view_button.vue';
import ReviewAppLink from '~/vue_merge_request_widget/components/review_app_link.vue';
+import ModalCopyButton from '~/vue_shared/components/modal_copy_button.vue';
import { deploymentMockData } from './deployment_mock_data';
const appButtonText = {
@@ -36,6 +37,7 @@ describe('Deployment View App button', () => {
const findMrWigdetDeploymentDropdownIcon = () =>
wrapper.findByTestId('mr-wigdet-deployment-dropdown-icon');
const findDeployUrlMenuItems = () => wrapper.findAllComponents(GlLink);
+ const findCopyButton = () => wrapper.findComponent(ModalCopyButton);
describe('text', () => {
it('renders text as passed', () => {
@@ -44,39 +46,93 @@ describe('Deployment View App button', () => {
});
describe('without changes', () => {
+ let deployment;
+
beforeEach(() => {
- createComponent({
- propsData: {
- deployment: { ...deploymentMockData, changes: null },
- appButtonText,
- },
+ deployment = { ...deploymentMockData, changes: null };
+ });
+
+ describe('with safe url', () => {
+ beforeEach(() => {
+ createComponent({
+ propsData: {
+ deployment,
+ appButtonText,
+ },
+ });
+ });
+
+ it('renders the link to the review app without dropdown', () => {
+ expect(findMrWigdetDeploymentDropdown().exists()).toBe(false);
+ expect(findReviewAppLink().attributes('href')).toBe(deployment.external_url);
});
});
- it('renders the link to the review app without dropdown', () => {
- expect(findMrWigdetDeploymentDropdown().exists()).toBe(false);
+ describe('without safe URL', () => {
+ beforeEach(() => {
+ deployment = { ...deployment, external_url: 'postgres://example' };
+ createComponent({
+ propsData: {
+ deployment,
+ appButtonText,
+ },
+ });
+ });
+
+ it('renders the link as a copy button', () => {
+ expect(findMrWigdetDeploymentDropdown().exists()).toBe(false);
+ expect(findCopyButton().props('text')).toBe(deployment.external_url);
+ });
});
});
describe('with a single change', () => {
+ let deployment;
+ let change;
+
beforeEach(() => {
- createComponent({
- propsData: {
- deployment: { ...deploymentMockData, changes: [deploymentMockData.changes[0]] },
- appButtonText,
- },
- });
+ [change] = deploymentMockData.changes;
+ deployment = { ...deploymentMockData, changes: [change] };
});
- it('renders the link to the review app without dropdown', () => {
- expect(findMrWigdetDeploymentDropdown().exists()).toBe(false);
- expect(findMrWigdetDeploymentDropdownIcon().exists()).toBe(false);
+ describe('with safe URL', () => {
+ beforeEach(() => {
+ createComponent({
+ propsData: {
+ deployment,
+ appButtonText,
+ },
+ });
+ });
+
+ it('renders the link to the review app without dropdown', () => {
+ expect(findMrWigdetDeploymentDropdown().exists()).toBe(false);
+ expect(findMrWigdetDeploymentDropdownIcon().exists()).toBe(false);
+ });
+
+ it('renders the link to the review app linked to to the first change', () => {
+ const expectedUrl = deploymentMockData.changes[0].external_url;
+
+ expect(findReviewAppLink().attributes('href')).toBe(expectedUrl);
+ });
});
- it('renders the link to the review app linked to to the first change', () => {
- const expectedUrl = deploymentMockData.changes[0].external_url;
+ describe('with unsafe URL', () => {
+ beforeEach(() => {
+ change = { ...change, external_url: 'postgres://example' };
+ deployment = { ...deployment, changes: [change] };
+ createComponent({
+ propsData: {
+ deployment,
+ appButtonText,
+ },
+ });
+ });
- expect(findReviewAppLink().attributes('href')).toBe(expectedUrl);
+ it('renders the link as a copy button', () => {
+ expect(findMrWigdetDeploymentDropdown().exists()).toBe(false);
+ expect(findCopyButton().props('text')).toBe(change.external_url);
+ });
});
});
diff --git a/spec/frontend/vue_merge_request_widget/extensions/test_report/index_spec.js b/spec/frontend/vue_merge_request_widget/extensions/test_report/index_spec.js
index 82743275739..05df66165dd 100644
--- a/spec/frontend/vue_merge_request_widget/extensions/test_report/index_spec.js
+++ b/spec/frontend/vue_merge_request_widget/extensions/test_report/index_spec.js
@@ -42,7 +42,7 @@ describe('Test report extension', () => {
const findFullReportLink = () => wrapper.findByTestId('full-report-link');
const findCopyFailedSpecsBtn = () => wrapper.findByTestId('copy-failed-specs-btn');
const findAllExtensionListItems = () => wrapper.findAllByTestId('extension-list-item');
- const findModal = () => wrapper.find(TestCaseDetails);
+ const findModal = () => wrapper.findComponent(TestCaseDetails);
const createComponent = () => {
wrapper = mountExtended(extensionsContainer, {
diff --git a/spec/frontend/vue_merge_request_widget/mr_widget_how_to_merge_modal_spec.js b/spec/frontend/vue_merge_request_widget/mr_widget_how_to_merge_modal_spec.js
index 295b9df30b9..d038660e6d3 100644
--- a/spec/frontend/vue_merge_request_widget/mr_widget_how_to_merge_modal_spec.js
+++ b/spec/frontend/vue_merge_request_widget/mr_widget_how_to_merge_modal_spec.js
@@ -24,7 +24,7 @@ describe('MRWidgetHowToMerge', () => {
mountComponent();
});
- const findModal = () => wrapper.find(GlModal);
+ const findModal = () => wrapper.findComponent(GlModal);
const findInstructionsFields = () =>
wrapper.findAll('[ data-testid="how-to-merge-instructions"]');
const findTipLink = () => wrapper.find("[data-testid='docs-tip']");
diff --git a/spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js b/spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js
index cc894f94f80..6622749da92 100644
--- a/spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js
+++ b/spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js
@@ -368,12 +368,13 @@ describe('MrWidgetOptions', () => {
describe('bindEventHubListeners', () => {
it.each`
- event | method | methodArgs
- ${'MRWidgetUpdateRequested'} | ${'checkStatus'} | ${(x) => [x]}
- ${'MRWidgetRebaseSuccess'} | ${'checkStatus'} | ${(x) => [x, true]}
- ${'FetchActionsContent'} | ${'fetchActionsContent'} | ${() => []}
- ${'EnablePolling'} | ${'resumePolling'} | ${() => []}
- ${'DisablePolling'} | ${'stopPolling'} | ${() => []}
+ event | method | methodArgs
+ ${'MRWidgetUpdateRequested'} | ${'checkStatus'} | ${(x) => [x]}
+ ${'MRWidgetRebaseSuccess'} | ${'checkStatus'} | ${(x) => [x, true]}
+ ${'FetchActionsContent'} | ${'fetchActionsContent'} | ${() => []}
+ ${'EnablePolling'} | ${'resumePolling'} | ${() => []}
+ ${'DisablePolling'} | ${'stopPolling'} | ${() => []}
+ ${'FetchDeployments'} | ${'fetchPreMergeDeployments'} | ${() => []}
`('should bind to $event', ({ event, method, methodArgs }) => {
jest.spyOn(wrapper.vm, method).mockImplementation();
@@ -771,34 +772,40 @@ describe('MrWidgetOptions', () => {
});
describe('security widget', () => {
- describe.each`
- context | hasPipeline | shouldRender
- ${'there is a pipeline'} | ${true} | ${true}
- ${'no pipeline'} | ${false} | ${false}
- `('given $context', ({ hasPipeline, shouldRender }) => {
- beforeEach(() => {
- const mrData = {
- ...mockData,
- ...(hasPipeline ? {} : { pipeline: null }),
- };
+ const setup = async (hasPipeline) => {
+ const mrData = {
+ ...mockData,
+ ...(hasPipeline ? {} : { pipeline: null }),
+ };
- // Override top-level mocked requests, which always use a fresh copy of
- // mockData, which always includes the full pipeline object.
- mock.onGet(mockData.merge_request_widget_path).reply(() => [200, mrData]);
- mock.onGet(mockData.merge_request_cached_widget_path).reply(() => [200, mrData]);
-
- return createComponent(mrData, {
- apolloProvider: createMockApollo([
- [
- securityReportMergeRequestDownloadPathsQuery,
- async () => ({ data: securityReportMergeRequestDownloadPathsQueryResponse }),
- ],
- ]),
- });
+ // Override top-level mocked requests, which always use a fresh copy of
+ // mockData, which always includes the full pipeline object.
+ mock.onGet(mockData.merge_request_widget_path).reply(() => [200, mrData]);
+ mock.onGet(mockData.merge_request_cached_widget_path).reply(() => [200, mrData]);
+
+ return createComponent(mrData, {
+ apolloProvider: createMockApollo([
+ [
+ securityReportMergeRequestDownloadPathsQuery,
+ async () => ({ data: securityReportMergeRequestDownloadPathsQueryResponse }),
+ ],
+ ]),
});
+ };
+
+ describe('with a pipeline', () => {
+ it('renders the security widget', async () => {
+ await setup(true);
+
+ expect(findSecurityMrWidget().exists()).toBe(true);
+ });
+ });
+
+ describe('with no pipeline', () => {
+ it('does not render the security widget', async () => {
+ await setup(false);
- it(shouldRender ? 'renders' : 'does not render', () => {
- expect(findSecurityMrWidget().exists()).toBe(shouldRender);
+ expect(findSecurityMrWidget().exists()).toBe(false);
});
});
});
@@ -881,7 +888,10 @@ describe('MrWidgetOptions', () => {
await nextTick();
expect(
- wrapper.find('[data-testid="widget-extension-top-level"]').find(GlDropdown).exists(),
+ wrapper
+ .find('[data-testid="widget-extension-top-level"]')
+ .findComponent(GlDropdown)
+ .exists(),
).toBe(false);
await nextTick();
@@ -891,19 +901,19 @@ describe('MrWidgetOptions', () => {
expect(collapsedSection.text()).toContain('Hello world');
// Renders icon in the row
- expect(collapsedSection.find(GlIcon).exists()).toBe(true);
- expect(collapsedSection.find(GlIcon).props('name')).toBe('status-failed');
+ expect(collapsedSection.findComponent(GlIcon).exists()).toBe(true);
+ expect(collapsedSection.findComponent(GlIcon).props('name')).toBe('status-failed');
// Renders badge in the row
- expect(collapsedSection.find(GlBadge).exists()).toBe(true);
- expect(collapsedSection.find(GlBadge).text()).toBe('Closed');
+ expect(collapsedSection.findComponent(GlBadge).exists()).toBe(true);
+ expect(collapsedSection.findComponent(GlBadge).text()).toBe('Closed');
// Renders a link in the row
- expect(collapsedSection.find(GlLink).exists()).toBe(true);
- expect(collapsedSection.find(GlLink).text()).toBe('GitLab.com');
+ expect(collapsedSection.findComponent(GlLink).exists()).toBe(true);
+ expect(collapsedSection.findComponent(GlLink).text()).toBe('GitLab.com');
- expect(collapsedSection.find(GlButton).exists()).toBe(true);
- expect(collapsedSection.find(GlButton).text()).toBe('Full report');
+ expect(collapsedSection.findComponent(GlButton).exists()).toBe(true);
+ expect(collapsedSection.findComponent(GlButton).text()).toBe('Full report');
});
it('extension polling is not called if enablePolling flag is not passed', () => {
@@ -994,7 +1004,7 @@ describe('MrWidgetOptions', () => {
await createComponent();
- expect(pollRequest).toHaveBeenCalledTimes(4);
+ expect(pollRequest).toHaveBeenCalledTimes(2);
});
});
@@ -1032,7 +1042,7 @@ describe('MrWidgetOptions', () => {
registerExtension(pollingErrorExtension);
await createComponent();
- expect(pollRequest).toHaveBeenCalledTimes(4);
+ expect(pollRequest).toHaveBeenCalledTimes(2);
});
it('captures sentry error and displays error when poll has failed', async () => {
@@ -1134,7 +1144,7 @@ describe('MrWidgetOptions', () => {
${'WidgetCodeQuality'} | ${'i_testing_code_quality_widget_total'}
${'WidgetTerraform'} | ${'i_testing_terraform_widget_total'}
${'WidgetIssues'} | ${'i_testing_issues_widget_total'}
- ${'WidgetTestReport'} | ${'i_testing_summary_widget_total'}
+ ${'WidgetTestSummary'} | ${'i_testing_summary_widget_total'}
`(
"sends non-standard events for the '$widgetName' widget",
async ({ widgetName, nonStandardEvent }) => {
diff --git a/spec/frontend/vue_merge_request_widget/stores/get_state_key_spec.js b/spec/frontend/vue_merge_request_widget/stores/get_state_key_spec.js
index 0246a8d4b0f..88d9d0b4cff 100644
--- a/spec/frontend/vue_merge_request_widget/stores/get_state_key_spec.js
+++ b/spec/frontend/vue_merge_request_widget/stores/get_state_key_spec.js
@@ -16,12 +16,13 @@ describe('getStateKey', () => {
commitsCount: 2,
hasConflicts: false,
draft: false,
+ detailedMergeStatus: null,
};
const bound = getStateKey.bind(context);
expect(bound()).toEqual(null);
- context.canBeMerged = true;
+ context.detailedMergeStatus = 'MERGEABLE';
expect(bound()).toEqual('readyToMerge');
@@ -36,21 +37,15 @@ describe('getStateKey', () => {
expect(bound()).toEqual('shaMismatch');
context.canMerge = false;
- context.isPipelineBlocked = true;
-
- expect(bound()).toEqual('pipelineBlocked');
-
- context.hasMergeableDiscussionsState = true;
- context.autoMergeEnabled = false;
+ context.detailedMergeStatus = 'DISCUSSIONS_NOT_RESOLVED';
expect(bound()).toEqual('unresolvedDiscussions');
- context.draft = true;
+ context.detailedMergeStatus = 'DRAFT_STATUS';
expect(bound()).toEqual('draft');
- context.onlyAllowMergeIfPipelineSucceeds = true;
- context.isPipelineFailed = true;
+ context.detailedMergeStatus = 'CI_MUST_PASS';
expect(bound()).toEqual('pipelineFailed');
@@ -62,7 +57,7 @@ describe('getStateKey', () => {
expect(bound()).toEqual('conflicts');
- context.mergeStatus = 'unchecked';
+ context.detailedMergeStatus = 'CHECKING';
expect(bound()).toEqual('checking');
diff --git a/spec/frontend/vue_shared/components/ci_badge_link_spec.js b/spec/frontend/vue_shared/components/ci_badge_link_spec.js
index 27b6718fb8e..07cbfe1e79b 100644
--- a/spec/frontend/vue_shared/components/ci_badge_link_spec.js
+++ b/spec/frontend/vue_shared/components/ci_badge_link_spec.js
@@ -1,7 +1,7 @@
+import { GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import CiBadge from '~/vue_shared/components/ci_badge_link.vue';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
-import { visitUrl } from '~/lib/utils/url_utility';
jest.mock('~/lib/utils/url_utility', () => ({
visitUrl: jest.fn(),
@@ -86,18 +86,14 @@ describe('CI Badge Link Component', () => {
wrapper.destroy();
});
- it.each(Object.keys(statuses))('should render badge for status: %s', async (status) => {
+ it.each(Object.keys(statuses))('should render badge for status: %s', (status) => {
createComponent({ status: statuses[status] });
- expect(wrapper.attributes('href')).toBe();
+ expect(wrapper.attributes('href')).toBe(statuses[status].details_path);
expect(wrapper.text()).toBe(statuses[status].text);
expect(wrapper.classes()).toContain('ci-status');
expect(wrapper.classes()).toContain(`ci-${statuses[status].group}`);
expect(findIcon().exists()).toBe(true);
-
- await wrapper.trigger('click');
-
- expect(visitUrl).toHaveBeenCalledWith(statuses[status].details_path);
});
it('should not render label', () => {
@@ -109,7 +105,7 @@ describe('CI Badge Link Component', () => {
it('should emit ciStatusBadgeClick event', async () => {
createComponent({ status: statuses.success });
- await wrapper.trigger('click');
+ await wrapper.findComponent(GlLink).vm.$emit('click');
expect(wrapper.emitted('ciStatusBadgeClick')).toEqual([[]]);
});
diff --git a/spec/frontend/vue_shared/components/color_select_dropdown/color_select_root_spec.js b/spec/frontend/vue_shared/components/color_select_dropdown/color_select_root_spec.js
index 441e21ee905..5b0772f6e34 100644
--- a/spec/frontend/vue_shared/components/color_select_dropdown/color_select_root_spec.js
+++ b/spec/frontend/vue_shared/components/color_select_dropdown/color_select_root_spec.js
@@ -3,7 +3,7 @@ import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import SidebarEditableItem from '~/sidebar/components/sidebar_editable_item.vue';
import DropdownContents from '~/vue_shared/components/color_select_dropdown/dropdown_contents.vue';
import DropdownValue from '~/vue_shared/components/color_select_dropdown/dropdown_value.vue';
@@ -146,7 +146,7 @@ describe('LabelsSelectRoot', () => {
});
it('creates flash with error message', () => {
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
captureError: true,
message: 'Error fetching epic color.',
});
@@ -186,7 +186,7 @@ describe('LabelsSelectRoot', () => {
findDropdownContents().vm.$emit('setColor', color);
await waitForPromises();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
captureError: true,
error: expect.anything(),
message: 'An error occurred while updating color.',
diff --git a/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_lib_spec.js b/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_lib_spec.js
index 10eacff630d..7a8f94b3746 100644
--- a/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_lib_spec.js
+++ b/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_lib_spec.js
@@ -121,7 +121,7 @@ describe('date time picker lib', () => {
const utcResult = '2019-09-08T01:01:01Z';
const localResult = '2019-09-08T08:01:01Z';
- test.each`
+ it.each`
val | locatTimezone | utc | result
${value} | ${'UTC'} | ${undefined} | ${utcResult}
${value} | ${'UTC'} | ${false} | ${utcResult}
@@ -167,7 +167,7 @@ describe('date time picker lib', () => {
const utcResult = '2019-09-08 08:01:01';
const localResult = '2019-09-08 01:01:01';
- test.each`
+ it.each`
val | locatTimezone | utc | result
${value} | ${'UTC'} | ${undefined} | ${utcResult}
${value} | ${'UTC'} | ${false} | ${utcResult}
diff --git a/spec/frontend/vue_shared/components/diff_stats_dropdown_spec.js b/spec/frontend/vue_shared/components/diff_stats_dropdown_spec.js
index 68684004b82..99c973bdd26 100644
--- a/spec/frontend/vue_shared/components/diff_stats_dropdown_spec.js
+++ b/spec/frontend/vue_shared/components/diff_stats_dropdown_spec.js
@@ -106,11 +106,11 @@ describe('Diff Stats Dropdown', () => {
expectedAddedDeletedExpanded,
expectedAddedDeletedCollapsed,
}) => {
- beforeAll(() => {
+ beforeEach(() => {
createComponent({ changed, added, deleted });
});
- afterAll(() => {
+ afterEach(() => {
wrapper.destroy();
});
diff --git a/spec/frontend/vue_shared/components/diff_viewer/diff_viewer_spec.js b/spec/frontend/vue_shared/components/diff_viewer/diff_viewer_spec.js
index 69964b2687d..6e0717c29d7 100644
--- a/spec/frontend/vue_shared/components/diff_viewer/diff_viewer_spec.js
+++ b/spec/frontend/vue_shared/components/diff_viewer/diff_viewer_spec.js
@@ -1,8 +1,6 @@
-import Vue, { nextTick } from 'vue';
-
-import mountComponent from 'helpers/vue_mount_component_helper';
+import { mount } from '@vue/test-utils';
import { GREEN_BOX_IMAGE_URL, RED_BOX_IMAGE_URL } from 'spec/test_constants';
-import diffViewer from '~/vue_shared/components/diff_viewer/diff_viewer.vue';
+import DiffViewer from '~/vue_shared/components/diff_viewer/diff_viewer.vue';
describe('DiffViewer', () => {
const requiredProps = {
@@ -14,37 +12,28 @@ describe('DiffViewer', () => {
oldPath: RED_BOX_IMAGE_URL,
oldSha: 'DEF',
};
- let vm;
-
- function createComponent(props) {
- const DiffViewer = Vue.extend(diffViewer);
+ let wrapper;
- vm = mountComponent(DiffViewer, props);
+ function createComponent(propsData) {
+ wrapper = mount(DiffViewer, { propsData });
}
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
- it('renders image diff', async () => {
+ it('renders image diff', () => {
window.gon = {
relative_url_root: '',
};
createComponent({ ...requiredProps, projectPath: '' });
- await nextTick();
-
- expect(vm.$el.querySelector('.deleted img').getAttribute('src')).toBe(
- `//-/raw/DEF/${RED_BOX_IMAGE_URL}`,
- );
-
- expect(vm.$el.querySelector('.added img').getAttribute('src')).toBe(
- `//-/raw/ABC/${GREEN_BOX_IMAGE_URL}`,
- );
+ expect(wrapper.find('.deleted img').attributes('src')).toBe(`//-/raw/DEF/${RED_BOX_IMAGE_URL}`);
+ expect(wrapper.find('.added img').attributes('src')).toBe(`//-/raw/ABC/${GREEN_BOX_IMAGE_URL}`);
});
- it('renders fallback download diff display', async () => {
+ it('renders fallback download diff display', () => {
createComponent({
...requiredProps,
diffViewerMode: 'added',
@@ -52,18 +41,10 @@ describe('DiffViewer', () => {
oldPath: 'testold.abc',
});
- await nextTick();
-
- expect(vm.$el.querySelector('.deleted .file-info').textContent.trim()).toContain('testold.abc');
-
- expect(vm.$el.querySelector('.deleted .btn.btn-default').textContent.trim()).toContain(
- 'Download',
- );
-
- expect(vm.$el.querySelector('.added .file-info').textContent.trim()).toContain('test.abc');
- expect(vm.$el.querySelector('.added .btn.btn-default').textContent.trim()).toContain(
- 'Download',
- );
+ expect(wrapper.find('.deleted .file-info').text()).toContain('testold.abc');
+ expect(wrapper.find('.deleted .btn.btn-default').text()).toContain('Download');
+ expect(wrapper.find('.added .file-info').text()).toContain('test.abc');
+ expect(wrapper.find('.added .btn.btn-default').text()).toContain('Download');
});
describe('renamed file', () => {
@@ -85,7 +66,7 @@ describe('DiffViewer', () => {
oldPath: 'testold.abc',
});
- expect(vm.$el.textContent).toContain('File renamed with no changes.');
+ expect(wrapper.text()).toContain('File renamed with no changes.');
});
});
@@ -99,6 +80,6 @@ describe('DiffViewer', () => {
bMode: '321',
});
- expect(vm.$el.textContent).toContain('File mode changed from 123 to 321');
+ expect(wrapper.text()).toContain('File mode changed from 123 to 321');
});
});
diff --git a/spec/frontend/vue_shared/components/file_finder/item_spec.js b/spec/frontend/vue_shared/components/file_finder/item_spec.js
index b69c33055c1..f0998b1b5c6 100644
--- a/spec/frontend/vue_shared/components/file_finder/item_spec.js
+++ b/spec/frontend/vue_shared/components/file_finder/item_spec.js
@@ -1,127 +1,119 @@
-import Vue, { nextTick } from 'vue';
-import createComponent from 'helpers/vue_mount_component_helper';
+import { mount } from '@vue/test-utils';
import { file } from 'jest/ide/helpers';
import ItemComponent from '~/vue_shared/components/file_finder/item.vue';
describe('File finder item spec', () => {
- const Component = Vue.extend(ItemComponent);
- let vm;
- let localFile;
-
- beforeEach(() => {
- localFile = {
- ...file(),
- name: 'test file',
- path: 'test/file',
- };
-
- vm = createComponent(Component, {
- file: localFile,
- focused: true,
- searchText: '',
- index: 0,
+ let wrapper;
+
+ const createComponent = ({ file: customFileFields = {}, ...otherProps } = {}) => {
+ wrapper = mount(ItemComponent, {
+ propsData: {
+ file: {
+ ...file(),
+ name: 'test file',
+ path: 'test/file',
+ ...customFileFields,
+ },
+ focused: true,
+ searchText: '',
+ index: 0,
+ ...otherProps,
+ },
});
- });
+ };
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
it('renders file name & path', () => {
- expect(vm.$el.textContent).toContain('test file');
- expect(vm.$el.textContent).toContain('test/file');
+ createComponent();
+
+ expect(wrapper.text()).toContain('test file');
+ expect(wrapper.text()).toContain('test/file');
});
describe('focused', () => {
it('adds is-focused class', () => {
- expect(vm.$el.classList).toContain('is-focused');
+ createComponent();
+
+ expect(wrapper.classes()).toContain('is-focused');
});
it('does not have is-focused class when not focused', async () => {
- vm.focused = false;
+ createComponent({ focused: false });
- await nextTick();
- expect(vm.$el.classList).not.toContain('is-focused');
+ expect(wrapper.classes()).not.toContain('is-focused');
});
});
describe('changed file icon', () => {
it('does not render when not a changed or temp file', () => {
- expect(vm.$el.querySelector('.diff-changed-stats')).toBe(null);
+ createComponent();
+
+ expect(wrapper.find('.diff-changed-stats').exists()).toBe(false);
});
it('renders when a changed file', async () => {
- vm.file.changed = true;
+ createComponent({ file: { changed: true } });
- await nextTick();
- expect(vm.$el.querySelector('.diff-changed-stats')).not.toBe(null);
+ expect(wrapper.find('.diff-changed-stats').exists()).toBe(true);
});
it('renders when a temp file', async () => {
- vm.file.tempFile = true;
+ createComponent({ file: { tempFile: true } });
- await nextTick();
- expect(vm.$el.querySelector('.diff-changed-stats')).not.toBe(null);
+ expect(wrapper.find('.diff-changed-stats').exists()).toBe(true);
});
});
- it('emits event when clicked', () => {
- jest.spyOn(vm, '$emit').mockImplementation(() => {});
+ it('emits event when clicked', async () => {
+ createComponent();
- vm.$el.click();
+ await wrapper.find('*').trigger('click');
- expect(vm.$emit).toHaveBeenCalledWith('click', vm.file);
+ expect(wrapper.emitted('click')[0]).toStrictEqual([wrapper.props('file')]);
});
describe('path', () => {
- let el;
-
- beforeEach(async () => {
- vm.searchText = 'file';
-
- el = vm.$el.querySelector('.diff-changed-file-path');
-
- nextTick();
- });
+ const findChangedFilePath = () => wrapper.find('.diff-changed-file-path');
it('highlights text', () => {
- expect(el.querySelectorAll('.highlighted').length).toBe(4);
+ createComponent({ searchText: 'file' });
+
+ expect(findChangedFilePath().findAll('.highlighted')).toHaveLength(4);
});
it('adds ellipsis to long text', async () => {
- vm.file.path = new Array(70)
+ const path = new Array(70)
.fill()
.map((_, i) => `${i}-`)
.join('');
- await nextTick();
- expect(el.textContent).toBe(`...${vm.file.path.substr(vm.file.path.length - 60)}`);
+ createComponent({ searchText: 'file', file: { path } });
+
+ expect(findChangedFilePath().text()).toBe(`...${path.substring(path.length - 60)}`);
});
});
describe('name', () => {
- let el;
-
- beforeEach(async () => {
- vm.searchText = 'file';
-
- el = vm.$el.querySelector('.diff-changed-file-name');
-
- await nextTick();
- });
+ const findChangedFileName = () => wrapper.find('.diff-changed-file-name');
it('highlights text', () => {
- expect(el.querySelectorAll('.highlighted').length).toBe(4);
+ createComponent({ searchText: 'file' });
+
+ expect(findChangedFileName().findAll('.highlighted')).toHaveLength(4);
});
it('does not add ellipsis to long text', async () => {
- vm.file.name = new Array(70)
+ const name = new Array(70)
.fill()
.map((_, i) => `${i}-`)
.join('');
- await nextTick();
- expect(el.textContent).not.toBe(`...${vm.file.name.substr(vm.file.name.length - 60)}`);
+ createComponent({ searchText: 'file', file: { name } });
+
+ expect(findChangedFileName().text()).not.toBe(`...${name.substring(name.length - 60)}`);
});
});
});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/store/modules/filters/actions_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/store/modules/filters/actions_spec.js
index 4140ec09b4e..66ef473f368 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/store/modules/filters/actions_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/store/modules/filters/actions_spec.js
@@ -3,7 +3,7 @@ import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
import { mockBranches } from 'jest/vue_shared/components/filtered_search_bar/mock_data';
import Api from '~/api';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import httpStatusCodes from '~/lib/utils/http_status';
import * as actions from '~/vue_shared/components/filtered_search_bar/store/modules/filters/actions';
import * as types from '~/vue_shared/components/filtered_search_bar/store/modules/filters/mutation_types';
@@ -159,7 +159,7 @@ describe('Filters actions', () => {
},
],
[],
- ).then(() => expect(createFlash).toHaveBeenCalled());
+ ).then(() => expect(createAlert).toHaveBeenCalled());
});
});
});
@@ -233,7 +233,7 @@ describe('Filters actions', () => {
[],
).then(() => {
expect(mock.history.get[0].url).toBe('/api/v1/groups/fake_group_endpoint/members');
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
});
@@ -252,7 +252,7 @@ describe('Filters actions', () => {
[],
).then(() => {
expect(mock.history.get[0].url).toBe('/api/v1/projects/fake_project_endpoint/users');
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
});
});
@@ -298,7 +298,7 @@ describe('Filters actions', () => {
},
],
[],
- ).then(() => expect(createFlash).toHaveBeenCalled());
+ ).then(() => expect(createAlert).toHaveBeenCalled());
});
});
});
@@ -376,7 +376,7 @@ describe('Filters actions', () => {
[],
).then(() => {
expect(mock.history.get[0].url).toBe('/api/v1/groups/fake_group_endpoint/members');
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
});
@@ -395,7 +395,7 @@ describe('Filters actions', () => {
[],
).then(() => {
expect(mock.history.get[0].url).toBe('/api/v1/projects/fake_project_endpoint/users');
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
});
});
@@ -441,7 +441,7 @@ describe('Filters actions', () => {
},
],
[],
- ).then(() => expect(createFlash).toHaveBeenCalled());
+ ).then(() => expect(createAlert).toHaveBeenCalled());
});
});
});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js
index 302dfabffb2..5371b9af475 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js
@@ -8,7 +8,7 @@ import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import { DEFAULT_NONE_ANY } from '~/vue_shared/components/filtered_search_bar/constants';
@@ -140,13 +140,13 @@ describe('AuthorToken', () => {
});
});
- it('calls `createFlash` with flash error message when request fails', () => {
+ it('calls `createAlert` with flash error message when request fails', () => {
jest.spyOn(wrapper.vm.config, 'fetchAuthors').mockRejectedValue({});
getBaseToken().vm.$emit('fetch-suggestions', 'root');
return waitForPromises().then(() => {
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'There was a problem fetching users.',
});
});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/branch_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/branch_token_spec.js
index 1de35daa3a5..05b42011fe1 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/branch_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/branch_token_spec.js
@@ -9,7 +9,7 @@ import MockAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import { DEFAULT_NONE_ANY } from '~/vue_shared/components/filtered_search_bar/constants';
import BranchToken from '~/vue_shared/components/filtered_search_bar/tokens/branch_token.vue';
@@ -87,13 +87,13 @@ describe('BranchToken', () => {
});
});
- it('calls `createFlash` with flash error message when request fails', () => {
+ it('calls `createAlert` with flash error message when request fails', () => {
jest.spyOn(wrapper.vm.config, 'fetchBranches').mockRejectedValue({});
wrapper.vm.fetchBranches('foo');
return waitForPromises().then(() => {
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'There was a problem fetching branches.',
});
});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/crm_contact_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/crm_contact_token_spec.js
index c9879987931..5b744521979 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/crm_contact_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/crm_contact_token_spec.js
@@ -8,7 +8,7 @@ import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import { DEFAULT_NONE_ANY } from '~/vue_shared/components/filtered_search_bar/constants';
import BaseToken from '~/vue_shared/components/filtered_search_bar/tokens/base_token.vue';
@@ -94,7 +94,7 @@ describe('CrmContactToken', () => {
getBaseToken().vm.$emit('fetch-suggestions', 'foo');
await waitForPromises();
- expect(createFlash).not.toHaveBeenCalled();
+ expect(createAlert).not.toHaveBeenCalled();
expect(searchGroupCrmContactsQueryHandler).toHaveBeenCalledWith({
fullPath: 'group',
isProject: false,
@@ -108,7 +108,7 @@ describe('CrmContactToken', () => {
getBaseToken().vm.$emit('fetch-suggestions', '5');
await waitForPromises();
- expect(createFlash).not.toHaveBeenCalled();
+ expect(createAlert).not.toHaveBeenCalled();
expect(searchGroupCrmContactsQueryHandler).toHaveBeenCalledWith({
fullPath: 'group',
isProject: false,
@@ -134,7 +134,7 @@ describe('CrmContactToken', () => {
getBaseToken().vm.$emit('fetch-suggestions', 'foo');
await waitForPromises();
- expect(createFlash).not.toHaveBeenCalled();
+ expect(createAlert).not.toHaveBeenCalled();
expect(searchProjectCrmContactsQueryHandler).toHaveBeenCalledWith({
fullPath: 'project',
isProject: true,
@@ -148,7 +148,7 @@ describe('CrmContactToken', () => {
getBaseToken().vm.$emit('fetch-suggestions', '5');
await waitForPromises();
- expect(createFlash).not.toHaveBeenCalled();
+ expect(createAlert).not.toHaveBeenCalled();
expect(searchProjectCrmContactsQueryHandler).toHaveBeenCalledWith({
fullPath: 'project',
isProject: true,
@@ -159,7 +159,7 @@ describe('CrmContactToken', () => {
});
});
- it('calls `createFlash` with flash error message when request fails', async () => {
+ it('calls `createAlert` with flash error message when request fails', async () => {
mountComponent();
jest.spyOn(wrapper.vm.$apollo, 'query').mockRejectedValue({});
@@ -167,7 +167,7 @@ describe('CrmContactToken', () => {
getBaseToken().vm.$emit('fetch-suggestions');
await waitForPromises();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'There was a problem fetching CRM contacts.',
});
});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/crm_organization_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/crm_organization_token_spec.js
index 16333b052e6..3a3e96032e8 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/crm_organization_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/crm_organization_token_spec.js
@@ -8,7 +8,7 @@ import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import { DEFAULT_NONE_ANY } from '~/vue_shared/components/filtered_search_bar/constants';
import BaseToken from '~/vue_shared/components/filtered_search_bar/tokens/base_token.vue';
@@ -93,7 +93,7 @@ describe('CrmOrganizationToken', () => {
getBaseToken().vm.$emit('fetch-suggestions', 'foo');
await waitForPromises();
- expect(createFlash).not.toHaveBeenCalled();
+ expect(createAlert).not.toHaveBeenCalled();
expect(searchGroupCrmOrganizationsQueryHandler).toHaveBeenCalledWith({
fullPath: 'group',
isProject: false,
@@ -107,7 +107,7 @@ describe('CrmOrganizationToken', () => {
getBaseToken().vm.$emit('fetch-suggestions', '5');
await waitForPromises();
- expect(createFlash).not.toHaveBeenCalled();
+ expect(createAlert).not.toHaveBeenCalled();
expect(searchGroupCrmOrganizationsQueryHandler).toHaveBeenCalledWith({
fullPath: 'group',
isProject: false,
@@ -133,7 +133,7 @@ describe('CrmOrganizationToken', () => {
getBaseToken().vm.$emit('fetch-suggestions', 'foo');
await waitForPromises();
- expect(createFlash).not.toHaveBeenCalled();
+ expect(createAlert).not.toHaveBeenCalled();
expect(searchProjectCrmOrganizationsQueryHandler).toHaveBeenCalledWith({
fullPath: 'project',
isProject: true,
@@ -147,7 +147,7 @@ describe('CrmOrganizationToken', () => {
getBaseToken().vm.$emit('fetch-suggestions', '5');
await waitForPromises();
- expect(createFlash).not.toHaveBeenCalled();
+ expect(createAlert).not.toHaveBeenCalled();
expect(searchProjectCrmOrganizationsQueryHandler).toHaveBeenCalledWith({
fullPath: 'project',
isProject: true,
@@ -158,7 +158,7 @@ describe('CrmOrganizationToken', () => {
});
});
- it('calls `createFlash` with flash error message when request fails', async () => {
+ it('calls `createAlert` with flash error message when request fails', async () => {
mountComponent();
jest.spyOn(wrapper.vm.$apollo, 'query').mockRejectedValue({});
@@ -166,7 +166,7 @@ describe('CrmOrganizationToken', () => {
getBaseToken().vm.$emit('fetch-suggestions');
await waitForPromises();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'There was a problem fetching CRM organizations.',
});
});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js
index bf4a6eb7635..e8436d2db17 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js
@@ -8,7 +8,7 @@ import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import {
@@ -93,13 +93,13 @@ describe('EmojiToken', () => {
});
});
- it('calls `createFlash` with flash error message when request fails', () => {
+ it('calls `createAlert` with flash error message when request fails', () => {
jest.spyOn(wrapper.vm.config, 'fetchEmojis').mockRejectedValue({});
wrapper.vm.fetchEmojis('foo');
return waitForPromises().then(() => {
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'There was a problem fetching emojis.',
});
});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js
index 01e281884ed..8ca12afacec 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js
@@ -11,7 +11,7 @@ import {
mockRegularLabel,
mockLabels,
} from 'jest/vue_shared/components/sidebar/labels_select_vue/mock_data';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import { DEFAULT_NONE_ANY } from '~/vue_shared/components/filtered_search_bar/constants';
@@ -116,13 +116,13 @@ describe('LabelToken', () => {
});
});
- it('calls `createFlash` with flash error message when request fails', () => {
+ it('calls `createAlert` with flash error message when request fails', () => {
jest.spyOn(wrapper.vm.config, 'fetchLabels').mockRejectedValue({});
wrapper.vm.fetchLabels('foo');
return waitForPromises().then(() => {
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'There was a problem fetching labels.',
});
});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js
index f71ba51fc5b..589697fe542 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js
@@ -8,7 +8,7 @@ import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import { sortMilestonesByDueDate } from '~/milestones/utils';
@@ -112,13 +112,13 @@ describe('MilestoneToken', () => {
});
});
- it('calls `createFlash` with flash error message when request fails', () => {
+ it('calls `createAlert` with flash error message when request fails', () => {
jest.spyOn(wrapper.vm.config, 'fetchMilestones').mockRejectedValue({});
wrapper.vm.fetchMilestones('foo');
return waitForPromises().then(() => {
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'There was a problem fetching milestones.',
});
});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/release_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/release_token_spec.js
index 4bbbaab9b7a..0e5fa0f66d4 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/release_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/release_token_spec.js
@@ -2,7 +2,7 @@ import { GlFilteredSearchToken, GlFilteredSearchTokenSegment } from '@gitlab/ui'
import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import ReleaseToken from '~/vue_shared/components/filtered_search_bar/tokens/release_token.vue';
import { mockReleaseToken } from '../mock_data';
@@ -73,7 +73,7 @@ describe('ReleaseToken', () => {
});
await waitForPromises();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
message: 'There was a problem fetching releases.',
});
});
diff --git a/spec/frontend/vue_shared/components/gitlab_version_check_spec.js b/spec/frontend/vue_shared/components/gitlab_version_check_spec.js
index 6699ae5fb69..38f28837cc1 100644
--- a/spec/frontend/vue_shared/components/gitlab_version_check_spec.js
+++ b/spec/frontend/vue_shared/components/gitlab_version_check_spec.js
@@ -1,7 +1,9 @@
import { GlBadge } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
+import { mockTracking } from 'helpers/tracking_helper';
+import { helpPagePath } from '~/helpers/help_page_helper';
import axios from '~/lib/utils/axios_utils';
import GitlabVersionCheck from '~/vue_shared/components/gitlab_version_check.vue';
@@ -9,6 +11,8 @@ describe('GitlabVersionCheck', () => {
let wrapper;
let mock;
+ const UPGRADE_DOCS_URL = helpPagePath('update/index');
+
const defaultResponse = {
code: 200,
res: { severity: 'success' },
@@ -23,7 +27,7 @@ describe('GitlabVersionCheck', () => {
mock = new MockAdapter(axios);
mock.onGet().replyOnce(response.code, response.res);
- wrapper = shallowMount(GitlabVersionCheck);
+ wrapper = shallowMountExtended(GitlabVersionCheck);
};
const dummyGon = {
@@ -38,6 +42,7 @@ describe('GitlabVersionCheck', () => {
window.gon = originalGon;
});
+ const findGlBadgeClickWrapper = () => wrapper.findByTestId('badge-click-wrapper');
const findGlBadge = () => wrapper.findComponent(GlBadge);
describe.each`
@@ -77,7 +82,8 @@ describe('GitlabVersionCheck', () => {
await waitForPromises(); // Ensure we wrap up the axios call
});
- it(`does${renders ? '' : ' not'} render GlBadge`, () => {
+ it(`does${renders ? '' : ' not'} render Badge Click Wrapper and GlBadge`, () => {
+ expect(findGlBadgeClickWrapper().exists()).toBe(renders);
expect(findGlBadge().exists()).toBe(renders);
});
});
@@ -90,8 +96,11 @@ describe('GitlabVersionCheck', () => {
${{ code: 200, res: { severity: 'danger' } }} | ${{ title: 'Update ASAP', variant: 'danger' }}
`('badge ui', ({ mockResponse, expectedUI }) => {
describe(`when response is ${mockResponse.res.severity}`, () => {
+ let trackingSpy;
+
beforeEach(async () => {
createComponent(mockResponse);
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
await waitForPromises(); // Ensure we wrap up the axios call
});
@@ -102,6 +111,24 @@ describe('GitlabVersionCheck', () => {
it(`variant is ${expectedUI.variant}`, () => {
expect(findGlBadge().attributes('variant')).toBe(expectedUI.variant);
});
+
+ it(`tracks rendered_version_badge with label ${expectedUI.title}`, () => {
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'rendered_version_badge', {
+ label: expectedUI.title,
+ });
+ });
+
+ it(`link is ${UPGRADE_DOCS_URL}`, () => {
+ expect(findGlBadge().attributes('href')).toBe(UPGRADE_DOCS_URL);
+ });
+
+ it(`tracks click_version_badge with label ${expectedUI.title} when badge is clicked`, async () => {
+ await findGlBadgeClickWrapper().trigger('click');
+
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_version_badge', {
+ label: expectedUI.title,
+ });
+ });
});
});
});
diff --git a/spec/frontend/vue_shared/components/gl_countdown_spec.js b/spec/frontend/vue_shared/components/gl_countdown_spec.js
index 0d1d42082ab..af53d256236 100644
--- a/spec/frontend/vue_shared/components/gl_countdown_spec.js
+++ b/spec/frontend/vue_shared/components/gl_countdown_spec.js
@@ -1,10 +1,9 @@
import Vue, { nextTick } from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
+import { mount } from '@vue/test-utils';
import GlCountdown from '~/vue_shared/components/gl_countdown.vue';
describe('GlCountdown', () => {
- const Component = Vue.extend(GlCountdown);
- let vm;
+ let wrapper;
let now = '2000-01-01T00:00:00Z';
beforeEach(() => {
@@ -12,21 +11,20 @@ describe('GlCountdown', () => {
});
afterEach(() => {
- vm.$destroy();
- jest.clearAllTimers();
+ wrapper.destroy();
});
describe('when there is time remaining', () => {
beforeEach(async () => {
- vm = mountComponent(Component, {
- endDateString: '2000-01-01T01:02:03Z',
+ wrapper = mount(GlCountdown, {
+ propsData: {
+ endDateString: '2000-01-01T01:02:03Z',
+ },
});
-
- await nextTick();
});
it('displays remaining time', () => {
- expect(vm.$el.textContent).toContain('01:02:03');
+ expect(wrapper.text()).toContain('01:02:03');
});
it('updates remaining time', async () => {
@@ -34,21 +32,21 @@ describe('GlCountdown', () => {
jest.advanceTimersByTime(1000);
await nextTick();
- expect(vm.$el.textContent).toContain('01:02:02');
+ expect(wrapper.text()).toContain('01:02:02');
});
});
describe('when there is no time remaining', () => {
beforeEach(async () => {
- vm = mountComponent(Component, {
- endDateString: '1900-01-01T00:00:00Z',
+ wrapper = mount(GlCountdown, {
+ propsData: {
+ endDateString: '1900-01-01T00:00:00Z',
+ },
});
-
- await nextTick();
});
it('displays 00:00:00', () => {
- expect(vm.$el.textContent).toContain('00:00:00');
+ expect(wrapper.text()).toContain('00:00:00');
});
});
@@ -62,8 +60,10 @@ describe('GlCountdown', () => {
});
it('throws a validation error', () => {
- vm = mountComponent(Component, {
- endDateString: 'this is invalid',
+ wrapper = mount(GlCountdown, {
+ propsData: {
+ endDateString: 'this is invalid',
+ },
});
expect(Vue.config.warnHandler).toHaveBeenCalledTimes(1);
diff --git a/spec/frontend/vue_shared/components/group_select/utils_spec.js b/spec/frontend/vue_shared/components/group_select/utils_spec.js
new file mode 100644
index 00000000000..5188e1aabf1
--- /dev/null
+++ b/spec/frontend/vue_shared/components/group_select/utils_spec.js
@@ -0,0 +1,24 @@
+import { groupsPath } from '~/vue_shared/components/group_select/utils';
+
+describe('group_select utils', () => {
+ describe('groupsPath', () => {
+ it.each`
+ groupsFilter | parentGroupID | expectedPath
+ ${undefined} | ${undefined} | ${'/api/:version/groups.json'}
+ ${undefined} | ${1} | ${'/api/:version/groups.json'}
+ ${'descendant_groups'} | ${1} | ${'/api/:version/groups/1/descendant_groups'}
+ ${'subgroups'} | ${1} | ${'/api/:version/groups/1/subgroups'}
+ `(
+ 'returns $expectedPath with groupsFilter = $groupsFilter and parentGroupID = $parentGroupID',
+ ({ groupsFilter, parentGroupID, expectedPath }) => {
+ expect(groupsPath(groupsFilter, parentGroupID)).toBe(expectedPath);
+ },
+ );
+ });
+
+ it('throws if groupsFilter is passed but parentGroupID is undefined', () => {
+ expect(() => {
+ groupsPath('descendant_groups');
+ }).toThrow('Cannot use groupsFilter without a parentGroupID');
+ });
+});
diff --git a/spec/frontend/vue_shared/components/markdown/header_spec.js b/spec/frontend/vue_shared/components/markdown/header_spec.js
index 9831908f806..ed417097e1e 100644
--- a/spec/frontend/vue_shared/components/markdown/header_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/header_spec.js
@@ -54,6 +54,8 @@ describe('Markdown field header component', () => {
'Add a bullet list',
'Add a numbered list',
'Add a checklist',
+ 'Indent line (⌘])',
+ 'Outdent line (⌘[)',
'Add a collapsible section',
'Add a table',
'Go full screen',
@@ -140,7 +142,7 @@ describe('Markdown field header component', () => {
const tableButton = findToolbarButtonByProp('icon', 'table');
expect(tableButton.props('tag')).toEqual(
- '| header | header |\n| ------ | ------ |\n| cell | cell |\n| cell | cell |',
+ '| header | header |\n| ------ | ------ |\n| | |\n| | |',
);
});
diff --git a/spec/frontend/vue_shared/components/markdown/markdown_editor_spec.js b/spec/frontend/vue_shared/components/markdown/markdown_editor_spec.js
new file mode 100644
index 00000000000..f7e93f45148
--- /dev/null
+++ b/spec/frontend/vue_shared/components/markdown/markdown_editor_spec.js
@@ -0,0 +1,289 @@
+import { GlSegmentedControl } from '@gitlab/ui';
+import axios from 'axios';
+import MockAdapter from 'axios-mock-adapter';
+import { nextTick } from 'vue';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import { EDITING_MODE_MARKDOWN_FIELD, EDITING_MODE_CONTENT_EDITOR } from '~/vue_shared/constants';
+import MarkdownEditor from '~/vue_shared/components/markdown/markdown_editor.vue';
+import ContentEditor from '~/content_editor/components/content_editor.vue';
+import BubbleMenu from '~/content_editor/components/bubble_menus/bubble_menu.vue';
+import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
+import MarkdownField from '~/vue_shared/components/markdown/field.vue';
+import { stubComponent } from 'helpers/stub_component';
+
+jest.mock('~/emoji');
+
+describe('vue_shared/component/markdown/markdown_editor', () => {
+ let wrapper;
+ const value = 'test markdown';
+ const renderMarkdownPath = '/api/markdown';
+ const markdownDocsPath = '/help/markdown';
+ const quickActionsDocsPath = '/help/quickactions';
+ const enableAutocomplete = true;
+ const enablePreview = false;
+ const formFieldId = 'markdown_field';
+ const formFieldName = 'form[markdown_field]';
+ const formFieldPlaceholder = 'Write some markdown';
+ const formFieldAriaLabel = 'Edit your content';
+ let mock;
+
+ const buildWrapper = ({ propsData = {}, attachTo } = {}) => {
+ wrapper = mountExtended(MarkdownEditor, {
+ attachTo,
+ propsData: {
+ value,
+ renderMarkdownPath,
+ markdownDocsPath,
+ quickActionsDocsPath,
+ enableAutocomplete,
+ enablePreview,
+ formFieldId,
+ formFieldName,
+ formFieldPlaceholder,
+ formFieldAriaLabel,
+ ...propsData,
+ },
+ stubs: {
+ BubbleMenu: stubComponent(BubbleMenu),
+ },
+ });
+ };
+ const findSegmentedControl = () => wrapper.findComponent(GlSegmentedControl);
+ const findMarkdownField = () => wrapper.findComponent(MarkdownField);
+ const findTextarea = () => wrapper.find('textarea');
+ const findLocalStorageSync = () => wrapper.findComponent(LocalStorageSync);
+ const findContentEditor = () => wrapper.findComponent(ContentEditor);
+
+ beforeEach(() => {
+ window.uploads_path = 'uploads';
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ mock.restore();
+ });
+
+ it('displays markdown field by default', () => {
+ buildWrapper({ propsData: { supportsQuickActions: true } });
+
+ expect(findMarkdownField().props()).toEqual(
+ expect.objectContaining({
+ markdownPreviewPath: renderMarkdownPath,
+ quickActionsDocsPath,
+ canAttachFile: true,
+ enableAutocomplete,
+ textareaValue: value,
+ markdownDocsPath,
+ uploadsPath: window.uploads_path,
+ enablePreview,
+ }),
+ );
+ });
+
+ it('renders markdown field textarea', () => {
+ buildWrapper();
+
+ expect(findTextarea().attributes()).toEqual(
+ expect.objectContaining({
+ id: formFieldId,
+ name: formFieldName,
+ placeholder: formFieldPlaceholder,
+ 'aria-label': formFieldAriaLabel,
+ }),
+ );
+
+ expect(findTextarea().element.value).toBe(value);
+ });
+
+ it('renders switch segmented control', () => {
+ buildWrapper();
+
+ expect(findSegmentedControl().props()).toEqual({
+ checked: EDITING_MODE_MARKDOWN_FIELD,
+ options: [
+ {
+ text: expect.any(String),
+ value: EDITING_MODE_MARKDOWN_FIELD,
+ },
+ {
+ text: expect.any(String),
+ value: EDITING_MODE_CONTENT_EDITOR,
+ },
+ ],
+ });
+ });
+
+ describe.each`
+ editingMode
+ ${EDITING_MODE_CONTENT_EDITOR}
+ ${EDITING_MODE_MARKDOWN_FIELD}
+ `('when segmented control emits change event with $editingMode value', ({ editingMode }) => {
+ it(`emits ${editingMode} event`, () => {
+ buildWrapper();
+
+ findSegmentedControl().vm.$emit('change', editingMode);
+
+ expect(wrapper.emitted(editingMode)).toHaveLength(1);
+ });
+ });
+
+ describe(`when editingMode is ${EDITING_MODE_MARKDOWN_FIELD}`, () => {
+ it('emits input event when markdown field textarea changes', async () => {
+ buildWrapper();
+ const newValue = 'new value';
+
+ await findTextarea().setValue(newValue);
+
+ expect(wrapper.emitted('input')).toEqual([[newValue]]);
+ });
+
+ describe('when initOnAutofocus is true', () => {
+ beforeEach(async () => {
+ buildWrapper({ attachTo: document.body, propsData: { initOnAutofocus: true } });
+
+ await nextTick();
+ });
+
+ it('sets the markdown field as the active element in the document', () => {
+ expect(document.activeElement).toBe(findTextarea().element);
+ });
+ });
+
+ it('bubbles up keydown event', async () => {
+ buildWrapper();
+
+ await findTextarea().trigger('keydown');
+
+ expect(wrapper.emitted('keydown')).toHaveLength(1);
+ });
+
+ describe(`when segmented control triggers input event with ${EDITING_MODE_CONTENT_EDITOR} value`, () => {
+ beforeEach(() => {
+ buildWrapper();
+ findSegmentedControl().vm.$emit('input', EDITING_MODE_CONTENT_EDITOR);
+ findSegmentedControl().vm.$emit('change', EDITING_MODE_CONTENT_EDITOR);
+ });
+
+ it('displays the content editor', () => {
+ expect(findContentEditor().props()).toEqual(
+ expect.objectContaining({
+ renderMarkdown: expect.any(Function),
+ uploadsPath: window.uploads_path,
+ markdown: value,
+ autofocus: 'end',
+ }),
+ );
+ });
+
+ it('adds hidden field with current markdown', () => {
+ const hiddenField = wrapper.find(`#${formFieldId}`);
+
+ expect(hiddenField.attributes()).toEqual(
+ expect.objectContaining({
+ id: formFieldId,
+ name: formFieldName,
+ }),
+ );
+ expect(hiddenField.element.value).toBe(value);
+ });
+
+ it('hides the markdown field', () => {
+ expect(findMarkdownField().exists()).toBe(false);
+ });
+
+ it('updates localStorage value', () => {
+ expect(findLocalStorageSync().props().value).toBe(EDITING_MODE_CONTENT_EDITOR);
+ });
+ });
+ });
+
+ describe(`when editingMode is ${EDITING_MODE_CONTENT_EDITOR}`, () => {
+ beforeEach(() => {
+ buildWrapper();
+ findSegmentedControl().vm.$emit('input', EDITING_MODE_CONTENT_EDITOR);
+ });
+
+ describe('when initOnAutofocus is true', () => {
+ beforeEach(() => {
+ buildWrapper({ propsData: { initOnAutofocus: true } });
+ findLocalStorageSync().vm.$emit('input', EDITING_MODE_CONTENT_EDITOR);
+ });
+
+ it('sets the content editor autofocus property to end', () => {
+ expect(findContentEditor().props().autofocus).toBe('end');
+ });
+ });
+
+ it('emits input event when content editor emits change event', async () => {
+ const newValue = 'new value';
+
+ await findContentEditor().vm.$emit('change', { markdown: newValue });
+
+ expect(wrapper.emitted('input')).toEqual([[newValue]]);
+ });
+
+ it('bubbles up keydown event', () => {
+ const event = new Event('keydown');
+
+ findContentEditor().vm.$emit('keydown', event);
+
+ expect(wrapper.emitted('keydown')).toEqual([[event]]);
+ });
+
+ describe(`when segmented control triggers input event with ${EDITING_MODE_MARKDOWN_FIELD} value`, () => {
+ beforeEach(() => {
+ findSegmentedControl().vm.$emit('input', EDITING_MODE_MARKDOWN_FIELD);
+ });
+
+ it('hides the content editor', () => {
+ expect(findContentEditor().exists()).toBe(false);
+ });
+
+ it('shows the markdown field', () => {
+ expect(findMarkdownField().exists()).toBe(true);
+ });
+
+ it('updates localStorage value', () => {
+ expect(findLocalStorageSync().props().value).toBe(EDITING_MODE_MARKDOWN_FIELD);
+ });
+
+ it('sets the textarea as the activeElement in the document', async () => {
+ // The component should be rebuilt to attach it to the document body
+ buildWrapper({ attachTo: document.body });
+ await findSegmentedControl().vm.$emit('input', EDITING_MODE_CONTENT_EDITOR);
+
+ expect(findContentEditor().exists()).toBe(true);
+
+ await findSegmentedControl().vm.$emit('input', EDITING_MODE_MARKDOWN_FIELD);
+ await findSegmentedControl().vm.$emit('change', EDITING_MODE_MARKDOWN_FIELD);
+
+ expect(document.activeElement).toBe(findTextarea().element);
+ });
+ });
+
+ describe('when content editor emits loading event', () => {
+ beforeEach(() => {
+ findContentEditor().vm.$emit('loading');
+ });
+
+ it('disables switch editing mode control', () => {
+ // This is the only way that I found to check the segmented control is disabled
+ expect(findSegmentedControl().find('input[disabled]').exists()).toBe(true);
+ });
+
+ describe.each`
+ event
+ ${'loadingSuccess'}
+ ${'loadingError'}
+ `('when content editor emits $event event', ({ event }) => {
+ beforeEach(() => {
+ findContentEditor().vm.$emit(event);
+ });
+ it('enables the switch editing mode control', () => {
+ expect(findSegmentedControl().find('input[disabled]').exists()).toBe(false);
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/metric_images/metric_images_table_spec.js b/spec/frontend/vue_shared/components/metric_images/metric_images_table_spec.js
index d792bd46ccd..9c91dc9b5fc 100644
--- a/spec/frontend/vue_shared/components/metric_images/metric_images_table_spec.js
+++ b/spec/frontend/vue_shared/components/metric_images/metric_images_table_spec.js
@@ -139,8 +139,7 @@ describe('Metrics upload item', () => {
closeModal();
await waitForPromises();
-
- expect(findModal().attributes('visible')).toBeFalsy();
+ expect(findModal().attributes('visible')).toBeUndefined();
});
it('should delete the image when selected', async () => {
@@ -189,8 +188,7 @@ describe('Metrics upload item', () => {
closeEditModal();
await waitForPromises();
-
- expect(findEditModal().attributes('visible')).toBeFalsy();
+ expect(findEditModal().attributes('visible')).toBeUndefined();
});
it('should delete the image when selected', async () => {
diff --git a/spec/frontend/vue_shared/components/metric_images/store/actions_spec.js b/spec/frontend/vue_shared/components/metric_images/store/actions_spec.js
index 518cf354675..537367940e0 100644
--- a/spec/frontend/vue_shared/components/metric_images/store/actions_spec.js
+++ b/spec/frontend/vue_shared/components/metric_images/store/actions_spec.js
@@ -4,7 +4,7 @@ import actionsFactory from '~/vue_shared/components/metric_images/store/actions'
import * as types from '~/vue_shared/components/metric_images/store/mutation_types';
import createStore from '~/vue_shared/components/metric_images/store';
import testAction from 'helpers/vuex_action_helper';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import { fileList, initialData } from '../mock_data';
@@ -35,7 +35,7 @@ describe('Metrics tab store actions', () => {
});
afterEach(() => {
- createFlash.mockClear();
+ createAlert.mockClear();
});
describe('fetching metric images', () => {
@@ -61,7 +61,7 @@ describe('Metrics tab store actions', () => {
[{ type: types.REQUEST_METRIC_IMAGES }, { type: types.RECEIVE_METRIC_IMAGES_ERROR }],
[],
);
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
});
@@ -98,7 +98,7 @@ describe('Metrics tab store actions', () => {
[{ type: types.REQUEST_METRIC_UPLOAD }, { type: types.RECEIVE_METRIC_UPLOAD_ERROR }],
[],
);
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
});
@@ -129,7 +129,7 @@ describe('Metrics tab store actions', () => {
[{ type: types.REQUEST_METRIC_UPLOAD }, { type: types.RECEIVE_METRIC_UPLOAD_ERROR }],
[],
);
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
});
diff --git a/spec/frontend/vue_shared/components/modal_copy_button_spec.js b/spec/frontend/vue_shared/components/modal_copy_button_spec.js
index b57efc88d57..61e4e774420 100644
--- a/spec/frontend/vue_shared/components/modal_copy_button_spec.js
+++ b/spec/frontend/vue_shared/components/modal_copy_button_spec.js
@@ -17,9 +17,16 @@ describe('modal copy button', () => {
title: 'Copy this value',
id: 'test-id',
},
+ slots: {
+ default: 'test',
+ },
});
});
+ it('should show the default slot', () => {
+ expect(wrapper.text()).toBe('test');
+ });
+
describe('clipboard', () => {
it('should fire a `success` event on click', async () => {
const root = createWrapper(wrapper.vm.$root);
diff --git a/spec/frontend/vue_shared/components/namespace_select/namespace_select_spec.js b/spec/frontend/vue_shared/components/namespace_select/namespace_select_deprecated_spec.js
index 2c14d65186b..d930ef63dad 100644
--- a/spec/frontend/vue_shared/components/namespace_select/namespace_select_spec.js
+++ b/spec/frontend/vue_shared/components/namespace_select/namespace_select_deprecated_spec.js
@@ -11,14 +11,14 @@ import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import NamespaceSelect, {
i18n,
EMPTY_NAMESPACE_ID,
-} from '~/vue_shared/components/namespace_select/namespace_select.vue';
+} from '~/vue_shared/components/namespace_select/namespace_select_deprecated.vue';
import { userNamespaces, groupNamespaces } from './mock_data';
const FLAT_NAMESPACES = [...userNamespaces, ...groupNamespaces];
const EMPTY_NAMESPACE_TITLE = 'Empty namespace TEST';
const EMPTY_NAMESPACE_ITEM = { id: EMPTY_NAMESPACE_ID, humanName: EMPTY_NAMESPACE_TITLE };
-describe('Namespace Select', () => {
+describe('NamespaceSelectDeprecated', () => {
let wrapper;
const createComponent = (props = {}) =>
@@ -207,9 +207,9 @@ describe('Namespace Select', () => {
expect(wrapper.emitted('load-more-groups')).toEqual([[]]);
});
- describe('when `isLoadingMoreGroups` prop is `true`', () => {
+ describe('when `isLoading` prop is `true`', () => {
it('renders a loading icon', () => {
- wrapper = createComponent({ hasNextPageOfGroups: true, isLoadingMoreGroups: true });
+ wrapper = createComponent({ hasNextPageOfGroups: true, isLoading: true });
expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
@@ -223,4 +223,14 @@ describe('Namespace Select', () => {
expect(wrapper.findComponent(GlSearchBoxByType).props('isLoading')).toBe(true);
});
});
+
+ describe('when dropdown is opened', () => {
+ it('emits `show` event', () => {
+ wrapper = createComponent();
+
+ findDropdown().vm.$emit('show');
+
+ expect(wrapper.emitted('show')).toEqual([[]]);
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_note_spec.js.snap b/spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_note_spec.js.snap
index bf6c8e8c704..3bac96069ec 100644
--- a/spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_note_spec.js.snap
+++ b/spec/frontend/vue_shared/components/notes/__snapshots__/placeholder_note_spec.js.snap
@@ -2,13 +2,12 @@
exports[`Issue placeholder note component matches snapshot 1`] = `
<timeline-entry-item-stub
- class="note note-wrapper being-posted fade-in-half"
+ class="note note-wrapper note-comment being-posted fade-in-half"
>
<div
- class="timeline-icon"
+ class="timeline-avatar gl-float-left"
>
<gl-avatar-link-stub
- class="gl-mr-3"
href="/root"
>
<gl-avatar-stub
@@ -16,7 +15,7 @@ exports[`Issue placeholder note component matches snapshot 1`] = `
entityid="0"
entityname="root"
shape="circle"
- size="[object Object]"
+ size="32"
src="mock_path"
/>
</gl-avatar-link-stub>
@@ -50,16 +49,20 @@ exports[`Issue placeholder note component matches snapshot 1`] = `
</div>
<div
- class="note-body"
+ class="timeline-discussion-body"
>
<div
- class="note-text md"
+ class="note-body"
>
- <p>
- Foo
- </p>
-
+ <div
+ class="note-text md"
+ >
+ <p>
+ Foo
+ </p>
+
+ </div>
</div>
</div>
</div>
diff --git a/spec/frontend/vue_shared/components/notes/placeholder_note_spec.js b/spec/frontend/vue_shared/components/notes/placeholder_note_spec.js
index b86c8946e96..8f9f1bb336f 100644
--- a/spec/frontend/vue_shared/components/notes/placeholder_note_spec.js
+++ b/spec/frontend/vue_shared/components/notes/placeholder_note_spec.js
@@ -1,5 +1,4 @@
import { shallowMount } from '@vue/test-utils';
-import { GlAvatar } from '@gitlab/ui';
import Vue from 'vue';
import Vuex from 'vuex';
import IssuePlaceholderNote from '~/vue_shared/components/notes/placeholder_note.vue';
@@ -53,17 +52,4 @@ describe('Issue placeholder note component', () => {
expect(findNote().classes()).toContain('discussion');
});
-
- describe('avatar size', () => {
- it.each`
- size | line | isOverviewTab
- ${{ default: 24, md: 32 }} | ${null} | ${false}
- ${24} | ${{ line_code: '123' }} | ${false}
- ${{ default: 24, md: 32 }} | ${{ line_code: '123' }} | ${true}
- `('renders avatar $size for $line and $isOverviewTab', ({ size, line, isOverviewTab }) => {
- createComponent(false, { line, isOverviewTab });
-
- expect(wrapper.findComponent(GlAvatar).props('size')).toEqual(size);
- });
- });
});
diff --git a/spec/frontend/vue_shared/components/pagination_bar/pagination_bar_spec.js b/spec/frontend/vue_shared/components/pagination_bar/pagination_bar_spec.js
index b3be2f8a775..112cdaf74c6 100644
--- a/spec/frontend/vue_shared/components/pagination_bar/pagination_bar_spec.js
+++ b/spec/frontend/vue_shared/components/pagination_bar/pagination_bar_spec.js
@@ -2,6 +2,7 @@ import { GlPagination, GlDropdown, GlDropdownItem } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import PaginationBar from '~/vue_shared/components/pagination_bar/pagination_bar.vue';
import PaginationLinks from '~/vue_shared/components/pagination_links.vue';
+import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
describe('Pagination bar', () => {
const DEFAULT_PROPS = {
@@ -20,6 +21,7 @@ describe('Pagination bar', () => {
...DEFAULT_PROPS,
...propsData,
},
+ stubs: { LocalStorageSync: true },
});
};
@@ -90,4 +92,28 @@ describe('Pagination bar', () => {
'Showing 21 - 40 of 1000+',
);
});
+
+ describe('local storage sync', () => {
+ it('does not perform local storage sync when no storage key is provided', () => {
+ createComponent();
+
+ expect(wrapper.findComponent(LocalStorageSync).exists()).toBe(false);
+ });
+
+ it('passes current page size to local storage sync when storage key is provided', () => {
+ const STORAGE_KEY = 'fakeStorageKey';
+ createComponent({ storageKey: STORAGE_KEY });
+
+ expect(wrapper.getComponent(LocalStorageSync).props('storageKey')).toBe(STORAGE_KEY);
+ });
+
+ it('emits set-page event when local storage sync provides new value', () => {
+ const SAVED_SIZE = 50;
+ createComponent({ storageKey: 'some storage key' });
+
+ wrapper.getComponent(LocalStorageSync).vm.$emit('input', SAVED_SIZE);
+
+ expect(wrapper.emitted('set-page-size')).toEqual([[SAVED_SIZE]]);
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/panel_resizer_spec.js b/spec/frontend/vue_shared/components/panel_resizer_spec.js
index d8b903e5bfd..0e261124cbf 100644
--- a/spec/frontend/vue_shared/components/panel_resizer_spec.js
+++ b/spec/frontend/vue_shared/components/panel_resizer_spec.js
@@ -1,12 +1,10 @@
-import Vue from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
-import panelResizer from '~/vue_shared/components/panel_resizer.vue';
+import { mount } from '@vue/test-utils';
+import PanelResizer from '~/vue_shared/components/panel_resizer.vue';
describe('Panel Resizer component', () => {
- let vm;
- let PanelResizer;
+ let wrapper;
- const triggerEvent = (eventName, el = vm.$el, clientX = 0) => {
+ const triggerEvent = (eventName, el = wrapper.element, clientX = 0) => {
const event = document.createEvent('MouseEvents');
event.initMouseEvent(
eventName,
@@ -29,57 +27,64 @@ describe('Panel Resizer component', () => {
el.dispatchEvent(event);
};
- beforeEach(() => {
- PanelResizer = Vue.extend(panelResizer);
- });
-
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
it('should render a div element with the correct classes and styles', () => {
- vm = mountComponent(PanelResizer, {
- startSize: 100,
- side: 'left',
+ wrapper = mount(PanelResizer, {
+ propsData: {
+ startSize: 100,
+ side: 'left',
+ },
});
- expect(vm.$el.tagName).toEqual('DIV');
- expect(vm.$el.getAttribute('class')).toBe(
- 'position-absolute position-top-0 position-bottom-0 drag-handle position-left-0',
- );
+ expect(wrapper.element.tagName).toEqual('DIV');
+ expect(wrapper.classes().sort()).toStrictEqual([
+ 'drag-handle',
+ 'position-absolute',
+ 'position-bottom-0',
+ 'position-left-0',
+ 'position-top-0',
+ ]);
- expect(vm.$el.getAttribute('style')).toBe('cursor: ew-resize;');
+ expect(wrapper.element.getAttribute('style')).toBe('cursor: ew-resize;');
});
it('should render a div element with the correct classes for a right side panel', () => {
- vm = mountComponent(PanelResizer, {
- startSize: 100,
- side: 'right',
+ wrapper = mount(PanelResizer, {
+ propsData: {
+ startSize: 100,
+ side: 'right',
+ },
});
- expect(vm.$el.tagName).toEqual('DIV');
- expect(vm.$el.getAttribute('class')).toBe(
- 'position-absolute position-top-0 position-bottom-0 drag-handle position-right-0',
- );
+ expect(wrapper.element.tagName).toEqual('DIV');
+ expect(wrapper.classes().sort()).toStrictEqual([
+ 'drag-handle',
+ 'position-absolute',
+ 'position-bottom-0',
+ 'position-right-0',
+ 'position-top-0',
+ ]);
});
it('drag the resizer', () => {
- vm = mountComponent(PanelResizer, {
- startSize: 100,
- side: 'left',
+ wrapper = mount(PanelResizer, {
+ propsData: {
+ startSize: 100,
+ side: 'left',
+ },
});
- jest.spyOn(vm, '$emit').mockImplementation(() => {});
- triggerEvent('mousedown', vm.$el);
+ triggerEvent('mousedown');
triggerEvent('mousemove', document);
triggerEvent('mouseup', document);
- expect(vm.$emit.mock.calls).toEqual([
- ['resize-start', 100],
- ['update:size', 100],
- ['resize-end', 100],
- ]);
-
- expect(vm.size).toBe(100);
+ expect(wrapper.emitted()).toEqual({
+ 'resize-start': [[100]],
+ 'update:size': [[100]],
+ 'resize-end': [[100]],
+ });
});
});
diff --git a/spec/frontend/vue_shared/components/registry/__snapshots__/history_item_spec.js.snap b/spec/frontend/vue_shared/components/registry/__snapshots__/history_item_spec.js.snap
index 2abae33bc19..66cf2354bc7 100644
--- a/spec/frontend/vue_shared/components/registry/__snapshots__/history_item_spec.js.snap
+++ b/spec/frontend/vue_shared/components/registry/__snapshots__/history_item_spec.js.snap
@@ -2,7 +2,7 @@
exports[`History Item renders the correct markup 1`] = `
<li
- class="timeline-entry system-note note-wrapper gl-mb-6!"
+ class="timeline-entry system-note note-wrapper"
>
<div
class="timeline-entry-inner"
@@ -22,11 +22,13 @@ exports[`History Item renders the correct markup 1`] = `
<div
class="note-header"
>
- <span>
+ <div
+ class="note-header-info"
+ >
<div
data-testid="default-slot"
/>
- </span>
+ </div>
</div>
<div
diff --git a/spec/frontend/vue_shared/components/security_reports/artifact_downloads/merge_request_artifact_download_spec.js b/spec/frontend/vue_shared/components/security_reports/artifact_downloads/merge_request_artifact_download_spec.js
index c5672bc28cc..09b0b3d43ad 100644
--- a/spec/frontend/vue_shared/components/security_reports/artifact_downloads/merge_request_artifact_download_spec.js
+++ b/spec/frontend/vue_shared/components/security_reports/artifact_downloads/merge_request_artifact_download_spec.js
@@ -6,7 +6,7 @@ import {
expectedDownloadDropdownPropsWithTitle,
securityReportMergeRequestDownloadPathsQueryResponse,
} from 'jest/vue_shared/security_reports/mock_data';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import Component from '~/vue_shared/security_reports/components/artifact_downloads/merge_request_artifact_download.vue';
import SecurityReportDownloadDropdown from '~/vue_shared/security_reports/components/security_report_download_dropdown.vue';
import {
@@ -93,8 +93,8 @@ describe('Merge request artifact Download', () => {
});
});
- it('calls createFlash correctly', () => {
- expect(createFlash).toHaveBeenCalledWith({
+ it('calls createAlert correctly', () => {
+ expect(createAlert).toHaveBeenCalledWith({
message: Component.i18n.apiError,
captureError: true,
error: expect.any(Error),
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js
index 4c7ac6e9a6f..30c1a4b7d2f 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js
@@ -67,9 +67,9 @@ describe('LabelsSelectRoot', () => {
// We're utilizing `onDropdownClose` event emitted from the component to always include `touchedLabels`
// while the first param of the method is the labels list which were added/removed.
- expect(wrapper.emitted('updateSelectedLabels')).toBeTruthy();
+ expect(wrapper.emitted('updateSelectedLabels')).toHaveLength(1);
expect(wrapper.emitted('updateSelectedLabels')[0]).toEqual([touchedLabels]);
- expect(wrapper.emitted('onDropdownClose')).toBeTruthy();
+ expect(wrapper.emitted('onDropdownClose')).toHaveLength(1);
expect(wrapper.emitted('onDropdownClose')[0]).toEqual([touchedLabels]);
});
@@ -88,7 +88,7 @@ describe('LabelsSelectRoot', () => {
},
);
- expect(wrapper.emitted('updateSelectedLabels')).toBeTruthy();
+ expect(wrapper.emitted('updateSelectedLabels')).toHaveLength(1);
expect(wrapper.emitted('updateSelectedLabels')[0]).toEqual([
[
{
@@ -97,7 +97,7 @@ describe('LabelsSelectRoot', () => {
},
],
]);
- expect(wrapper.emitted('onDropdownClose')).toBeTruthy();
+ expect(wrapper.emitted('onDropdownClose')).toHaveLength(1);
expect(wrapper.emitted('onDropdownClose')[0]).toEqual([[]]);
});
});
@@ -106,8 +106,7 @@ describe('LabelsSelectRoot', () => {
it('emits `toggleCollapse` event on component', () => {
createComponent();
wrapper.vm.handleCollapsedValueClick();
-
- expect(wrapper.emitted().toggleCollapse).toBeTruthy();
+ expect(wrapper.emitted().toggleCollapse).toHaveLength(1);
});
});
});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js
index 2bc513e87bf..edd044bd754 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js
@@ -1,7 +1,7 @@
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import * as actions from '~/vue_shared/components/sidebar/labels_select_vue/store/actions';
import * as types from '~/vue_shared/components/sidebar/labels_select_vue/store/mutation_types';
@@ -102,7 +102,7 @@ describe('LabelsSelect Actions', () => {
it('shows flash error', () => {
actions.receiveLabelsFailure({ commit: () => {} });
- expect(createFlash).toHaveBeenCalledWith({ message: 'Error fetching labels.' });
+ expect(createAlert).toHaveBeenCalledWith({ message: 'Error fetching labels.' });
});
});
@@ -186,7 +186,7 @@ describe('LabelsSelect Actions', () => {
it('shows flash error', () => {
actions.receiveCreateLabelFailure({ commit: () => {} });
- expect(createFlash).toHaveBeenCalledWith({ message: 'Error creating label.' });
+ expect(createAlert).toHaveBeenCalledWith({ message: 'Error creating label.' });
});
});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js
index 1819e750324..2b2508b5e11 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js
@@ -189,10 +189,20 @@ describe('LabelsSelect Mutations', () => {
});
labelGroupIds.forEach((l) => {
- expect(state.labels[l.id - 1].touched).toBeFalsy();
+ expect(state.labels[l.id - 1].touched).toBeUndefined();
expect(state.labels[l.id - 1].set).toBe(false);
});
});
+ it('allows selection of multiple scoped labels', () => {
+ const state = { labels: cloneDeep(labels), allowMultipleScopedLabels: true };
+
+ mutations[types.UPDATE_SELECTED_LABELS](state, { labels: [{ id: labels[4].id }] });
+ mutations[types.UPDATE_SELECTED_LABELS](state, { labels: [{ id: labels[5].id }] });
+
+ expect(state.labels[4].set).toBe(true);
+ expect(state.labels[5].set).toBe(true);
+ expect(state.labels[6].set).toBe(true);
+ });
});
describe(`${types.UPDATE_LABELS_SET_STATE}`, () => {
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_create_view_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_create_view_spec.js
index 9c29f304c71..237f174e048 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_create_view_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_create_view_spec.js
@@ -4,7 +4,7 @@ import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import { workspaceLabelsQueries } from '~/sidebar/constants';
import DropdownContentsCreateView from '~/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_create_view.vue';
import createLabelMutation from '~/vue_shared/components/sidebar/labels_select_widget/graphql/create_label.mutation.graphql';
@@ -202,7 +202,7 @@ describe('DropdownContentsCreateView', () => {
});
});
- it('calls createFlash is mutation has a user-recoverable error', async () => {
+ it('calls createAlert is mutation has a user-recoverable error', async () => {
createComponent({ mutationHandler: createLabelUserRecoverableErrorHandler });
fillLabelAttributes();
await nextTick();
@@ -210,10 +210,10 @@ describe('DropdownContentsCreateView', () => {
findCreateButton().vm.$emit('click');
await waitForPromises();
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
- it('calls createFlash is mutation was rejected', async () => {
+ it('calls createAlert is mutation was rejected', async () => {
createComponent({ mutationHandler: createLabelErrorHandler });
fillLabelAttributes();
await nextTick();
@@ -221,7 +221,7 @@ describe('DropdownContentsCreateView', () => {
findCreateButton().vm.$emit('click');
await waitForPromises();
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
it('displays error in alert if label title is already taken', async () => {
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_labels_view_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_labels_view_spec.js
index 7f6770e0bea..5d8ad5ddee5 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_labels_view_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_labels_view_spec.js
@@ -9,7 +9,7 @@ import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
import { DropdownVariant } from '~/vue_shared/components/sidebar/labels_select_widget/constants';
import DropdownContentsLabelsView from '~/vue_shared/components/sidebar/labels_select_widget/dropdown_contents_labels_view.vue';
@@ -143,13 +143,13 @@ describe('DropdownContentsLabelsView', () => {
expect(findNoResultsMessage().isVisible()).toBe(true);
});
- it('calls `createFlash` when fetching labels failed', async () => {
+ it('calls `createAlert` when fetching labels failed', async () => {
createComponent({ queryHandler: jest.fn().mockRejectedValue('Houston, we have a problem!') });
await makeObserverAppear();
jest.advanceTimersByTime(DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
await waitForPromises();
- expect(createFlash).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalled();
});
it('emits an `input` event on label click', async () => {
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/labels_select_root_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/labels_select_root_spec.js
index cad401e0013..b58c44645d6 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/labels_select_root_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/labels_select_root_spec.js
@@ -3,7 +3,7 @@ import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import { IssuableType } from '~/issues/constants';
import SidebarEditableItem from '~/sidebar/components/sidebar_editable_item.vue';
import DropdownContents from '~/vue_shared/components/sidebar/labels_select_widget/dropdown_contents.vue';
@@ -151,7 +151,7 @@ describe('LabelsSelectRoot', () => {
it('creates flash with error message when query is rejected', async () => {
createComponent({ queryHandler: errorQueryHandler });
await waitForPromises();
- expect(createFlash).toHaveBeenCalledWith({ message: 'Error fetching labels.' });
+ expect(createAlert).toHaveBeenCalledWith({ message: 'Error fetching labels.' });
});
});
@@ -197,7 +197,7 @@ describe('LabelsSelectRoot', () => {
findDropdownContents().vm.$emit('setLabels', [label]);
await waitForPromises();
- expect(createFlash).toHaveBeenCalledWith({
+ expect(createAlert).toHaveBeenCalledWith({
captureError: true,
error: expect.anything(),
message: 'An error occurred while updating labels.',
diff --git a/spec/frontend/vue_shared/components/source_viewer/components/chunk_line_spec.js b/spec/frontend/vue_shared/components/source_viewer/components/chunk_line_spec.js
index fd3ff9ce892..f661bd6747a 100644
--- a/spec/frontend/vue_shared/components/source_viewer/components/chunk_line_spec.js
+++ b/spec/frontend/vue_shared/components/source_viewer/components/chunk_line_spec.js
@@ -1,10 +1,5 @@
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import ChunkLine from '~/vue_shared/components/source_viewer/components/chunk_line.vue';
-import {
- BIDI_CHARS,
- BIDI_CHARS_CLASS_LIST,
- BIDI_CHAR_TOOLTIP,
-} from '~/vue_shared/components/source_viewer/constants';
const DEFAULT_PROPS = {
number: 2,
@@ -31,7 +26,6 @@ describe('Chunk Line component', () => {
const findLineLink = () => wrapper.find('.file-line-num');
const findBlameLink = () => wrapper.find('.file-line-blame');
const findContent = () => wrapper.findByTestId('content');
- const findWrappedBidiChars = () => wrapper.findAllByTestId('bidi-wrapper');
beforeEach(() => {
createComponent();
@@ -40,22 +34,6 @@ describe('Chunk Line component', () => {
afterEach(() => wrapper.destroy());
describe('rendering', () => {
- it('wraps BiDi characters', () => {
- const content = `// some content ${BIDI_CHARS.toString()} with BiDi chars`;
- createComponent({ content });
- const wrappedBidiChars = findWrappedBidiChars();
-
- expect(wrappedBidiChars.length).toBe(BIDI_CHARS.length);
-
- wrappedBidiChars.wrappers.forEach((_, i) => {
- expect(wrappedBidiChars.at(i).text()).toBe(BIDI_CHARS[i]);
- expect(wrappedBidiChars.at(i).attributes()).toMatchObject({
- class: BIDI_CHARS_CLASS_LIST,
- title: BIDI_CHAR_TOOLTIP,
- });
- });
- });
-
it('renders a blame link', () => {
expect(findBlameLink().attributes()).toMatchObject({
href: `${DEFAULT_PROPS.blamePath}#L${DEFAULT_PROPS.number}`,
diff --git a/spec/frontend/vue_shared/components/source_viewer/highlight_util_spec.js b/spec/frontend/vue_shared/components/source_viewer/highlight_util_spec.js
new file mode 100644
index 00000000000..4a995e2fde1
--- /dev/null
+++ b/spec/frontend/vue_shared/components/source_viewer/highlight_util_spec.js
@@ -0,0 +1,44 @@
+import hljs from 'highlight.js/lib/core';
+import languageLoader from '~/content_editor/services/highlight_js_language_loader';
+import { registerPlugins } from '~/vue_shared/components/source_viewer/plugins/index';
+import { highlight } from '~/vue_shared/components/source_viewer/workers/highlight_utils';
+
+jest.mock('highlight.js/lib/core', () => ({
+ highlight: jest.fn().mockReturnValue({}),
+ registerLanguage: jest.fn(),
+}));
+
+jest.mock('~/content_editor/services/highlight_js_language_loader', () => ({
+ javascript: jest.fn().mockReturnValue({ default: jest.fn() }),
+}));
+
+jest.mock('~/vue_shared/components/source_viewer/plugins/index', () => ({
+ registerPlugins: jest.fn(),
+}));
+
+const fileType = 'text';
+const content = 'function test() { return true };';
+const language = 'javascript';
+
+describe('Highlight utility', () => {
+ beforeEach(() => highlight(fileType, content, language));
+
+ it('loads the language', () => {
+ expect(languageLoader.javascript).toHaveBeenCalled();
+ });
+
+ it('registers the plugins', () => {
+ expect(registerPlugins).toHaveBeenCalled();
+ });
+
+ it('registers the language', () => {
+ expect(hljs.registerLanguage).toHaveBeenCalledWith(
+ language,
+ languageLoader[language]().default,
+ );
+ });
+
+ it('highlights the content', () => {
+ expect(hljs.highlight).toHaveBeenCalledWith(content, { language });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/source_viewer/plugins/index_spec.js b/spec/frontend/vue_shared/components/source_viewer/plugins/index_spec.js
index 83fdc5d669d..57045ca54ae 100644
--- a/spec/frontend/vue_shared/components/source_viewer/plugins/index_spec.js
+++ b/spec/frontend/vue_shared/components/source_viewer/plugins/index_spec.js
@@ -1,14 +1,18 @@
-import { registerPlugins } from '~/vue_shared/components/source_viewer/plugins/index';
-import { HLJS_ON_AFTER_HIGHLIGHT } from '~/vue_shared/components/source_viewer/constants';
-import wrapComments from '~/vue_shared/components/source_viewer/plugins/wrap_comments';
+import {
+ registerPlugins,
+ HLJS_ON_AFTER_HIGHLIGHT,
+} from '~/vue_shared/components/source_viewer/plugins/index';
+import wrapChildNodes from '~/vue_shared/components/source_viewer/plugins/wrap_child_nodes';
+import wrapBidiChars from '~/vue_shared/components/source_viewer/plugins/wrap_bidi_chars';
-jest.mock('~/vue_shared/components/source_viewer/plugins/wrap_comments');
+jest.mock('~/vue_shared/components/source_viewer/plugins/wrap_child_nodes');
const hljsMock = { addPlugin: jest.fn() };
describe('Highlight.js plugin registration', () => {
beforeEach(() => registerPlugins(hljsMock));
it('registers our plugins', () => {
- expect(hljsMock.addPlugin).toHaveBeenCalledWith({ [HLJS_ON_AFTER_HIGHLIGHT]: wrapComments });
+ expect(hljsMock.addPlugin).toHaveBeenCalledWith({ [HLJS_ON_AFTER_HIGHLIGHT]: wrapBidiChars });
+ expect(hljsMock.addPlugin).toHaveBeenCalledWith({ [HLJS_ON_AFTER_HIGHLIGHT]: wrapChildNodes });
});
});
diff --git a/spec/frontend/vue_shared/components/source_viewer/plugins/utils/dependency_linker_util_spec.js b/spec/frontend/vue_shared/components/source_viewer/plugins/utils/dependency_linker_util_spec.js
index 8079d5ad99a..e4ce07ec668 100644
--- a/spec/frontend/vue_shared/components/source_viewer/plugins/utils/dependency_linker_util_spec.js
+++ b/spec/frontend/vue_shared/components/source_viewer/plugins/utils/dependency_linker_util_spec.js
@@ -15,7 +15,7 @@ describe('createLink', () => {
it('escapes the user-controlled content', () => {
const unescapedXSS = '<script>XSS</script>';
const escapedPackageName = '&lt;script&gt;XSS&lt;/script&gt;';
- const escapedHref = '&amp;lt;script&amp;gt;XSS&amp;lt;/script&amp;gt;';
+ const escapedHref = '&lt;script&gt;XSS&lt;/script&gt;';
const href = `http://test.com/${unescapedXSS}`;
const innerText = `testing${unescapedXSS}`;
const result = `<a href="http://test.com/${escapedHref}" rel="nofollow noreferrer noopener">testing${escapedPackageName}</a>`;
diff --git a/spec/frontend/vue_shared/components/source_viewer/plugins/wrap_bidi_chars_spec.js b/spec/frontend/vue_shared/components/source_viewer/plugins/wrap_bidi_chars_spec.js
new file mode 100644
index 00000000000..f40f8b22627
--- /dev/null
+++ b/spec/frontend/vue_shared/components/source_viewer/plugins/wrap_bidi_chars_spec.js
@@ -0,0 +1,17 @@
+import wrapBidiChars from '~/vue_shared/components/source_viewer/plugins/wrap_bidi_chars';
+import {
+ BIDI_CHARS,
+ BIDI_CHARS_CLASS_LIST,
+ BIDI_CHAR_TOOLTIP,
+} from '~/vue_shared/components/source_viewer/constants';
+
+describe('Highlight.js plugin for wrapping BiDi characters', () => {
+ it.each(BIDI_CHARS)('wraps %s BiDi char', (bidiChar) => {
+ const inputValue = `// some content ${bidiChar} with BiDi chars`;
+ const outputValue = `// some content <span class="${BIDI_CHARS_CLASS_LIST}" title="${BIDI_CHAR_TOOLTIP}">${bidiChar}</span>`;
+ const hljsResultMock = { value: inputValue };
+
+ wrapBidiChars(hljsResultMock);
+ expect(hljsResultMock.value).toContain(outputValue);
+ });
+});
diff --git a/spec/frontend/vue_shared/components/source_viewer/plugins/wrap_child_nodes_spec.js b/spec/frontend/vue_shared/components/source_viewer/plugins/wrap_child_nodes_spec.js
new file mode 100644
index 00000000000..bc6df1a2565
--- /dev/null
+++ b/spec/frontend/vue_shared/components/source_viewer/plugins/wrap_child_nodes_spec.js
@@ -0,0 +1,22 @@
+import wrapChildNodes from '~/vue_shared/components/source_viewer/plugins/wrap_child_nodes';
+
+describe('Highlight.js plugin for wrapping _emitter nodes', () => {
+ it('mutates the input value by wrapping each node in a span tag', () => {
+ const hljsResultMock = {
+ _emitter: {
+ rootNode: {
+ children: [
+ { kind: 'string', children: ['Text 1'] },
+ { kind: 'string', children: ['Text 2', { kind: 'comment', children: ['Text 3'] }] },
+ 'Text4\nText5',
+ ],
+ },
+ },
+ };
+
+ const outputValue = `<span class="hljs-string">Text 1</span><span class="hljs-string"><span class="hljs-string">Text 2</span><span class="hljs-comment">Text 3</span></span><span class="">Text4</span>\n<span class="">Text5</span>`;
+
+ wrapChildNodes(hljsResultMock);
+ expect(hljsResultMock.value).toBe(outputValue);
+ });
+});
diff --git a/spec/frontend/vue_shared/components/source_viewer/plugins/wrap_comments_spec.js b/spec/frontend/vue_shared/components/source_viewer/plugins/wrap_comments_spec.js
deleted file mode 100644
index 5fd4182da29..00000000000
--- a/spec/frontend/vue_shared/components/source_viewer/plugins/wrap_comments_spec.js
+++ /dev/null
@@ -1,29 +0,0 @@
-import { HLJS_COMMENT_SELECTOR } from '~/vue_shared/components/source_viewer/constants';
-import wrapComments from '~/vue_shared/components/source_viewer/plugins/wrap_comments';
-
-describe('Highlight.js plugin for wrapping comments', () => {
- it('mutates the input value by wrapping each line in a span tag', () => {
- const inputValue = `<span class="${HLJS_COMMENT_SELECTOR}">/* Line 1 \n* Line 2 \n*/</span>`;
- const outputValue = `<span class="${HLJS_COMMENT_SELECTOR}">/* Line 1 \n<span class="${HLJS_COMMENT_SELECTOR}">* Line 2 </span>\n<span class="${HLJS_COMMENT_SELECTOR}">*/</span>`;
- const hljsResultMock = { value: inputValue };
-
- wrapComments(hljsResultMock);
- expect(hljsResultMock.value).toBe(outputValue);
- });
-
- it('does not mutate the input value if the hljs comment selector is not present', () => {
- const inputValue = '<span class="hljs-keyword">const</span>';
- const hljsResultMock = { value: inputValue };
-
- wrapComments(hljsResultMock);
- expect(hljsResultMock.value).toBe(inputValue);
- });
-
- it('does not mutate the input value if the hljs comment line includes a closing tag', () => {
- const inputValue = `<span class="${HLJS_COMMENT_SELECTOR}">/* Line 1 </span> \n* Line 2 \n*/`;
- const hljsResultMock = { value: inputValue };
-
- wrapComments(hljsResultMock);
- expect(hljsResultMock.value).toBe(inputValue);
- });
-});
diff --git a/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js b/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js
index e020d9a557e..6d319b37b02 100644
--- a/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js
+++ b/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js
@@ -22,10 +22,10 @@ jest.mock('~/vue_shared/components/source_viewer/plugins/index');
Vue.use(VueRouter);
const router = new VueRouter();
-const generateContent = (content, totalLines = 1) => {
+const generateContent = (content, totalLines = 1, delimiter = '\n') => {
let generatedContent = '';
for (let i = 0; i < totalLines; i += 1) {
- generatedContent += `Line: ${i + 1} = ${content}\n`;
+ generatedContent += `Line: ${i + 1} = ${content}${delimiter}`;
}
return generatedContent;
};
@@ -38,7 +38,9 @@ describe('Source Viewer component', () => {
const mappedLanguage = ROUGE_TO_HLJS_LANGUAGE_MAP[language];
const chunk1 = generateContent('// Some source code 1', 70);
const chunk2 = generateContent('// Some source code 2', 70);
- const content = chunk1 + chunk2;
+ const chunk3 = generateContent('// Some source code 3', 70, '\r\n');
+ const chunk3Result = generateContent('// Some source code 3', 70, '\n');
+ const content = chunk1 + chunk2 + chunk3;
const path = 'some/path.js';
const blamePath = 'some/blame/path.js';
const fileType = 'javascript';
@@ -152,6 +154,19 @@ describe('Source Viewer component', () => {
startingFrom: 70,
});
});
+
+ it('renders the third chunk', async () => {
+ const thirdChunk = findChunks().at(2);
+
+ expect(thirdChunk.props('content')).toContain(chunk3Result.trim());
+
+ expect(chunk3Result).toEqual(chunk3.replace(/\r?\n/g, '\n'));
+
+ expect(thirdChunk.props()).toMatchObject({
+ totalLines: 70,
+ startingFrom: 140,
+ });
+ });
});
it('emits showBlobInteractionZones on the eventHub when chunk appears', () => {
diff --git a/spec/frontend/vue_shared/components/stacked_progress_bar_spec.js b/spec/frontend/vue_shared/components/stacked_progress_bar_spec.js
index c6f01efa71a..79b1f17afa0 100644
--- a/spec/frontend/vue_shared/components/stacked_progress_bar_spec.js
+++ b/spec/frontend/vue_shared/components/stacked_progress_bar_spec.js
@@ -1,121 +1,109 @@
-import Vue from 'vue';
-
-import mountComponent from 'helpers/vue_mount_component_helper';
-import stackedProgressBarComponent from '~/vue_shared/components/stacked_progress_bar.vue';
-
-const createComponent = (config) => {
- const Component = Vue.extend(stackedProgressBarComponent);
- const defaultConfig = {
- successLabel: 'Synced',
- failureLabel: 'Failed',
- neutralLabel: 'Out of sync',
- successCount: 25,
- failureCount: 10,
- totalCount: 5000,
- ...config,
- };
-
- return mountComponent(Component, defaultConfig);
-};
+import { mount } from '@vue/test-utils';
+import StackedProgressBarComponent from '~/vue_shared/components/stacked_progress_bar.vue';
describe('StackedProgressBarComponent', () => {
- let vm;
-
- beforeEach(() => {
- vm = createComponent();
- });
+ let wrapper;
+
+ const createComponent = (config) => {
+ const defaultConfig = {
+ successLabel: 'Synced',
+ failureLabel: 'Failed',
+ neutralLabel: 'Out of sync',
+ successCount: 25,
+ failureCount: 10,
+ totalCount: 5000,
+ ...config,
+ };
+
+ wrapper = mount(StackedProgressBarComponent, { propsData: defaultConfig });
+ };
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
- const findSuccessBarText = (wrapper) =>
- wrapper.$el.querySelector('.status-green').innerText.trim();
- const findNeutralBarText = (wrapper) =>
- wrapper.$el.querySelector('.status-neutral').innerText.trim();
- const findFailureBarText = (wrapper) => wrapper.$el.querySelector('.status-red').innerText.trim();
- const findUnavailableBarText = (wrapper) =>
- wrapper.$el.querySelector('.status-unavailable').innerText.trim();
-
- describe('computed', () => {
- describe('neutralCount', () => {
- it('returns neutralCount based on totalCount, successCount and failureCount', () => {
- expect(vm.neutralCount).toBe(4965); // 5000 - 25 - 10
- });
- });
- });
+ const findSuccessBar = () => wrapper.find('.status-green');
+ const findNeutralBar = () => wrapper.find('.status-neutral');
+ const findFailureBar = () => wrapper.find('.status-red');
+ const findUnavailableBar = () => wrapper.find('.status-unavailable');
describe('template', () => {
it('renders container element', () => {
- expect(vm.$el.classList.contains('stacked-progress-bar')).toBeTruthy();
+ createComponent();
+
+ expect(wrapper.classes()).toContain('stacked-progress-bar');
});
it('renders empty state when count is unavailable', () => {
- const vmX = createComponent({ totalCount: 0, successCount: 0, failureCount: 0 });
+ createComponent({ totalCount: 0, successCount: 0, failureCount: 0 });
- expect(findUnavailableBarText(vmX)).not.toBeUndefined();
+ expect(findUnavailableBar()).not.toBeUndefined();
});
it('renders bar elements when count is available', () => {
- expect(findSuccessBarText(vm)).not.toBeUndefined();
- expect(findNeutralBarText(vm)).not.toBeUndefined();
- expect(findFailureBarText(vm)).not.toBeUndefined();
+ createComponent();
+
+ expect(findSuccessBar().exists()).toBe(true);
+ expect(findNeutralBar().exists()).toBe(true);
+ expect(findFailureBar().exists()).toBe(true);
});
describe('getPercent', () => {
it('returns correct percentages from provided count based on `totalCount`', () => {
- vm = createComponent({ totalCount: 100, successCount: 25, failureCount: 10 });
+ createComponent({ totalCount: 100, successCount: 25, failureCount: 10 });
- expect(findSuccessBarText(vm)).toBe('25%');
- expect(findNeutralBarText(vm)).toBe('65%');
- expect(findFailureBarText(vm)).toBe('10%');
+ expect(findSuccessBar().text()).toBe('25%');
+ expect(findNeutralBar().text()).toBe('65%');
+ expect(findFailureBar().text()).toBe('10%');
});
it('returns percentage with decimal place when decimal is greater than 1', () => {
- vm = createComponent({ successCount: 67 });
+ createComponent({ successCount: 67 });
- expect(findSuccessBarText(vm)).toBe('1.3%');
+ expect(findSuccessBar().text()).toBe('1.3%');
});
it('returns percentage as `< 1%` from provided count based on `totalCount` when evaluated value is less than 1', () => {
- vm = createComponent({ successCount: 10 });
+ createComponent({ successCount: 10 });
- expect(findSuccessBarText(vm)).toBe('< 1%');
+ expect(findSuccessBar().text()).toBe('< 1%');
});
it('returns not available if totalCount is falsy', () => {
- vm = createComponent({ totalCount: 0 });
+ createComponent({ totalCount: 0 });
- expect(findUnavailableBarText(vm)).toBe('Not available');
+ expect(findUnavailableBar().text()).toBe('Not available');
});
it('returns 99.9% when numbers are extreme decimals', () => {
- vm = createComponent({ totalCount: 1000000 });
+ createComponent({ totalCount: 1000000 });
- expect(findNeutralBarText(vm)).toBe('99.9%');
+ expect(findNeutralBar().text()).toBe('99.9%');
});
});
- describe('barStyle', () => {
- it('returns style string based on percentage provided', () => {
- expect(vm.barStyle(50)).toBe('width: 50%;');
+ describe('bar style', () => {
+ it('renders width based on percentage provided', () => {
+ createComponent({ totalCount: 100, successCount: 25 });
+
+ expect(findSuccessBar().element.style.width).toBe('25%');
});
});
- describe('getTooltip', () => {
+ describe('tooltip', () => {
describe('when hideTooltips is false', () => {
it('returns label string based on label and count provided', () => {
- expect(vm.getTooltip('Synced', 10)).toBe('Synced: 10');
+ createComponent({ successCount: 10, successLabel: 'Synced', hideTooltips: false });
+
+ expect(findSuccessBar().attributes('title')).toBe('Synced: 10');
});
});
describe('when hideTooltips is true', () => {
- beforeEach(() => {
- vm = createComponent({ hideTooltips: true });
- });
-
it('returns an empty string', () => {
- expect(vm.getTooltip('Synced', 10)).toBe('');
+ createComponent({ successCount: 10, successLabel: 'Synced', hideTooltips: true });
+
+ expect(findSuccessBar().attributes('title')).toBe('');
});
});
});
diff --git a/spec/frontend/vue_shared/components/timezone_dropdown/helpers.js b/spec/frontend/vue_shared/components/timezone_dropdown/helpers.js
new file mode 100644
index 00000000000..dee4c92add4
--- /dev/null
+++ b/spec/frontend/vue_shared/components/timezone_dropdown/helpers.js
@@ -0,0 +1,6 @@
+import timezoneDataFixture from 'test_fixtures/timezones/short.json';
+
+export { timezoneDataFixture };
+
+export const findTzByName = (identifier = '') =>
+ timezoneDataFixture.find(({ name }) => name.toLowerCase() === identifier.toLowerCase());
diff --git a/spec/frontend/deploy_freeze/components/timezone_dropdown_spec.js b/spec/frontend/vue_shared/components/timezone_dropdown/timezone_dropdown_spec.js
index 567d18f8b92..e5f56c63031 100644
--- a/spec/frontend/deploy_freeze/components/timezone_dropdown_spec.js
+++ b/spec/frontend/vue_shared/components/timezone_dropdown/timezone_dropdown_spec.js
@@ -1,27 +1,20 @@
import { GlDropdownItem, GlDropdown } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import Vue from 'vue';
-import Vuex from 'vuex';
-import createStore from '~/deploy_freeze/store';
-import TimezoneDropdown from '~/vue_shared/components/timezone_dropdown.vue';
-import { findTzByName, formatTz, timezoneDataFixture } from '../helpers';
-
-Vue.use(Vuex);
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import TimezoneDropdown from '~/vue_shared/components/timezone_dropdown/timezone_dropdown.vue';
+import { formatTimezone } from '~/lib/utils/datetime_utility';
+import { findTzByName, timezoneDataFixture } from './helpers';
describe('Deploy freeze timezone dropdown', () => {
let wrapper;
let store;
const createComponent = (searchTerm, selectedTimezone) => {
- store = createStore({
- projectId: '8',
- timezoneData: timezoneDataFixture,
- });
- wrapper = shallowMount(TimezoneDropdown, {
+ wrapper = shallowMountExtended(TimezoneDropdown, {
store,
propsData: {
value: selectedTimezone,
timezoneData: timezoneDataFixture,
+ name: 'user[timezone]',
},
});
@@ -32,6 +25,8 @@ describe('Deploy freeze timezone dropdown', () => {
const findAllDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
const findDropdownItemByIndex = (index) => wrapper.findAllComponents(GlDropdownItem).at(index);
+ const findEmptyResultsItem = () => wrapper.findByTestId('noMatchingResults');
+ const findHiddenInput = () => wrapper.find('input');
afterEach(() => {
wrapper.destroy();
@@ -66,11 +61,11 @@ describe('Deploy freeze timezone dropdown', () => {
it('renders only the time zone searched for', () => {
const selectedTz = findTzByName('Alaska');
expect(findAllDropdownItems()).toHaveLength(1);
- expect(findDropdownItemByIndex(0).text()).toBe(formatTz(selectedTz));
+ expect(findDropdownItemByIndex(0).text()).toBe(formatTimezone(selectedTz));
});
it('should not display empty results message', () => {
- expect(wrapper.find('[data-testid="noMatchingResults"]').exists()).toBe(false);
+ expect(findEmptyResultsItem().exists()).toBe(false);
});
describe('Custom events', () => {
@@ -81,7 +76,7 @@ describe('Deploy freeze timezone dropdown', () => {
expect(wrapper.emitted('input')).toEqual([
[
{
- formattedTimezone: formatTz(selectedTz),
+ formattedTimezone: formatTimezone(selectedTz),
identifier: selectedTz.identifier,
},
],
@@ -90,13 +85,27 @@ describe('Deploy freeze timezone dropdown', () => {
});
});
- describe('Selected time zone', () => {
+ describe('Selected time zone not found', () => {
+ beforeEach(() => {
+ createComponent('', 'Berlin');
+ });
+
+ it('renders empty selections', () => {
+ expect(wrapper.findComponent(GlDropdown).props().text).toBe('Select timezone');
+ });
+
+ it('preserves initial value in the associated input', () => {
+ expect(findHiddenInput().attributes('value')).toBe('Berlin');
+ });
+ });
+
+ describe('Selected time zone found', () => {
beforeEach(() => {
- createComponent('', 'Alaska');
+ createComponent('', 'Europe/Berlin');
});
it('renders selected time zone as dropdown label', () => {
- expect(wrapper.findComponent(GlDropdown).vm.text).toBe('Alaska');
+ expect(wrapper.findComponent(GlDropdown).props().text).toBe('[UTC + 2] Berlin');
});
});
});
diff --git a/spec/frontend/vue_shared/components/url_sync_spec.js b/spec/frontend/vue_shared/components/url_sync_spec.js
index aefe6a5c3e8..acda1a64a75 100644
--- a/spec/frontend/vue_shared/components/url_sync_spec.js
+++ b/spec/frontend/vue_shared/components/url_sync_spec.js
@@ -1,10 +1,11 @@
import { shallowMount } from '@vue/test-utils';
import { historyPushState } from '~/lib/utils/common_utils';
-import { mergeUrlParams } from '~/lib/utils/url_utility';
-import UrlSyncComponent from '~/vue_shared/components/url_sync.vue';
+import { mergeUrlParams, setUrlParams } from '~/lib/utils/url_utility';
+import UrlSyncComponent, { URL_SET_PARAMS_STRATEGY } from '~/vue_shared/components/url_sync.vue';
jest.mock('~/lib/utils/url_utility', () => ({
- mergeUrlParams: jest.fn((query, url) => `urlParams: ${query} ${url}`),
+ mergeUrlParams: jest.fn((query, url) => `urlParams: ${JSON.stringify(query)} ${url}`),
+ setUrlParams: jest.fn((query, url) => `urlParams: ${JSON.stringify(query)} ${url}`),
}));
jest.mock('~/lib/utils/common_utils', () => ({
@@ -17,9 +18,14 @@ describe('url sync component', () => {
const findButton = () => wrapper.find('button');
- const createComponent = ({ query = mockQuery, scopedSlots, slots } = {}) => {
+ const createComponent = ({
+ query = mockQuery,
+ scopedSlots,
+ slots,
+ urlParamsUpdateStrategy,
+ } = {}) => {
wrapper = shallowMount(UrlSyncComponent, {
- propsData: { query },
+ propsData: { query, ...(urlParamsUpdateStrategy && { urlParamsUpdateStrategy }) },
scopedSlots,
slots,
});
@@ -29,21 +35,39 @@ describe('url sync component', () => {
wrapper.destroy();
});
- const expectUrlSync = (query, times, mergeUrlParamsReturnValue) => {
- expect(mergeUrlParams).toHaveBeenCalledTimes(times);
- expect(mergeUrlParams).toHaveBeenCalledWith(query, window.location.href, {
- spreadArrays: true,
- });
+ const expectUrlSyncFactory = (
+ query,
+ times,
+ urlParamsUpdateStrategy,
+ urlOptions,
+ urlReturnValue,
+ ) => {
+ expect(urlParamsUpdateStrategy).toHaveBeenCalledTimes(times);
+ expect(urlParamsUpdateStrategy).toHaveBeenCalledWith(query, window.location.href, urlOptions);
expect(historyPushState).toHaveBeenCalledTimes(times);
- expect(historyPushState).toHaveBeenCalledWith(mergeUrlParamsReturnValue);
+ expect(historyPushState).toHaveBeenCalledWith(urlReturnValue);
+ };
+
+ const expectUrlSyncWithMergeUrlParams = (query, times, mergeUrlParamsReturnValue) => {
+ expectUrlSyncFactory(
+ query,
+ times,
+ mergeUrlParams,
+ { spreadArrays: true },
+ mergeUrlParamsReturnValue,
+ );
+ };
+
+ const expectUrlSyncWithSetUrlParams = (query, times, setUrlParamsReturnValue) => {
+ expectUrlSyncFactory(query, times, setUrlParams, true, setUrlParamsReturnValue);
};
describe('with query as a props', () => {
it('immediately syncs the query to the URL', () => {
createComponent();
- expectUrlSync(mockQuery, 1, mergeUrlParams.mock.results[0].value);
+ expectUrlSyncWithMergeUrlParams(mockQuery, 1, mergeUrlParams.mock.results[0].value);
});
describe('when the query is modified', () => {
@@ -54,11 +78,21 @@ describe('url sync component', () => {
// using setProps to test the watcher
await wrapper.setProps({ query: newQuery });
- expectUrlSync(mockQuery, 2, mergeUrlParams.mock.results[1].value);
+ expectUrlSyncWithMergeUrlParams(mockQuery, 2, mergeUrlParams.mock.results[1].value);
});
});
});
+ describe('with url-params-update-strategy equals to URL_SET_PARAMS_STRATEGY', () => {
+ it('uses setUrlParams to generate URL', () => {
+ createComponent({
+ urlParamsUpdateStrategy: URL_SET_PARAMS_STRATEGY,
+ });
+
+ expectUrlSyncWithSetUrlParams(mockQuery, 1, setUrlParams.mock.results[0].value);
+ });
+ });
+
describe('with scoped slot', () => {
const scopedSlots = {
default: `
@@ -77,7 +111,7 @@ describe('url sync component', () => {
findButton().trigger('click');
- expectUrlSync({ bar: 'baz' }, 1, mergeUrlParams.mock.results[0].value);
+ expectUrlSyncWithMergeUrlParams({ bar: 'baz' }, 1, mergeUrlParams.mock.results[0].value);
});
});
diff --git a/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_new_spec.js b/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_new_spec.js
deleted file mode 100644
index f87737ca86a..00000000000
--- a/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_new_spec.js
+++ /dev/null
@@ -1,134 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import { GlAvatar, GlTooltip } from '@gitlab/ui';
-import defaultAvatarUrl from 'images/no_avatar.png';
-import { placeholderImage } from '~/lazy_loader';
-import UserAvatarImage from '~/vue_shared/components/user_avatar/user_avatar_image_new.vue';
-
-jest.mock('images/no_avatar.png', () => 'default-avatar-url');
-
-const PROVIDED_PROPS = {
- size: 32,
- imgSrc: 'myavatarurl.com',
- imgAlt: 'mydisplayname',
- cssClasses: 'myextraavatarclass',
- tooltipText: 'tooltip text',
- tooltipPlacement: 'bottom',
-};
-
-describe('User Avatar Image Component', () => {
- let wrapper;
-
- const findAvatar = () => wrapper.findComponent(GlAvatar);
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- describe('Initialization', () => {
- beforeEach(() => {
- wrapper = shallowMount(UserAvatarImage, {
- propsData: {
- ...PROVIDED_PROPS,
- },
- });
- });
-
- it('should render `GlAvatar` and provide correct properties to it', () => {
- expect(findAvatar().attributes('data-src')).toBe(
- `${PROVIDED_PROPS.imgSrc}?width=${PROVIDED_PROPS.size}`,
- );
- expect(findAvatar().props()).toMatchObject({
- src: `${PROVIDED_PROPS.imgSrc}?width=${PROVIDED_PROPS.size}`,
- alt: PROVIDED_PROPS.imgAlt,
- size: PROVIDED_PROPS.size,
- });
- });
-
- it('should add correct CSS classes', () => {
- const classes = wrapper.findComponent(GlAvatar).classes();
- expect(classes).toContain(PROVIDED_PROPS.cssClasses);
- expect(classes).not.toContain('lazy');
- });
- });
-
- describe('Initialization when lazy', () => {
- beforeEach(() => {
- wrapper = shallowMount(UserAvatarImage, {
- propsData: {
- ...PROVIDED_PROPS,
- lazy: true,
- },
- });
- });
-
- it('should add lazy attributes', () => {
- expect(findAvatar().classes()).toContain('lazy');
- expect(findAvatar().attributes()).toMatchObject({
- src: placeholderImage,
- 'data-src': `${PROVIDED_PROPS.imgSrc}?width=${PROVIDED_PROPS.size}`,
- });
- });
-
- it('should use maximum number when size is provided as an object', () => {
- wrapper = shallowMount(UserAvatarImage, {
- propsData: {
- ...PROVIDED_PROPS,
- size: { default: 16, md: 64, lg: 24 },
- lazy: true,
- },
- });
-
- expect(findAvatar().attributes('data-src')).toBe(`${PROVIDED_PROPS.imgSrc}?width=${64}`);
- });
- });
-
- describe('Initialization without src', () => {
- beforeEach(() => {
- wrapper = shallowMount(UserAvatarImage, {
- propsData: {
- ...PROVIDED_PROPS,
- imgSrc: null,
- },
- });
- });
-
- it('should have default avatar image', () => {
- expect(findAvatar().props('src')).toBe(`${defaultAvatarUrl}?width=${PROVIDED_PROPS.size}`);
- });
- });
-
- describe('Dynamic tooltip content', () => {
- const slots = {
- default: ['Action!'],
- };
-
- describe('when `tooltipText` is provided and no default slot', () => {
- beforeEach(() => {
- wrapper = shallowMount(UserAvatarImage, {
- propsData: { ...PROVIDED_PROPS },
- });
- });
-
- it('renders the tooltip with `tooltipText` as content', () => {
- expect(wrapper.findComponent(GlTooltip).text()).toBe(PROVIDED_PROPS.tooltipText);
- });
- });
-
- describe('when `tooltipText` and default slot is provided', () => {
- beforeEach(() => {
- wrapper = shallowMount(UserAvatarImage, {
- propsData: { ...PROVIDED_PROPS },
- slots,
- });
- });
-
- it('does not render `tooltipText` inside the tooltip', () => {
- expect(wrapper.findComponent(GlTooltip).text()).not.toBe(PROVIDED_PROPS.tooltipText);
- });
-
- it('renders the content provided via default slot', () => {
- expect(wrapper.findComponent(GlTooltip).text()).toContain(slots.default[0]);
- });
- });
- });
-});
diff --git a/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_old_spec.js b/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_old_spec.js
deleted file mode 100644
index 2c1be6ec47e..00000000000
--- a/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_old_spec.js
+++ /dev/null
@@ -1,127 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import { GlTooltip } from '@gitlab/ui';
-import defaultAvatarUrl from 'images/no_avatar.png';
-import { placeholderImage } from '~/lazy_loader';
-import UserAvatarImage from '~/vue_shared/components/user_avatar/user_avatar_image_old.vue';
-
-jest.mock('images/no_avatar.png', () => 'default-avatar-url');
-
-const PROVIDED_PROPS = {
- size: 32,
- imgSrc: 'myavatarurl.com',
- imgAlt: 'mydisplayname',
- cssClasses: 'myextraavatarclass',
- tooltipText: 'tooltip text',
- tooltipPlacement: 'bottom',
-};
-
-const DEFAULT_PROPS = {
- size: 20,
-};
-
-describe('User Avatar Image Component', () => {
- let wrapper;
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- describe('Initialization', () => {
- beforeEach(() => {
- wrapper = shallowMount(UserAvatarImage, {
- propsData: {
- ...PROVIDED_PROPS,
- },
- });
- });
-
- it('should have <img> as a child element', () => {
- const imageElement = wrapper.find('img');
-
- expect(imageElement.exists()).toBe(true);
- expect(imageElement.attributes('src')).toBe(
- `${PROVIDED_PROPS.imgSrc}?width=${PROVIDED_PROPS.size}`,
- );
- expect(imageElement.attributes('data-src')).toBe(
- `${PROVIDED_PROPS.imgSrc}?width=${PROVIDED_PROPS.size}`,
- );
- expect(imageElement.attributes('alt')).toBe(PROVIDED_PROPS.imgAlt);
- });
-
- it('should properly render img css', () => {
- const classes = wrapper.find('img').classes();
- expect(classes).toEqual(['avatar', 's32', PROVIDED_PROPS.cssClasses]);
- expect(classes).not.toContain('lazy');
- });
- });
-
- describe('Initialization when lazy', () => {
- beforeEach(() => {
- wrapper = shallowMount(UserAvatarImage, {
- propsData: {
- ...PROVIDED_PROPS,
- lazy: true,
- },
- });
- });
-
- it('should add lazy attributes', () => {
- const imageElement = wrapper.find('img');
-
- expect(imageElement.classes()).toContain('lazy');
- expect(imageElement.attributes('src')).toBe(placeholderImage);
- expect(imageElement.attributes('data-src')).toBe(
- `${PROVIDED_PROPS.imgSrc}?width=${PROVIDED_PROPS.size}`,
- );
- });
- });
-
- describe('Initialization without src', () => {
- beforeEach(() => {
- wrapper = shallowMount(UserAvatarImage);
- });
-
- it('should have default avatar image', () => {
- const imageElement = wrapper.find('img');
-
- expect(imageElement.attributes('src')).toBe(
- `${defaultAvatarUrl}?width=${DEFAULT_PROPS.size}`,
- );
- });
- });
-
- describe('Dynamic tooltip content', () => {
- const slots = {
- default: ['Action!'],
- };
-
- describe('when `tooltipText` is provided and no default slot', () => {
- beforeEach(() => {
- wrapper = shallowMount(UserAvatarImage, {
- propsData: { ...PROVIDED_PROPS },
- });
- });
-
- it('renders the tooltip with `tooltipText` as content', () => {
- expect(wrapper.findComponent(GlTooltip).text()).toBe(PROVIDED_PROPS.tooltipText);
- });
- });
-
- describe('when `tooltipText` and default slot is provided', () => {
- beforeEach(() => {
- wrapper = shallowMount(UserAvatarImage, {
- propsData: { ...PROVIDED_PROPS },
- slots,
- });
- });
-
- it('does not render `tooltipText` inside the tooltip', () => {
- expect(wrapper.findComponent(GlTooltip).text()).not.toBe(PROVIDED_PROPS.tooltipText);
- });
-
- it('renders the content provided via default slot', () => {
- expect(wrapper.findComponent(GlTooltip).text()).toContain(slots.default[0]);
- });
- });
- });
-});
diff --git a/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_spec.js b/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_spec.js
index 6ad2ef226c2..d63b13981ac 100644
--- a/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_spec.js
+++ b/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_spec.js
@@ -1,7 +1,10 @@
import { shallowMount } from '@vue/test-utils';
+import { GlAvatar, GlTooltip } from '@gitlab/ui';
+import defaultAvatarUrl from 'images/no_avatar.png';
+import { placeholderImage } from '~/lazy_loader';
import UserAvatarImage from '~/vue_shared/components/user_avatar/user_avatar_image.vue';
-import UserAvatarImageNew from '~/vue_shared/components/user_avatar/user_avatar_image_new.vue';
-import UserAvatarImageOld from '~/vue_shared/components/user_avatar/user_avatar_image_old.vue';
+
+jest.mock('images/no_avatar.png', () => 'default-avatar-url');
const PROVIDED_PROPS = {
size: 32,
@@ -15,37 +18,117 @@ const PROVIDED_PROPS = {
describe('User Avatar Image Component', () => {
let wrapper;
- const createWrapper = (props = {}, { glAvatarForAllUserAvatars } = {}) => {
- wrapper = shallowMount(UserAvatarImage, {
- propsData: {
- ...PROVIDED_PROPS,
- ...props,
- },
- provide: {
- glFeatures: {
- glAvatarForAllUserAvatars,
- },
- },
- });
- };
+ const findAvatar = () => wrapper.findComponent(GlAvatar);
afterEach(() => {
wrapper.destroy();
});
- describe.each([
- [false, true, true],
- [true, false, true],
- [true, true, true],
- [false, false, false],
- ])(
- 'when glAvatarForAllUserAvatars=%s and enforceGlAvatar=%s',
- (glAvatarForAllUserAvatars, enforceGlAvatar, isUsingNewVersion) => {
- it(`will render ${isUsingNewVersion ? 'new' : 'old'} version`, () => {
- createWrapper({ enforceGlAvatar }, { glAvatarForAllUserAvatars });
- expect(wrapper.findComponent(UserAvatarImageNew).exists()).toBe(isUsingNewVersion);
- expect(wrapper.findComponent(UserAvatarImageOld).exists()).toBe(!isUsingNewVersion);
- });
- },
- );
+ describe('Initialization', () => {
+ beforeEach(() => {
+ wrapper = shallowMount(UserAvatarImage, {
+ propsData: {
+ ...PROVIDED_PROPS,
+ },
+ });
+ });
+
+ it('should render `GlAvatar` and provide correct properties to it', () => {
+ expect(findAvatar().attributes('data-src')).toBe(
+ `${PROVIDED_PROPS.imgSrc}?width=${PROVIDED_PROPS.size}`,
+ );
+ expect(findAvatar().props()).toMatchObject({
+ src: `${PROVIDED_PROPS.imgSrc}?width=${PROVIDED_PROPS.size}`,
+ alt: PROVIDED_PROPS.imgAlt,
+ size: PROVIDED_PROPS.size,
+ });
+ });
+
+ it('should add correct CSS classes', () => {
+ const classes = wrapper.findComponent(GlAvatar).classes();
+ expect(classes).toContain(PROVIDED_PROPS.cssClasses);
+ expect(classes).not.toContain('lazy');
+ });
+ });
+
+ describe('Initialization when lazy', () => {
+ beforeEach(() => {
+ wrapper = shallowMount(UserAvatarImage, {
+ propsData: {
+ ...PROVIDED_PROPS,
+ lazy: true,
+ },
+ });
+ });
+
+ it('should add lazy attributes', () => {
+ expect(findAvatar().classes()).toContain('lazy');
+ expect(findAvatar().attributes()).toMatchObject({
+ src: placeholderImage,
+ 'data-src': `${PROVIDED_PROPS.imgSrc}?width=${PROVIDED_PROPS.size}`,
+ });
+ });
+
+ it('should use maximum number when size is provided as an object', () => {
+ wrapper = shallowMount(UserAvatarImage, {
+ propsData: {
+ ...PROVIDED_PROPS,
+ size: { default: 16, md: 64, lg: 24 },
+ lazy: true,
+ },
+ });
+
+ expect(findAvatar().attributes('data-src')).toBe(`${PROVIDED_PROPS.imgSrc}?width=${64}`);
+ });
+ });
+
+ describe('Initialization without src', () => {
+ beforeEach(() => {
+ wrapper = shallowMount(UserAvatarImage, {
+ propsData: {
+ ...PROVIDED_PROPS,
+ imgSrc: null,
+ },
+ });
+ });
+
+ it('should have default avatar image', () => {
+ expect(findAvatar().props('src')).toBe(`${defaultAvatarUrl}?width=${PROVIDED_PROPS.size}`);
+ });
+ });
+
+ describe('Dynamic tooltip content', () => {
+ const slots = {
+ default: ['Action!'],
+ };
+
+ describe('when `tooltipText` is provided and no default slot', () => {
+ beforeEach(() => {
+ wrapper = shallowMount(UserAvatarImage, {
+ propsData: { ...PROVIDED_PROPS },
+ });
+ });
+
+ it('renders the tooltip with `tooltipText` as content', () => {
+ expect(wrapper.findComponent(GlTooltip).text()).toBe(PROVIDED_PROPS.tooltipText);
+ });
+ });
+
+ describe('when `tooltipText` and default slot is provided', () => {
+ beforeEach(() => {
+ wrapper = shallowMount(UserAvatarImage, {
+ propsData: { ...PROVIDED_PROPS },
+ slots,
+ });
+ });
+
+ it('does not render `tooltipText` inside the tooltip', () => {
+ expect(wrapper.findComponent(GlTooltip).text()).not.toBe(PROVIDED_PROPS.tooltipText);
+ });
+
+ it('renders the content provided via default slot', () => {
+ expect(wrapper.findComponent(GlTooltip).text()).toContain(slots.default[0]);
+ });
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_new_spec.js b/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_new_spec.js
deleted file mode 100644
index f485a14cfea..00000000000
--- a/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_new_spec.js
+++ /dev/null
@@ -1,103 +0,0 @@
-import { GlAvatarLink } from '@gitlab/ui';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import { TEST_HOST } from 'spec/test_constants';
-import UserAvatarImage from '~/vue_shared/components/user_avatar/user_avatar_image.vue';
-import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link_new.vue';
-
-describe('User Avatar Link Component', () => {
- let wrapper;
-
- const findUserName = () => wrapper.findByTestId('user-avatar-link-username');
-
- const defaultProps = {
- linkHref: `${TEST_HOST}/myavatarurl.com`,
- imgSize: 32,
- imgSrc: `${TEST_HOST}/myavatarurl.com`,
- imgAlt: 'mydisplayname',
- imgCssClasses: 'myextraavatarclass',
- tooltipText: 'tooltip text',
- tooltipPlacement: 'bottom',
- username: 'username',
- };
-
- const createWrapper = (props, slots) => {
- wrapper = shallowMountExtended(UserAvatarLink, {
- propsData: {
- ...defaultProps,
- ...props,
- ...slots,
- },
- });
- };
-
- beforeEach(() => {
- createWrapper();
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('should render GlLink with correct props', () => {
- const link = wrapper.findComponent(GlAvatarLink);
- expect(link.exists()).toBe(true);
- expect(link.attributes('href')).toBe(defaultProps.linkHref);
- });
-
- it('should render UserAvatarImage and provide correct props to it', () => {
- expect(wrapper.findComponent(UserAvatarImage).exists()).toBe(true);
- expect(wrapper.findComponent(UserAvatarImage).props()).toEqual({
- cssClasses: defaultProps.imgCssClasses,
- imgAlt: defaultProps.imgAlt,
- imgSrc: defaultProps.imgSrc,
- lazy: false,
- size: defaultProps.imgSize,
- tooltipPlacement: defaultProps.tooltipPlacement,
- tooltipText: '',
- enforceGlAvatar: false,
- });
- });
-
- describe('when username provided', () => {
- beforeEach(() => {
- createWrapper({ username: defaultProps.username });
- });
-
- it('should render provided username', () => {
- expect(findUserName().text()).toBe(defaultProps.username);
- });
-
- it('should provide the tooltip data for the username', () => {
- expect(findUserName().attributes()).toEqual(
- expect.objectContaining({
- title: defaultProps.tooltipText,
- 'tooltip-placement': defaultProps.tooltipPlacement,
- }),
- );
- });
- });
-
- describe('when username is NOT provided', () => {
- beforeEach(() => {
- createWrapper({ username: '' });
- });
-
- it('should NOT render username', () => {
- expect(findUserName().exists()).toBe(false);
- });
- });
-
- describe('avatar-badge slot', () => {
- const badge = '<span>User badge</span>';
-
- beforeEach(() => {
- createWrapper(defaultProps, {
- 'avatar-badge': badge,
- });
- });
-
- it('should render provided `avatar-badge` slot content', () => {
- expect(wrapper.html()).toContain(badge);
- });
- });
-});
diff --git a/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_old_spec.js b/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_old_spec.js
deleted file mode 100644
index cf7a1025dba..00000000000
--- a/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_old_spec.js
+++ /dev/null
@@ -1,103 +0,0 @@
-import { GlLink } from '@gitlab/ui';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import { TEST_HOST } from 'spec/test_constants';
-import UserAvatarImage from '~/vue_shared/components/user_avatar/user_avatar_image.vue';
-import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link_old.vue';
-
-describe('User Avatar Link Component', () => {
- let wrapper;
-
- const findUserName = () => wrapper.find('[data-testid="user-avatar-link-username"]');
-
- const defaultProps = {
- linkHref: `${TEST_HOST}/myavatarurl.com`,
- imgSize: 32,
- imgSrc: `${TEST_HOST}/myavatarurl.com`,
- imgAlt: 'mydisplayname',
- imgCssClasses: 'myextraavatarclass',
- tooltipText: 'tooltip text',
- tooltipPlacement: 'bottom',
- username: 'username',
- };
-
- const createWrapper = (props, slots) => {
- wrapper = shallowMountExtended(UserAvatarLink, {
- propsData: {
- ...defaultProps,
- ...props,
- ...slots,
- },
- });
- };
-
- beforeEach(() => {
- createWrapper();
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('should render GlLink with correct props', () => {
- const link = wrapper.findComponent(GlLink);
- expect(link.exists()).toBe(true);
- expect(link.attributes('href')).toBe(defaultProps.linkHref);
- });
-
- it('should render UserAvatarImage and povide correct props to it', () => {
- expect(wrapper.findComponent(UserAvatarImage).exists()).toBe(true);
- expect(wrapper.findComponent(UserAvatarImage).props()).toEqual({
- cssClasses: defaultProps.imgCssClasses,
- imgAlt: defaultProps.imgAlt,
- imgSrc: defaultProps.imgSrc,
- lazy: false,
- size: defaultProps.imgSize,
- tooltipPlacement: defaultProps.tooltipPlacement,
- tooltipText: '',
- enforceGlAvatar: false,
- });
- });
-
- describe('when username provided', () => {
- beforeEach(() => {
- createWrapper({ username: defaultProps.username });
- });
-
- it('should render provided username', () => {
- expect(findUserName().text()).toBe(defaultProps.username);
- });
-
- it('should provide the tooltip data for the username', () => {
- expect(findUserName().attributes()).toEqual(
- expect.objectContaining({
- title: defaultProps.tooltipText,
- 'tooltip-placement': defaultProps.tooltipPlacement,
- }),
- );
- });
- });
-
- describe('when username is NOT provided', () => {
- beforeEach(() => {
- createWrapper({ username: '' });
- });
-
- it('should NOT render username', () => {
- expect(findUserName().exists()).toBe(false);
- });
- });
-
- describe('avatar-badge slot', () => {
- const badge = '<span>User badge</span>';
-
- beforeEach(() => {
- createWrapper(defaultProps, {
- 'avatar-badge': badge,
- });
- });
-
- it('should render provided `avatar-badge` slot content', () => {
- expect(wrapper.html()).toContain(badge);
- });
- });
-});
diff --git a/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js b/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js
index fd3f59008ec..df7ce449678 100644
--- a/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js
+++ b/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js
@@ -1,51 +1,102 @@
-import { shallowMount } from '@vue/test-utils';
+import { GlAvatarLink } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { TEST_HOST } from 'spec/test_constants';
+import UserAvatarImage from '~/vue_shared/components/user_avatar/user_avatar_image.vue';
import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link.vue';
-import UserAvatarLinkNew from '~/vue_shared/components/user_avatar/user_avatar_link_new.vue';
-import UserAvatarLinkOld from '~/vue_shared/components/user_avatar/user_avatar_link_old.vue';
-
-const PROVIDED_PROPS = {
- size: 32,
- imgSrc: 'myavatarurl.com',
- imgAlt: 'mydisplayname',
- cssClasses: 'myextraavatarclass',
- tooltipText: 'tooltip text',
- tooltipPlacement: 'bottom',
-};
describe('User Avatar Link Component', () => {
let wrapper;
- const createWrapper = (props = {}, { glAvatarForAllUserAvatars } = {}) => {
- wrapper = shallowMount(UserAvatarLink, {
+ const findUserName = () => wrapper.findByTestId('user-avatar-link-username');
+
+ const defaultProps = {
+ linkHref: `${TEST_HOST}/myavatarurl.com`,
+ imgSize: 32,
+ imgSrc: `${TEST_HOST}/myavatarurl.com`,
+ imgAlt: 'mydisplayname',
+ imgCssClasses: 'myextraavatarclass',
+ tooltipText: 'tooltip text',
+ tooltipPlacement: 'bottom',
+ username: 'username',
+ };
+
+ const createWrapper = (props, slots) => {
+ wrapper = shallowMountExtended(UserAvatarLink, {
propsData: {
- ...PROVIDED_PROPS,
+ ...defaultProps,
...props,
- },
- provide: {
- glFeatures: {
- glAvatarForAllUserAvatars,
- },
+ ...slots,
},
});
};
+ beforeEach(() => {
+ createWrapper();
+ });
+
afterEach(() => {
wrapper.destroy();
});
- describe.each([
- [false, true, true],
- [true, false, true],
- [true, true, true],
- [false, false, false],
- ])(
- 'when glAvatarForAllUserAvatars=%s and enforceGlAvatar=%s',
- (glAvatarForAllUserAvatars, enforceGlAvatar, isUsingNewVersion) => {
- it(`will render ${isUsingNewVersion ? 'new' : 'old'} version`, () => {
- createWrapper({ enforceGlAvatar }, { glAvatarForAllUserAvatars });
- expect(wrapper.findComponent(UserAvatarLinkNew).exists()).toBe(isUsingNewVersion);
- expect(wrapper.findComponent(UserAvatarLinkOld).exists()).toBe(!isUsingNewVersion);
+ it('should render GlLink with correct props', () => {
+ const link = wrapper.findComponent(GlAvatarLink);
+ expect(link.exists()).toBe(true);
+ expect(link.attributes('href')).toBe(defaultProps.linkHref);
+ });
+
+ it('should render UserAvatarImage and provide correct props to it', () => {
+ expect(wrapper.findComponent(UserAvatarImage).exists()).toBe(true);
+ expect(wrapper.findComponent(UserAvatarImage).props()).toEqual({
+ cssClasses: defaultProps.imgCssClasses,
+ imgAlt: defaultProps.imgAlt,
+ imgSrc: defaultProps.imgSrc,
+ lazy: false,
+ size: defaultProps.imgSize,
+ tooltipPlacement: defaultProps.tooltipPlacement,
+ tooltipText: '',
+ });
+ });
+
+ describe('when username provided', () => {
+ beforeEach(() => {
+ createWrapper({ username: defaultProps.username });
+ });
+
+ it('should render provided username', () => {
+ expect(findUserName().text()).toBe(defaultProps.username);
+ });
+
+ it('should provide the tooltip data for the username', () => {
+ expect(findUserName().attributes()).toEqual(
+ expect.objectContaining({
+ title: defaultProps.tooltipText,
+ 'tooltip-placement': defaultProps.tooltipPlacement,
+ }),
+ );
+ });
+ });
+
+ describe('when username is NOT provided', () => {
+ beforeEach(() => {
+ createWrapper({ username: '' });
+ });
+
+ it('should NOT render username', () => {
+ expect(findUserName().exists()).toBe(false);
+ });
+ });
+
+ describe('avatar-badge slot', () => {
+ const badge = '<span>User badge</span>';
+
+ beforeEach(() => {
+ createWrapper(defaultProps, {
+ 'avatar-badge': badge,
});
- },
- );
+ });
+
+ it('should render provided `avatar-badge` slot content', () => {
+ expect(wrapper.html()).toContain(badge);
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/user_avatar/user_avatar_list_spec.js b/spec/frontend/vue_shared/components/user_avatar/user_avatar_list_spec.js
index b9accbf0373..1ad6d043399 100644
--- a/spec/frontend/vue_shared/components/user_avatar/user_avatar_list_spec.js
+++ b/spec/frontend/vue_shared/components/user_avatar/user_avatar_list_spec.js
@@ -153,29 +153,4 @@ describe('UserAvatarList', () => {
});
});
});
-
- describe('additional styling for the image', () => {
- it('should not add CSS class when feature flag `glAvatarForAllUserAvatars` is disabled', () => {
- factory({
- propsData: { items: createList(1) },
- });
-
- const link = wrapper.findComponent(UserAvatarLink);
- expect(link.props('imgCssClasses')).not.toBe('gl-mr-3');
- });
-
- it('should add CSS class when feature flag `glAvatarForAllUserAvatars` is enabled', () => {
- factory({
- propsData: { items: createList(1) },
- provide: {
- glFeatures: {
- glAvatarForAllUserAvatars: true,
- },
- },
- });
-
- const link = wrapper.findComponent(UserAvatarLink);
- expect(link.props('imgCssClasses')).toBe('gl-mr-3');
- });
- });
});
diff --git a/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
index 6d48000beb0..f6316af6ad8 100644
--- a/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
+++ b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
@@ -8,10 +8,12 @@ import {
I18N_USER_BLOCKED,
I18N_USER_LEARN,
I18N_USER_FOLLOW,
+ I18N_ERROR_FOLLOW,
I18N_USER_UNFOLLOW,
+ I18N_ERROR_UNFOLLOW,
} from '~/vue_shared/components/user_popover/constants';
import axios from '~/lib/utils/axios_utils';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import { followUser, unfollowUser } from '~/api/user_api';
import { mockTracking } from 'helpers/tracking_helper';
@@ -239,6 +241,18 @@ describe('User Popover Component', () => {
expect(wrapper.html()).toContain('<gl-emoji data-name="basketball_player"');
});
+ it('should show only emoji', () => {
+ const user = {
+ ...DEFAULT_PROPS.user,
+ status: { emoji: 'basketball_player' },
+ };
+
+ createWrapper({ user });
+
+ expect(findUserStatus().exists()).toBe(true);
+ expect(wrapper.html()).toContain('<gl-emoji data-name="basketball_player"');
+ });
+
it('hides the div when status is null', () => {
const user = { ...DEFAULT_PROPS.user, status: null };
@@ -367,27 +381,49 @@ describe('User Popover Component', () => {
itTracksToggleFollowButtonClick('follow_from_user_popover');
describe('when an error occurs', () => {
- beforeEach(() => {
- followUser.mockRejectedValue({});
+ describe('api send error message', () => {
+ const mockedMessage = sprintf(I18N_ERROR_UNFOLLOW, { limit: 300 });
+ const apiResponse = { response: { data: { message: mockedMessage } } };
- findToggleFollowButton().trigger('click');
- });
+ beforeEach(() => {
+ followUser.mockRejectedValue(apiResponse);
+ findToggleFollowButton().trigger('click');
+ });
- it('shows an error message', async () => {
- await axios.waitForAll();
+ it('show an error message from api response', async () => {
+ await axios.waitForAll();
- expect(createFlash).toHaveBeenCalledWith({
- message: 'An error occurred while trying to follow this user, please try again.',
- error: {},
- captureError: true,
+ expect(createAlert).toHaveBeenCalledWith({
+ message: mockedMessage,
+ error: apiResponse,
+ captureError: true,
+ });
});
});
- it('emits no events', async () => {
- await axios.waitForAll();
+ describe('api did not send error message', () => {
+ beforeEach(() => {
+ followUser.mockRejectedValue({});
- expect(wrapper.emitted().follow).toBeUndefined();
- expect(wrapper.emitted().unfollow).toBeUndefined();
+ findToggleFollowButton().trigger('click');
+ });
+
+ it('shows an error message', async () => {
+ await axios.waitForAll();
+
+ expect(createAlert).toHaveBeenCalledWith({
+ message: I18N_ERROR_FOLLOW,
+ error: {},
+ captureError: true,
+ });
+ });
+
+ it('emits no events', async () => {
+ await axios.waitForAll();
+
+ expect(wrapper.emitted().follow).toBeUndefined();
+ expect(wrapper.emitted().unfollow).toBeUndefined();
+ });
});
});
});
@@ -425,8 +461,8 @@ describe('User Popover Component', () => {
});
it('shows an error message', () => {
- expect(createFlash).toHaveBeenCalledWith({
- message: 'An error occurred while trying to unfollow this user, please try again.',
+ expect(createAlert).toHaveBeenCalledWith({
+ message: I18N_ERROR_UNFOLLOW,
error: {},
captureError: true,
});
diff --git a/spec/frontend/vue_shared/directives/safe_html_spec.js b/spec/frontend/vue_shared/directives/safe_html_spec.js
new file mode 100644
index 00000000000..ba1de8e4596
--- /dev/null
+++ b/spec/frontend/vue_shared/directives/safe_html_spec.js
@@ -0,0 +1,116 @@
+import { shallowMount } from '@vue/test-utils';
+import safeHtml from '~/vue_shared/directives/safe_html';
+import { defaultConfig } from '~/lib/dompurify';
+/* eslint-disable no-script-url */
+const invalidProtocolUrls = [
+ 'javascript:alert(1)',
+ 'jAvascript:alert(1)',
+ 'data:text/html,<script>alert(1);</script>',
+ ' javascript:',
+ 'javascript :',
+];
+/* eslint-enable no-script-url */
+const validProtocolUrls = ['slack://open', 'x-devonthink-item://90909', 'x-devonthink-item:90909'];
+
+describe('safe html directive', () => {
+ let wrapper;
+
+ const createComponent = ({ template, html, config } = {}) => {
+ const defaultTemplate = `<div v-safe-html="rawHtml"></div>`;
+ const defaultHtml = 'hello <script>alert(1)</script>world';
+
+ const component = {
+ directives: {
+ safeHtml,
+ },
+ data() {
+ return {
+ rawHtml: html || defaultHtml,
+ config: config || {},
+ };
+ },
+ template: template || defaultTemplate,
+ };
+
+ wrapper = shallowMount(component);
+ };
+
+ describe('default', () => {
+ it('should remove the script tag', () => {
+ createComponent();
+
+ expect(wrapper.html()).toEqual('<div>hello world</div>');
+ });
+
+ it('should remove javascript hrefs', () => {
+ createComponent({ html: '<a href="javascript:prompt(1)">click here</a>' });
+
+ expect(wrapper.html()).toEqual('<div><a>click here</a></div>');
+ });
+
+ it('should remove any existing children', () => {
+ createComponent({
+ template: `<div v-safe-html="rawHtml">foo <i>bar</i></div>`,
+ });
+
+ expect(wrapper.html()).toEqual('<div>hello world</div>');
+ });
+
+ describe('with non-http links', () => {
+ it.each(validProtocolUrls)('should allow %s', (url) => {
+ createComponent({
+ html: `<a href="${url}">internal link</a>`,
+ });
+ expect(wrapper.html()).toContain(`<a href="${url}">internal link</a>`);
+ });
+
+ it.each(invalidProtocolUrls)('should not allow %s', (url) => {
+ createComponent({
+ html: `<a href="${url}">internal link</a>`,
+ });
+ expect(wrapper.html()).toContain(`<a>internal link</a>`);
+ });
+ });
+
+ describe('handles data attributes correctly', () => {
+ const allowedDataAttrs = ['data-safe', 'data-random'];
+
+ it.each(defaultConfig.FORBID_ATTR)('removes dangerous `%s` attribute', (attr) => {
+ const html = `<a ${attr}="true"></a>`;
+ createComponent({ html });
+
+ expect(wrapper.html()).not.toContain(html);
+ });
+
+ it.each(allowedDataAttrs)('does not remove allowed `%s` attribute', (attr) => {
+ const html = `<a ${attr}="true"></a>`;
+ createComponent({ html });
+
+ expect(wrapper.html()).toContain(html);
+ });
+ });
+ });
+
+ describe('advance config', () => {
+ const template = '<div v-safe-html:[config]="rawHtml"></div>';
+ it('should only allow <b> tags', () => {
+ createComponent({
+ template,
+ html: '<a href="javascript:prompt(1)"><b>click here</b></a>',
+ config: { ALLOWED_TAGS: ['b'] },
+ });
+
+ expect(wrapper.html()).toEqual('<div><b>click here</b></div>');
+ });
+
+ it('should strip all html tags', () => {
+ createComponent({
+ template,
+ html: '<a href="javascript:prompt(1)"><u>click here</u></a>',
+ config: { ALLOWED_TAGS: [] },
+ });
+
+ expect(wrapper.html()).toEqual('<div>click here</div>');
+ });
+ });
+});
diff --git a/spec/frontend/boards/components/__snapshots__/board_blocked_icon_spec.js.snap b/spec/frontend/vue_shared/issuable/__snapshots__/issuable_blocked_icon_spec.js.snap
index 34e4f996ff0..dd011b9d84e 100644
--- a/spec/frontend/boards/components/__snapshots__/board_blocked_icon_spec.js.snap
+++ b/spec/frontend/vue_shared/issuable/__snapshots__/issuable_blocked_icon_spec.js.snap
@@ -1,23 +1,23 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
-exports[`BoardBlockedIcon on mouseenter on blocked icon with more than three blocking issues matches the snapshot 1`] = `
-"<div class=\\"gl-display-inline\\"><svg data-testid=\\"issue-blocked-icon\\" role=\\"img\\" aria-hidden=\\"true\\" class=\\"issue-blocked-icon gl-mr-2 gl-cursor-pointer gl-text-red-500 gl-icon s16\\" id=\\"blocked-icon-uniqueId\\">
+exports[`IssuableBlockedIcon on mouseenter on blocked icon with more than three blocking issues matches the snapshot 1`] = `
+"<div class=\\"gl-display-inline\\"><svg data-testid=\\"issuable-blocked-icon\\" role=\\"img\\" aria-hidden=\\"true\\" class=\\"issuable-blocked-icon gl-mr-2 gl-cursor-pointer gl-text-red-500 gl-icon s16\\" id=\\"blocked-icon-uniqueId\\">
<use href=\\"#issue-block\\"></use>
</svg>
<div class=\\"gl-popover\\">
- <ul class=\\"gl-list-style-none gl-p-0\\">
+ <ul class=\\"gl-list-style-none gl-p-0 gl-mb-0\\">
<li><a href=\\"http://gdk.test:3000/gitlab-org/my-project-1/-/issues/6\\" class=\\"gl-link gl-text-blue-500! gl-font-sm\\">my-project-1#6</a>
- <p data-testid=\\"issuable-title\\" class=\\"gl-mb-3 gl-display-block!\\">
+ <p data-testid=\\"issuable-title\\" class=\\"gl-display-block! gl-mb-3\\">
blocking issue title 1
</p>
</li>
<li><a href=\\"http://gdk.test:3000/gitlab-org/my-project-1/-/issues/5\\" class=\\"gl-link gl-text-blue-500! gl-font-sm\\">my-project-1#5</a>
- <p data-testid=\\"issuable-title\\" class=\\"gl-mb-3 gl-display-block!\\">
+ <p data-testid=\\"issuable-title\\" class=\\"gl-display-block! gl-mb-3\\">
blocking issue title 2 + blocking issue title 2 + blocking issue title 2 + bloc…
</p>
</li>
<li><a href=\\"http://gdk.test:3000/gitlab-org/my-project-1/-/issues/4\\" class=\\"gl-link gl-text-blue-500! gl-font-sm\\">my-project-1#4</a>
- <p data-testid=\\"issuable-title\\" class=\\"gl-mb-3 gl-display-block!\\">
+ <p data-testid=\\"issuable-title\\" class=\\"gl-display-block! gl-mb-0\\">
blocking issue title 3
</p>
</li>
diff --git a/spec/frontend/boards/components/board_blocked_icon_spec.js b/spec/frontend/vue_shared/issuable/issuable_blocked_icon_spec.js
index ffdc0a7cecc..d59cbce6633 100644
--- a/spec/frontend/boards/components/board_blocked_icon_spec.js
+++ b/spec/frontend/vue_shared/issuable/issuable_blocked_icon_spec.js
@@ -5,8 +5,9 @@ import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import BoardBlockedIcon from '~/boards/components/board_blocked_icon.vue';
-import { blockingIssuablesQueries, issuableTypes } from '~/boards/constants';
+import IssuableBlockedIcon from '~/vue_shared/components/issuable_blocked_icon/issuable_blocked_icon.vue';
+import { blockingIssuablesQueries } from '~/vue_shared/components/issuable_blocked_icon/constants';
+import { issuableTypes } from '~/boards/constants';
import { truncate } from '~/lib/utils/text_utility';
import {
mockIssue,
@@ -21,9 +22,9 @@ import {
mockBlockedIssue2,
mockBlockedEpic1,
mockBlockingEpicIssuablesResponse1,
-} from '../mock_data';
+} from '../../boards/mock_data';
-describe('BoardBlockedIcon', () => {
+describe('IssuableBlockedIcon', () => {
let wrapper;
let mockApollo;
@@ -64,7 +65,7 @@ describe('BoardBlockedIcon', () => {
Vue.use(VueApollo);
wrapper = extendedWrapper(
- mount(BoardBlockedIcon, {
+ mount(IssuableBlockedIcon, {
apolloProvider: mockApollo,
propsData: {
item: {
@@ -88,7 +89,7 @@ describe('BoardBlockedIcon', () => {
issuableType = issuableTypes.issue,
} = {}) => {
wrapper = extendedWrapper(
- shallowMount(BoardBlockedIcon, {
+ shallowMount(IssuableBlockedIcon, {
propsData: {
item: {
...mockIssuable,
diff --git a/spec/frontend/vue_shared/issuable/show/components/issuable_body_spec.js b/spec/frontend/vue_shared/issuable/show/components/issuable_body_spec.js
index 39a76a51191..6b20f0c77a3 100644
--- a/spec/frontend/vue_shared/issuable/show/components/issuable_body_spec.js
+++ b/spec/frontend/vue_shared/issuable/show/components/issuable_body_spec.js
@@ -138,7 +138,7 @@ describe('IssuableBody', () => {
wrapper.vm.handleTaskListUpdateSuccess(updatedIssuable);
- expect(wrapper.emitted('task-list-update-success')).toBeTruthy();
+ expect(wrapper.emitted('task-list-update-success')).toHaveLength(1);
expect(wrapper.emitted('task-list-update-success')[0]).toEqual([updatedIssuable]);
});
});
@@ -147,7 +147,7 @@ describe('IssuableBody', () => {
it('emits `task-list-update-failure` event on component', () => {
wrapper.vm.handleTaskListUpdateFailure();
- expect(wrapper.emitted('task-list-update-failure')).toBeTruthy();
+ expect(wrapper.emitted('task-list-update-failure')).toHaveLength(1);
});
});
});
@@ -202,7 +202,7 @@ describe('IssuableBody', () => {
issuableTitle.vm.$emit('edit-issuable');
- expect(wrapper.emitted('edit-issuable')).toBeTruthy();
+ expect(wrapper.emitted('edit-issuable')).toHaveLength(1);
});
it.each(['keydown-title', 'keydown-description'])(
@@ -227,7 +227,7 @@ describe('IssuableBody', () => {
issuableEditForm.vm.$emit(eventName, eventObj, issuableMeta);
- expect(wrapper.emitted(eventName)).toBeTruthy();
+ expect(wrapper.emitted(eventName)).toHaveLength(1);
expect(wrapper.emitted(eventName)[0]).toMatchObject([eventObj, issuableMeta]);
},
);
diff --git a/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js b/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js
index a9651cf8bac..43ff68e30b5 100644
--- a/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js
+++ b/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js
@@ -14,7 +14,7 @@ import {
sastDiffSuccessMock,
secretDetectionDiffSuccessMock,
} from 'jest/vue_shared/security_reports/mock_data';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import HelpIcon from '~/vue_shared/security_reports/components/help_icon.vue';
import SecurityReportDownloadDropdown from '~/vue_shared/security_reports/components/security_report_download_dropdown.vue';
@@ -135,8 +135,8 @@ describe('Security reports app', () => {
});
});
- it('calls createFlash correctly', () => {
- expect(createFlash).toHaveBeenCalledWith({
+ it('calls createAlert correctly', () => {
+ expect(createAlert).toHaveBeenCalledWith({
message: SecurityReportsApp.i18n.apiError,
captureError: true,
error: expect.any(Error),
diff --git a/spec/frontend/webhooks/components/form_url_app_spec.js b/spec/frontend/webhooks/components/form_url_app_spec.js
new file mode 100644
index 00000000000..16e0a3f549e
--- /dev/null
+++ b/spec/frontend/webhooks/components/form_url_app_spec.js
@@ -0,0 +1,142 @@
+import { nextTick } from 'vue';
+import { GlFormRadio, GlFormRadioGroup, GlLink } from '@gitlab/ui';
+
+import FormUrlApp from '~/webhooks/components/form_url_app.vue';
+import FormUrlMaskItem from '~/webhooks/components/form_url_mask_item.vue';
+
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+
+describe('FormUrlApp', () => {
+ let wrapper;
+
+ const createComponent = ({ props } = {}) => {
+ wrapper = shallowMountExtended(FormUrlApp, {
+ propsData: { ...props },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findAllRadioButtons = () => wrapper.findAllComponents(GlFormRadio);
+ const findRadioGroup = () => wrapper.findComponent(GlFormRadioGroup);
+ const findUrlMaskDisable = () => findAllRadioButtons().at(0);
+ const findUrlMaskEnable = () => findAllRadioButtons().at(1);
+ const findAllUrlMaskItems = () => wrapper.findAllComponents(FormUrlMaskItem);
+ const findAddItem = () => wrapper.findComponent(GlLink);
+ const findFormUrl = () => wrapper.findByTestId('form-url');
+ const findFormUrlPreview = () => wrapper.findByTestId('form-url-preview');
+ const findUrlMaskSection = () => wrapper.findByTestId('url-mask-section');
+
+ describe('template', () => {
+ it('renders radio buttons for URL masking', () => {
+ createComponent();
+
+ expect(findAllRadioButtons()).toHaveLength(2);
+ expect(findUrlMaskDisable().text()).toBe(FormUrlApp.i18n.radioFullUrlText);
+ expect(findUrlMaskEnable().text()).toBe(FormUrlApp.i18n.radioMaskUrlText);
+ });
+
+ it('does not render mask section', () => {
+ createComponent();
+
+ expect(findUrlMaskSection().exists()).toBe(false);
+ });
+
+ describe('on radio select', () => {
+ beforeEach(async () => {
+ createComponent();
+
+ findRadioGroup().vm.$emit('input', true);
+ await nextTick();
+ });
+
+ it('renders mask section', () => {
+ expect(findUrlMaskSection().exists()).toBe(true);
+ });
+
+ it('renders an empty mask item by default', () => {
+ expect(findAllUrlMaskItems()).toHaveLength(1);
+
+ const firstItem = findAllUrlMaskItems().at(0);
+ expect(firstItem.props('itemKey')).toBeNull();
+ expect(firstItem.props('itemValue')).toBeNull();
+ });
+ });
+
+ describe('with mask items', () => {
+ const mockItem1 = { key: 'key1', value: 'value1' };
+ const mockItem2 = { key: 'key2', value: 'value2' };
+
+ beforeEach(() => {
+ createComponent({
+ props: { initialUrlVariables: [mockItem1, mockItem2] },
+ });
+ });
+
+ it('renders masked URL preview', async () => {
+ const mockUrl = 'https://test.host/value1?secret=value2';
+
+ findFormUrl().vm.$emit('input', mockUrl);
+ await nextTick();
+
+ expect(findFormUrlPreview().attributes('value')).toBe(
+ 'https://test.host/{key1}?secret={key2}',
+ );
+ });
+
+ it('renders mask items correctly', () => {
+ expect(findAllUrlMaskItems()).toHaveLength(2);
+
+ const firstItem = findAllUrlMaskItems().at(0);
+ expect(firstItem.props('itemKey')).toBe(mockItem1.key);
+ expect(firstItem.props('itemValue')).toBe(mockItem1.value);
+
+ const secondItem = findAllUrlMaskItems().at(1);
+ expect(secondItem.props('itemKey')).toBe(mockItem2.key);
+ expect(secondItem.props('itemValue')).toBe(mockItem2.value);
+ });
+
+ describe('on mask item input', () => {
+ const mockInput = { index: 0, key: 'display', value: 'secret' };
+
+ it('updates mask item', async () => {
+ const firstItem = findAllUrlMaskItems().at(0);
+ firstItem.vm.$emit('input', mockInput);
+ await nextTick();
+
+ expect(firstItem.props('itemKey')).toBe(mockInput.key);
+ expect(firstItem.props('itemValue')).toBe(mockInput.value);
+ });
+ });
+
+ describe('when add item is clicked', () => {
+ it('adds mask item', async () => {
+ findAddItem().vm.$emit('click');
+ await nextTick();
+
+ expect(findAllUrlMaskItems()).toHaveLength(3);
+
+ const lastItem = findAllUrlMaskItems().at(-1);
+ expect(lastItem.props('itemKey')).toBeNull();
+ expect(lastItem.props('itemValue')).toBeNull();
+ });
+ });
+
+ describe('when remove item is clicked', () => {
+ it('removes the correct mask item', async () => {
+ const firstItem = findAllUrlMaskItems().at(0);
+ firstItem.vm.$emit('remove');
+ await nextTick();
+
+ expect(findAllUrlMaskItems()).toHaveLength(1);
+
+ const newFirstItem = findAllUrlMaskItems().at(0);
+ expect(newFirstItem.props('itemKey')).toBe(mockItem2.key);
+ expect(newFirstItem.props('itemValue')).toBe(mockItem2.value);
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/webhooks/components/form_url_mask_item_spec.js b/spec/frontend/webhooks/components/form_url_mask_item_spec.js
new file mode 100644
index 00000000000..ab028ef2997
--- /dev/null
+++ b/spec/frontend/webhooks/components/form_url_mask_item_spec.js
@@ -0,0 +1,100 @@
+import { nextTick } from 'vue';
+import { GlButton, GlFormInput } from '@gitlab/ui';
+
+import FormUrlMaskItem from '~/webhooks/components/form_url_mask_item.vue';
+
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+
+describe('FormUrlMaskItem', () => {
+ let wrapper;
+
+ const defaultProps = {
+ index: 0,
+ };
+ const mockKey = 'key';
+ const mockValue = 'value';
+ const mockInput = 'input';
+
+ const createComponent = ({ props } = {}) => {
+ wrapper = shallowMountExtended(FormUrlMaskItem, {
+ propsData: { ...defaultProps, ...props },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findMaskItemKey = () => wrapper.findByTestId('mask-item-key');
+ const findMaskItemValue = () => wrapper.findByTestId('mask-item-value');
+ const findRemoveButton = () => wrapper.findComponent(GlButton);
+
+ describe('template', () => {
+ it('renders input for key and value', () => {
+ createComponent();
+
+ const keyInput = findMaskItemKey();
+ expect(keyInput.attributes('label')).toBe(FormUrlMaskItem.i18n.keyLabel);
+ expect(keyInput.findComponent(GlFormInput).attributes('name')).toBe(
+ 'hook[url_variables][][key]',
+ );
+
+ const valueInput = findMaskItemValue();
+ expect(valueInput.attributes('label')).toBe(FormUrlMaskItem.i18n.valueLabel);
+ expect(valueInput.findComponent(GlFormInput).attributes('name')).toBe(
+ 'hook[url_variables][][value]',
+ );
+ });
+
+ describe('on key input', () => {
+ beforeEach(async () => {
+ createComponent({ props: { itemKey: mockKey, itemValue: mockValue } });
+
+ findMaskItemKey().findComponent(GlFormInput).vm.$emit('input', mockInput);
+ await nextTick();
+ });
+
+ it('emits input event', () => {
+ expect(wrapper.emitted('input')).toEqual([
+ [{ index: defaultProps.index, key: mockInput, value: mockValue }],
+ ]);
+ });
+ });
+
+ describe('on value input', () => {
+ beforeEach(async () => {
+ createComponent({ props: { itemKey: mockKey, itemValue: mockValue } });
+
+ findMaskItemValue().findComponent(GlFormInput).vm.$emit('input', mockInput);
+ await nextTick();
+ });
+
+ it('emits input event', () => {
+ expect(wrapper.emitted('input')).toEqual([
+ [{ index: defaultProps.index, key: mockKey, value: mockInput }],
+ ]);
+ });
+ });
+
+ it('renders remove button', () => {
+ createComponent();
+
+ expect(findRemoveButton().props('icon')).toBe('remove');
+ });
+
+ describe('when remove button is clicked', () => {
+ const mockIndex = 5;
+
+ beforeEach(async () => {
+ createComponent({ props: { index: mockIndex } });
+
+ findRemoveButton().vm.$emit('click');
+ await nextTick();
+ });
+
+ it('emits remove event', () => {
+ expect(wrapper.emitted('remove')).toEqual([[mockIndex]]);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/whats_new/components/app_spec.js b/spec/frontend/whats_new/components/app_spec.js
index de5a814d3e7..da95b51c0b1 100644
--- a/spec/frontend/whats_new/components/app_spec.js
+++ b/spec/frontend/whats_new/components/app_spec.js
@@ -54,7 +54,7 @@ describe('App', () => {
});
};
- const findInfiniteScroll = () => wrapper.find(GlInfiniteScroll);
+ const findInfiniteScroll = () => wrapper.findComponent(GlInfiniteScroll);
const setup = async () => {
document.body.dataset.page = 'test-page';
@@ -80,7 +80,7 @@ describe('App', () => {
setup();
});
- const getDrawer = () => wrapper.find(GlDrawer);
+ const getDrawer = () => wrapper.findComponent(GlDrawer);
const getBackdrop = () => wrapper.find('.whats-new-modal-backdrop');
it('contains a drawer', () => {
@@ -173,7 +173,7 @@ describe('App', () => {
value();
- expect(getDrawerBodyHeight).toHaveBeenCalledWith(wrapper.find(GlDrawer).element);
+ expect(getDrawerBodyHeight).toHaveBeenCalledWith(wrapper.findComponent(GlDrawer).element);
expect(actions.setDrawerBodyHeight).toHaveBeenCalledWith(
expect.any(Object),
diff --git a/spec/frontend/work_items/components/work_item_assignees_spec.js b/spec/frontend/work_items/components/work_item_assignees_spec.js
index 28231fad108..1b204b6fd60 100644
--- a/spec/frontend/work_items/components/work_item_assignees_spec.js
+++ b/spec/frontend/work_items/components/work_item_assignees_spec.js
@@ -157,6 +157,14 @@ describe('WorkItemAssignees component', () => {
expect(findTokenSelector().props('viewOnly')).toBe(true);
});
+ it('has a label', () => {
+ createComponent();
+
+ expect(findTokenSelector().props('ariaLabelledby')).toEqual(
+ findAssigneesTitle().attributes('id'),
+ );
+ });
+
describe('when clicking outside the token selector', () => {
function arrange(args) {
createComponent(args);
diff --git a/spec/frontend/work_items/components/work_item_description_spec.js b/spec/frontend/work_items/components/work_item_description_spec.js
index d3165d8dc26..0691fe25e0d 100644
--- a/spec/frontend/work_items/components/work_item_description_spec.js
+++ b/spec/frontend/work_items/components/work_item_description_spec.js
@@ -4,6 +4,7 @@ import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import { mockTracking } from 'helpers/tracking_helper';
import waitForPromises from 'helpers/wait_for_promises';
+import EditedAt from '~/issues/show/components/edited.vue';
import { updateDraft } from '~/lib/utils/autosave';
import { confirmAction } from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal';
import MarkdownField from '~/vue_shared/components/markdown/field.vue';
@@ -35,6 +36,7 @@ describe('WorkItemDescription', () => {
const findEditButton = () => wrapper.find('[data-testid="edit-description"]');
const findMarkdownField = () => wrapper.findComponent(MarkdownField);
+ const findEditedAt = () => wrapper.findComponent(EditedAt);
const editDescription = (newText) => wrapper.find('textarea').setValue(newText);
@@ -44,9 +46,9 @@ describe('WorkItemDescription', () => {
const createComponent = async ({
mutationHandler = mutationSuccessHandler,
canUpdate = true,
+ workItemResponse = workItemResponseFactory({ canUpdate }),
isEditing = false,
} = {}) => {
- const workItemResponse = workItemResponseFactory({ canUpdate });
const workItemResponseHandler = jest.fn().mockResolvedValue(workItemResponse);
const { id } = workItemQueryResponse.data.workItem;
@@ -100,6 +102,33 @@ describe('WorkItemDescription', () => {
});
describe('editing description', () => {
+ it('shows edited by text', async () => {
+ const lastEditedAt = '2022-09-21T06:18:42Z';
+ const lastEditedBy = {
+ name: 'Administrator',
+ webPath: '/root',
+ };
+
+ await createComponent({
+ workItemResponse: workItemResponseFactory({
+ lastEditedAt,
+ lastEditedBy,
+ }),
+ });
+
+ expect(findEditedAt().props()).toEqual({
+ updatedAt: lastEditedAt,
+ updatedByName: lastEditedBy.name,
+ updatedByPath: lastEditedBy.webPath,
+ });
+ });
+
+ it('does not show edited by text', async () => {
+ await createComponent();
+
+ expect(findEditedAt().exists()).toBe(false);
+ });
+
it('cancels when clicking cancel', async () => {
await createComponent({
isEditing: true,
diff --git a/spec/frontend/work_items/components/work_item_detail_spec.js b/spec/frontend/work_items/components/work_item_detail_spec.js
index b047e0dc8d7..aae61b11196 100644
--- a/spec/frontend/work_items/components/work_item_detail_spec.js
+++ b/spec/frontend/work_items/components/work_item_detail_spec.js
@@ -1,8 +1,14 @@
-import { GlAlert, GlBadge, GlLoadingIcon, GlSkeletonLoader, GlButton } from '@gitlab/ui';
+import {
+ GlAlert,
+ GlBadge,
+ GlLoadingIcon,
+ GlSkeletonLoader,
+ GlButton,
+ GlEmptyState,
+} from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
-import workItemWeightSubscription from 'ee_component/work_items/graphql/work_item_weight.subscription.graphql';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
@@ -14,11 +20,13 @@ import WorkItemState from '~/work_items/components/work_item_state.vue';
import WorkItemTitle from '~/work_items/components/work_item_title.vue';
import WorkItemAssignees from '~/work_items/components/work_item_assignees.vue';
import WorkItemLabels from '~/work_items/components/work_item_labels.vue';
+import WorkItemMilestone from '~/work_items/components/work_item_milestone.vue';
import WorkItemInformation from '~/work_items/components/work_item_information.vue';
import { i18n } from '~/work_items/constants';
import workItemQuery from '~/work_items/graphql/work_item.query.graphql';
import workItemDatesSubscription from '~/work_items/graphql/work_item_dates.subscription.graphql';
import workItemTitleSubscription from '~/work_items/graphql/work_item_title.subscription.graphql';
+import workItemAssigneesSubscription from '~/work_items/graphql/work_item_assignees.subscription.graphql';
import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
import updateWorkItemTaskMutation from '~/work_items/graphql/update_work_item_task.mutation.graphql';
import { temporaryConfig } from '~/graphql_shared/issuable_client';
@@ -28,7 +36,7 @@ import {
workItemDatesSubscriptionResponse,
workItemResponseFactory,
workItemTitleSubscriptionResponse,
- workItemWeightSubscriptionResponse,
+ workItemAssigneesSubscriptionResponse,
} from '../mock_data';
describe('WorkItemDetail component', () => {
@@ -46,9 +54,12 @@ describe('WorkItemDetail component', () => {
const successHandler = jest.fn().mockResolvedValue(workItemQueryResponse);
const datesSubscriptionHandler = jest.fn().mockResolvedValue(workItemDatesSubscriptionResponse);
const titleSubscriptionHandler = jest.fn().mockResolvedValue(workItemTitleSubscriptionResponse);
- const weightSubscriptionHandler = jest.fn().mockResolvedValue(workItemWeightSubscriptionResponse);
+ const assigneesSubscriptionHandler = jest
+ .fn()
+ .mockResolvedValue(workItemAssigneesSubscriptionResponse);
const findAlert = () => wrapper.findComponent(GlAlert);
+ const findEmptyState = () => wrapper.findComponent(GlEmptyState);
const findSkeleton = () => wrapper.findComponent(GlSkeletonLoader);
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findWorkItemActions = () => wrapper.findComponent(WorkItemActions);
@@ -58,6 +69,7 @@ describe('WorkItemDetail component', () => {
const findWorkItemDueDate = () => wrapper.findComponent(WorkItemDueDate);
const findWorkItemAssignees = () => wrapper.findComponent(WorkItemAssignees);
const findWorkItemLabels = () => wrapper.findComponent(WorkItemLabels);
+ const findWorkItemMilestone = () => wrapper.findComponent(WorkItemMilestone);
const findParent = () => wrapper.find('[data-testid="work-item-parent"]');
const findParentButton = () => findParent().findComponent(GlButton);
const findCloseButton = () => wrapper.find('[data-testid="work-item-close"]');
@@ -72,21 +84,18 @@ describe('WorkItemDetail component', () => {
handler = successHandler,
subscriptionHandler = titleSubscriptionHandler,
confidentialityMock = [updateWorkItemMutation, jest.fn()],
- workItemsMvc2Enabled = false,
- includeWidgets = false,
error = undefined,
+ includeWidgets = false,
+ workItemsMvc2Enabled = false,
} = {}) => {
const handlers = [
[workItemQuery, handler],
[workItemTitleSubscription, subscriptionHandler],
[workItemDatesSubscription, datesSubscriptionHandler],
+ [workItemAssigneesSubscription, assigneesSubscriptionHandler],
confidentialityMock,
];
- if (IS_EE) {
- handlers.push([workItemWeightSubscription, weightSubscriptionHandler]);
- }
-
wrapper = shallowMount(WorkItemDetail, {
apolloProvider: createMockApollo(
handlers,
@@ -107,6 +116,12 @@ describe('WorkItemDetail component', () => {
workItemsMvc2: workItemsMvc2Enabled,
},
hasIssueWeightsFeature: true,
+ hasIterationsFeature: true,
+ projectNamespace: 'namespace',
+ },
+ stubs: {
+ WorkItemWeight: true,
+ WorkItemIteration: true,
},
});
};
@@ -384,13 +399,14 @@ describe('WorkItemDetail component', () => {
});
});
- it('shows an error message when the work item query was unsuccessful', async () => {
+ it('shows empty state with an error message when the work item query was unsuccessful', async () => {
const errorHandler = jest.fn().mockRejectedValue('Oops');
createComponent({ handler: errorHandler });
await waitForPromises();
expect(errorHandler).toHaveBeenCalled();
- expect(findAlert().text()).toBe(i18n.fetchError);
+ expect(findEmptyState().props('description')).toBe(i18n.fetchError);
+ expect(findWorkItemTitle().exists()).toBe(false);
});
it('shows an error message when WorkItemTitle emits an `error` event', async () => {
@@ -413,6 +429,30 @@ describe('WorkItemDetail component', () => {
});
});
+ describe('assignees subscription', () => {
+ describe('when the assignees widget exists', () => {
+ it('calls the assignees subscription', async () => {
+ createComponent();
+ await waitForPromises();
+
+ expect(assigneesSubscriptionHandler).toHaveBeenCalledWith({
+ issuableId: workItemQueryResponse.data.workItem.id,
+ });
+ });
+ });
+
+ describe('when the assignees widget does not exist', () => {
+ it('does not call the assignees subscription', async () => {
+ const response = workItemResponseFactory({ assigneesWidgetPresent: false });
+ const handler = jest.fn().mockResolvedValue(response);
+ createComponent({ handler });
+ await waitForPromises();
+
+ expect(assigneesSubscriptionHandler).not.toHaveBeenCalled();
+ });
+ });
+ });
+
describe('dates subscription', () => {
describe('when the due date widget exists', () => {
it('calls the dates subscription', async () => {
@@ -429,7 +469,7 @@ describe('WorkItemDetail component', () => {
it('does not call the dates subscription', async () => {
const response = workItemResponseFactory({ datesWidgetPresent: false });
const handler = jest.fn().mockResolvedValue(response);
- createComponent({ handler, workItemsMvc2Enabled: true });
+ createComponent({ handler });
await waitForPromises();
expect(datesSubscriptionHandler).not.toHaveBeenCalled();
@@ -440,9 +480,7 @@ describe('WorkItemDetail component', () => {
describe('assignees widget', () => {
it('renders assignees component when widget is returned from the API', async () => {
- createComponent({
- workItemsMvc2Enabled: true,
- });
+ createComponent();
await waitForPromises();
expect(findWorkItemAssignees().exists()).toBe(true);
@@ -450,7 +488,6 @@ describe('WorkItemDetail component', () => {
it('does not render assignees component when widget is not returned from the API', async () => {
createComponent({
- workItemsMvc2Enabled: true,
handler: jest
.fn()
.mockResolvedValue(workItemResponseFactory({ assigneesWidgetPresent: false })),
@@ -463,11 +500,13 @@ describe('WorkItemDetail component', () => {
describe('labels widget', () => {
it.each`
- description | includeWidgets | exists
- ${'renders when widget is returned from API'} | ${true} | ${true}
- ${'does not render when widget is not returned from API'} | ${false} | ${false}
- `('$description', async ({ includeWidgets, exists }) => {
- createComponent({ includeWidgets, workItemsMvc2Enabled: true });
+ description | labelsWidgetPresent | exists
+ ${'renders when widget is returned from API'} | ${true} | ${true}
+ ${'does not render when widget is not returned from API'} | ${false} | ${false}
+ `('$description', async ({ labelsWidgetPresent, exists }) => {
+ const response = workItemResponseFactory({ labelsWidgetPresent });
+ const handler = jest.fn().mockResolvedValue(response);
+ createComponent({ handler });
await waitForPromises();
expect(findWorkItemLabels().exists()).toBe(exists);
@@ -483,7 +522,7 @@ describe('WorkItemDetail component', () => {
it(`${datesWidgetPresent ? 'renders' : 'does not render'} due date component`, async () => {
const response = workItemResponseFactory({ datesWidgetPresent });
const handler = jest.fn().mockResolvedValue(response);
- createComponent({ handler, workItemsMvc2Enabled: true });
+ createComponent({ handler });
await waitForPromises();
expect(findWorkItemDueDate().exists()).toBe(exists);
@@ -491,7 +530,7 @@ describe('WorkItemDetail component', () => {
});
it('shows an error message when it emits an `error` event', async () => {
- createComponent({ workItemsMvc2Enabled: true });
+ createComponent();
await waitForPromises();
const updateError = 'Failed to update';
@@ -502,6 +541,19 @@ describe('WorkItemDetail component', () => {
});
});
+ describe('milestone widget', () => {
+ it.each`
+ description | includeWidgets | exists
+ ${'renders when widget is returned from API'} | ${true} | ${true}
+ ${'does not render when widget is not returned from API'} | ${false} | ${false}
+ `('$description', async ({ includeWidgets, exists }) => {
+ createComponent({ includeWidgets, workItemsMvc2Enabled: true });
+ await waitForPromises();
+
+ expect(findWorkItemMilestone().exists()).toBe(exists);
+ });
+ });
+
describe('work item information', () => {
beforeEach(() => {
createComponent();
diff --git a/spec/frontend/work_items/components/work_item_due_date_spec.js b/spec/frontend/work_items/components/work_item_due_date_spec.js
index 1d76154a1f0..701406b9588 100644
--- a/spec/frontend/work_items/components/work_item_due_date_spec.js
+++ b/spec/frontend/work_items/components/work_item_due_date_spec.js
@@ -62,7 +62,7 @@ describe('WorkItemDueDate component', () => {
createComponent({ canUpdate: true, startDate });
});
- it(exists ? 'renders' : 'does not render', () => {
+ it(`${exists ? 'renders' : 'does not render'}`, () => {
expect(findStartDateButton().exists()).toBe(exists);
});
});
@@ -172,7 +172,7 @@ describe('WorkItemDueDate component', () => {
createComponent({ canUpdate: true, dueDate });
});
- it(exists ? 'renders' : 'does not render', () => {
+ it(`${exists ? 'renders' : 'does not render'}`, () => {
expect(findDueDateButton().exists()).toBe(exists);
});
});
diff --git a/spec/frontend/work_items/components/work_item_labels_spec.js b/spec/frontend/work_items/components/work_item_labels_spec.js
index 1d976897c15..e6ff7e8502d 100644
--- a/spec/frontend/work_items/components/work_item_labels_spec.js
+++ b/spec/frontend/work_items/components/work_item_labels_spec.js
@@ -7,10 +7,18 @@ import { mountExtended } from 'helpers/vue_test_utils_helper';
import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
import labelSearchQuery from '~/vue_shared/components/sidebar/labels_select_widget/graphql/project_labels.query.graphql';
import workItemQuery from '~/work_items/graphql/work_item.query.graphql';
+import workItemLabelsSubscription from 'ee_else_ce/work_items/graphql/work_item_labels.subscription.graphql';
+import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
import WorkItemLabels from '~/work_items/components/work_item_labels.vue';
-import { i18n } from '~/work_items/constants';
-import { temporaryConfig, resolvers } from '~/graphql_shared/issuable_client';
-import { projectLabelsResponse, mockLabels, workItemQueryResponse } from '../mock_data';
+import { i18n, I18N_WORK_ITEM_ERROR_FETCHING_LABELS } from '~/work_items/constants';
+import {
+ projectLabelsResponse,
+ mockLabels,
+ workItemQueryResponse,
+ workItemResponseFactory,
+ updateWorkItemMutationResponse,
+ workItemLabelsSubscriptionResponse,
+} from '../mock_data';
Vue.use(VueApollo);
@@ -21,32 +29,32 @@ describe('WorkItemLabels component', () => {
const findTokenSelector = () => wrapper.findComponent(GlTokenSelector);
const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoader);
-
const findEmptyState = () => wrapper.findByTestId('empty-state');
+ const findLabelsTitle = () => wrapper.findByTestId('labels-title');
+ const workItemQuerySuccess = jest.fn().mockResolvedValue(workItemQueryResponse);
const successSearchQueryHandler = jest.fn().mockResolvedValue(projectLabelsResponse);
+ const successUpdateWorkItemMutationHandler = jest
+ .fn()
+ .mockResolvedValue(updateWorkItemMutationResponse);
+ const subscriptionHandler = jest.fn().mockResolvedValue(workItemLabelsSubscriptionResponse);
const errorHandler = jest.fn().mockRejectedValue('Houston, we have a problem');
const createComponent = ({
- labels = mockLabels,
canUpdate = true,
+ workItemQueryHandler = workItemQuerySuccess,
searchQueryHandler = successSearchQueryHandler,
+ updateWorkItemMutationHandler = successUpdateWorkItemMutationHandler,
} = {}) => {
- const apolloProvider = createMockApollo([[labelSearchQuery, searchQueryHandler]], resolvers, {
- typePolicies: temporaryConfig.cacheConfig.typePolicies,
- });
-
- apolloProvider.clients.defaultClient.writeQuery({
- query: workItemQuery,
- variables: {
- id: workItemId,
- },
- data: workItemQueryResponse.data,
- });
+ const apolloProvider = createMockApollo([
+ [workItemQuery, workItemQueryHandler],
+ [labelSearchQuery, searchQueryHandler],
+ [updateWorkItemMutation, updateWorkItemMutationHandler],
+ [workItemLabelsSubscription, subscriptionHandler],
+ ]);
wrapper = mountExtended(WorkItemLabels, {
propsData: {
- labels,
workItemId,
canUpdate,
fullPath: 'test-project-path',
@@ -60,6 +68,12 @@ describe('WorkItemLabels component', () => {
wrapper.destroy();
});
+ it('has a label', () => {
+ createComponent();
+
+ expect(findTokenSelector().props('ariaLabelledby')).toEqual(findLabelsTitle().attributes('id'));
+ });
+
it('focuses token selector on token selector input event', async () => {
createComponent();
findTokenSelector().vm.$emit('input', [mockLabels[0]]);
@@ -151,7 +165,7 @@ describe('WorkItemLabels component', () => {
findTokenSelector().vm.$emit('focus');
await waitForPromises();
- expect(wrapper.emitted('error')).toEqual([[i18n.fetchError]]);
+ expect(wrapper.emitted('error')).toEqual([[I18N_WORK_ITEM_ERROR_FETCHING_LABELS]]);
});
it('should search for with correct key after text input', async () => {
@@ -163,7 +177,53 @@ describe('WorkItemLabels component', () => {
await waitForPromises();
expect(successSearchQueryHandler).toHaveBeenCalledWith(
- expect.objectContaining({ search: searchKey }),
+ expect.objectContaining({ searchTerm: searchKey }),
);
});
+
+ describe('when clicking outside the token selector', () => {
+ it('calls a mutation with correct variables', () => {
+ createComponent();
+
+ findTokenSelector().vm.$emit('input', [mockLabels[0]]);
+ findTokenSelector().vm.$emit('blur', new FocusEvent({ relatedTarget: null }));
+
+ expect(successUpdateWorkItemMutationHandler).toHaveBeenCalledWith({
+ input: {
+ labelsWidget: { addLabelIds: [mockLabels[0].id], removeLabelIds: [] },
+ id: 'gid://gitlab/WorkItem/1',
+ },
+ });
+ });
+
+ it('emits an error and resets labels if mutation was rejected', async () => {
+ const workItemQueryHandler = jest.fn().mockResolvedValue(workItemResponseFactory());
+
+ createComponent({ updateWorkItemMutationHandler: errorHandler, workItemQueryHandler });
+
+ await waitForPromises();
+
+ const initialLabels = findTokenSelector().props('selectedTokens');
+
+ findTokenSelector().vm.$emit('input', [mockLabels[0]]);
+ findTokenSelector().vm.$emit('blur', new FocusEvent({ relatedTarget: null }));
+
+ await waitForPromises();
+
+ const updatedLabels = findTokenSelector().props('selectedTokens');
+
+ expect(wrapper.emitted('error')).toEqual([[i18n.updateError]]);
+ expect(updatedLabels).toEqual(initialLabels);
+ });
+
+ it('has a subscription', async () => {
+ createComponent();
+
+ await waitForPromises();
+
+ expect(subscriptionHandler).toHaveBeenCalledWith({
+ issuableId: workItemId,
+ });
+ });
+ });
});
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js
index 434c1db8a2c..ab3ea623e3e 100644
--- a/spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js
+++ b/spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js
@@ -28,6 +28,7 @@ describe('WorkItemLinksForm', () => {
listResponse = availableWorkItemsResponse,
typesResponse = projectWorkItemTypesQueryResponse,
parentConfidential = false,
+ hasIterationsFeature = false,
} = {}) => {
wrapper = shallowMountExtended(WorkItemLinksForm, {
apolloProvider: createMockApollo([
@@ -39,6 +40,7 @@ describe('WorkItemLinksForm', () => {
propsData: { issuableGid: 'gid://gitlab/WorkItem/1', parentConfidential },
provide: {
projectPath: 'project/path',
+ hasIterationsFeature,
},
});
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_links_menu_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_links_menu_spec.js
index 287ec022d3f..e3f3b74f296 100644
--- a/spec/frontend/work_items/components/work_item_links/work_item_links_menu_spec.js
+++ b/spec/frontend/work_items/components/work_item_links/work_item_links_menu_spec.js
@@ -10,8 +10,8 @@ describe('WorkItemLinksMenu', () => {
wrapper = shallowMountExtended(WorkItemLinksMenu);
};
- const findDropdown = () => wrapper.find(GlDropdown);
- const findRemoveDropdownItem = () => wrapper.find(GlDropdownItem);
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findRemoveDropdownItem = () => wrapper.findComponent(GlDropdownItem);
beforeEach(async () => {
createComponent();
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_links_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_links_spec.js
index 876aedff08b..6961996f912 100644
--- a/spec/frontend/work_items/components/work_item_links/work_item_links_spec.js
+++ b/spec/frontend/work_items/components/work_item_links/work_item_links_spec.js
@@ -5,7 +5,7 @@ import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
-import issueConfidentialQuery from '~/sidebar/queries/issue_confidential.query.graphql';
+import issueDetailsQuery from 'ee_else_ce/work_items/graphql/get_issue_details.query.graphql';
import WorkItemLinks from '~/work_items/components/work_item_links/work_item_links.vue';
import WorkItemLinkChild from '~/work_items/components/work_item_links/work_item_link_child.vue';
import workItemQuery from '~/work_items/graphql/work_item.query.graphql';
@@ -21,16 +21,29 @@ import {
Vue.use(VueApollo);
-const issueConfidentialityResponse = (confidential = false) => ({
+const issueDetailsResponse = (confidential = false) => ({
data: {
workspace: {
- id: '1',
- __typename: 'Project',
+ id: 'gid://gitlab/Project/1',
issuable: {
- __typename: 'Issue',
id: 'gid://gitlab/Issue/4',
confidential,
+ iteration: {
+ id: 'gid://gitlab/Iteration/1124',
+ title: null,
+ startDate: '2022-06-22',
+ dueDate: '2022-07-19',
+ webUrl: 'http://127.0.0.1:3000/groups/gitlab-org/-/iterations/1124',
+ iterationCadence: {
+ id: 'gid://gitlab/Iterations::Cadence/1101',
+ title: 'Quod voluptates quidem ea eaque eligendi ex corporis.',
+ __typename: 'IterationCadence',
+ },
+ __typename: 'Iteration',
+ },
+ __typename: 'Issue',
},
+ __typename: 'Project',
},
},
});
@@ -55,14 +68,15 @@ describe('WorkItemLinks', () => {
data = {},
fetchHandler = jest.fn().mockResolvedValue(workItemHierarchyResponse),
mutationHandler = mutationChangeParentHandler,
- confidentialQueryHandler = jest.fn().mockResolvedValue(issueConfidentialityResponse()),
+ issueDetailsQueryHandler = jest.fn().mockResolvedValue(issueDetailsResponse()),
+ hasIterationsFeature = false,
} = {}) => {
mockApollo = createMockApollo(
[
[getWorkItemLinksQuery, fetchHandler],
[changeWorkItemParentMutation, mutationHandler],
[workItemQuery, childWorkItemQueryHandler],
- [issueConfidentialQuery, confidentialQueryHandler],
+ [issueDetailsQuery, issueDetailsQueryHandler],
],
{},
{ addTypename: true },
@@ -77,6 +91,7 @@ describe('WorkItemLinks', () => {
provide: {
projectPath: 'project/path',
iid: '1',
+ hasIterationsFeature,
},
propsData: { issuableId: 1 },
apolloProvider: mockApollo,
@@ -266,7 +281,7 @@ describe('WorkItemLinks', () => {
describe('when parent item is confidential', () => {
it('passes correct confidentiality status to form', async () => {
await createComponent({
- confidentialQueryHandler: jest.fn().mockResolvedValue(issueConfidentialityResponse(true)),
+ issueDetailsQueryHandler: jest.fn().mockResolvedValue(issueDetailsResponse(true)),
});
findToggleAddFormButton().vm.$emit('click');
await nextTick();
diff --git a/spec/frontend/work_items/components/work_item_milestone_spec.js b/spec/frontend/work_items/components/work_item_milestone_spec.js
new file mode 100644
index 00000000000..08cdf62ae52
--- /dev/null
+++ b/spec/frontend/work_items/components/work_item_milestone_spec.js
@@ -0,0 +1,247 @@
+import {
+ GlDropdown,
+ GlDropdownItem,
+ GlSearchBoxByType,
+ GlSkeletonLoader,
+ GlFormGroup,
+ GlDropdownText,
+} from '@gitlab/ui';
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import WorkItemMilestone from '~/work_items/components/work_item_milestone.vue';
+import { resolvers, temporaryConfig } from '~/graphql_shared/issuable_client';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { mockTracking } from 'helpers/tracking_helper';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import { TRACKING_CATEGORY_SHOW } from '~/work_items/constants';
+import projectMilestonesQuery from '~/sidebar/queries/project_milestones.query.graphql';
+import {
+ projectMilestonesResponse,
+ projectMilestonesResponseWithNoMilestones,
+ mockMilestoneWidgetResponse,
+ workItemResponseFactory,
+ updateWorkItemMutationErrorResponse,
+} from 'jest/work_items/mock_data';
+import workItemQuery from '~/work_items/graphql/work_item.query.graphql';
+
+describe('WorkItemMilestone component', () => {
+ Vue.use(VueApollo);
+
+ let wrapper;
+
+ const workItemId = 'gid://gitlab/WorkItem/1';
+ const workItemType = 'Task';
+ const fullPath = 'full-path';
+
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findSearchBox = () => wrapper.findComponent(GlSearchBoxByType);
+ const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoader);
+ const findNoMilestoneDropdownItem = () => wrapper.findByTestId('no-milestone');
+ const findDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
+ const findFirstDropdownItem = () => findDropdownItems().at(0);
+ const findDropdownTexts = () => wrapper.findAllComponents(GlDropdownText);
+ const findDropdownItemAtIndex = (index) => findDropdownItems().at(index);
+ const findDisabledTextSpan = () => wrapper.findByTestId('disabled-text');
+ const findDropdownTextAtIndex = (index) => findDropdownTexts().at(index);
+ const findInputGroup = () => wrapper.findComponent(GlFormGroup);
+
+ const workItemQueryResponse = workItemResponseFactory({ canUpdate: true, canDelete: true });
+
+ const networkResolvedValue = new Error();
+
+ const successSearchQueryHandler = jest.fn().mockResolvedValue(projectMilestonesResponse);
+ const successSearchWithNoMatchingMilestones = jest
+ .fn()
+ .mockResolvedValue(projectMilestonesResponseWithNoMilestones);
+
+ const showDropdown = () => {
+ findDropdown().vm.$emit('shown');
+ };
+
+ const hideDropdown = () => {
+ findDropdown().vm.$emit('hide');
+ };
+
+ const createComponent = ({
+ canUpdate = true,
+ milestone = mockMilestoneWidgetResponse,
+ searchQueryHandler = successSearchQueryHandler,
+ } = {}) => {
+ const apolloProvider = createMockApollo(
+ [[projectMilestonesQuery, searchQueryHandler]],
+ resolvers,
+ {
+ typePolicies: temporaryConfig.cacheConfig.typePolicies,
+ },
+ );
+
+ apolloProvider.clients.defaultClient.writeQuery({
+ query: workItemQuery,
+ variables: {
+ id: workItemId,
+ },
+ data: workItemQueryResponse.data,
+ });
+
+ wrapper = shallowMountExtended(WorkItemMilestone, {
+ apolloProvider,
+ propsData: {
+ canUpdate,
+ workItemMilestone: milestone,
+ workItemId,
+ workItemType,
+ fullPath,
+ },
+ stubs: {
+ GlDropdown,
+ GlSearchBoxByType,
+ },
+ });
+ };
+
+ it('has "Milestone" label', () => {
+ createComponent();
+
+ expect(findInputGroup().exists()).toBe(true);
+ expect(findInputGroup().attributes('label')).toBe(WorkItemMilestone.i18n.MILESTONE);
+ });
+
+ describe('Default text with canUpdate false and milestone value', () => {
+ describe.each`
+ description | milestone | value
+ ${'when no milestone'} | ${null} | ${WorkItemMilestone.i18n.NONE}
+ ${'when milestone set'} | ${mockMilestoneWidgetResponse} | ${mockMilestoneWidgetResponse.title}
+ `('$description', ({ milestone, value }) => {
+ it(`has a value of "${value}"`, () => {
+ createComponent({ canUpdate: false, milestone });
+
+ expect(findDisabledTextSpan().text()).toBe(value);
+ expect(findDropdown().exists()).toBe(false);
+ });
+ });
+ });
+
+ describe('Default text value when canUpdate true and no milestone set', () => {
+ it(`has a value of "Add to milestone"`, () => {
+ createComponent({ canUpdate: true, milestone: null });
+
+ expect(findDropdown().props('text')).toBe(WorkItemMilestone.i18n.MILESTONE_PLACEHOLDER);
+ });
+ });
+
+ describe('Dropdown search', () => {
+ it('has the search box', () => {
+ createComponent();
+
+ expect(findSearchBox().exists()).toBe(true);
+ });
+
+ it('shows no matching results when no items', () => {
+ createComponent({
+ searchQueryHandler: successSearchWithNoMatchingMilestones,
+ });
+
+ expect(findDropdownTextAtIndex(0).text()).toBe(WorkItemMilestone.i18n.NO_MATCHING_RESULTS);
+ expect(findDropdownItems()).toHaveLength(1);
+ expect(findDropdownTexts()).toHaveLength(1);
+ });
+ });
+
+ describe('Dropdown options', () => {
+ beforeEach(() => {
+ createComponent({ canUpdate: true });
+ });
+
+ it('shows the skeleton loader when the items are being fetched on click', async () => {
+ showDropdown();
+ await nextTick();
+
+ expect(findSkeletonLoader().exists()).toBe(true);
+ });
+
+ it('shows the milestones in dropdown when the items have finished fetching', async () => {
+ showDropdown();
+ await waitForPromises();
+
+ expect(findSkeletonLoader().exists()).toBe(false);
+ expect(findNoMilestoneDropdownItem().exists()).toBe(true);
+ expect(findDropdownItems()).toHaveLength(
+ projectMilestonesResponse.data.workspace.attributes.nodes.length + 1,
+ );
+ });
+
+ it('changes the milestone to null when clicked on no milestone', async () => {
+ showDropdown();
+ findFirstDropdownItem().vm.$emit('click');
+
+ hideDropdown();
+ await nextTick();
+ expect(findDropdown().props('loading')).toBe(true);
+
+ await waitForPromises();
+
+ expect(findDropdown().props('loading')).toBe(false);
+ expect(findDropdown().props('text')).toBe(WorkItemMilestone.i18n.MILESTONE_PLACEHOLDER);
+ });
+
+ it('changes the milestone to the selected milestone', async () => {
+ const milestoneIndex = 1;
+ /** the index is -1 since no matching results is also a dropdown item */
+ const milestoneAtIndex =
+ projectMilestonesResponse.data.workspace.attributes.nodes[milestoneIndex - 1];
+ showDropdown();
+
+ await waitForPromises();
+ findDropdownItemAtIndex(milestoneIndex).vm.$emit('click');
+
+ hideDropdown();
+ await waitForPromises();
+
+ expect(findDropdown().props('text')).toBe(milestoneAtIndex.title);
+ });
+ });
+
+ describe('Error handlers', () => {
+ it.each`
+ errorType | expectedErrorMessage | mockValue | resolveFunction
+ ${'graphql error'} | ${'Something went wrong while updating the task. Please try again.'} | ${updateWorkItemMutationErrorResponse} | ${'mockResolvedValue'}
+ ${'network error'} | ${'Something went wrong while updating the task. Please try again.'} | ${networkResolvedValue} | ${'mockRejectedValue'}
+ `(
+ 'emits an error when there is a $errorType',
+ async ({ mockValue, expectedErrorMessage, resolveFunction }) => {
+ createComponent({
+ mutationHandler: jest.fn()[resolveFunction](mockValue),
+ canUpdate: true,
+ });
+
+ showDropdown();
+ findFirstDropdownItem().vm.$emit('click');
+ hideDropdown();
+
+ await waitForPromises();
+
+ expect(wrapper.emitted('error')).toEqual([[expectedErrorMessage]]);
+ },
+ );
+ });
+
+ describe('Tracking event', () => {
+ it('tracks updating the milestone', async () => {
+ const trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ createComponent({ canUpdate: true });
+
+ showDropdown();
+ findFirstDropdownItem().vm.$emit('click');
+ hideDropdown();
+
+ await waitForPromises();
+
+ expect(trackingSpy).toHaveBeenCalledWith(TRACKING_CATEGORY_SHOW, 'updated_milestone', {
+ category: TRACKING_CATEGORY_SHOW,
+ label: 'item_milestone',
+ property: 'type_Task',
+ });
+ });
+ });
+});
diff --git a/spec/frontend/work_items/components/work_item_type_icon_spec.js b/spec/frontend/work_items/components/work_item_type_icon_spec.js
index 95ddfc3980e..182fb0f8cb6 100644
--- a/spec/frontend/work_items/components/work_item_type_icon_spec.js
+++ b/spec/frontend/work_items/components/work_item_type_icon_spec.js
@@ -51,7 +51,7 @@ describe('Work Item type component', () => {
});
it('renders the icon in gray color', () => {
- expect(findIcon().classes()).toContain('gl-text-gray-500');
+ expect(findIcon().classes()).toContain('gl-text-secondary');
});
it('shows tooltip on hover when props passed', () => {
diff --git a/spec/frontend/work_items/mock_data.js b/spec/frontend/work_items/mock_data.js
index e1bc8d2f6b7..ed90b11222a 100644
--- a/spec/frontend/work_items/mock_data.js
+++ b/spec/frontend/work_items/mock_data.js
@@ -17,6 +17,25 @@ export const mockAssignees = [
},
];
+export const mockLabels = [
+ {
+ __typename: 'Label',
+ id: 'gid://gitlab/Label/1',
+ title: 'Label 1',
+ description: '',
+ color: '#f00',
+ textColor: '#00f',
+ },
+ {
+ __typename: 'Label',
+ id: 'gid://gitlab/Label/2',
+ title: 'Label::2',
+ description: '',
+ color: '#b00',
+ textColor: '#00b',
+ },
+];
+
export const workItemQueryResponse = {
data: {
workItem: {
@@ -50,6 +69,8 @@ export const workItemQueryResponse = {
description: 'some **great** text',
descriptionHtml:
'<p data-sourcepos="1:1-1:19" dir="auto">some <strong>great</strong> text</p>',
+ lastEditedAt: null,
+ lastEditedBy: null,
},
{
__typename: 'WorkItemWidgetAssignees',
@@ -163,9 +184,15 @@ export const workItemResponseFactory = ({
allowsMultipleAssignees = true,
assigneesWidgetPresent = true,
datesWidgetPresent = true,
+ labelsWidgetPresent = true,
weightWidgetPresent = true,
+ milestoneWidgetPresent = true,
+ iterationWidgetPresent = true,
confidential = false,
canInviteMembers = false,
+ allowsScopedLabels = false,
+ lastEditedAt = null,
+ lastEditedBy = null,
parent = mockParent.parent,
} = {}) => ({
data: {
@@ -200,6 +227,8 @@ export const workItemResponseFactory = ({
description: 'some **great** text',
descriptionHtml:
'<p data-sourcepos="1:1-1:19" dir="auto">some <strong>great</strong> text</p>',
+ lastEditedAt,
+ lastEditedBy,
},
assigneesWidgetPresent
? {
@@ -212,6 +241,16 @@ export const workItemResponseFactory = ({
},
}
: { type: 'MOCK TYPE' },
+ labelsWidgetPresent
+ ? {
+ __typename: 'WorkItemWidgetLabels',
+ type: 'LABELS',
+ allowsScopedLabels,
+ labels: {
+ nodes: mockLabels,
+ },
+ }
+ : { type: 'MOCK TYPE' },
datesWidgetPresent
? {
__typename: 'WorkItemWidgetStartAndDueDate',
@@ -227,6 +266,30 @@ export const workItemResponseFactory = ({
weight: 0,
}
: { type: 'MOCK TYPE' },
+ iterationWidgetPresent
+ ? {
+ __typename: 'WorkItemWidgetIteration',
+ type: 'ITERATION',
+ iteration: {
+ description: null,
+ id: 'gid://gitlab/Iteration/1215',
+ iid: '182',
+ title: 'Iteration default title',
+ startDate: '2022-09-22',
+ dueDate: '2022-09-30',
+ },
+ }
+ : { type: 'MOCK TYPE' },
+ milestoneWidgetPresent
+ ? {
+ __typename: 'WorkItemWidgetMilestone',
+ dueDate: null,
+ expired: false,
+ id: 'gid://gitlab/Milestone/30',
+ title: 'v4.0',
+ type: 'MILESTONE',
+ }
+ : { type: 'MOCK TYPE' },
{
__typename: 'WorkItemWidgetHierarchy',
type: 'HIERARCHY',
@@ -331,6 +394,11 @@ export const createWorkItemFromTaskMutationResponse = {
type: 'DESCRIPTION',
description: 'New description',
descriptionHtml: '<p>New description</p>',
+ lastEditedAt: '2022-09-21T06:18:42Z',
+ lastEditedBy: {
+ name: 'Administrator',
+ webPath: '/root',
+ },
},
],
},
@@ -444,6 +512,61 @@ export const workItemWeightSubscriptionResponse = {
},
};
+export const workItemAssigneesSubscriptionResponse = {
+ data: {
+ issuableAssigneesUpdated: {
+ id: 'gid://gitlab/WorkItem/1',
+ widgets: [
+ {
+ __typename: 'WorkItemAssigneesWeight',
+ assignees: {
+ nodes: [mockAssignees[0]],
+ },
+ },
+ ],
+ },
+ },
+};
+
+export const workItemLabelsSubscriptionResponse = {
+ data: {
+ issuableLabelsUpdated: {
+ id: 'gid://gitlab/WorkItem/1',
+ widgets: [
+ {
+ __typename: 'WorkItemWidgetLabels',
+ type: 'LABELS',
+ allowsScopedLabels: false,
+ labels: {
+ nodes: mockLabels,
+ },
+ },
+ ],
+ },
+ },
+};
+
+export const workItemIterationSubscriptionResponse = {
+ data: {
+ issuableIterationUpdated: {
+ id: 'gid://gitlab/WorkItem/1',
+ widgets: [
+ {
+ __typename: 'WorkItemWidgetIteration',
+ iteration: {
+ description: 'Iteration description',
+ dueDate: '2022-07-29',
+ id: 'gid://gitlab/Iteration/1125',
+ iid: '95',
+ startDate: '2022-06-22',
+ title: 'Iteration subcription title',
+ },
+ },
+ ],
+ },
+ },
+};
+
export const workItemHierarchyEmptyResponse = {
data: {
workItem: {
@@ -857,25 +980,6 @@ export const currentUserNullResponse = {
},
};
-export const mockLabels = [
- {
- __typename: 'Label',
- id: 'gid://gitlab/Label/1',
- title: 'Label 1',
- description: '',
- color: '#f00',
- textColor: '#00f',
- },
- {
- __typename: 'Label',
- id: 'gid://gitlab/Label/2',
- title: 'Label 2',
- description: '',
- color: '#b00',
- textColor: '#00b',
- },
-];
-
export const projectLabelsResponse = {
data: {
workspace: {
@@ -887,3 +991,134 @@ export const projectLabelsResponse = {
},
},
};
+
+export const mockIterationWidgetResponse = {
+ description: 'Iteration description',
+ dueDate: '2022-07-19',
+ id: 'gid://gitlab/Iteration/1124',
+ iid: '91',
+ startDate: '2022-06-22',
+ title: 'Iteration title widget',
+};
+
+export const groupIterationsResponse = {
+ data: {
+ workspace: {
+ id: 'gid://gitlab/Group/22',
+ attributes: {
+ nodes: [
+ {
+ id: 'gid://gitlab/Iteration/1124',
+ title: null,
+ startDate: '2022-06-22',
+ dueDate: '2022-07-19',
+ webUrl: 'http://127.0.0.1:3000/groups/gitlab-org/-/iterations/1124',
+ iterationCadence: {
+ id: 'gid://gitlab/Iterations::Cadence/1101',
+ title: 'Quod voluptates quidem ea eaque eligendi ex corporis.',
+ __typename: 'IterationCadence',
+ },
+ __typename: 'Iteration',
+ state: 'current',
+ },
+ {
+ id: 'gid://gitlab/Iteration/1185',
+ title: null,
+ startDate: '2022-07-06',
+ dueDate: '2022-07-19',
+ webUrl: 'http://127.0.0.1:3000/groups/gitlab-org/-/iterations/1185',
+ iterationCadence: {
+ id: 'gid://gitlab/Iterations::Cadence/1144',
+ title: 'Quo velit perspiciatis saepe aut omnis voluptas ab eos.',
+ __typename: 'IterationCadence',
+ },
+ __typename: 'Iteration',
+ state: 'current',
+ },
+ {
+ id: 'gid://gitlab/Iteration/1194',
+ title: null,
+ startDate: '2022-07-06',
+ dueDate: '2022-07-19',
+ webUrl: 'http://127.0.0.1:3000/groups/gitlab-org/-/iterations/1194',
+ iterationCadence: {
+ id: 'gid://gitlab/Iterations::Cadence/1152',
+ title:
+ 'Minima aut consequatur magnam vero doloremque accusamus maxime repellat voluptatem qui.',
+ __typename: 'IterationCadence',
+ },
+ __typename: 'Iteration',
+ state: 'current',
+ },
+ ],
+ __typename: 'IterationConnection',
+ },
+ __typename: 'Group',
+ },
+ },
+};
+
+export const groupIterationsResponseWithNoIterations = {
+ data: {
+ workspace: {
+ id: 'gid://gitlab/Group/22',
+ attributes: {
+ nodes: [],
+ __typename: 'IterationConnection',
+ },
+ __typename: 'Group',
+ },
+ },
+};
+
+export const mockMilestoneWidgetResponse = {
+ dueDate: null,
+ expired: false,
+ id: 'gid://gitlab/Milestone/30',
+ title: 'v4.0',
+};
+
+export const projectMilestonesResponse = {
+ data: {
+ workspace: {
+ id: 'gid://gitlab/Project/1',
+ attributes: {
+ nodes: [
+ {
+ id: 'gid://gitlab/Milestone/5',
+ title: 'v4.0',
+ webUrl: '/gitlab-org/gitlab-test/-/milestones/5',
+ dueDate: null,
+ expired: false,
+ __typename: 'Milestone',
+ state: 'active',
+ },
+ {
+ id: 'gid://gitlab/Milestone/4',
+ title: 'v3.0',
+ webUrl: '/gitlab-org/gitlab-test/-/milestones/4',
+ dueDate: null,
+ expired: false,
+ __typename: 'Milestone',
+ state: 'active',
+ },
+ ],
+ __typename: 'MilestoneConnection',
+ },
+ __typename: 'Project',
+ },
+ },
+};
+
+export const projectMilestonesResponseWithNoMilestones = {
+ data: {
+ workspace: {
+ id: 'gid://gitlab/Project/1',
+ attributes: {
+ nodes: [],
+ __typename: 'MilestoneConnection',
+ },
+ __typename: 'Project',
+ },
+ },
+};
diff --git a/spec/frontend/work_items/router_spec.js b/spec/frontend/work_items/router_spec.js
index ab370e2ca8b..66a917d8052 100644
--- a/spec/frontend/work_items/router_spec.js
+++ b/spec/frontend/work_items/router_spec.js
@@ -4,15 +4,19 @@ import VueApollo from 'vue-apollo';
import workItemWeightSubscription from 'ee_component/work_items/graphql/work_item_weight.subscription.graphql';
import createMockApollo from 'helpers/mock_apollo_helper';
import {
+ workItemAssigneesSubscriptionResponse,
workItemDatesSubscriptionResponse,
workItemResponseFactory,
workItemTitleSubscriptionResponse,
workItemWeightSubscriptionResponse,
+ workItemLabelsSubscriptionResponse,
} from 'jest/work_items/mock_data';
import App from '~/work_items/components/app.vue';
import workItemQuery from '~/work_items/graphql/work_item.query.graphql';
import workItemDatesSubscription from '~/work_items/graphql/work_item_dates.subscription.graphql';
import workItemTitleSubscription from '~/work_items/graphql/work_item_title.subscription.graphql';
+import workItemAssigneesSubscription from '~/work_items/graphql/work_item_assignees.subscription.graphql';
+import workItemLabelsSubscription from 'ee_else_ce/work_items/graphql/work_item_labels.subscription.graphql';
import CreateWorkItem from '~/work_items/pages/create_work_item.vue';
import WorkItemsRoot from '~/work_items/pages/work_item_root.vue';
import { createRouter } from '~/work_items/router';
@@ -26,6 +30,10 @@ describe('Work items router', () => {
const datesSubscriptionHandler = jest.fn().mockResolvedValue(workItemDatesSubscriptionResponse);
const titleSubscriptionHandler = jest.fn().mockResolvedValue(workItemTitleSubscriptionResponse);
const weightSubscriptionHandler = jest.fn().mockResolvedValue(workItemWeightSubscriptionResponse);
+ const assigneesSubscriptionHandler = jest
+ .fn()
+ .mockResolvedValue(workItemAssigneesSubscriptionResponse);
+ const labelsSubscriptionHandler = jest.fn().mockResolvedValue(workItemLabelsSubscriptionResponse);
const createComponent = async (routeArg) => {
const router = createRouter('/work_item');
@@ -37,6 +45,8 @@ describe('Work items router', () => {
[workItemQuery, workItemQueryHandler],
[workItemDatesSubscription, datesSubscriptionHandler],
[workItemTitleSubscription, titleSubscriptionHandler],
+ [workItemAssigneesSubscription, assigneesSubscriptionHandler],
+ [workItemLabelsSubscription, labelsSubscriptionHandler],
];
if (IS_EE) {
diff --git a/spec/frontend/work_items_hierarchy/components/app_spec.js b/spec/frontend/work_items_hierarchy/components/app_spec.js
index 1426fbfab80..124ff5f1608 100644
--- a/spec/frontend/work_items_hierarchy/components/app_spec.js
+++ b/spec/frontend/work_items_hierarchy/components/app_spec.js
@@ -32,7 +32,7 @@ describe('WorkItemsHierarchy App', () => {
it('shows when the banner is visible', () => {
createComponent({}, { bannerVisible: true });
- expect(wrapper.find(GlBanner).exists()).toBe(true);
+ expect(wrapper.findComponent(GlBanner).exists()).toBe(true);
});
it('hide when close is called', async () => {
@@ -42,7 +42,7 @@ describe('WorkItemsHierarchy App', () => {
await nextTick();
- expect(wrapper.find(GlBanner).exists()).toBe(false);
+ expect(wrapper.findComponent(GlBanner).exists()).toBe(false);
});
});
diff --git a/spec/frontend/work_items_hierarchy/components/hierarchy_spec.js b/spec/frontend/work_items_hierarchy/components/hierarchy_spec.js
index dca016dc317..084aaa754ab 100644
--- a/spec/frontend/work_items_hierarchy/components/hierarchy_spec.js
+++ b/spec/frontend/work_items_hierarchy/components/hierarchy_spec.js
@@ -57,7 +57,7 @@ describe('WorkItemsHierarchy Hierarchy', () => {
});
it('does not render badges', () => {
- expect(wrapper.find(GlBadge).exists()).toBe(false);
+ expect(wrapper.findComponent(GlBadge).exists()).toBe(false);
});
});
diff --git a/spec/frontend_integration/content_editor/content_editor_integration_spec.js b/spec/frontend_integration/content_editor/content_editor_integration_spec.js
index c0c6b5e5dc8..2fa491196ff 100644
--- a/spec/frontend_integration/content_editor/content_editor_integration_spec.js
+++ b/spec/frontend_integration/content_editor/content_editor_integration_spec.js
@@ -12,13 +12,16 @@ describe('content_editor', () => {
let wrapper;
let renderMarkdown;
- const buildWrapper = ({ markdown = '' } = {}) => {
+ const buildWrapper = ({ markdown = '', listeners = {} } = {}) => {
wrapper = mountExtended(ContentEditor, {
propsData: {
renderMarkdown,
uploadsPath: '/',
markdown,
},
+ listeners: {
+ ...listeners,
+ },
});
};
@@ -35,6 +38,10 @@ describe('content_editor', () => {
renderMarkdown = jest.fn();
});
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
describe('when loading initial content', () => {
describe('when the initial content is empty', () => {
it('still hides the loading indicator', async () => {
@@ -169,4 +176,16 @@ This reference tag is a mix of letters and numbers [^footnote].
});
});
});
+
+ it('bubbles up the keydown event captured by ProseMirror', async () => {
+ const keydownHandler = jest.fn();
+
+ buildWrapper({ listeners: { keydown: keydownHandler } });
+
+ await waitUntilContentIsLoaded();
+
+ wrapper.find('[contenteditable]').trigger('keydown', {});
+
+ expect(wrapper.emitted('keydown')).toHaveLength(1);
+ });
});
diff --git a/spec/frontend_integration/diffs/diffs_interopability_spec.js b/spec/frontend_integration/diffs/diffs_interopability_spec.js
index 8e9bc4f0a5f..5017fb8c49d 100644
--- a/spec/frontend_integration/diffs/diffs_interopability_spec.js
+++ b/spec/frontend_integration/diffs/diffs_interopability_spec.js
@@ -121,6 +121,7 @@ describe('diffs third party interoperability', () => {
vm = startDiffsApp();
+ // eslint-disable-next-line jest/no-standalone-expect
await waitFor(() => expect(hasLines(rowSelector)).toBe(true));
});
diff --git a/spec/graphql/gitlab_schema_spec.rb b/spec/graphql/gitlab_schema_spec.rb
index 60b3edfc279..b5c2d4da9ac 100644
--- a/spec/graphql/gitlab_schema_spec.rb
+++ b/spec/graphql/gitlab_schema_spec.rb
@@ -232,11 +232,7 @@ RSpec.describe GitlabSchema do
end
end
- describe '.parse_gid' do
- let_it_be(:global_id) { 'gid://gitlab/TestOne/2147483647' }
-
- subject(:parse_gid) { described_class.parse_gid(global_id) }
-
+ context 'for gid parsing' do
before do
test_base = Class.new
test_one = Class.new(test_base)
@@ -249,66 +245,85 @@ RSpec.describe GitlabSchema do
stub_const('TestThree', test_three)
end
- it 'parses the gid' do
- gid = parse_gid
+ describe '.parse_gid' do
+ let_it_be(:global_id) { 'gid://gitlab/TestOne/2147483647' }
- expect(gid.model_id).to eq '2147483647'
- expect(gid.model_class).to eq TestOne
- end
+ subject(:parse_gid) { described_class.parse_gid(global_id) }
- context 'when gid is malformed' do
- let_it_be(:global_id) { 'malformed://gitlab/TestOne/2147483647' }
+ it 'parses the gid' do
+ gid = parse_gid
- it 'raises an error' do
- expect { parse_gid }
- .to raise_error(Gitlab::Graphql::Errors::ArgumentError, "#{global_id} is not a valid GitLab ID.")
+ expect(gid.model_id).to eq '2147483647'
+ expect(gid.model_class).to eq TestOne
end
- end
- context 'when using expected_type' do
- it 'accepts a single type' do
- gid = described_class.parse_gid(global_id, expected_type: TestOne)
+ context 'when gid is malformed' do
+ let_it_be(:global_id) { 'malformed://gitlab/TestOne/2147483647' }
- expect(gid.model_class).to eq TestOne
+ it 'raises an error' do
+ expect { parse_gid }
+ .to raise_error(Gitlab::Graphql::Errors::ArgumentError, "#{global_id} is not a valid GitLab ID.")
+ end
end
- it 'accepts an ancestor type' do
- gid = described_class.parse_gid(global_id, expected_type: TestBase)
+ context 'when using expected_type' do
+ it 'accepts a single type' do
+ gid = described_class.parse_gid(global_id, expected_type: TestOne)
- expect(gid.model_class).to eq TestOne
- end
+ expect(gid.model_class).to eq TestOne
+ end
- it 'rejects an unknown type' do
- expect { described_class.parse_gid(global_id, expected_type: TestTwo) }
- .to raise_error(Gitlab::Graphql::Errors::ArgumentError, "#{global_id} is not a valid ID for TestTwo.")
- end
+ it 'accepts an ancestor type' do
+ gid = described_class.parse_gid(global_id, expected_type: TestBase)
- context 'when expected_type is an array' do
- subject(:parse_gid) { described_class.parse_gid(global_id, expected_type: [TestOne, TestTwo]) }
+ expect(gid.model_class).to eq TestOne
+ end
- context 'when global_id is of type TestOne' do
- it 'returns an object of an expected type' do
- expect(parse_gid.model_class).to eq TestOne
- end
+ it 'rejects an unknown type' do
+ expect { described_class.parse_gid(global_id, expected_type: TestTwo) }
+ .to raise_error(Gitlab::Graphql::Errors::ArgumentError, "#{global_id} is not a valid ID for TestTwo.")
end
- context 'when global_id is of type TestTwo' do
- let_it_be(:global_id) { 'gid://gitlab/TestTwo/2147483647' }
+ context 'when expected_type is an array' do
+ subject(:parse_gid) { described_class.parse_gid(global_id, expected_type: [TestOne, TestTwo]) }
- it 'returns an object of an expected type' do
- expect(parse_gid.model_class).to eq TestTwo
+ context 'when global_id is of type TestOne' do
+ it 'returns an object of an expected type' do
+ expect(parse_gid.model_class).to eq TestOne
+ end
+ end
+
+ context 'when global_id is of type TestTwo' do
+ let_it_be(:global_id) { 'gid://gitlab/TestTwo/2147483647' }
+
+ it 'returns an object of an expected type' do
+ expect(parse_gid.model_class).to eq TestTwo
+ end
end
- end
- context 'when global_id is of type TestThree' do
- let_it_be(:global_id) { 'gid://gitlab/TestThree/2147483647' }
+ context 'when global_id is of type TestThree' do
+ let_it_be(:global_id) { 'gid://gitlab/TestThree/2147483647' }
- it 'rejects an unknown type' do
- expect { parse_gid }
- .to raise_error(Gitlab::Graphql::Errors::ArgumentError, "#{global_id} is not a valid ID for TestOne, TestTwo.")
+ it 'rejects an unknown type' do
+ expect { parse_gid }
+ .to raise_error(Gitlab::Graphql::Errors::ArgumentError, "#{global_id} is not a valid ID for TestOne, TestTwo.")
+ end
end
end
end
end
+
+ describe '.parse_gids' do
+ let_it_be(:global_ids) { %w[gid://gitlab/TestOne/123 gid://gitlab/TestTwo/456] }
+
+ subject(:parse_gids) { described_class.parse_gids(global_ids, expected_type: [TestOne, TestTwo]) }
+
+ it 'parses the gids' do
+ expect(described_class).to receive(:parse_gid).with('gid://gitlab/TestOne/123', { expected_type: [TestOne, TestTwo] }).and_call_original
+ expect(described_class).to receive(:parse_gid).with('gid://gitlab/TestTwo/456', { expected_type: [TestOne, TestTwo] }).and_call_original
+ expect(parse_gids.map(&:model_id)).to eq %w[123 456]
+ expect(parse_gids.map(&:model_class)).to eq [TestOne, TestTwo]
+ end
+ end
end
end
diff --git a/spec/graphql/graphql_triggers_spec.rb b/spec/graphql/graphql_triggers_spec.rb
index 5e2ab74a0e5..a4a643582f5 100644
--- a/spec/graphql/graphql_triggers_spec.rb
+++ b/spec/graphql/graphql_triggers_spec.rb
@@ -32,6 +32,20 @@ RSpec.describe GraphqlTriggers do
end
end
+ describe '.issuable_description_updated' do
+ it 'triggers the issuableDescriptionUpdated subscription' do
+ work_item = create(:work_item)
+
+ expect(GitlabSchema.subscriptions).to receive(:trigger).with(
+ 'issuableDescriptionUpdated',
+ { issuable_id: work_item.to_gid },
+ work_item
+ ).and_call_original
+
+ GraphqlTriggers.issuable_description_updated(work_item)
+ end
+ end
+
describe '.issuable_labels_updated' do
it 'triggers the issuableLabelsUpdated subscription' do
project = create(:project)
@@ -61,4 +75,32 @@ RSpec.describe GraphqlTriggers do
GraphqlTriggers.issuable_dates_updated(work_item)
end
end
+
+ describe '.merge_request_reviewers_updated' do
+ it 'triggers the mergeRequestReviewersUpdated subscription' do
+ merge_request = build_stubbed(:merge_request)
+
+ expect(GitlabSchema.subscriptions).to receive(:trigger).with(
+ 'mergeRequestReviewersUpdated',
+ { issuable_id: merge_request.to_gid },
+ merge_request
+ ).and_call_original
+
+ GraphqlTriggers.merge_request_reviewers_updated(merge_request)
+ end
+ end
+
+ describe '.merge_request_merge_status_updated' do
+ it 'triggers the mergeRequestMergeStatusUpdated subscription' do
+ merge_request = build_stubbed(:merge_request)
+
+ expect(GitlabSchema.subscriptions).to receive(:trigger).with(
+ 'mergeRequestMergeStatusUpdated',
+ { issuable_id: merge_request.to_gid },
+ merge_request
+ ).and_call_original
+
+ GraphqlTriggers.merge_request_merge_status_updated(merge_request)
+ end
+ end
end
diff --git a/spec/graphql/mutations/ci/job_token_scope/add_project_spec.rb b/spec/graphql/mutations/ci/job_token_scope/add_project_spec.rb
index 412be5f16a4..727db7e2361 100644
--- a/spec/graphql/mutations/ci/job_token_scope/add_project_spec.rb
+++ b/spec/graphql/mutations/ci/job_token_scope/add_project_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Mutations::Ci::JobTokenScope::AddProject do
describe '#resolve' do
let_it_be(:project) do
- create(:project, ci_job_token_scope_enabled: true).tap(&:save!)
+ create(:project, ci_outbound_job_token_scope_enabled: true).tap(&:save!)
end
let_it_be(:target_project) { create(:project) }
diff --git a/spec/graphql/mutations/ci/job_token_scope/remove_project_spec.rb b/spec/graphql/mutations/ci/job_token_scope/remove_project_spec.rb
index 0e706ea6e0c..d399e73f394 100644
--- a/spec/graphql/mutations/ci/job_token_scope/remove_project_spec.rb
+++ b/spec/graphql/mutations/ci/job_token_scope/remove_project_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Mutations::Ci::JobTokenScope::RemoveProject do
end
describe '#resolve' do
- let_it_be(:project) { create(:project, ci_job_token_scope_enabled: true).tap(&:save!) }
+ let_it_be(:project) { create(:project, ci_outbound_job_token_scope_enabled: true).tap(&:save!) }
let_it_be(:target_project) { create(:project) }
let_it_be(:link) do
diff --git a/spec/graphql/mutations/ci/runner/update_spec.rb b/spec/graphql/mutations/ci/runner/update_spec.rb
index 39fe2a53a68..ee65be1e085 100644
--- a/spec/graphql/mutations/ci/runner/update_spec.rb
+++ b/spec/graphql/mutations/ci/runner/update_spec.rb
@@ -45,7 +45,8 @@ RSpec.describe Mutations::Ci::Runner::Update do
end
context 'when user can update runner', :enable_admin_mode do
- let(:admin_user) { create(:user, :admin) }
+ let_it_be(:admin_user) { create(:user, :admin) }
+
let(:current_ctx) { { current_user: admin_user } }
context 'with valid arguments' do
@@ -134,8 +135,7 @@ RSpec.describe Mutations::Ci::Runner::Update do
response
expect(response[:errors]).to match_array(['user not allowed to assign runner'])
- expect(response[:runner]).to be_an_instance_of(Ci::Runner)
- expect(response[:runner]).not_to have_attributes(expected_attributes)
+ expect(response[:runner]).to be_nil
expect(runner.reload).not_to have_attributes(expected_attributes)
expect(runner.projects).to match_array([project1])
end
@@ -164,7 +164,7 @@ RSpec.describe Mutations::Ci::Runner::Update do
let(:mutation_params) do
{
id: runner.to_global_id,
- associated_projects: ['gid://gitlab/Project/-1']
+ associated_projects: ["gid://gitlab/Project/#{non_existing_record_id}"]
}
end
@@ -191,6 +191,7 @@ RSpec.describe Mutations::Ci::Runner::Update do
end
it 'returns a descriptive error' do
+ expect(response[:runner]).to be_nil
expect(response[:errors]).to contain_exactly(
'Maximum timeout needs to be at least 10 minutes',
'Tags list can not be empty when runner is not allowed to pick untagged jobs'
@@ -202,6 +203,7 @@ RSpec.describe Mutations::Ci::Runner::Update do
it 'returns a descriptive error' do
mutation_params[:maintenance_note] = '1' * 1025
+ expect(response[:runner]).to be_nil
expect(response[:errors]).to contain_exactly(
'Maintenance note is too long (maximum is 1024 characters)'
)
diff --git a/spec/graphql/mutations/incident_management/timeline_event/create_spec.rb b/spec/graphql/mutations/incident_management/timeline_event/create_spec.rb
index ea74e427dd6..9254d84b29c 100644
--- a/spec/graphql/mutations/incident_management/timeline_event/create_spec.rb
+++ b/spec/graphql/mutations/incident_management/timeline_event/create_spec.rb
@@ -37,7 +37,7 @@ RSpec.describe Mutations::IncidentManagement::TimelineEvent::Create do
let(:args) { {} }
it_behaves_like 'responding with an incident timeline errors',
- errors: ["Occurred at can't be blank, Note can't be blank, and Note html can't be blank"]
+ errors: ["Occurred at can't be blank and Timeline text can't be blank"]
end
end
diff --git a/spec/graphql/mutations/incident_management/timeline_event/update_spec.rb b/spec/graphql/mutations/incident_management/timeline_event/update_spec.rb
index 102d33378c6..7081fb7117e 100644
--- a/spec/graphql/mutations/incident_management/timeline_event/update_spec.rb
+++ b/spec/graphql/mutations/incident_management/timeline_event/update_spec.rb
@@ -65,7 +65,7 @@ RSpec.describe Mutations::IncidentManagement::TimelineEvent::Update do
end
it 'responds with error' do
- expect(resolve).to eq(timeline_event: nil, errors: ["Note can't be blank"])
+ expect(resolve).to eq(timeline_event: nil, errors: ["Timeline text can't be blank"])
end
end
diff --git a/spec/graphql/mutations/namespace/package_settings/update_spec.rb b/spec/graphql/mutations/namespace/package_settings/update_spec.rb
index 631e02ff3dc..09ac1c99b10 100644
--- a/spec/graphql/mutations/namespace/package_settings/update_spec.rb
+++ b/spec/graphql/mutations/namespace/package_settings/update_spec.rb
@@ -26,8 +26,29 @@ RSpec.describe Mutations::Namespace::PackageSettings::Update do
RSpec.shared_examples 'updating the namespace package setting' do
it_behaves_like 'updating the namespace package setting attributes',
- from: { maven_duplicates_allowed: true, maven_duplicate_exception_regex: 'SNAPSHOT', generic_duplicates_allowed: true, generic_duplicate_exception_regex: 'foo' },
- to: { maven_duplicates_allowed: false, maven_duplicate_exception_regex: 'RELEASE', generic_duplicates_allowed: false, generic_duplicate_exception_regex: 'bar' }
+ from: {
+ maven_duplicates_allowed: true,
+ maven_duplicate_exception_regex: 'SNAPSHOT',
+ generic_duplicates_allowed: true,
+ generic_duplicate_exception_regex: 'foo',
+ maven_package_requests_forwarding: nil,
+ lock_maven_package_requests_forwarding: false,
+ npm_package_requests_forwarding: nil,
+ lock_npm_package_requests_forwarding: false,
+ pypi_package_requests_forwarding: nil,
+ lock_pypi_package_requests_forwarding: false
+ }, to: {
+ maven_duplicates_allowed: false,
+ maven_duplicate_exception_regex: 'RELEASE',
+ generic_duplicates_allowed: false,
+ generic_duplicate_exception_regex: 'bar',
+ maven_package_requests_forwarding: true,
+ lock_maven_package_requests_forwarding: true,
+ npm_package_requests_forwarding: true,
+ lock_npm_package_requests_forwarding: true,
+ pypi_package_requests_forwarding: true,
+ lock_pypi_package_requests_forwarding: true
+ }
it_behaves_like 'returning a success'
@@ -59,11 +80,19 @@ RSpec.describe Mutations::Namespace::PackageSettings::Update do
context 'with existing namespace package setting' do
let_it_be(:package_settings) { create(:namespace_package_setting, namespace: namespace) }
let_it_be(:params) do
- { namespace_path: namespace.full_path,
+ {
+ namespace_path: namespace.full_path,
maven_duplicates_allowed: false,
maven_duplicate_exception_regex: 'RELEASE',
generic_duplicates_allowed: false,
- generic_duplicate_exception_regex: 'bar' }
+ generic_duplicate_exception_regex: 'bar',
+ maven_package_requests_forwarding: true,
+ lock_maven_package_requests_forwarding: true,
+ npm_package_requests_forwarding: true,
+ lock_npm_package_requests_forwarding: true,
+ pypi_package_requests_forwarding: true,
+ lock_pypi_package_requests_forwarding: true
+ }
end
where(:user_role, :shared_examples_name) do
diff --git a/spec/graphql/mutations/work_items/update_widgets_spec.rb b/spec/graphql/mutations/work_items/update_widgets_spec.rb
deleted file mode 100644
index 2e54b81b5c7..00000000000
--- a/spec/graphql/mutations/work_items/update_widgets_spec.rb
+++ /dev/null
@@ -1,58 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Mutations::WorkItems::UpdateWidgets do
- include GraphqlHelpers
-
- let_it_be(:project) { create(:project) }
- let_it_be(:developer) { create(:user).tap { |user| project.add_developer(user) } }
-
- let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }, field: nil) }
-
- describe '#resolve' do
- before do
- stub_spam_services
- end
-
- context 'when no work item matches the given id' do
- let(:current_user) { developer }
- let(:gid) { global_id_of(id: non_existing_record_id, model_name: WorkItem.name) }
-
- it 'raises an error' do
- expect { mutation.resolve(id: gid, resolve: true) }.to raise_error(
- Gitlab::Graphql::Errors::ResourceNotAvailable,
- Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR
- )
- end
- end
-
- context 'when user can access the requested work item', :aggregate_failures do
- let(:current_user) { developer }
- let(:args) { {} }
-
- let_it_be(:work_item) { create(:work_item, project: project) }
-
- subject { mutation.resolve(id: work_item.to_global_id, **args) }
-
- context 'when `:work_items` is disabled for a project' do
- let_it_be(:project2) { create(:project) }
-
- it 'returns an error' do
- stub_feature_flags(work_items: project2) # only enable `work_item` for project2
-
- expect(subject[:errors]).to contain_exactly('`work_items` feature flag disabled for this project')
- end
- end
-
- context 'when resolved with an input for description widget' do
- let(:args) { { description_widget: { description: "updated description" } } }
-
- it 'returns the updated work item' do
- expect(subject[:work_item].description).to eq("updated description")
- expect(subject[:errors]).to be_empty
- end
- end
- end
- end
-end
diff --git a/spec/graphql/resolvers/ci/all_jobs_resolver_spec.rb b/spec/graphql/resolvers/ci/all_jobs_resolver_spec.rb
new file mode 100644
index 00000000000..2a7d0a8171b
--- /dev/null
+++ b/spec/graphql/resolvers/ci/all_jobs_resolver_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::Ci::AllJobsResolver do
+ include GraphqlHelpers
+
+ let_it_be(:successful_job) { create(:ci_build, :success, name: 'Job One') }
+ let_it_be(:successful_job_two) { create(:ci_build, :success, name: 'Job Two') }
+ let_it_be(:failed_job) { create(:ci_build, :failed, name: 'Job Three') }
+ let_it_be(:pending_job) { create(:ci_build, :pending, name: 'Job Three') }
+
+ let(:args) { {} }
+ let(:current_user) { create(:admin) }
+
+ subject { resolve_jobs(args) }
+
+ describe '#resolve' do
+ context 'with authorized user' do
+ context 'with statuses argument' do
+ let(:args) { { statuses: [Types::Ci::JobStatusEnum.coerce_isolated_input('SUCCESS')] } }
+
+ it { is_expected.to contain_exactly(successful_job, successful_job_two) }
+ end
+
+ context 'with multiple statuses' do
+ let(:args) do
+ { statuses: [Types::Ci::JobStatusEnum.coerce_isolated_input('SUCCESS'),
+ Types::Ci::JobStatusEnum.coerce_isolated_input('FAILED')] }
+ end
+
+ it { is_expected.to contain_exactly(successful_job, successful_job_two, failed_job) }
+ end
+
+ context 'without statuses argument' do
+ it { is_expected.to contain_exactly(successful_job, successful_job_two, failed_job, pending_job) }
+ end
+ end
+
+ context 'with unauthorized user' do
+ let(:current_user) { nil }
+
+ it { is_expected.to be_empty }
+ end
+ end
+
+ private
+
+ def resolve_jobs(args = {}, context = { current_user: current_user })
+ resolve(described_class, args: args, ctx: context)
+ end
+end
diff --git a/spec/graphql/resolvers/ci/job_token_scope_resolver_spec.rb b/spec/graphql/resolvers/ci/job_token_scope_resolver_spec.rb
index 1bfd6fbf6b9..59ece15b745 100644
--- a/spec/graphql/resolvers/ci/job_token_scope_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/job_token_scope_resolver_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Resolvers::Ci::JobTokenScopeResolver do
include GraphqlHelpers
let_it_be(:current_user) { create(:user) }
- let_it_be(:project) { create(:project, ci_job_token_scope_enabled: true).tap(&:save!) }
+ let_it_be(:project) { create(:project, ci_outbound_job_token_scope_enabled: true).tap(&:save!) }
specify do
expect(described_class).to have_nullable_graphql_type(::Types::Ci::JobTokenScopeType)
@@ -21,7 +21,7 @@ RSpec.describe Resolvers::Ci::JobTokenScopeResolver do
end
it 'returns the same project in the allow list of projects for the Ci Job Token when scope is not enabled' do
- allow(project).to receive(:ci_job_token_scope_enabled?).and_return(false)
+ allow(project).to receive(:ci_outbound_job_token_scope_enabled?).and_return(false)
expect(resolve_scope.all_projects).to contain_exactly(project)
end
@@ -40,7 +40,7 @@ RSpec.describe Resolvers::Ci::JobTokenScopeResolver do
context 'when job token scope is disabled' do
before do
- project.update!(ci_job_token_scope_enabled: false)
+ project.update!(ci_outbound_job_token_scope_enabled: false)
end
it 'resolves projects' do
diff --git a/spec/graphql/resolvers/project_pipeline_schedules_resolver_spec.rb b/spec/graphql/resolvers/project_pipeline_schedules_resolver_spec.rb
new file mode 100644
index 00000000000..159335adf79
--- /dev/null
+++ b/spec/graphql/resolvers/project_pipeline_schedules_resolver_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::ProjectPipelineSchedulesResolver do
+ include GraphqlHelpers
+
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository, public_builds: false) }
+
+ before do
+ project.add_owner(user)
+ end
+
+ describe 'With filters' do
+ let(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project, owner: developer) }
+
+ before do
+ pipeline_schedule.pipelines << build(:ci_pipeline, project: project)
+ end
+
+ it 'shows active pipeline schedules' do
+ schedules = resolve_pipeline_schedules
+
+ expect(schedules).to contain_exactly(pipeline_schedule)
+ end
+
+ it 'shows the inactive pipeline schedules' do
+ schedules = resolve_pipeline_schedules(args:
+ { status: ::Types::Ci::PipelineScheduleStatusEnum.values['INACTIVE'].value })
+
+ expect(schedules).to be_empty
+ end
+ end
+
+ def resolve_pipeline_schedules(args: {})
+ resolve(described_class, obj: project, ctx: { current_user: user }, args: args)
+ end
+end
diff --git a/spec/graphql/resolvers/users/participants_resolver_spec.rb b/spec/graphql/resolvers/users/participants_resolver_spec.rb
index 3f04d157410..eb2418b63f4 100644
--- a/spec/graphql/resolvers/users/participants_resolver_spec.rb
+++ b/spec/graphql/resolvers/users/participants_resolver_spec.rb
@@ -10,18 +10,31 @@ RSpec.describe Resolvers::Users::ParticipantsResolver do
let_it_be(:guest) { create(:user) }
let_it_be(:project) { create(:project, :public) }
let_it_be(:issue) { create(:issue, project: project) }
- let_it_be(:note) do
- create(
- :note,
- :system,
- :confidential,
- project: project,
- noteable: issue,
- author: create(:user)
- )
- end
- let_it_be(:note_metadata) { create(:system_note_metadata, note: note) }
+ let_it_be(:public_note_author) { create(:user) }
+ let_it_be(:public_reply_author) { create(:user) }
+ let_it_be(:internal_note_author) { create(:user) }
+ let_it_be(:internal_reply_author) { create(:user) }
+
+ let_it_be(:public_note) { create(:note, project: project, noteable: issue, author: public_note_author) }
+ let_it_be(:internal_note) { create(:note, :confidential, project: project, noteable: issue, author: internal_note_author) }
+
+ let_it_be(:public_reply) { create(:note, noteable: issue, in_reply_to: public_note, project: project, author: public_reply_author) }
+ let_it_be(:internal_reply) { create(:note, :confidential, noteable: issue, in_reply_to: internal_note, project: project, author: internal_reply_author) }
+
+ let_it_be(:note_metadata2) { create(:system_note_metadata, note: public_note) }
+
+ let_it_be(:issue_emoji) { create(:award_emoji, name: 'thumbsup', awardable: issue) }
+ let_it_be(:note_emoji1) { create(:award_emoji, name: 'thumbsup', awardable: public_note) }
+ let_it_be(:note_emoji2) { create(:award_emoji, name: 'thumbsup', awardable: internal_note) }
+ let_it_be(:note_emoji3) { create(:award_emoji, name: 'thumbsup', awardable: public_reply) }
+ let_it_be(:note_emoji4) { create(:award_emoji, name: 'thumbsup', awardable: internal_reply) }
+
+ let_it_be(:issue_emoji_author) { issue_emoji.user }
+ let_it_be(:public_note_emoji_author) { note_emoji1.user }
+ let_it_be(:internal_note_emoji_author) { note_emoji2.user }
+ let_it_be(:public_reply_emoji_author) { note_emoji3.user }
+ let_it_be(:internal_reply_emoji_author) { note_emoji4.user }
subject(:resolved_items) { resolve(described_class, args: {}, ctx: { current_user: current_user }, obj: issue)&.items }
@@ -34,7 +47,16 @@ RSpec.describe Resolvers::Users::ParticipantsResolver do
let(:current_user) { nil }
it 'returns only publicly visible participants for this user' do
- is_expected.to match_array([issue.author])
+ is_expected.to match_array(
+ [
+ issue.author,
+ issue_emoji_author,
+ public_note_author,
+ public_note_emoji_author,
+ public_reply_author,
+ public_reply_emoji_author
+ ]
+ )
end
end
@@ -42,15 +64,37 @@ RSpec.describe Resolvers::Users::ParticipantsResolver do
let(:current_user) { guest }
it 'returns only publicly visible participants for this user' do
- is_expected.to match_array([issue.author])
+ is_expected.to match_array(
+ [
+ issue.author,
+ issue_emoji_author,
+ public_note_author,
+ public_note_emoji_author,
+ public_reply_author,
+ public_reply_emoji_author
+ ]
+ )
end
end
- context 'when current user has access to confidential notes' do
+ context 'when current user has access to internal notes' do
let(:current_user) { user }
it 'returns all participants for this user' do
- is_expected.to match_array([issue.author, note.author])
+ is_expected.to match_array(
+ [
+ issue.author,
+ issue_emoji_author,
+ public_note_author,
+ public_note_emoji_author,
+ public_reply_author,
+ internal_note_author,
+ internal_note_emoji_author,
+ internal_reply_author,
+ public_reply_emoji_author,
+ internal_reply_emoji_author
+ ]
+ )
end
context 'N+1 queries' do
@@ -64,9 +108,14 @@ RSpec.describe Resolvers::Users::ParticipantsResolver do
it 'does not execute N+1 for project relation' do
control_count = ActiveRecord::QueryRecorder.new { query.call }
- create(:note, :confidential, project: project, noteable: issue, author: create(:user))
+ create(:award_emoji, :upvote, awardable: issue)
+ internal_note = create(:note, :confidential, project: project, noteable: issue, author: create(:user))
+ create(:award_emoji, name: 'thumbsup', awardable: internal_note)
+ public_note = create(:note, project: project, noteable: issue, author: create(:user))
+ create(:award_emoji, name: 'thumbsup', awardable: public_note)
- expect { query.call }.not_to exceed_query_limit(control_count)
+ # 1 extra query per source (3 emojis + 2 notes) to fetch participables collection
+ expect { query.call }.not_to exceed_query_limit(control_count).with_threshold(5)
end
it 'does not execute N+1 for system note metadata relation' do
diff --git a/spec/graphql/types/ci/job_token_scope_type_spec.rb b/spec/graphql/types/ci/job_token_scope_type_spec.rb
index 18f4d762d1e..569b59d6c70 100644
--- a/spec/graphql/types/ci/job_token_scope_type_spec.rb
+++ b/spec/graphql/types/ci/job_token_scope_type_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe GitlabSchema.types['CiJobTokenScopeType'] do
end
describe 'query' do
- let(:project) { create(:project, ci_job_token_scope_enabled: true).tap(&:save!) }
+ let(:project) { create(:project, ci_outbound_job_token_scope_enabled: true).tap(&:save!) }
let_it_be(:current_user) { create(:user) }
let(:query) do
diff --git a/spec/graphql/types/ci/job_type_spec.rb b/spec/graphql/types/ci/job_type_spec.rb
index b3dee082d1f..ce1558c4097 100644
--- a/spec/graphql/types/ci/job_type_spec.rb
+++ b/spec/graphql/types/ci/job_type_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe Types::Ci::JobType do
active
allow_failure
artifacts
+ browse_artifacts_path
cancelable
commitPath
coverage
@@ -64,4 +65,16 @@ RSpec.describe Types::Ci::JobType do
is_expected.to eq("/#{project.full_path}/-/jobs/#{build.id}")
end
end
+
+ describe '#browse_artifacts_path' do
+ subject { resolve_field(:browse_artifacts_path, build, current_user: user, object_type: described_class) }
+
+ let_it_be(:project) { create(:project) }
+ let(:user) { create(:user) }
+ let(:build) { create(:ci_build, :artifacts, project: project, user: user) }
+
+ it 'returns the path to browse the artifacts of the job' do
+ is_expected.to eq("/#{project.full_path}/-/jobs/#{build.id}/artifacts/browse")
+ end
+ end
end
diff --git a/spec/graphql/types/ci/pipeline_schedule_status_enum_spec.rb b/spec/graphql/types/ci/pipeline_schedule_status_enum_spec.rb
new file mode 100644
index 00000000000..d271e72b17f
--- /dev/null
+++ b/spec/graphql/types/ci/pipeline_schedule_status_enum_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Ci::PipelineScheduleStatusEnum do
+ specify { expect(described_class.graphql_name ).to eq('PipelineScheduleStatus') }
+
+ it 'exposes the status of a pipeline schedule' do
+ expect(described_class.values.keys).to match_array(%w[ACTIVE INACTIVE])
+ end
+end
diff --git a/spec/graphql/types/ci/pipeline_schedule_type_spec.rb b/spec/graphql/types/ci/pipeline_schedule_type_spec.rb
new file mode 100644
index 00000000000..bf1413ef657
--- /dev/null
+++ b/spec/graphql/types/ci/pipeline_schedule_type_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Ci::PipelineScheduleType do
+ include GraphqlHelpers
+
+ specify { expect(described_class.graphql_name).to eq('PipelineSchedule') }
+ specify { expect(described_class).to expose_permissions_using(Types::PermissionTypes::Ci::PipelineSchedules) }
+
+ it 'exposes the expected fields' do
+ expected_fields = %i[
+ id
+ description
+ owner
+ active
+ lastPipeline
+ refForDisplay
+ refPath
+ forTag
+ nextRunAt
+ realNextRun
+ cron
+ cronTimezone
+ userPermissions
+ ]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/environment_type_spec.rb b/spec/graphql/types/environment_type_spec.rb
index ae58fe00af7..2605beac95a 100644
--- a/spec/graphql/types/environment_type_spec.rb
+++ b/spec/graphql/types/environment_type_spec.rb
@@ -5,13 +5,13 @@ require 'spec_helper'
RSpec.describe GitlabSchema.types['Environment'] do
specify { expect(described_class.graphql_name).to eq('Environment') }
- it 'has the expected fields' do
+ it 'includes the expected fields' do
expected_fields = %w[
name id state metrics_dashboard latest_opened_most_severe_alert path external_url deployments
slug createdAt updatedAt autoStopAt autoDeleteAt tier environmentType lastDeployment
]
- expect(described_class).to have_graphql_fields(*expected_fields)
+ expect(described_class).to include_graphql_fields(*expected_fields)
end
specify { expect(described_class).to require_graphql_authorizations(:read_environment) }
diff --git a/spec/graphql/types/namespace/package_settings_type_spec.rb b/spec/graphql/types/namespace/package_settings_type_spec.rb
index f63a0a7010f..5039f2d6153 100644
--- a/spec/graphql/types/namespace/package_settings_type_spec.rb
+++ b/spec/graphql/types/namespace/package_settings_type_spec.rb
@@ -14,4 +14,24 @@ RSpec.describe GitlabSchema.types['PackageSettings'] do
it { is_expected.to have_graphql_type(Types::UntrustedRegexp) }
end
+
+ it 'includes package setting fields' do
+ expected_fields = %w[
+ maven_duplicates_allowed
+ maven_duplicate_exception_regex
+ generic_duplicates_allowed
+ generic_duplicate_exception_regex
+ maven_package_requests_forwarding
+ lock_maven_package_requests_forwarding
+ npm_package_requests_forwarding
+ lock_npm_package_requests_forwarding
+ pypi_package_requests_forwarding
+ lock_pypi_package_requests_forwarding
+ maven_package_requests_forwarding_locked
+ npm_package_requests_forwarding_locked
+ pypi_package_requests_forwarding_locked
+ ]
+
+ expect(described_class).to include_graphql_fields(*expected_fields)
+ end
end
diff --git a/spec/graphql/types/packages/tag_type_spec.rb b/spec/graphql/types/packages/tag_type_spec.rb
index 83b705157d8..ea0801c7a0f 100644
--- a/spec/graphql/types/packages/tag_type_spec.rb
+++ b/spec/graphql/types/packages/tag_type_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe GitlabSchema.types['PackageTag'] do
it 'includes all the package tag fields' do
expected_fields = %w[
- id name created_at updated_at
+ id name created_at updated_at
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/permission_types/ci/pipeline_schedule_type_spec.rb b/spec/graphql/types/permission_types/ci/pipeline_schedule_type_spec.rb
new file mode 100644
index 00000000000..5929d22b202
--- /dev/null
+++ b/spec/graphql/types/permission_types/ci/pipeline_schedule_type_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Ci::PipelineScheduleType do
+ specify { expect(described_class).to expose_permissions_using(Types::PermissionTypes::Ci::PipelineSchedules) }
+end
diff --git a/spec/graphql/types/project_type_spec.rb b/spec/graphql/types/project_type_spec.rb
index 617cbdb07fe..b435f3ed5ff 100644
--- a/spec/graphql/types/project_type_spec.rb
+++ b/spec/graphql/types/project_type_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe GitlabSchema.types['Project'] do
cluster_agent cluster_agents agent_configurations
ci_template timelogs merge_commit_template squash_commit_template work_item_types
recent_issue_boards ci_config_path_or_default packages_cleanup_policy ci_variables
- timelog_categories fork_targets branch_rules ci_config_variables
+ timelog_categories fork_targets branch_rules ci_config_variables pipeline_schedules
]
expect(described_class).to include_graphql_fields(*expected_fields)
@@ -508,20 +508,6 @@ RSpec.describe GitlabSchema.types['Project'] do
it { is_expected.to have_graphql_resolver(Resolvers::Ci::JobTokenScopeResolver) }
end
- describe 'branch_rules field' do
- subject { described_class.fields['branchRules'] }
-
- let(:br_resolver) { Resolvers::Projects::BranchRulesResolver }
-
- specify do
- is_expected.to have_graphql_type(
- Types::Projects::BranchRuleType.connection_type
- )
- end
-
- specify { is_expected.to have_graphql_resolver(br_resolver) }
- end
-
describe 'agent_configurations' do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
diff --git a/spec/graphql/types/branch_rule_type_spec.rb b/spec/graphql/types/projects/branch_rule_type_spec.rb
index 277901f00bf..119ecf8a097 100644
--- a/spec/graphql/types/branch_rule_type_spec.rb
+++ b/spec/graphql/types/projects/branch_rule_type_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe GitlabSchema.types['BranchRule'] do
let(:fields) do
%i[
name
+ isDefault
branch_protection
created_at
updated_at
@@ -18,5 +19,5 @@ RSpec.describe GitlabSchema.types['BranchRule'] do
specify { is_expected.to require_graphql_authorizations(:read_protected_branch) }
- specify { is_expected.to have_graphql_fields(fields) }
+ specify { is_expected.to have_graphql_fields(fields).at_least }
end
diff --git a/spec/graphql/types/subscription_type_spec.rb b/spec/graphql/types/subscription_type_spec.rb
index 860cbbf0c15..c23a14deaf3 100644
--- a/spec/graphql/types/subscription_type_spec.rb
+++ b/spec/graphql/types/subscription_type_spec.rb
@@ -8,9 +8,11 @@ RSpec.describe GitlabSchema.types['Subscription'] do
issuable_assignees_updated
issue_crm_contacts_updated
issuable_title_updated
+ issuable_description_updated
issuable_labels_updated
issuable_dates_updated
merge_request_reviewers_updated
+ merge_request_merge_status_updated
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/work_items/widgets/labels_update_input_type_spec.rb b/spec/graphql/types/work_items/widgets/labels_update_input_type_spec.rb
new file mode 100644
index 00000000000..3e5f40bde02
--- /dev/null
+++ b/spec/graphql/types/work_items/widgets/labels_update_input_type_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Types::WorkItems::Widgets::LabelsUpdateInputType do
+ it { expect(described_class.graphql_name).to eq('WorkItemWidgetLabelsUpdateInput') }
+
+ it { expect(described_class.arguments.keys).to contain_exactly('addLabelIds', 'removeLabelIds') }
+end
diff --git a/spec/helpers/application_helper_spec.rb b/spec/helpers/application_helper_spec.rb
index 264431b1bb5..a4b2c963c74 100644
--- a/spec/helpers/application_helper_spec.rb
+++ b/spec/helpers/application_helper_spec.rb
@@ -486,6 +486,25 @@ RSpec.describe ApplicationHelper do
end
end
+ describe '#gitlab_ui_form_with' do
+ let_it_be(:user) { build(:user) }
+
+ before do
+ allow(helper).to receive(:users_path).and_return('/root')
+ allow(helper).to receive(:form_with).and_call_original
+ end
+
+ it 'adds custom form builder to options and calls `form_with`' do
+ options = { model: user, html: { class: 'foo-bar' } }
+ expected_options = options.merge({ builder: ::Gitlab::FormBuilders::GitlabUiFormBuilder })
+
+ expect do |b|
+ helper.gitlab_ui_form_with(**options, &b)
+ end.to yield_with_args(::Gitlab::FormBuilders::GitlabUiFormBuilder)
+ expect(helper).to have_received(:form_with).with(expected_options)
+ end
+ end
+
describe '#page_class' do
context 'when logged_out_marketing_header experiment is enabled' do
let_it_be(:expected_class) { 'logged-out-marketing-header-candidate' }
diff --git a/spec/helpers/application_settings_helper_spec.rb b/spec/helpers/application_settings_helper_spec.rb
index 1703727db21..c75e9caa77a 100644
--- a/spec/helpers/application_settings_helper_spec.rb
+++ b/spec/helpers/application_settings_helper_spec.rb
@@ -51,12 +51,13 @@ RSpec.describe ApplicationSettingsHelper do
end
it 'contains rate limit parameters' do
- expect(helper.visible_attributes).to include(*%i(
- issues_create_limit notes_create_limit project_export_limit
- project_download_export_limit project_export_limit project_import_limit
- raw_blob_request_limit group_export_limit group_download_export_limit
- group_import_limit users_get_by_id_limit search_rate_limit search_rate_limit_unauthenticated
- ))
+ expect(helper.visible_attributes).to include(
+ *%i(
+ issues_create_limit notes_create_limit project_export_limit
+ project_download_export_limit project_export_limit project_import_limit
+ raw_blob_request_limit group_export_limit group_download_export_limit
+ group_import_limit users_get_by_id_limit search_rate_limit search_rate_limit_unauthenticated
+ ))
end
context 'when GitLab.com' do
@@ -233,23 +234,24 @@ RSpec.describe ApplicationSettingsHelper do
end
it 'returns available formats correctly' do
- expect(helper.kroki_available_formats).to eq([
- {
- name: 'kroki_formats_blockdiag',
- label: 'BlockDiag (includes BlockDiag, SeqDiag, ActDiag, NwDiag, PacketDiag, and RackDiag)',
- value: true
- },
- {
- name: 'kroki_formats_bpmn',
- label: 'BPMN',
- value: false
- },
- {
- name: 'kroki_formats_excalidraw',
- label: 'Excalidraw',
- value: false
- }
- ])
+ expect(helper.kroki_available_formats).to eq(
+ [
+ {
+ name: 'kroki_formats_blockdiag',
+ label: 'BlockDiag (includes BlockDiag, SeqDiag, ActDiag, NwDiag, PacketDiag, and RackDiag)',
+ value: true
+ },
+ {
+ name: 'kroki_formats_bpmn',
+ label: 'BPMN',
+ value: false
+ },
+ {
+ name: 'kroki_formats_excalidraw',
+ label: 'Excalidraw',
+ value: false
+ }
+ ])
end
end
diff --git a/spec/helpers/boards_helper_spec.rb b/spec/helpers/boards_helper_spec.rb
index ccc150c397a..27b7bac5a88 100644
--- a/spec/helpers/boards_helper_spec.rb
+++ b/spec/helpers/boards_helper_spec.rb
@@ -105,10 +105,6 @@ RSpec.describe BoardsHelper do
allow(helper).to receive(:can?).with(user, :admin_issue_board, project).and_return(false)
end
- it 'returns a board_lists_path as lists_endpoint' do
- expect(helper.board_data[:lists_endpoint]).to eq(board_lists_path(project_board))
- end
-
it 'returns board type as parent' do
expect(helper.board_data[:parent]).to eq('project')
end
@@ -189,14 +185,4 @@ RSpec.describe BoardsHelper do
end
end
end
-
- describe '#current_board_json' do
- let(:board_json) { helper.current_board_json }
-
- it 'can serialise with a basic set of attributes' do
- assign(:board, project_board)
-
- expect(board_json).to match_schema('current-board')
- end
- end
end
diff --git a/spec/helpers/ci/pipeline_editor_helper_spec.rb b/spec/helpers/ci/pipeline_editor_helper_spec.rb
index 1950d685980..c9aac63a883 100644
--- a/spec/helpers/ci/pipeline_editor_helper_spec.rb
+++ b/spec/helpers/ci/pipeline_editor_helper_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe Ci::PipelineEditorHelper do
- include CycleAnalyticsHelpers
-
let_it_be(:project) { create(:project) }
describe 'can_view_pipeline_editor?' do
@@ -25,6 +23,30 @@ RSpec.describe Ci::PipelineEditorHelper do
describe '#js_pipeline_editor_data' do
let(:project) { create(:project, :repository) }
+ let(:default_helper_data) do
+ {
+ "ci-config-path": project.ci_config_path_or_default,
+ "ci-examples-help-page-path" => help_page_path('ci/examples/index'),
+ "ci-help-page-path" => help_page_path('ci/index'),
+ "ci-lint-path" => project_ci_lint_path(project),
+ "default-branch" => project.default_branch_or_main,
+ "empty-state-illustration-path" => 'illustrations/empty.svg',
+ "initial-branch-name" => nil,
+ "includes-help-page-path" => help_page_path('ci/yaml/includes'),
+ "lint-help-page-path" => help_page_path('ci/lint', anchor: 'check-cicd-syntax'),
+ "lint-unavailable-help-page-path" => help_page_path('ci/pipeline_editor/index', anchor: 'configuration-validation-currently-not-available-message'),
+ "needs-help-page-path" => help_page_path('ci/yaml/index', anchor: 'needs'),
+ "new-merge-request-path" => '/mock/project/-/merge_requests/new',
+ "pipeline-page-path" => project_pipelines_path(project),
+ "project-path" => project.path,
+ "project-full-path" => project.full_path,
+ "project-namespace" => project.namespace.full_path,
+ "simulate-pipeline-help-page-path" => help_page_path('ci/pipeline_editor/index', anchor: 'simulate-a-cicd-pipeline'),
+ "uses-external-config" => 'false',
+ "validate-tab-illustration-path" => 'illustrations/validate.svg',
+ "yml-help-page-path" => help_page_path('ci/yaml/index')
+ }
+ end
before do
allow(helper)
@@ -46,29 +68,10 @@ RSpec.describe Ci::PipelineEditorHelper do
context 'with a project with commits' do
it 'returns pipeline editor data' do
- expect(pipeline_editor_data).to eq({
- "ci-config-path": project.ci_config_path_or_default,
- "ci-examples-help-page-path" => help_page_path('ci/examples/index'),
- "ci-help-page-path" => help_page_path('ci/index'),
- "ci-lint-path" => project_ci_lint_path(project),
- "default-branch" => project.default_branch_or_main,
- "empty-state-illustration-path" => 'illustrations/empty.svg',
- "initial-branch-name" => nil,
- "includes-help-page-path" => help_page_path('ci/yaml/includes'),
- "lint-help-page-path" => help_page_path('ci/lint', anchor: 'check-cicd-syntax'),
- "lint-unavailable-help-page-path" => help_page_path('ci/pipeline_editor/index', anchor: 'configuration-validation-currently-not-available-message'),
- "needs-help-page-path" => help_page_path('ci/yaml/index', anchor: 'needs'),
- "new-merge-request-path" => '/mock/project/-/merge_requests/new',
+ expect(pipeline_editor_data).to eq(default_helper_data.merge({
"pipeline_etag" => graphql_etag_pipeline_sha_path(project.commit.sha),
- "pipeline-page-path" => project_pipelines_path(project),
- "project-path" => project.path,
- "project-full-path" => project.full_path,
- "project-namespace" => project.namespace.full_path,
- "simulate-pipeline-help-page-path" => help_page_path('ci/pipeline_editor/index', anchor: 'simulate-a-cicd-pipeline'),
- "total-branches" => project.repository.branches.length,
- "validate-tab-illustration-path" => 'illustrations/validate.svg',
- "yml-help-page-path" => help_page_path('ci/yaml/index')
- })
+ "total-branches" => project.repository.branches.length
+ }))
end
end
@@ -76,29 +79,10 @@ RSpec.describe Ci::PipelineEditorHelper do
let(:project) { create(:project, :empty_repo) }
it 'returns pipeline editor data' do
- expect(pipeline_editor_data).to eq({
- "ci-config-path": project.ci_config_path_or_default,
- "ci-examples-help-page-path" => help_page_path('ci/examples/index'),
- "ci-help-page-path" => help_page_path('ci/index'),
- "ci-lint-path" => project_ci_lint_path(project),
- "default-branch" => project.default_branch_or_main,
- "empty-state-illustration-path" => 'illustrations/empty.svg',
- "initial-branch-name" => nil,
- "includes-help-page-path" => help_page_path('ci/yaml/includes'),
- "lint-help-page-path" => help_page_path('ci/lint', anchor: 'check-cicd-syntax'),
- "lint-unavailable-help-page-path" => help_page_path('ci/pipeline_editor/index', anchor: 'configuration-validation-currently-not-available-message'),
- "needs-help-page-path" => help_page_path('ci/yaml/index', anchor: 'needs'),
- "new-merge-request-path" => '/mock/project/-/merge_requests/new',
+ expect(pipeline_editor_data).to eq(default_helper_data.merge({
"pipeline_etag" => '',
- "pipeline-page-path" => project_pipelines_path(project),
- "project-path" => project.path,
- "project-full-path" => project.full_path,
- "project-namespace" => project.namespace.full_path,
- "simulate-pipeline-help-page-path" => help_page_path('ci/pipeline_editor/index', anchor: 'simulate-a-cicd-pipeline'),
- "total-branches" => 0,
- "validate-tab-illustration-path" => 'illustrations/validate.svg',
- "yml-help-page-path" => help_page_path('ci/yaml/index')
- })
+ "total-branches" => 0
+ }))
end
end
@@ -113,11 +97,38 @@ RSpec.describe Ci::PipelineEditorHelper do
end
end
+ context 'with a remote CI config' do
+ before do
+ create(:commit, project: project)
+ project.ci_config_path = 'http://example.com/path/to/ci/config.yml'
+ end
+
+ it 'returns true for uses-external-config in pipeline editor data' do
+ expect(pipeline_editor_data['uses-external-config']).to eq('true')
+ end
+ end
+
+ context 'with a CI config from an external project' do
+ before do
+ create(:commit, project: project)
+ project.ci_config_path = '.gitlab-ci.yml@group/project'
+ end
+
+ it 'returns true for uses-external-config in pipeline editor data' do
+ expect(pipeline_editor_data['uses-external-config']).to eq('true')
+ end
+ end
+
context 'with a non-default branch name' do
let(:user) { create(:user) }
before do
- create_commit('Message', project, user, 'feature')
+ project.repository.commit_files(
+ user,
+ branch_name: 'feature',
+ message: 'Message',
+ actions: [{ action: :create, file_path: 'a/new.file', content: 'This is a new file' }]
+ )
controller.params[:branch_name] = 'feature'
end
diff --git a/spec/helpers/commits_helper_spec.rb b/spec/helpers/commits_helper_spec.rb
index 0cc53da98b2..27738f73ea5 100644
--- a/spec/helpers/commits_helper_spec.rb
+++ b/spec/helpers/commits_helper_spec.rb
@@ -227,10 +227,11 @@ RSpec.describe CommitsHelper do
end
it 'returns data for cherry picking into a project' do
- expect(helper.cherry_pick_projects_data(forked_project)).to match_array([
- { id: project.id.to_s, name: project.full_path, refsUrl: refs_project_path(project) },
- { id: forked_project.id.to_s, name: forked_project.full_path, refsUrl: refs_project_path(forked_project) }
- ])
+ expect(helper.cherry_pick_projects_data(forked_project)).to match_array(
+ [
+ { id: project.id.to_s, name: project.full_path, refsUrl: refs_project_path(project) },
+ { id: forked_project.id.to_s, name: forked_project.full_path, refsUrl: refs_project_path(forked_project) }
+ ])
end
end
diff --git a/spec/helpers/events_helper_spec.rb b/spec/helpers/events_helper_spec.rb
index cc6804f0355..7005b3dc53e 100644
--- a/spec/helpers/events_helper_spec.rb
+++ b/spec/helpers/events_helper_spec.rb
@@ -24,6 +24,45 @@ RSpec.describe EventsHelper do
end
end
+ describe '#localized_action_name' do
+ it 'handles all valid design events' do
+ created, updated, destroyed = %i[created updated destroyed].map do |trait|
+ event = build(:design_event, trait)
+ helper.localized_action_name(event)
+ end
+
+ expect(created).to eq(_('added'))
+ expect(updated).to eq(_('updated'))
+ expect(destroyed).to eq(_('removed'))
+ end
+
+ context 'handles correct base actions' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:trait, :localized_action_name) do
+ :created | s_('Event|created')
+ :updated | s_('Event|opened')
+ :closed | s_('Event|closed')
+ :reopened | s_('Event|opened')
+ :commented | s_('Event|commented on')
+ :merged | s_('Event|accepted')
+ :joined | s_('Event|joined')
+ :left | s_('Event|left')
+ :destroyed | s_('Event|destroyed')
+ :expired | s_('Event|removed due to membership expiration from')
+ :approved | s_('Event|approved')
+ end
+
+ with_them do
+ it 'with correct name and method' do
+ event = build(:event, trait)
+
+ expect(helper.localized_action_name(event)).to eq(localized_action_name)
+ end
+ end
+ end
+ end
+
describe '#event_commit_title' do
let(:message) { 'foo & bar ' + 'A' * 70 + '\n' + 'B' * 80 }
diff --git a/spec/helpers/form_helper_spec.rb b/spec/helpers/form_helper_spec.rb
index 4b76c370810..14ff5d97057 100644
--- a/spec/helpers/form_helper_spec.rb
+++ b/spec/helpers/form_helper_spec.rb
@@ -6,35 +6,85 @@ RSpec.describe FormHelper do
include Devise::Test::ControllerHelpers
describe '#dropdown_max_select' do
+ let(:feature_flag) { :limit_reviewer_and_assignee_size }
+
context "with the :limit_reviewer_and_assignee_size feature flag on" do
+ before do
+ stub_feature_flags(feature_flag => true)
+ end
+
it 'correctly returns the max amount of reviewers or assignees to allow' do
- max = MergeRequest::MAX_NUMBER_OF_ASSIGNEES_OR_REVIEWERS
+ max = Issuable::MAX_NUMBER_OF_ASSIGNEES_OR_REVIEWERS
- expect(helper.dropdown_max_select({}))
+ expect(helper.dropdown_max_select({}, feature_flag))
.to eq(max)
- expect(helper.dropdown_max_select({ 'max-select'.to_sym => 5 }))
+ expect(helper.dropdown_max_select({ 'max-select'.to_sym => 5 }, feature_flag))
.to eq(5)
- expect(helper.dropdown_max_select({ 'max-select'.to_sym => max + 5 }))
+ expect(helper.dropdown_max_select({ 'max-select'.to_sym => max + 5 }, feature_flag))
.to eq(max)
end
end
context "with the :limit_reviewer_and_assignee_size feature flag off" do
before do
- stub_feature_flags(limit_reviewer_and_assignee_size: false)
+ stub_feature_flags(feature_flag => false)
end
it 'correctly returns the max amount of reviewers or assignees to allow' do
- expect(helper.dropdown_max_select({}))
+ expect(helper.dropdown_max_select({}, feature_flag))
.to eq(nil)
- expect(helper.dropdown_max_select({ 'max-select'.to_sym => 5 }))
+ expect(helper.dropdown_max_select({ 'max-select'.to_sym => 5 }, feature_flag))
.to eq(5)
- expect(helper.dropdown_max_select({ 'max-select'.to_sym => 120 }))
+ expect(helper.dropdown_max_select({ 'max-select'.to_sym => 120 }, feature_flag))
.to eq(120)
end
end
end
+ describe '#assignees_dropdown_options' do
+ let(:merge_request) { build(:merge_request) }
+
+ context "with the :limit_assignees_per_issuable feature flag on" do
+ context "with multiple assignees" do
+ it 'correctly returns the max amount of assignees to allow' do
+ allow(helper).to receive(:merge_request_supports_multiple_assignees?).and_return(true)
+
+ expect(helper.assignees_dropdown_options(:merge_request)[:data][:'max-select'])
+ .to eq(Issuable::MAX_NUMBER_OF_ASSIGNEES_OR_REVIEWERS)
+ end
+ end
+
+ context "with only 1 assignee" do
+ it 'correctly returns the max amount of assignees to allow' do
+ expect(helper.assignees_dropdown_options(:merge_request)[:data][:'max-select'])
+ .to eq(1)
+ end
+ end
+ end
+
+ context "with the :limit_assignees_per_issuable feature flag off" do
+ before do
+ stub_feature_flags(limit_assignees_per_issuable: false)
+ end
+
+ context "with multiple assignees" do
+ it 'correctly returns the max amount of assignees to allow' do
+ allow(helper).to receive(:merge_request_supports_multiple_assignees?).and_return(true)
+
+ expect(helper.assignees_dropdown_options(:merge_request)[:data][:'max-select'])
+ .to eq(nil)
+ end
+ end
+
+ context "with only 1 assignee" do
+ it 'correctly returns the max amount of assignees to allow' do
+ expect(helper.assignees_dropdown_options(:merge_request)[:data][:'max-select'])
+ .to eq(1)
+ end
+ end
+ end
+ end
+
describe '#reviewers_dropdown_options' do
let(:merge_request) { build(:merge_request) }
@@ -44,7 +94,7 @@ RSpec.describe FormHelper do
allow(helper).to receive(:merge_request_supports_multiple_reviewers?).and_return(true)
expect(helper.reviewers_dropdown_options(merge_request)[:data][:'max-select'])
- .to eq(MergeRequest::MAX_NUMBER_OF_ASSIGNEES_OR_REVIEWERS)
+ .to eq(Issuable::MAX_NUMBER_OF_ASSIGNEES_OR_REVIEWERS)
end
end
diff --git a/spec/helpers/groups_helper_spec.rb b/spec/helpers/groups_helper_spec.rb
index 00e620832b3..a38483a956d 100644
--- a/spec/helpers/groups_helper_spec.rb
+++ b/spec/helpers/groups_helper_spec.rb
@@ -388,22 +388,30 @@ RSpec.describe GroupsHelper do
end
describe '#show_thanks_for_purchase_alert?' do
- subject { helper.show_thanks_for_purchase_alert? }
+ subject { helper.show_thanks_for_purchase_alert?(quantity) }
- it 'returns true with purchased_quantity present in params' do
- allow(controller).to receive(:params) { { purchased_quantity: '1' } }
+ context 'with quantity present' do
+ let(:quantity) { 1 }
- is_expected.to be_truthy
+ it 'returns true' do
+ is_expected.to be_truthy
+ end
end
- it 'returns false with purchased_quantity not present in params' do
- is_expected.to be_falsey
+ context 'with quantity not present' do
+ let(:quantity) { nil }
+
+ it 'returns false' do
+ is_expected.to be_falsey
+ end
end
- it 'returns false with purchased_quantity is empty in params' do
- allow(controller).to receive(:params) { { purchased_quantity: '' } }
+ context 'with quantity empty' do
+ let(:quantity) { '' }
- is_expected.to be_falsey
+ it 'returns false' do
+ is_expected.to be_falsey
+ end
end
end
@@ -523,12 +531,14 @@ RSpec.describe GroupsHelper do
describe '#group_overview_tabs_app_data' do
let_it_be(:group) { create(:group) }
let_it_be(:user) { create(:user) }
+ let_it_be(:initial_sort) { 'created_asc' }
before do
allow(helper).to receive(:current_user).and_return(user)
allow(helper).to receive(:can?).with(user, :create_subgroup, group) { true }
allow(helper).to receive(:can?).with(user, :create_projects, group) { true }
+ allow(helper).to receive(:project_list_sort_by).and_return(initial_sort)
end
it 'returns expected hash' do
@@ -537,7 +547,8 @@ RSpec.describe GroupsHelper do
subgroups_and_projects_endpoint: including("/groups/#{group.path}/-/children.json"),
shared_projects_endpoint: including("/groups/#{group.path}/-/shared_projects.json"),
archived_projects_endpoint: including("/groups/#{group.path}/-/children.json?archived=only"),
- current_group_visibility: group.visibility
+ current_group_visibility: group.visibility,
+ initial_sort: initial_sort
}.merge(helper.group_overview_tabs_app_data(group))
)
end
diff --git a/spec/helpers/hooks_helper_spec.rb b/spec/helpers/hooks_helper_spec.rb
index bac73db5dd4..8f438a3ddc8 100644
--- a/spec/helpers/hooks_helper_spec.rb
+++ b/spec/helpers/hooks_helper_spec.rb
@@ -8,6 +8,13 @@ RSpec.describe HooksHelper do
let(:service_hook) { create(:service_hook, integration: create(:drone_ci_integration)) }
let(:system_hook) { create(:system_hook) }
+ describe '#webhook_form_data' do
+ subject { helper.webhook_form_data(project_hook) }
+
+ it { expect(subject[:url]).to eq(project_hook.url) }
+ it { expect(subject[:url_variables]).to be_nil }
+ end
+
describe '#link_to_test_hook' do
let(:trigger) { 'push_events' }
diff --git a/spec/helpers/ide_helper_spec.rb b/spec/helpers/ide_helper_spec.rb
index dc0a234f981..e750379f62d 100644
--- a/spec/helpers/ide_helper_spec.rb
+++ b/spec/helpers/ide_helper_spec.rb
@@ -5,75 +5,113 @@ require 'spec_helper'
RSpec.describe IdeHelper do
describe '#ide_data' do
let_it_be(:project) { create(:project) }
+ let_it_be(:user) { project.creator }
before do
- allow(helper).to receive(:current_user).and_return(project.creator)
+ allow(helper).to receive(:current_user).and_return(user)
+ allow(helper).to receive(:content_security_policy_nonce).and_return('test-csp-nonce')
end
- context 'when instance vars are not set' do
- it 'returns instance data in the hash as nil' do
- expect(helper.ide_data)
- .to include(
- 'branch-name' => nil,
- 'file-path' => nil,
- 'merge-request' => nil,
- 'fork-info' => nil,
- 'project' => nil,
- 'preview-markdown-path' => nil
- )
- end
- end
-
- context 'when instance vars are set' do
- it 'returns instance data in the hash' do
- fork_info = { ide_path: '/test/ide/path' }
+ context 'with vscode_web_ide=true and instance vars set' do
+ before do
+ stub_feature_flags(vscode_web_ide: true)
self.instance_variable_set(:@branch, 'master')
- self.instance_variable_set(:@path, 'foo/bar')
- self.instance_variable_set(:@merge_request, '1')
- self.instance_variable_set(:@fork_info, fork_info)
self.instance_variable_set(:@project, project)
+ end
- serialized_project = API::Entities::Project.represent(project, current_user: project.creator).to_json
-
+ it 'returns hash' do
expect(helper.ide_data)
- .to include(
+ .to eq(
+ 'can-use-new-web-ide' => 'true',
+ 'use-new-web-ide' => 'true',
+ 'user-preferences-path' => profile_preferences_path,
'branch-name' => 'master',
- 'file-path' => 'foo/bar',
- 'merge-request' => '1',
- 'fork-info' => fork_info.to_json,
- 'project' => serialized_project,
- 'preview-markdown-path' => Gitlab::Routing.url_helpers.preview_markdown_project_path(project)
+ 'project-path' => project.path_with_namespace,
+ 'csp-nonce' => 'test-csp-nonce'
)
end
+
+ it 'does not use new web ide if user.use_legacy_web_ide' do
+ allow(user).to receive(:use_legacy_web_ide).and_return(true)
+
+ expect(helper.ide_data).to include('use-new-web-ide' => 'false')
+ end
end
- context 'environments guidance experiment', :experiment do
+ context 'with vscode_web_ide=false' do
before do
- stub_experiments(in_product_guidance_environments_webide: :candidate)
- self.instance_variable_set(:@project, project)
+ stub_feature_flags(vscode_web_ide: false)
end
- context 'when project has no enviornments' do
- it 'enables environment guidance' do
- expect(helper.ide_data).to include('enable-environments-guidance' => 'true')
+ context 'when instance vars are not set' do
+ it 'returns instance data in the hash as nil' do
+ expect(helper.ide_data)
+ .to include(
+ 'can-use-new-web-ide' => 'false',
+ 'use-new-web-ide' => 'false',
+ 'user-preferences-path' => profile_preferences_path,
+ 'branch-name' => nil,
+ 'file-path' => nil,
+ 'merge-request' => nil,
+ 'fork-info' => nil,
+ 'project' => nil,
+ 'preview-markdown-path' => nil
+ )
end
+ end
- context 'and the callout has been dismissed' do
- it 'disables environment guidance' do
- callout = create(:callout, feature_name: :web_ide_ci_environments_guidance, user: project.creator)
- callout.update!(dismissed_at: Time.now - 1.week)
- allow(helper).to receive(:current_user).and_return(User.find(project.creator.id))
- expect(helper.ide_data).to include('enable-environments-guidance' => 'false')
- end
+ context 'when instance vars are set' do
+ it 'returns instance data in the hash' do
+ fork_info = { ide_path: '/test/ide/path' }
+
+ self.instance_variable_set(:@branch, 'master')
+ self.instance_variable_set(:@path, 'foo/bar')
+ self.instance_variable_set(:@merge_request, '1')
+ self.instance_variable_set(:@fork_info, fork_info)
+ self.instance_variable_set(:@project, project)
+
+ serialized_project = API::Entities::Project.represent(project, current_user: project.creator).to_json
+
+ expect(helper.ide_data)
+ .to include(
+ 'branch-name' => 'master',
+ 'file-path' => 'foo/bar',
+ 'merge-request' => '1',
+ 'fork-info' => fork_info.to_json,
+ 'project' => serialized_project,
+ 'preview-markdown-path' => Gitlab::Routing.url_helpers.preview_markdown_project_path(project)
+ )
end
end
- context 'when the project has environments' do
- it 'disables environment guidance' do
- create(:environment, project: project)
+ context 'environments guidance experiment', :experiment do
+ before do
+ stub_experiments(in_product_guidance_environments_webide: :candidate)
+ self.instance_variable_set(:@project, project)
+ end
+
+ context 'when project has no enviornments' do
+ it 'enables environment guidance' do
+ expect(helper.ide_data).to include('enable-environments-guidance' => 'true')
+ end
+
+ context 'and the callout has been dismissed' do
+ it 'disables environment guidance' do
+ callout = create(:callout, feature_name: :web_ide_ci_environments_guidance, user: project.creator)
+ callout.update!(dismissed_at: Time.now - 1.week)
+ allow(helper).to receive(:current_user).and_return(User.find(project.creator.id))
+ expect(helper.ide_data).to include('enable-environments-guidance' => 'false')
+ end
+ end
+ end
- expect(helper.ide_data).to include('enable-environments-guidance' => 'false')
+ context 'when the project has environments' do
+ it 'disables environment guidance' do
+ create(:environment, project: project)
+
+ expect(helper.ide_data).to include('enable-environments-guidance' => 'false')
+ end
end
end
end
diff --git a/spec/helpers/invite_members_helper_spec.rb b/spec/helpers/invite_members_helper_spec.rb
index 4d47732e008..c753d553371 100644
--- a/spec/helpers/invite_members_helper_spec.rb
+++ b/spec/helpers/invite_members_helper_spec.rb
@@ -11,10 +11,6 @@ RSpec.describe InviteMembersHelper do
let(:owner) { project.owner }
- before do
- helper.extend(Gitlab::Experimentation::ControllerConcern)
- end
-
describe '#common_invite_group_modal_data' do
it 'has expected common attributes' do
attributes = {
diff --git a/spec/helpers/issuables_description_templates_helper_spec.rb b/spec/helpers/issuables_description_templates_helper_spec.rb
index bd8af384d40..b32a99fe989 100644
--- a/spec/helpers/issuables_description_templates_helper_spec.rb
+++ b/spec/helpers/issuables_description_templates_helper_spec.rb
@@ -64,12 +64,12 @@ RSpec.describe IssuablesDescriptionTemplatesHelper, :clean_gitlab_redis_cache do
it 'returns project templates' do
value = [
- "",
- [
- { name: "another_issue_template", id: "another_issue_template", project_id: project.id },
- { name: "custom_issue_template", id: "custom_issue_template", project_id: project.id }
- ]
- ].to_json
+ "",
+ [
+ { name: "another_issue_template", id: "another_issue_template", project_id: project.id },
+ { name: "custom_issue_template", id: "custom_issue_template", project_id: project.id }
+ ]
+ ].to_json
expect(helper.available_service_desk_templates_for(@project)).to eq(value)
end
end
diff --git a/spec/helpers/issues_helper_spec.rb b/spec/helpers/issues_helper_spec.rb
index a58fe9a6cd9..e5bd8e6532f 100644
--- a/spec/helpers/issues_helper_spec.rb
+++ b/spec/helpers/issues_helper_spec.rb
@@ -4,8 +4,8 @@ require 'spec_helper'
RSpec.describe IssuesHelper do
let(:project) { create(:project) }
- let(:issue) { create :issue, project: project }
- let(:ext_project) { create :redmine_project }
+ let(:issue) { create(:issue, project: project) }
+ let(:ext_project) { create(:project, :with_redmine_integration) }
describe '#work_item_type_icon' do
it 'returns icon of all standard base types' do
diff --git a/spec/helpers/listbox_helper_spec.rb b/spec/helpers/listbox_helper_spec.rb
index 0a27aa04b37..cba00b43ae5 100644
--- a/spec/helpers/listbox_helper_spec.rb
+++ b/spec/helpers/listbox_helper_spec.rb
@@ -26,13 +26,14 @@ RSpec.describe ListboxHelper do
describe '#gl_redirect_listbox_tag' do
it 'creates root element with expected classes' do
- expect(subject.classes).to include(*%w[
- dropdown
- b-dropdown
- gl-new-dropdown
- btn-group
- js-redirect-listbox
- ])
+ expect(subject.classes).to include(
+ *%w[
+ dropdown
+ b-dropdown
+ gl-new-dropdown
+ btn-group
+ js-redirect-listbox
+ ])
end
it 'sets data attributes for items and selected' do
@@ -41,14 +42,15 @@ RSpec.describe ListboxHelper do
end
it 'adds styled button' do
- expect(subject.at_css('button').classes).to include(*%w[
- btn
- dropdown-toggle
- btn-default
- btn-md
- gl-button
- gl-dropdown-toggle
- ])
+ expect(subject.at_css('button').classes).to include(
+ *%w[
+ btn
+ dropdown-toggle
+ btn-default
+ btn-md
+ gl-button
+ gl-dropdown-toggle
+ ])
end
it 'sets button text to selected item' do
diff --git a/spec/helpers/markup_helper_spec.rb b/spec/helpers/markup_helper_spec.rb
index 8a7a6d003f4..a2e34471324 100644
--- a/spec/helpers/markup_helper_spec.rb
+++ b/spec/helpers/markup_helper_spec.rb
@@ -425,21 +425,21 @@ FooBar
end
it 'delegates to #markdown_unsafe when file name corresponds to Markdown' do
- expect(helper).to receive(:gitlab_markdown?).with('foo.md').and_return(true)
+ expect(Gitlab::MarkupHelper).to receive(:gitlab_markdown?).with('foo.md').and_return(true)
expect(helper).to receive(:markdown_unsafe).and_return('NOEL')
expect(helper.markup('foo.md', content)).to eq('NOEL')
end
it 'delegates to #asciidoc_unsafe when file name corresponds to AsciiDoc' do
- expect(helper).to receive(:asciidoc?).with('foo.adoc').and_return(true)
+ expect(Gitlab::MarkupHelper).to receive(:asciidoc?).with('foo.adoc').and_return(true)
expect(helper).to receive(:asciidoc_unsafe).and_return('NOEL')
expect(helper.markup('foo.adoc', content)).to eq('NOEL')
end
it 'uses passed in rendered content' do
- expect(helper).not_to receive(:gitlab_markdown?)
+ expect(Gitlab::MarkupHelper).not_to receive(:gitlab_markdown?)
expect(helper).not_to receive(:markdown_unsafe)
expect(helper.markup('foo.md', content, rendered: '<p>NOEL</p>')).to eq('<p>NOEL</p>')
@@ -562,20 +562,6 @@ FooBar
shared_examples_for 'common markdown examples' do
let(:project_base) { build(:project, :repository) }
- it 'displays inline code' do
- object = create_object('Text with `inline code`')
- expected = 'Text with <code>inline code</code>'
-
- expect(first_line_in_markdown(object, attribute, 100, project: project)).to match(expected)
- end
-
- it 'truncates the text with multiple paragraphs' do
- object = create_object("Paragraph 1\n\nParagraph 2")
- expected = 'Paragraph 1...'
-
- expect(first_line_in_markdown(object, attribute, 100, project: project)).to match(expected)
- end
-
it 'displays the first line of a code block' do
object = create_object("```\nCode block\nwith two lines\n```")
expected = %r{<pre.+><code><span class="line">Code block\.\.\.</span>\n</code></pre>}
@@ -591,18 +577,6 @@ FooBar
expect(first_line_in_markdown(object, attribute, 150, project: project)).to match(expected)
end
- it 'preserves a link href when link text is truncated' do
- text = 'The quick brown fox jumped over the lazy dog' # 44 chars
- link_url = 'http://example.com/foo/bar/baz' # 30 chars
- input = "#{text}#{text}#{text} #{link_url}" # 163 chars
- expected_link_text = 'http://example...</a>'
-
- object = create_object(input)
-
- expect(first_line_in_markdown(object, attribute, 150, project: project)).to match(link_url)
- expect(first_line_in_markdown(object, attribute, 150, project: project)).to match(expected_link_text)
- end
-
it 'preserves code color scheme' do
object = create_object("```ruby\ndef test\n 'hello world'\nend\n```")
expected = "\n<pre class=\"code highlight js-syntax-highlight language-ruby\">" \
@@ -669,40 +643,6 @@ FooBar
expect(result).to include(html)
end
- it 'truncates Markdown properly' do
- object = create_object("@#{user.username}, can you look at this?\nHello world\n")
- actual = first_line_in_markdown(object, attribute, 100, project: project)
-
- doc = Nokogiri::HTML.parse(actual)
-
- # Make sure we didn't create invalid markup
- expect(doc.errors).to be_empty
-
- # Leading user link
- expect(doc.css('a').length).to eq(1)
- expect(doc.css('a')[0].attr('href')).to eq user_path(user)
- expect(doc.css('a')[0].text).to eq "@#{user.username}"
-
- expect(doc.content).to eq "@#{user.username}, can you look at this?..."
- end
-
- it 'truncates Markdown with emoji properly' do
- object = create_object("foo :wink:\nbar :grinning:")
- actual = first_line_in_markdown(object, attribute, 100, project: project)
-
- doc = Nokogiri::HTML.parse(actual)
-
- # Make sure we didn't create invalid markup
- # But also account for the 2 errors caused by the unknown `gl-emoji` elements
- expect(doc.errors.length).to eq(2)
-
- expect(doc.css('gl-emoji').length).to eq(2)
- expect(doc.css('gl-emoji')[0].attr('data-name')).to eq 'wink'
- expect(doc.css('gl-emoji')[1].attr('data-name')).to eq 'grinning'
-
- expect(doc.content).to eq "foo 😉\nbar 😀"
- end
-
it 'does not post-process truncated text', :request_store do
object = create_object("hello \n\n [Test](README.md)")
diff --git a/spec/helpers/milestones_helper_spec.rb b/spec/helpers/milestones_helper_spec.rb
new file mode 100644
index 00000000000..f7f3b7d8227
--- /dev/null
+++ b/spec/helpers/milestones_helper_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MilestonesHelper do
+ let_it_be(:issuable) { build(:merge_request) }
+
+ describe '#milestone_header_class' do
+ using RSpec::Parameterized::TableSyntax
+
+ color_primary = 'gl-bg-blue-500 gl-text-white'
+ border_empty = 'gl-border-bottom-0 gl-rounded-base'
+
+ where(:primary, :issuables, :header_color, :header_border) do
+ true | [issuable] | color_primary | ''
+ true | [] | color_primary | border_empty
+ false | [] | '' | border_empty
+ false | [issuable] | '' | ''
+ end
+
+ with_them do
+ subject { helper.milestone_header_class(primary, issuables) }
+
+ it { is_expected.to eq("#{header_color} #{header_border} gl-display-flex") }
+ end
+ end
+
+ describe '#milestone_counter_class' do
+ context 'when primary is set to true' do
+ subject { helper.milestone_counter_class(true) }
+
+ it { is_expected.to eq('gl-text-white') }
+ end
+
+ context 'when primary is set to false' do
+ subject { helper.milestone_counter_class(false) }
+
+ it { is_expected.to eq('gl-text-gray-500') }
+ end
+ end
+end
diff --git a/spec/helpers/nav_helper_spec.rb b/spec/helpers/nav_helper_spec.rb
index f0ad2038347..4a37e17fb08 100644
--- a/spec/helpers/nav_helper_spec.rb
+++ b/spec/helpers/nav_helper_spec.rb
@@ -116,7 +116,7 @@ RSpec.describe NavHelper do
using RSpec::Parameterized::TableSyntax
where path: %w(
- merge_requests#show
+ projects/merge_requests#show
projects/merge_requests/conflicts#show
issues#show
milestones#show
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index a9db2a1c008..07c2d50f70a 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -825,7 +825,7 @@ RSpec.describe ProjectsHelper do
end
context 'gitaly is working appropriately' do
- let(:license) { Licensee::License.new('mit') }
+ let(:license) { ::Gitlab::Git::DeclaredLicense.new(key: 'mit', name: 'MIT License') }
before do
expect(repository).to receive(:license).and_return(license)
@@ -1336,4 +1336,24 @@ RSpec.describe ProjectsHelper do
)
end
end
+
+ describe '#localized_project_human_access' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:key, :localized_project_human_access) do
+ Gitlab::Access::NO_ACCESS | _('No access')
+ Gitlab::Access::MINIMAL_ACCESS | _("Minimal Access")
+ Gitlab::Access::GUEST | _('Guest')
+ Gitlab::Access::REPORTER | _('Reporter')
+ Gitlab::Access::DEVELOPER | _('Developer')
+ Gitlab::Access::MAINTAINER | _('Maintainer')
+ Gitlab::Access::OWNER | _('Owner')
+ end
+
+ with_them do
+ it 'with correct key' do
+ expect(helper.localized_project_human_access(key)).to eq(localized_project_human_access)
+ end
+ end
+ end
end
diff --git a/spec/helpers/recaptcha_helper_spec.rb b/spec/helpers/recaptcha_helper_spec.rb
index 8ad91a0a217..2c327431437 100644
--- a/spec/helpers/recaptcha_helper_spec.rb
+++ b/spec/helpers/recaptcha_helper_spec.rb
@@ -9,21 +9,70 @@ RSpec.describe RecaptchaHelper, type: :helper do
allow(helper).to receive(:session) { session }
end
- describe '.show_recaptcha_sign_up?' do
- context 'when reCAPTCHA is disabled' do
- it 'returns false' do
- stub_application_setting(recaptcha_enabled: false)
+ shared_examples 'Gitlab QA bypass' do
+ context 'when GITLAB_QA_USER_AGENT env var is present' do
+ using RSpec::Parameterized::TableSyntax
- expect(helper.show_recaptcha_sign_up?).to be_falsey
+ where(:dot_com, :user_agent, :qa_user_agent, :result) do
+ false | 'qa_user_agent' | 'qa_user_agent' | true
+ true | nil | 'qa_user_agent' | true
+ true | '' | 'qa_user_agent' | true
+ true | 'qa_user_agent' | '' | true
+ true | 'qa_user_agent' | nil | true
+ true | 'qa_user_agent' | 'qa_user_agent' | false
end
- end
- context 'when reCAPTCHA is enabled' do
- it 'returns true' do
- stub_application_setting(recaptcha_enabled: true)
+ with_them do
+ before do
+ allow(Gitlab).to receive(:com?).and_return(dot_com)
+ stub_env('GITLAB_QA_USER_AGENT', qa_user_agent)
+
+ request_double = instance_double(ActionController::TestRequest, user_agent: user_agent)
+ allow(helper).to receive(:request).and_return(request_double)
+ end
- expect(helper.show_recaptcha_sign_up?).to be_truthy
+ it { is_expected.to eq result }
end
end
end
+
+ describe '.show_recaptcha_sign_up?' do
+ let(:setting_state) { true }
+
+ before do
+ stub_application_setting(recaptcha_enabled: setting_state)
+ end
+
+ subject { helper.show_recaptcha_sign_up? }
+
+ it { is_expected.to eq true }
+
+ context 'when setting is disabled' do
+ let(:setting_state) { false }
+
+ it { is_expected.to eq false }
+ end
+
+ include_examples 'Gitlab QA bypass'
+ end
+
+ describe '.recaptcha_enabled_on_login?' do
+ let(:setting_state) { true }
+
+ before do
+ stub_application_setting(login_recaptcha_protection_enabled: setting_state)
+ end
+
+ subject { helper.recaptcha_enabled_on_login? }
+
+ it { is_expected.to eq true }
+
+ context 'when setting is disabled' do
+ let(:setting_state) { false }
+
+ it { is_expected.to eq false }
+ end
+
+ include_examples 'Gitlab QA bypass'
+ end
end
diff --git a/spec/helpers/releases_helper_spec.rb b/spec/helpers/releases_helper_spec.rb
index 59a92c067f4..5a9deb5c63b 100644
--- a/spec/helpers/releases_helper_spec.rb
+++ b/spec/helpers/releases_helper_spec.rb
@@ -49,6 +49,12 @@ RSpec.describe ReleasesHelper do
expect(helper.data_for_releases_page[:new_release_path]).to eq(new_project_release_path(project))
end
end
+
+ context 'new releases redirect new milestone creation' do
+ it 'redirects new_milestone_path back to the release page' do
+ expect(helper.data_for_new_release_page[:new_milestone_path]).to include('redirect_path')
+ end
+ end
end
describe '#data_for_edit_release_page' do
diff --git a/spec/helpers/search_helper_spec.rb b/spec/helpers/search_helper_spec.rb
index ad0705e4fbf..20718ad2f48 100644
--- a/spec/helpers/search_helper_spec.rb
+++ b/spec/helpers/search_helper_spec.rb
@@ -776,7 +776,7 @@ RSpec.describe SearchHelper do
end
context 'project data' do
- let(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
let(:project_metadata) { { project_path: project.path, issues_path: "/issues" } }
let(:scope) { 'issues' }
let(:code_search) { true }
@@ -848,4 +848,295 @@ RSpec.describe SearchHelper do
end
end
end
+
+ describe '.search_navigation' do
+ using RSpec::Parameterized::TableSyntax
+ let(:user) { build(:user) }
+ let_it_be(:project) { build(:project) }
+
+ before do
+ allow(self).to receive(:current_user).and_return(user)
+ allow(self).to receive(:can?).and_return(true)
+ allow(self).to receive(:project_search_tabs?).and_return(false)
+ allow(self).to receive(:feature_flag_tab_enabled?).and_return(false)
+ end
+
+ context 'projects' do
+ where(:global_project, :condition) do
+ nil | true
+ ref(:project) | false
+ end
+
+ with_them do
+ it 'data item condition is set correctly' do
+ @project = global_project
+
+ expect(search_navigation[:projects][:condition]).to eq(condition)
+ end
+ end
+ end
+
+ context 'code' do
+ where(:feature_flag_tab_enabled, :show_elasticsearch_tabs, :project_search_tabs, :condition) do
+ false | false | false | false
+ true | true | true | true
+ true | false | false | true
+ false | true | false | true
+ false | false | true | true
+ true | false | true | true
+ end
+
+ with_them do
+ it 'data item condition is set correctly' do
+ allow(search_service).to receive(:show_elasticsearch_tabs?).and_return(show_elasticsearch_tabs)
+ allow(self).to receive(:feature_flag_tab_enabled?).with(:global_search_code_tab).and_return(feature_flag_tab_enabled)
+ allow(self).to receive(:project_search_tabs?).with(:blobs).and_return(project_search_tabs)
+
+ expect(search_navigation[:blobs][:condition]).to eq(condition)
+ end
+ end
+ end
+
+ context 'issues' do
+ where(:feature_flag_tab_enabled, :project_search_tabs, :condition) do
+ false | false | false
+ true | true | true
+ true | false | true
+ false | true | true
+ end
+
+ with_them do
+ it 'data item condition is set correctly' do
+ allow(self).to receive(:feature_flag_tab_enabled?).with(:global_search_issues_tab).and_return(feature_flag_tab_enabled)
+ allow(self).to receive(:project_search_tabs?).with(:issues).and_return(project_search_tabs)
+
+ expect(search_navigation[:issues][:condition]).to eq(condition)
+ end
+ end
+ end
+
+ context 'merge requests' do
+ where(:feature_flag_tab_enabled, :project_search_tabs, :condition) do
+ false | false | false
+ true | true | true
+ true | false | true
+ false | true | true
+ end
+
+ with_them do
+ it 'data item condition is set correctly' do
+ allow(self).to receive(:feature_flag_tab_enabled?).with(:global_search_merge_requests_tab).and_return(feature_flag_tab_enabled)
+ allow(self).to receive(:project_search_tabs?).with(:merge_requests).and_return(project_search_tabs)
+
+ expect(search_navigation[:merge_requests][:condition]).to eq(condition)
+ end
+ end
+ end
+
+ context 'wiki' do
+ where(:project_search_tabs, :show_elasticsearch_tabs, :condition) do
+ false | false | false
+ true | true | true
+ true | false | true
+ false | true | true
+ end
+
+ with_them do
+ it 'data item condition is set correctly' do
+ allow(search_service).to receive(:show_elasticsearch_tabs?).and_return(show_elasticsearch_tabs)
+ allow(self).to receive(:project_search_tabs?).with(:wiki).and_return(project_search_tabs)
+
+ expect(search_navigation[:wiki_blobs][:condition]).to eq(condition)
+ end
+ end
+ end
+
+ context 'commits' do
+ where(:feature_flag_tab_enabled, :show_elasticsearch_tabs, :project_search_tabs, :condition) do
+ false | false | false | false
+ true | true | true | true
+ true | false | false | false
+ false | true | true | true
+ end
+
+ with_them do
+ it 'data item condition is set correctly' do
+ allow(search_service).to receive(:show_elasticsearch_tabs?).and_return(show_elasticsearch_tabs)
+ allow(self).to receive(:feature_flag_tab_enabled?).with(:global_search_commits_tab).and_return(feature_flag_tab_enabled)
+ allow(self).to receive(:project_search_tabs?).with(:commits).and_return(project_search_tabs)
+
+ expect(search_navigation[:commits][:condition]).to eq(condition)
+ end
+ end
+ end
+
+ context 'comments' do
+ where(:show_elasticsearch_tabs, :project_search_tabs, :condition) do
+ true | true | true
+ false | false | false
+ true | false | true
+ false | true | true
+ end
+
+ with_them do
+ it 'data item condition is set correctly' do
+ allow(search_service).to receive(:show_elasticsearch_tabs?).and_return(show_elasticsearch_tabs)
+ allow(self).to receive(:project_search_tabs?).with(:notes).and_return(project_search_tabs)
+
+ expect(search_navigation[:notes][:condition]).to eq(condition)
+ end
+ end
+ end
+
+ context 'milestones' do
+ where(:global_project, :project_search_tabs, :condition) do
+ ref(:project) | true | true
+ nil | false | true
+ ref(:project) | false | false
+ nil | true | true
+ end
+
+ with_them do
+ it 'data item condition is set correctly' do
+ @project = global_project
+ allow(self).to receive(:project_search_tabs?).with(:milestones).and_return(project_search_tabs)
+
+ expect(search_navigation[:milestones][:condition]).to eq(condition)
+ end
+ end
+ end
+
+ context 'users' do
+ where(:show_user_search_tab, :condition) do
+ true | true
+ false | false
+ end
+
+ with_them do
+ it 'data item condition is set correctly' do
+ allow(self).to receive(:show_user_search_tab?).and_return(show_user_search_tab)
+
+ expect(search_navigation[:users][:condition]).to eq(condition)
+ end
+ end
+ end
+
+ context 'snippet_titles' do
+ where(:global_project, :global_show_snippets, :condition) do
+ ref(:project) | true | false
+ nil | false | false
+ ref(:project) | false | false
+ nil | true | true
+ end
+
+ with_them do
+ it 'data item condition is set correctly' do
+ @show_snippets = global_show_snippets
+ @project = global_project
+
+ expect(search_navigation[:snippet_titles][:condition]).to eq(condition)
+ end
+ end
+ end
+ end
+
+ describe '.search_navigation_json' do
+ using RSpec::Parameterized::TableSyntax
+ context 'with data' do
+ example_data_1 = {
+ projects: { label: _("Projects"), condition: true },
+ blobs: { label: _("Code"), condition: false }
+ }
+
+ example_data_2 = {
+ projects: { label: _("Projects"), condition: false },
+ blobs: { label: _("Code"), condition: false }
+ }
+
+ example_data_3 = {
+ projects: { label: _("Projects"), condition: true },
+ blobs: { label: _("Code"), condition: true },
+ epics: { label: _("Epics"), condition: true }
+ }
+
+ where(:data, :matcher) do
+ example_data_1 | -> { include("projects") }
+ example_data_2 | -> { eq("{}") }
+ example_data_3 | -> { include("projects", "blobs", "epics") }
+ end
+
+ with_them do
+ it 'converts correctly' do
+ allow(self).to receive(:search_navigation).with(no_args).and_return(data)
+
+ expect(search_navigation_json).to instance_exec(&matcher)
+ end
+ end
+ end
+ end
+
+ describe '.search_filter_link_json' do
+ using RSpec::Parameterized::TableSyntax
+
+ context 'data' do
+ where(:scope, :label, :data, :search, :active_scope) do
+ "projects" | "Projects" | { qa_selector: 'projects_tab' } | nil | "projects"
+ "snippet_titles" | "Titles and Descriptions" | nil | { snippets: "test" } | "code"
+ "projects" | "Projects" | { qa_selector: 'projects_tab' } | nil | "issue"
+ "snippet_titles" | "Titles and Descriptions" | nil | { snippets: "test" } | "snippet_titles"
+ end
+
+ with_them do
+ it 'converts correctly' do
+ @timeout = false
+ @scope = active_scope
+ @search_results = double
+ dummy_count = 1000
+ allow(self).to receive(:search_path).with(any_args).and_return("link test")
+
+ allow(@search_results).to receive(:formatted_count).with(scope).and_return(dummy_count)
+ allow(self).to receive(:search_count_path).with(any_args).and_return("test count link")
+
+ current_scope = scope == active_scope
+
+ expected = {
+ label: label,
+ scope: scope,
+ data: data,
+ link: "link test",
+ active: current_scope
+ }
+
+ expected[:count] = dummy_count if current_scope
+ expected[:count_link] = "test count link" unless current_scope
+
+ expect(search_filter_link_json(scope, label, data, search)).to eq(expected)
+ end
+ end
+ end
+ end
+
+ describe 'show_elasticsearch_tabs' do
+ subject { search_service.show_elasticsearch_tabs? }
+
+ let(:user) { build(:user) }
+
+ before do
+ allow(self).to receive(:current_user).and_return(user)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ describe 'show_epics' do
+ subject { search_service.show_epics? }
+
+ let(:user) { build(:user) }
+
+ before do
+ allow(self).to receive(:current_user).and_return(user)
+ end
+
+ it { is_expected.to eq(false) }
+ end
end
diff --git a/spec/helpers/sessions_helper_spec.rb b/spec/helpers/sessions_helper_spec.rb
index 15424425060..c7b8225b866 100644
--- a/spec/helpers/sessions_helper_spec.rb
+++ b/spec/helpers/sessions_helper_spec.rb
@@ -92,6 +92,12 @@ RSpec.describe SessionsHelper do
end
context 'when an email address is very short' do
+ let(:email) { 'a@b.c' }
+
+ it { is_expected.to eq('a@b.c') }
+ end
+
+ context 'when an email address is even shorter' do
let(:email) { 'a@b' }
it { is_expected.to eq('a@b') }
diff --git a/spec/helpers/todos_helper_spec.rb b/spec/helpers/todos_helper_spec.rb
index a8945424877..c64d5990cd9 100644
--- a/spec/helpers/todos_helper_spec.rb
+++ b/spec/helpers/todos_helper_spec.rb
@@ -292,4 +292,22 @@ RSpec.describe TodosHelper do
it { is_expected.to eq(result) }
end
end
+
+ describe '#todos_filter_params' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:state, :result) do
+ 'done' | 'done'
+ 'pending' | 'pending'
+ '' | nil
+ end
+
+ with_them do
+ before do
+ allow(helper).to receive(:params).and_return({ state: state })
+ end
+
+ it { expect(helper.todos_filter_params[:state]).to eq(result) }
+ end
+ end
end
diff --git a/spec/helpers/users_helper_spec.rb b/spec/helpers/users_helper_spec.rb
index 617a796781e..c2c78be6a0f 100644
--- a/spec/helpers/users_helper_spec.rb
+++ b/spec/helpers/users_helper_spec.rb
@@ -204,11 +204,12 @@ RSpec.describe UsersHelper do
badges = helper.user_badges_in_admin_section(user)
- expect(badges).to match_array([
- { text: s_("AdminUsers|Blocked"), variant: "danger" },
- { text: s_("AdminUsers|Admin"), variant: "success" },
- { text: s_("AdminUsers|External"), variant: "secondary" }
- ])
+ expect(badges).to match_array(
+ [
+ { text: s_("AdminUsers|Blocked"), variant: "danger" },
+ { text: s_("AdminUsers|Admin"), variant: "success" },
+ { text: s_("AdminUsers|External"), variant: "secondary" }
+ ])
end
end
diff --git a/spec/helpers/wiki_helper_spec.rb b/spec/helpers/wiki_helper_spec.rb
index 75128d758f9..59624dc0682 100644
--- a/spec/helpers/wiki_helper_spec.rb
+++ b/spec/helpers/wiki_helper_spec.rb
@@ -75,41 +75,38 @@ RSpec.describe WikiHelper do
describe '#wiki_sort_controls' do
let(:wiki) { create(:project_wiki) }
- let(:wiki_link) { helper.wiki_sort_controls(wiki, sort, direction) }
+ let(:wiki_link) { helper.wiki_sort_controls(wiki, direction) }
let(:classes) { "gl-button btn btn-default btn-icon has-tooltip reverse-sort-btn qa-reverse-sort rspec-reverse-sort" }
- def expected_link(sort, direction, icon_class)
- path = "/#{wiki.project.full_path}/-/wikis/pages?direction=#{direction}&sort=#{sort}"
-
- helper.link_to(path, type: 'button', class: classes, title: 'Sort direction') do
+ def expected_link(direction, icon_class)
+ path = "/#{wiki.project.full_path}/-/wikis/pages?direction=#{direction}"
+ title = direction == 'desc' ? _('Sort direction: Ascending') : _('Sort direction: Descending')
+ helper.link_to(path, type: 'button', class: classes, title: title) do
helper.sprite_icon("sort-#{icon_class}")
end
end
context 'initial call' do
- let(:sort) { nil }
let(:direction) { nil }
it 'renders with default values' do
- expect(wiki_link).to eq(expected_link('title', 'desc', 'lowest'))
+ expect(wiki_link).to eq(expected_link('desc', 'lowest'))
end
end
- context 'sort by title' do
- let(:sort) { 'title' }
+ context 'sort by asc order' do
let(:direction) { 'asc' }
it 'renders a link with opposite direction' do
- expect(wiki_link).to eq(expected_link('title', 'desc', 'lowest'))
+ expect(wiki_link).to eq(expected_link('desc', 'lowest'))
end
end
- context 'sort by created_at' do
- let(:sort) { 'created_at' }
+ context 'sort by desc order' do
let(:direction) { 'desc' }
it 'renders a link with opposite direction' do
- expect(wiki_link).to eq(expected_link('created_at', 'asc', 'highest'))
+ expect(wiki_link).to eq(expected_link('asc', 'highest'))
end
end
end
diff --git a/spec/initializers/100_patch_omniauth_oauth2_spec.rb b/spec/initializers/100_patch_omniauth_oauth2_spec.rb
index 36a14816b7e..8c8e2b24484 100644
--- a/spec/initializers/100_patch_omniauth_oauth2_spec.rb
+++ b/spec/initializers/100_patch_omniauth_oauth2_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe 'OmniAuth::Strategies::OAuth2' do
it 'verifies the gem version' do
current_version = OmniAuth::OAuth2::VERSION
- expected_version = '1.7.3'
+ expected_version = '1.8.0'
expect(current_version).to eq(expected_version), <<~EOF
New version #{current_version} of the `omniauth-oauth2` gem detected!
diff --git a/spec/initializers/attr_encrypted_no_db_connection_spec.rb b/spec/initializers/attr_encrypted_no_db_connection_spec.rb
deleted file mode 100644
index 34d9e182370..00000000000
--- a/spec/initializers/attr_encrypted_no_db_connection_spec.rb
+++ /dev/null
@@ -1,40 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'GitLab monkey-patches to AttrEncrypted' do
- describe '#attribute_instance_methods_as_symbols_available?' do
- let(:klass) do
- Class.new(ActiveRecord::Base) do
- # We need some sort of table to work on
- self.table_name = 'projects'
-
- attr_encrypted :foo
- end
- end
-
- it 'returns false' do
- expect(ActiveRecord::Base.__send__(:attribute_instance_methods_as_symbols_available?)).to be_falsy
- end
-
- it 'does not define virtual attributes' do
- instance = klass.new
-
- aggregate_failures do
- %w[
- encrypted_foo encrypted_foo=
- encrypted_foo_iv encrypted_foo_iv=
- encrypted_foo_salt encrypted_foo_salt=
- ].each do |method_name|
- expect(instance).not_to respond_to(method_name)
- end
- end
- end
-
- it 'calls attr_changed? method with kwargs' do
- obj = klass.new
-
- expect(obj.foo_changed?).to eq(false)
- end
- end
-end
diff --git a/spec/initializers/attr_encrypted_thread_safe_spec.rb b/spec/initializers/attr_encrypted_thread_safe_spec.rb
deleted file mode 100644
index e79b7c716ec..00000000000
--- a/spec/initializers/attr_encrypted_thread_safe_spec.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe AttrEncrypted do
- describe '#encrypted_attributes' do
- subject do
- Class.new(ActiveRecord::Base) do
- self.table_name = 'projects'
-
- attr_accessor :encrypted_foo
- attr_accessor :encrypted_foo_iv
-
- attr_encrypted :foo, key: 'This is a key that is 256 bits!!'
- end
- end
-
- it 'does not share state with other instances' do
- instance = subject.new
- instance.foo = 'bar'
-
- another_instance = subject.new
-
- expect(instance.encrypted_attributes[:foo][:operation]).to eq(:encrypting)
- expect(another_instance.encrypted_attributes[:foo][:operation]).to be_nil
- end
- end
-end
diff --git a/spec/initializers/diagnostic_reports_spec.rb b/spec/initializers/diagnostic_reports_spec.rb
index 70574194916..01b1ed9b7b5 100644
--- a/spec/initializers/diagnostic_reports_spec.rb
+++ b/spec/initializers/diagnostic_reports_spec.rb
@@ -21,25 +21,33 @@ RSpec.describe 'diagnostic reports' do
stub_env('GITLAB_DIAGNOSTIC_REPORTS_ENABLED', true)
end
- context 'when run in application context' do
+ context 'when run in Puma context' do
before do
- allow(::Gitlab::Runtime).to receive(:application?).and_return(true)
+ allow(::Gitlab::Runtime).to receive(:puma?).and_return(true)
end
- it 'modifies worker startup hooks' do
- report_daemon = instance_double(Gitlab::Memory::ReportsDaemon)
+ let(:report_daemon) { instance_double(Gitlab::Memory::ReportsDaemon) }
+ it 'modifies worker startup hooks, starts Gitlab::Memory::ReportsDaemon' do
expect(Gitlab::Cluster::LifecycleEvents).to receive(:on_worker_start).and_call_original
- expect(Gitlab::Memory::ReportsDaemon).to receive(:instance).and_return(report_daemon)
- expect(report_daemon).to receive(:start)
+
+ expect_next_instance_of(Gitlab::Memory::ReportsDaemon) do |daemon|
+ expect(daemon).to receive(:start).and_call_original
+
+ # make sleep no-op
+ allow(daemon).to receive(:sleep).and_return(nil)
+
+ # let alive return 3 times: true, true, false
+ allow(daemon).to receive(:alive).and_return(true, true, false)
+ end
load_initializer
end
end
- context 'when run in non-application context, such as rails console or tests' do
+ context 'when run in non-Puma context, such as rails console, tests, Sidekiq' do
before do
- allow(::Gitlab::Runtime).to receive(:application?).and_return(false)
+ allow(::Gitlab::Runtime).to receive(:puma?).and_return(false)
end
include_examples 'does not modify worker startup hooks'
@@ -48,7 +56,7 @@ RSpec.describe 'diagnostic reports' do
context 'when GITLAB_DIAGNOSTIC_REPORTS_ENABLED is not set' do
before do
- allow(::Gitlab::Runtime).to receive(:application?).and_return(true)
+ allow(::Gitlab::Runtime).to receive(:puma?).and_return(true)
end
include_examples 'does not modify worker startup hooks'
@@ -57,7 +65,7 @@ RSpec.describe 'diagnostic reports' do
context 'when GITLAB_DIAGNOSTIC_REPORTS_ENABLED is set to false' do
before do
stub_env('GITLAB_DIAGNOSTIC_REPORTS_ENABLED', false)
- allow(::Gitlab::Runtime).to receive(:application?).and_return(true)
+ allow(::Gitlab::Runtime).to receive(:puma?).and_return(true)
end
include_examples 'does not modify worker startup hooks'
diff --git a/spec/initializers/memory_watchdog_spec.rb b/spec/initializers/memory_watchdog_spec.rb
index 56f995b5cd3..36f96131c3d 100644
--- a/spec/initializers/memory_watchdog_spec.rb
+++ b/spec/initializers/memory_watchdog_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
RSpec.describe 'memory watchdog' do
subject(:run_initializer) do
- load Rails.root.join('config/initializers/memory_watchdog.rb')
+ load rails_root_join('config/initializers/memory_watchdog.rb')
end
context 'when GITLAB_MEMORY_WATCHDOG_ENABLED is truthy' do
@@ -17,6 +17,7 @@ RSpec.describe 'memory watchdog' do
context 'when runtime is an application' do
let(:watchdog) { instance_double(Gitlab::Memory::Watchdog) }
let(:background_task) { instance_double(Gitlab::BackgroundTask) }
+ let(:logger) { Gitlab::AppLogger }
before do
allow(Gitlab::Runtime).to receive(:application?).and_return(true)
@@ -28,16 +29,65 @@ RSpec.describe 'memory watchdog' do
run_initializer
end
- shared_examples 'starts watchdog with handler' do |handler_class|
- it "uses the #{handler_class} and starts the watchdog" do
- expect(Gitlab::Memory::Watchdog).to receive(:new).with(
- handler: an_instance_of(handler_class),
- logger: Gitlab::AppLogger).and_return(watchdog)
- expect(Gitlab::BackgroundTask).to receive(:new).with(watchdog).and_return(background_task)
- expect(background_task).to receive(:start)
- expect(Gitlab::Cluster::LifecycleEvents).to receive(:on_worker_start).and_yield
+ shared_examples 'starts configured watchdog' do |handler_class|
+ let(:configuration) { Gitlab::Memory::Watchdog::Configuration.new }
+ let(:watchdog_monitors_params) do
+ {
+ Gitlab::Memory::Watchdog::Monitor::HeapFragmentation => {
+ max_heap_fragmentation: max_heap_fragmentation,
+ max_strikes: max_strikes
+ },
+ Gitlab::Memory::Watchdog::Monitor::UniqueMemoryGrowth => {
+ max_mem_growth: max_mem_growth,
+ max_strikes: max_strikes
+ }
+ }
+ end
+
+ shared_examples 'configures and starts watchdog' do
+ it "correctly configures and starts watchdog", :aggregate_failures do
+ expect(watchdog).to receive(:configure).and_yield(configuration)
+
+ watchdog_monitors_params.each do |monitor_class, params|
+ expect(configuration.monitors).to receive(:use).with(monitor_class, **params)
+ end
+
+ expect(Gitlab::Memory::Watchdog).to receive(:new).and_return(watchdog)
+ expect(Gitlab::BackgroundTask).to receive(:new).with(watchdog).and_return(background_task)
+ expect(background_task).to receive(:start)
+ expect(Gitlab::Cluster::LifecycleEvents).to receive(:on_worker_start).and_yield
+
+ run_initializer
+
+ expect(configuration.handler).to be_an_instance_of(handler_class)
+ expect(configuration.logger).to eq(logger)
+ expect(configuration.sleep_time_seconds).to eq(sleep_time_seconds)
+ end
+ end
+
+ context 'when settings are not passed through the environment' do
+ let(:max_strikes) { 5 }
+ let(:max_heap_fragmentation) { 0.5 }
+ let(:max_mem_growth) { 3.0 }
+ let(:sleep_time_seconds) { 60 }
+
+ include_examples 'configures and starts watchdog'
+ end
+
+ context 'when settings are passed through the environment' do
+ let(:max_strikes) { 6 }
+ let(:max_heap_fragmentation) { 0.4 }
+ let(:max_mem_growth) { 2.0 }
+ let(:sleep_time_seconds) { 50 }
+
+ before do
+ stub_env('GITLAB_MEMWD_MAX_STRIKES', 6)
+ stub_env('GITLAB_MEMWD_SLEEP_TIME_SEC', 50)
+ stub_env('GITLAB_MEMWD_MAX_MEM_GROWTH', 2.0)
+ stub_env('GITLAB_MEMWD_MAX_HEAP_FRAG', 0.4)
+ end
- run_initializer
+ include_examples 'configures and starts watchdog'
end
end
@@ -59,7 +109,7 @@ RSpec.describe 'memory watchdog' do
allow(Gitlab::Runtime).to receive(:puma?).and_return(true)
end
- it_behaves_like 'starts watchdog with handler', Gitlab::Memory::Watchdog::PumaHandler
+ it_behaves_like 'starts configured watchdog', Gitlab::Memory::Watchdog::PumaHandler
end
# rubocop: enable RSpec/VerifiedDoubles
@@ -68,11 +118,11 @@ RSpec.describe 'memory watchdog' do
allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
end
- it_behaves_like 'starts watchdog with handler', Gitlab::Memory::Watchdog::TermProcessHandler
+ it_behaves_like 'starts configured watchdog', Gitlab::Memory::Watchdog::TermProcessHandler
end
context 'when other runtime' do
- it_behaves_like 'starts watchdog with handler', Gitlab::Memory::Watchdog::NullHandler
+ it_behaves_like 'starts configured watchdog', Gitlab::Memory::Watchdog::NullHandler
end
end
diff --git a/spec/initializers/sawyer_patch_spec.rb b/spec/initializers/sawyer_patch_spec.rb
index dc922654d7d..b3c10e63460 100644
--- a/spec/initializers/sawyer_patch_spec.rb
+++ b/spec/initializers/sawyer_patch_spec.rb
@@ -1,5 +1,5 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
require 'sawyer'
require_relative '../../config/initializers/sawyer_patch'
@@ -64,6 +64,28 @@ RSpec.describe 'sawyer_patch' do
expect(sawyer_resource.count_total).to eq(1)
expect(sawyer_resource.count_total?).to eq(true)
expect(sawyer_resource.count_total + 1).to eq(2)
+ sawyer_resource.count_total = 3
+ expect(sawyer_resource.count_total).to eq(3)
expect(sawyer_resource.user.name).to eq('User name')
end
+
+ it 'logs when a sawyer resource dynamic method is called' do
+ sawyer_resource = Sawyer::Resource.new(
+ Sawyer::Agent.new(''),
+ {
+ count_total: 1,
+ user: { name: 'User name' }
+ }
+ )
+ expected_attributes = []
+ allow(Gitlab::Import::Logger).to receive(:warn) do |params|
+ expected_attributes.push(params[:attribute])
+ end
+
+ sawyer_resource.count_total
+ sawyer_resource.user
+ sawyer_resource.user.name
+
+ expect(expected_attributes).to match_array(%i[count_total user user name])
+ end
end
diff --git a/spec/initializers/sidekiq_spec.rb b/spec/initializers/sidekiq_spec.rb
index e34f59c3427..063dddd8c46 100644
--- a/spec/initializers/sidekiq_spec.rb
+++ b/spec/initializers/sidekiq_spec.rb
@@ -42,4 +42,61 @@ RSpec.describe 'sidekiq' do
it { is_expected.to be_falsey }
end
end
+
+ describe 'load_cron_jobs!' do
+ subject { load_cron_jobs! }
+
+ let(:cron_for_service_ping) { '4 7 * * 4' }
+
+ let(:cron_jobs_settings) do
+ {
+ 'gitlab_service_ping_worker' => {
+ 'cron' => nil,
+ 'job_class' => 'GitlabServicePingWorker'
+ },
+ 'import_export_project_cleanup_worker' => {
+ 'cron' => '0 * * * *',
+ 'job_class' => 'ImportExportProjectCleanupWorker'
+ },
+ "invalid_worker" => {
+ 'cron' => '0 * * * *'
+ }
+ }
+ end
+
+ let(:cron_jobs_hash) do
+ {
+ 'gitlab_service_ping_worker' => {
+ 'cron' => cron_for_service_ping,
+ 'class' => 'GitlabServicePingWorker'
+ },
+ 'import_export_project_cleanup_worker' => {
+ 'cron' => '0 * * * *',
+ 'class' => 'ImportExportProjectCleanupWorker'
+ }
+ }
+ end
+
+ around do |example|
+ original_settings = Gitlab.config['cron_jobs']
+ Gitlab.config['cron_jobs'] = cron_jobs_settings
+
+ example.run
+
+ Gitlab.config['cron_jobs'] = original_settings
+ end
+
+ it 'loads the cron jobs into sidekiq-cron' do
+ allow(Settings).to receive(:cron_for_service_ping).and_return(cron_for_service_ping)
+
+ expect(Sidekiq::Cron::Job).to receive(:load_from_hash!).with(cron_jobs_hash)
+
+ if Gitlab.ee?
+ expect(Gitlab::Mirror).to receive(:configure_cron_job!)
+ expect(Gitlab::Geo).to receive(:configure_cron_jobs!)
+ end
+
+ subject
+ end
+ end
end
diff --git a/spec/lib/api/entities/bulk_imports/entity_failure_spec.rb b/spec/lib/api/entities/bulk_imports/entity_failure_spec.rb
index adc8fdcdd9c..0132102b117 100644
--- a/spec/lib/api/entities/bulk_imports/entity_failure_spec.rb
+++ b/spec/lib/api/entities/bulk_imports/entity_failure_spec.rb
@@ -9,11 +9,26 @@ RSpec.describe API::Entities::BulkImports::EntityFailure do
it 'has the correct attributes' do
expect(subject).to include(
- :pipeline_class,
- :pipeline_step,
+ :relation,
+ :step,
:exception_class,
+ :exception_message,
:correlation_id_value,
:created_at
)
end
+
+ describe 'exception message' do
+ it 'truncates exception message to 72 characters' do
+ failure.update!(exception_message: 'a' * 100)
+
+ expect(subject[:exception_message].length).to eq(72)
+ end
+
+ it 'removes paths from the message' do
+ failure.update!(exception_message: 'Test /foo/bar')
+
+ expect(subject[:exception_message]).to eq('Test [FILTERED]')
+ end
+ end
end
diff --git a/spec/lib/api/entities/ml/mlflow/run_spec.rb b/spec/lib/api/entities/ml/mlflow/run_spec.rb
index 84234f474f5..b8d38093681 100644
--- a/spec/lib/api/entities/ml/mlflow/run_spec.rb
+++ b/spec/lib/api/entities/ml/mlflow/run_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe API::Entities::Ml::Mlflow::Run do
- let_it_be(:candidate) { create(:ml_candidates) }
+ let_it_be(:candidate) { create(:ml_candidates, :with_metrics_and_params) }
subject { described_class.new(candidate).as_json }
@@ -12,10 +12,52 @@ RSpec.describe API::Entities::Ml::Mlflow::Run do
end
it 'has the id' do
- expect(subject[:run][:info][:run_id]).to eq(candidate.iid.to_s)
+ expect(subject.dig(:run, :info, :run_id)).to eq(candidate.iid.to_s)
end
- it 'data is empty' do
- expect(subject[:run][:data]).to be_empty
+ it 'presents the metrics' do
+ expect(subject.dig(:run, :data, :metrics).size).to eq(candidate.metrics.size)
+ end
+
+ it 'presents metrics correctly' do
+ presented_metric = subject.dig(:run, :data, :metrics)[0]
+ metric = candidate.metrics[0]
+
+ expect(presented_metric[:key]).to eq(metric.name)
+ expect(presented_metric[:value]).to eq(metric.value)
+ expect(presented_metric[:timestamp]).to eq(metric.tracked_at)
+ expect(presented_metric[:step]).to eq(metric.step)
+ end
+
+ it 'presents the params' do
+ expect(subject.dig(:run, :data, :params).size).to eq(candidate.params.size)
+ end
+
+ it 'presents params correctly' do
+ presented_param = subject.dig(:run, :data, :params)[0]
+ param = candidate.params[0]
+
+ expect(presented_param[:key]).to eq(param.name)
+ expect(presented_param[:value]).to eq(param.value)
+ end
+
+ context 'when candidate has no metrics' do
+ before do
+ allow(candidate).to receive(:metrics).and_return([])
+ end
+
+ it 'returns empty data' do
+ expect(subject.dig(:run, :data, :metrics)).to be_empty
+ end
+ end
+
+ context 'when candidate has no params' do
+ before do
+ allow(candidate).to receive(:params).and_return([])
+ end
+
+ it 'data is empty' do
+ expect(subject.dig(:run, :data, :params)).to be_empty
+ end
end
end
diff --git a/spec/lib/api/helpers/merge_requests_helpers_spec.rb b/spec/lib/api/helpers/merge_requests_helpers_spec.rb
index 1d68b7985f1..80810133469 100644
--- a/spec/lib/api/helpers/merge_requests_helpers_spec.rb
+++ b/spec/lib/api/helpers/merge_requests_helpers_spec.rb
@@ -25,9 +25,7 @@ RSpec.describe API::Helpers::MergeRequestsHelpers do
context 'when merge request is invalid' do
before do
allow(merge_request).to receive(:valid?).and_return(false)
- allow(helper).to receive_messages([
- :unprocessable_entity!, :conflict!, :render_validation_error!
- ])
+ allow(helper).to receive_messages([:unprocessable_entity!, :conflict!, :render_validation_error!])
end
API::Helpers::MergeRequestsHelpers::UNPROCESSABLE_ERROR_KEYS.each do |error_key|
diff --git a/spec/lib/api/helpers/open_api_spec.rb b/spec/lib/api/helpers/open_api_spec.rb
new file mode 100644
index 00000000000..fb14f7fe001
--- /dev/null
+++ b/spec/lib/api/helpers/open_api_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Helpers::OpenApi do
+ describe 'class methods' do
+ let(:klass) { Class.new.include(described_class) }
+
+ describe '.add_open_api_documentation!' do
+ before do
+ allow(YAML).to receive(:load_file).and_return({ 'metadata' => { 'key' => 'value' } })
+ end
+
+ it 'calls the add_swagger_documentation method' do
+ expect(klass).to receive(:add_swagger_documentation).with({ key: 'value' })
+
+ klass.add_open_api_documentation!
+ end
+ end
+ end
+end
diff --git a/spec/lib/api/helpers/packages/dependency_proxy_helpers_spec.rb b/spec/lib/api/helpers/packages/dependency_proxy_helpers_spec.rb
index aa4b0a137cd..66cf06cde20 100644
--- a/spec/lib/api/helpers/packages/dependency_proxy_helpers_spec.rb
+++ b/spec/lib/api/helpers/packages/dependency_proxy_helpers_spec.rb
@@ -7,12 +7,23 @@ RSpec.describe API::Helpers::Packages::DependencyProxyHelpers do
describe '#redirect_registry_request' do
using RSpec::Parameterized::TableSyntax
+ include_context 'dependency proxy helpers context'
- let_it_be(:project) { create(:project) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be_with_reload(:package_setting) { create(:namespace_package_setting, namespace: group) }
+ let(:target) { project }
let(:options) { {} }
- subject { helper.redirect_registry_request(forward_to_registry, package_type, options) { helper.fallback } }
+ subject do
+ helper.redirect_registry_request(
+ forward_to_registry: forward_to_registry,
+ package_type: package_type,
+ target: target,
+ options: options
+ ) { helper.fallback }
+ end
before do
allow(helper).to receive(:options).and_return(for: described_class)
@@ -42,32 +53,57 @@ RSpec.describe API::Helpers::Packages::DependencyProxyHelpers do
%i[maven npm pypi].each do |forwardable_package_type|
context "with #{forwardable_package_type} packages" do
- include_context 'dependency proxy helpers context'
-
let(:package_type) { forwardable_package_type }
- let(:options) { { project: project } }
- where(:application_setting, :forward_to_registry, :example_name) do
- true | true | 'executing redirect'
- true | false | 'executing fallback'
- false | true | 'executing fallback'
- false | false | 'executing fallback'
+ where(:application_setting, :group_setting, :forward_to_registry, :example_name) do
+ true | nil | true | 'executing redirect'
+ true | nil | false | 'executing fallback'
+ false | nil | true | 'executing fallback'
+ false | nil | false | 'executing fallback'
+ true | false | true | 'executing fallback'
+ true | false | false | 'executing fallback'
+ false | true | true | 'executing redirect'
+ false | true | false | 'executing fallback'
end
with_them do
before do
- allow_fetch_application_setting(attribute: "#{forwardable_package_type}_package_requests_forwarding", return_value: application_setting)
+ allow_fetch_cascade_application_setting(attribute: "#{forwardable_package_type}_package_requests_forwarding", return_value: application_setting)
+ package_setting.update!("#{forwardable_package_type}_package_requests_forwarding" => group_setting)
end
it_behaves_like params[:example_name]
end
end
+ context 'when cascade_package_forwarding_settings is disabled' do
+ let(:package_type) { forwardable_package_type }
+ let(:forward_to_registry) { true }
+
+ before do
+ stub_feature_flags(cascade_package_forwarding_settings: false)
+ allow_fetch_cascade_application_setting(attribute: "#{forwardable_package_type}_package_requests_forwarding", return_value: true)
+ package_setting.update!("#{forwardable_package_type}_package_requests_forwarding" => false)
+ end
+
+ it_behaves_like 'executing redirect'
+ end
+
+ context 'when no target is present' do
+ let(:package_type) { forwardable_package_type }
+ let(:forward_to_registry) { true }
+ let(:target) { nil }
+
+ before do
+ allow_fetch_cascade_application_setting(attribute: "#{forwardable_package_type}_package_requests_forwarding", return_value: true)
+ package_setting.update!("#{forwardable_package_type}_package_requests_forwarding" => false)
+ end
+
+ it_behaves_like 'executing redirect'
+ end
+
context 'when maven_central_request_forwarding is disabled' do
let(:package_type) { :maven }
- let(:options) { { project: project } }
-
- include_context 'dependency proxy helpers context'
where(:application_setting, :forward_to_registry) do
true | true
@@ -79,7 +115,7 @@ RSpec.describe API::Helpers::Packages::DependencyProxyHelpers do
with_them do
before do
stub_feature_flags(maven_central_request_forwarding: false)
- allow_fetch_application_setting(attribute: "maven_package_requests_forwarding", return_value: application_setting)
+ allow_fetch_cascade_application_setting(attribute: "maven_package_requests_forwarding", return_value: application_setting)
end
it_behaves_like 'executing fallback'
diff --git a/spec/lib/api/helpers/packages_helpers_spec.rb b/spec/lib/api/helpers/packages_helpers_spec.rb
index cd6e718ce98..d764ed4afff 100644
--- a/spec/lib/api/helpers/packages_helpers_spec.rb
+++ b/spec/lib/api/helpers/packages_helpers_spec.rb
@@ -35,26 +35,6 @@ RSpec.describe API::Helpers::PackagesHelpers do
expect(helper.send('authorize_read_package!', subject)).to eq nil
end
end
-
- context 'with feature flag disabled' do
- before do
- stub_feature_flags(read_package_policy_rule: false)
- end
-
- where(:subject, :expected_class) do
- ref(:project) | ::Project
- ref(:group) | ::Group
- ref(:package) | ::Packages::Package
- end
-
- with_them do
- it 'calls authorize! with correct subject' do
- expect(helper).to receive(:authorize!).with(:read_package, have_attributes(id: subject.id, class: expected_class))
-
- expect(helper.send('authorize_read_package!', subject)).to eq nil
- end
- end
- end
end
%i[create_package destroy_package].each do |action|
diff --git a/spec/lib/api/helpers_spec.rb b/spec/lib/api/helpers_spec.rb
index f25c75ef93c..652727f371b 100644
--- a/spec/lib/api/helpers_spec.rb
+++ b/spec/lib/api/helpers_spec.rb
@@ -110,6 +110,13 @@ RSpec.describe API::Helpers do
end
end
+ context 'when ID is a negative number' do
+ let(:existing_id) { project.id }
+ let(:non_existing_id) { -1 }
+
+ it_behaves_like 'project finder'
+ end
+
context 'when project is pending delete' do
let(:project_pending_delete) { create(:project, pending_delete: true) }
@@ -325,6 +332,13 @@ RSpec.describe API::Helpers do
it_behaves_like 'group finder'
end
+
+ context 'when ID is a negative number' do
+ let(:existing_id) { group.id }
+ let(:non_existing_id) { -1 }
+
+ it_behaves_like 'group finder'
+ end
end
end
@@ -421,6 +435,13 @@ RSpec.describe API::Helpers do
it_behaves_like 'namespace finder'
end
+
+ context 'when ID is a negative number' do
+ let(:existing_id) { namespace.id }
+ let(:non_existing_id) { -1 }
+
+ it_behaves_like 'namespace finder'
+ end
end
shared_examples 'user namespace finder' do
@@ -773,6 +794,58 @@ RSpec.describe API::Helpers do
end
end
+ describe '#present_artifacts_file!' do
+ context 'with object storage' do
+ let(:artifact) { create(:ci_job_artifact, :zip, :remote_store) }
+
+ subject { helper.present_artifacts_file!(artifact.file, project: artifact.job.project) }
+
+ before do
+ allow(helper).to receive(:env).and_return({})
+
+ stub_artifacts_object_storage(enabled: true)
+ end
+
+ it 'redirects to a CDN-fronted URL' do
+ expect(helper).to receive(:redirect)
+ expect(helper).to receive(:cdn_fronted_url).and_call_original
+ expect(Gitlab::ApplicationContext).to receive(:push).with(artifact: artifact.file.model).and_call_original
+ expect(Gitlab::ApplicationContext).to receive(:push).with(artifact_used_cdn: false).and_call_original
+
+ subject
+ end
+ end
+ end
+
+ describe '#cdn_frontend_url' do
+ before do
+ allow(helper).to receive(:env).and_return({})
+
+ stub_artifacts_object_storage(enabled: true)
+ end
+
+ context 'with a CI artifact' do
+ let(:artifact) { create(:ci_job_artifact, :zip, :remote_store) }
+
+ it 'retrieves a CDN-fronted URL' do
+ expect(artifact.file).to receive(:cdn_enabled_url).and_call_original
+ expect(Gitlab::ApplicationContext).to receive(:push).with(artifact_used_cdn: false).and_call_original
+ expect(helper.cdn_fronted_url(artifact.file, artifact.job.project)).to be_a(String)
+ end
+ end
+
+ context 'with a file upload' do
+ let(:url) { 'https://example.com/path/to/upload' }
+
+ it 'retrieves the file URL' do
+ file = double(url: url)
+
+ expect(Gitlab::ApplicationContext).not_to receive(:push)
+ expect(helper.cdn_fronted_url(file, nil)).to eq(url)
+ end
+ end
+ end
+
describe '#order_by_similarity?' do
where(:params, :allow_unauthorized, :current_user_set, :expected) do
{} | false | false | false
@@ -916,42 +989,5 @@ RSpec.describe API::Helpers do
it_behaves_like 'authorized'
end
-
- context 'when gitlab_shell_jwt_token is disabled' do
- let(:valid_secret_token) { +'valid' } # mutable string to use chomp!
- let(:invalid_secret_token) { +'invalid' } # mutable string to use chomp!
-
- before do
- stub_feature_flags(gitlab_shell_jwt_token: false)
- end
-
- context 'when shared secret is not provided' do
- it_behaves_like 'unauthorized'
- end
-
- context 'when shared secret provided via params' do
- let(:params) { { 'secret_token' => valid_secret_token } }
-
- it_behaves_like 'authorized'
-
- context 'but it is invalid' do
- let(:params) { { 'secret_token' => invalid_secret_token } }
-
- it_behaves_like 'unauthorized'
- end
- end
-
- context 'when shared secret provided via headers' do
- let(:headers) { { described_class::GITLAB_SHARED_SECRET_HEADER => Base64.encode64(valid_secret_token) } }
-
- it_behaves_like 'authorized'
-
- context 'but it is invalid' do
- let(:headers) { { described_class::GITLAB_SHARED_SECRET_HEADER => Base64.encode64(invalid_secret_token) } }
-
- it_behaves_like 'unauthorized'
- end
- end
- end
end
end
diff --git a/spec/lib/banzai/filter/pathological_markdown_filter_spec.rb b/spec/lib/banzai/filter/pathological_markdown_filter_spec.rb
deleted file mode 100644
index e0a07d1ea77..00000000000
--- a/spec/lib/banzai/filter/pathological_markdown_filter_spec.rb
+++ /dev/null
@@ -1,27 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Banzai::Filter::PathologicalMarkdownFilter do
- include FilterSpecHelper
-
- let_it_be(:short_text) { '![a' * 5 }
- let_it_be(:long_text) { ([short_text] * 10).join(' ') }
- let_it_be(:with_images_text) { "![One ![one](one.jpg) #{'and\n' * 200} ![two ![two](two.jpg)" }
-
- it 'detects a significat number of unclosed image links' do
- msg = <<~TEXT
- _Unable to render markdown - too many unclosed markdown image links detected._
- TEXT
-
- expect(filter(long_text)).to eq(msg.strip)
- end
-
- it 'does nothing when there are only a few unclosed image links' do
- expect(filter(short_text)).to eq(short_text)
- end
-
- it 'does nothing when there are only a few unclosed image links and images' do
- expect(filter(with_images_text)).to eq(with_images_text)
- end
-end
diff --git a/spec/lib/banzai/filter/references/label_reference_filter_spec.rb b/spec/lib/banzai/filter/references/label_reference_filter_spec.rb
index c342a831d62..12cdb5cfb95 100644
--- a/spec/lib/banzai/filter/references/label_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/label_reference_filter_spec.rb
@@ -309,11 +309,12 @@ RSpec.describe Banzai::Filter::References::LabelReferenceFilter do
it 'links to valid references' do
doc = reference_filter("See #{references}")
- expect(doc.css('a').map { |a| a.attr('href') }).to match_array([
- urls.project_issues_url(project, label_name: bug.name),
- urls.project_issues_url(project, label_name: feature_proposal.name),
- urls.project_issues_url(project, label_name: technical_debt.name)
- ])
+ expect(doc.css('a').map { |a| a.attr('href') }).to match_array(
+ [
+ urls.project_issues_url(project, label_name: bug.name),
+ urls.project_issues_url(project, label_name: feature_proposal.name),
+ urls.project_issues_url(project, label_name: technical_debt.name)
+ ])
expect(doc.text).to eq 'See bug, feature proposal, technical debt'
end
end
@@ -324,11 +325,12 @@ RSpec.describe Banzai::Filter::References::LabelReferenceFilter do
it 'links to valid references' do
doc = reference_filter("See #{references}")
- expect(doc.css('a').map { |a| a.attr('href') }).to match_array([
- urls.project_issues_url(project, label_name: bug.name),
- urls.project_issues_url(project, label_name: feature_proposal.name),
- urls.project_issues_url(project, label_name: technical_debt.name)
- ])
+ expect(doc.css('a').map { |a| a.attr('href') }).to match_array(
+ [
+ urls.project_issues_url(project, label_name: bug.name),
+ urls.project_issues_url(project, label_name: feature_proposal.name),
+ urls.project_issues_url(project, label_name: technical_debt.name)
+ ])
expect(doc.text).to eq 'See bug feature proposal technical debt'
end
end
diff --git a/spec/lib/banzai/filter/truncate_visible_filter_spec.rb b/spec/lib/banzai/filter/truncate_visible_filter_spec.rb
new file mode 100644
index 00000000000..8daaed05264
--- /dev/null
+++ b/spec/lib/banzai/filter/truncate_visible_filter_spec.rb
@@ -0,0 +1,128 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Banzai::Filter::TruncateVisibleFilter do
+ include FilterSpecHelper
+
+ let_it_be(:project) { build(:project, :repository) }
+ let_it_be(:max_chars) { 100 }
+ let_it_be(:user) do
+ user = create(:user, username: 'gfm')
+ project.add_maintainer(user)
+ user
+ end
+
+ # Since we're truncating nodes of an html document, actually use the
+ # full pipeline to generate full documents.
+ def convert_markdown(text, context = {})
+ Banzai::Pipeline::FullPipeline.to_html(text, { project: project }.merge(context))
+ end
+
+ shared_examples_for 'truncates text' do
+ specify do
+ html = convert_markdown(markdown)
+ doc = filter(html, { truncate_visible_max_chars: max_chars })
+
+ expect(doc.to_html).to match(expected)
+ end
+ end
+
+ describe 'displays inline code' do
+ let(:markdown) { 'Text with `inline code`' }
+ let(:expected) { 'Text with <code>inline code</code>' }
+
+ it_behaves_like 'truncates text'
+ end
+
+ describe 'truncates the text with multiple paragraphs' do
+ let(:markdown) { "Paragraph 1\n\nParagraph 2" }
+ let(:expected) { 'Paragraph 1...' }
+
+ it_behaves_like 'truncates text'
+ end
+
+ describe 'truncates the first line of a code block' do
+ let(:markdown) { "```\nCode block\nwith two lines\n```" }
+ let(:expected) { "Code block...</span>\n</code>" }
+
+ it_behaves_like 'truncates text'
+ end
+
+ describe 'preserves code color scheme' do
+ let(:max_chars) { 150 }
+ let(:markdown) { "```ruby\ndef test\n 'hello world'\nend\n```" }
+ let(:expected) do
+ '<code><span id="LC1" class="line" lang="ruby">' \
+ '<span class="k">def</span> <span class="nf">test</span>...</span>'
+ end
+
+ it_behaves_like 'truncates text'
+ end
+
+ describe 'truncates a single long line of text' do
+ let(:max_chars) { 150 }
+ let(:text) { 'The quick brown fox jumped over the lazy dog twice' } # 50 chars
+ let(:markdown) { text * 4 }
+ let(:expected) { (text * 2).sub(/.{3}/, '...') }
+
+ it_behaves_like 'truncates text'
+ end
+
+ it 'preserves a link href when link text is truncated' do
+ max_chars = 150
+ text = 'The quick brown fox jumped over the lazy dog' # 44 chars
+ link_url = 'http://example.com/foo/bar/baz' # 30 chars
+ markdown = "#{text}#{text}#{text} #{link_url}" # 163 chars
+ expected_link_text = 'http://example...</a>'
+
+ html = convert_markdown(markdown)
+ doc = filter(html, { truncate_visible_max_chars: max_chars })
+
+ expect(doc.to_html).to match(link_url)
+ expect(doc.to_html).to match(expected_link_text)
+ end
+
+ it 'truncates HTML properly' do
+ markdown = "@#{user.username}, can you look at this?\nHello world\n"
+
+ html = convert_markdown(markdown)
+ doc = filter(html, { truncate_visible_max_chars: max_chars })
+
+ # Make sure we didn't create invalid markup
+ expect(doc.errors).to be_empty
+
+ # Leading user link
+ expect(doc.css('a').length).to eq(1)
+ expect(doc.css('a')[0].attr('href')).to eq urls.user_path(user)
+ expect(doc.css('a')[0].text).to eq "@#{user.username}"
+ expect(doc.content).to eq "@#{user.username}, can you look at this?..."
+ end
+
+ it 'truncates HTML with emoji properly' do
+ markdown = "foo :wink:\nbar :grinning:"
+ # actual = first_line_in_markdown(object, attribute, 100, project: project)
+
+ html = convert_markdown(markdown)
+ doc = filter(html, { truncate_visible_max_chars: max_chars })
+
+ # Make sure we didn't create invalid markup
+ # But also account for the 2 errors caused by the unknown `gl-emoji` elements
+ expect(doc.errors.length).to eq(2)
+
+ expect(doc.css('gl-emoji').length).to eq(2)
+ expect(doc.css('gl-emoji')[0].attr('data-name')).to eq 'wink'
+ expect(doc.css('gl-emoji')[1].attr('data-name')).to eq 'grinning'
+
+ expect(doc.content).to eq "foo 😉\nbar 😀"
+ end
+
+ it 'does not truncate if truncate_visible_max_chars not specified' do
+ markdown = "@#{user.username}, can you look at this?\nHello world"
+
+ html = convert_markdown(markdown)
+ doc = filter(html)
+
+ expect(doc.content).to eq markdown
+ end
+end
diff --git a/spec/lib/banzai/filter/wiki_link_filter_spec.rb b/spec/lib/banzai/filter/wiki_link_filter_spec.rb
index 70c7c3c74fb..9807e385a5a 100644
--- a/spec/lib/banzai/filter/wiki_link_filter_spec.rb
+++ b/spec/lib/banzai/filter/wiki_link_filter_spec.rb
@@ -47,6 +47,14 @@ RSpec.describe Banzai::Filter::WikiLinkFilter do
expect(filtered_link.attribute('href').value).to eq(path)
end
+
+ it 'does not rewrite links to old relative wiki path' do
+ old_wiki_base_path = wiki.wiki_base_path.sub('/-/', '/')
+ path = "#{old_wiki_base_path}/#{repository_upload_folder}/a.jpg"
+ filtered_link = filter("<a href='#{path}'>Link</a>", wiki: wiki, page_slug: 'home').children[0]
+
+ expect(filtered_link.attribute('href').value).to eq(path)
+ end
end
describe "when links point to the #{Wikis::CreateAttachmentService::ATTACHMENT_PATH} folder" do
diff --git a/spec/lib/banzai/pipeline/full_pipeline_spec.rb b/spec/lib/banzai/pipeline/full_pipeline_spec.rb
index c07f99dc9fc..1a0f5a53a23 100644
--- a/spec/lib/banzai/pipeline/full_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/full_pipeline_spec.rb
@@ -168,15 +168,13 @@ RSpec.describe Banzai::Pipeline::FullPipeline do
end
end
- describe 'unclosed image links' do
- it 'detects a significat number of unclosed image links' do
- markdown = '![a ' * 30
- msg = <<~TEXT
- Unable to render markdown - too many unclosed markdown image links detected.
- TEXT
- output = described_class.to_html(markdown, project: nil)
-
- expect(output).to include(msg.strip)
+ describe 'cmark-gfm and autlolinks' do
+ it 'does not hang with significant number of unclosed image links' do
+ markdown = '![a ' * 300000
+
+ expect do
+ Timeout.timeout(2.seconds) { described_class.to_html(markdown, project: nil) }
+ end.not_to raise_error
end
end
end
diff --git a/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb b/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb
index e24177a7043..f67f13b3862 100644
--- a/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Banzai::Pipeline::GfmPipeline do
describe 'integration between parsing regular and external issue references' do
- let(:project) { create(:redmine_project, :public) }
+ let(:project) { create(:project, :with_redmine_integration, :public) }
context 'when internal issue tracker is enabled' do
context 'when shorthand pattern #ISSUE_ID is used' do
diff --git a/spec/lib/bitbucket/connection_spec.rb b/spec/lib/bitbucket/connection_spec.rb
index bed44b94f44..58a05c52b9f 100644
--- a/spec/lib/bitbucket/connection_spec.rb
+++ b/spec/lib/bitbucket/connection_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Bitbucket::Connection do
+ let(:token) { 'token' }
+
before do
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:provider).and_return(double(app_id: '', app_secret: ''))
@@ -15,7 +17,7 @@ RSpec.describe Bitbucket::Connection do
expect(instance).to receive(:get).and_return(double(parsed: true))
end
- connection = described_class.new({})
+ connection = described_class.new({ token: token })
connection.get('/users')
end
@@ -27,19 +29,19 @@ RSpec.describe Bitbucket::Connection do
expect(instance).to receive(:expired?).and_return(true)
end
- expect(described_class.new({}).expired?).to be_truthy
+ expect(described_class.new({ token: token }).expired?).to be_truthy
end
end
describe '#refresh!' do
it 'calls connection.refresh!' do
- response = double(token: nil, expires_at: nil, expires_in: nil, refresh_token: nil)
+ response = double(token: token, expires_at: nil, expires_in: nil, refresh_token: nil)
expect_next_instance_of(OAuth2::AccessToken) do |instance|
expect(instance).to receive(:refresh!).and_return(response)
end
- described_class.new({}).refresh!
+ described_class.new({ token: token }).refresh!
end
end
end
diff --git a/spec/lib/bulk_imports/common/pipelines/entity_finisher_spec.rb b/spec/lib/bulk_imports/common/pipelines/entity_finisher_spec.rb
index f03a178b993..9ea519d367e 100644
--- a/spec/lib/bulk_imports/common/pipelines/entity_finisher_spec.rb
+++ b/spec/lib/bulk_imports/common/pipelines/entity_finisher_spec.rb
@@ -13,11 +13,12 @@ RSpec.describe BulkImports::Common::Pipelines::EntityFinisher do
expect(logger)
.to receive(:info)
.with(
- bulk_import_id: entity.bulk_import.id,
+ bulk_import_id: entity.bulk_import_id,
bulk_import_entity_id: entity.id,
bulk_import_entity_type: entity.source_type,
pipeline_class: described_class.name,
- message: 'Entity finished'
+ message: 'Entity finished',
+ importer: 'gitlab_migration'
)
end
diff --git a/spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb
index f0b461e518e..5220b9d37e5 100644
--- a/spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe BulkImports::Common::Pipelines::LfsObjectsPipeline do
let_it_be(:oid) { 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855' }
let(:tmpdir) { Dir.mktmpdir }
- let(:entity) { create(:bulk_import_entity, :project_entity, project: portable, source_full_path: 'test') }
+ let(:entity) { create(:bulk_import_entity, :project_entity, project: portable, source_full_path: 'test', source_xid: nil) }
let(:tracker) { create(:bulk_import_tracker, entity: entity) }
let(:context) { BulkImports::Pipeline::Context.new(tracker) }
let(:lfs_dir_path) { tmpdir }
diff --git a/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb
index f650e931dc7..7a93365d098 100644
--- a/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb
@@ -152,14 +152,14 @@ RSpec.describe BulkImports::Common::Pipelines::UploadsPipeline do
context 'when importing to group' do
let(:portable) { group }
- let(:entity) { create(:bulk_import_entity, :group_entity, group: group, source_full_path: 'test') }
+ let(:entity) { create(:bulk_import_entity, :group_entity, group: group, source_full_path: 'test', source_xid: nil) }
include_examples 'uploads import'
end
context 'when importing to project' do
let(:portable) { project }
- let(:entity) { create(:bulk_import_entity, :project_entity, project: project, source_full_path: 'test') }
+ let(:entity) { create(:bulk_import_entity, :project_entity, project: project, source_full_path: 'test', source_xid: nil) }
include_examples 'uploads import'
end
diff --git a/spec/lib/bulk_imports/common/rest/get_badges_query_spec.rb b/spec/lib/bulk_imports/common/rest/get_badges_query_spec.rb
index 0a04c0a2243..fabef50af8b 100644
--- a/spec/lib/bulk_imports/common/rest/get_badges_query_spec.rb
+++ b/spec/lib/bulk_imports/common/rest/get_badges_query_spec.rb
@@ -9,15 +9,32 @@ RSpec.describe BulkImports::Common::Rest::GetBadgesQuery do
let(:context) { BulkImports::Pipeline::Context.new(tracker) }
let(:encoded_full_path) { ERB::Util.url_encode(entity.source_full_path) }
- it 'returns correct query and page info' do
- expected = {
- resource: [entity.pluralized_name, encoded_full_path, 'badges'].join('/'),
- query: {
- page: context.tracker.next_page
+ context 'when source id is present' do
+ it 'returns correct query using source id and page info' do
+ expected = {
+ resource: [entity.base_resource_path, 'badges'].join('/'),
+ query: {
+ page: context.tracker.next_page
+ }
}
- }
- expect(described_class.to_h(context)).to eq(expected)
+ expect(described_class.to_h(context)).to eq(expected)
+ end
+ end
+
+ context 'when source id is missing' do
+ it 'returns correct query using source full path' do
+ entity.update!(source_xid: nil)
+
+ expected = {
+ resource: ["/#{entity.pluralized_name}", encoded_full_path, 'badges'].join('/'),
+ query: {
+ page: context.tracker.next_page
+ }
+ }
+
+ expect(described_class.to_h(context)).to eq(expected)
+ end
end
end
diff --git a/spec/lib/bulk_imports/features_spec.rb b/spec/lib/bulk_imports/features_spec.rb
new file mode 100644
index 00000000000..a92e4706bbe
--- /dev/null
+++ b/spec/lib/bulk_imports/features_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Features do
+ describe '.project_migration_enabled' do
+ let_it_be(:top_level_namespace) { create(:group) }
+
+ context 'when bulk_import_projects feature flag is enabled' do
+ it 'returns true' do
+ stub_feature_flags(bulk_import_projects: true)
+
+ expect(described_class.project_migration_enabled?).to eq(true)
+ end
+
+ context 'when feature flag is enabled on root ancestor level' do
+ it 'returns true' do
+ stub_feature_flags(bulk_import_projects: top_level_namespace)
+
+ expect(described_class.project_migration_enabled?(top_level_namespace.full_path)).to eq(true)
+ end
+ end
+
+ context 'when feature flag is enabled on a different top level namespace' do
+ it 'returns false' do
+ stub_feature_flags(bulk_import_projects: top_level_namespace)
+
+ different_namepace = create(:group)
+
+ expect(described_class.project_migration_enabled?(different_namepace.full_path)).to eq(false)
+ end
+ end
+ end
+
+ context 'when bulk_import_projects feature flag is disabled' do
+ it 'returns false' do
+ stub_feature_flags(bulk_import_projects: false)
+
+ expect(described_class.project_migration_enabled?(top_level_namespace.full_path)).to eq(false)
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb
index 441a34b0c74..36b425f4f12 100644
--- a/spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe BulkImports::Groups::Pipelines::GroupPipeline do
:bulk_import_entity,
bulk_import: bulk_import,
source_full_path: 'source/full/path',
- destination_name: 'My Destination Group',
+ destination_slug: 'my-destination-group',
destination_namespace: parent.full_path
)
end
diff --git a/spec/lib/bulk_imports/groups/pipelines/project_entities_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/project_entities_pipeline_spec.rb
index 5b6c93e695f..c07d27e973f 100644
--- a/spec/lib/bulk_imports/groups/pipelines/project_entities_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/groups/pipelines/project_entities_pipeline_spec.rb
@@ -19,6 +19,7 @@ RSpec.describe BulkImports::Groups::Pipelines::ProjectEntitiesPipeline do
let(:extracted_data) do
BulkImports::Pipeline::ExtractedData.new(data: {
+ 'id' => 'gid://gitlab/Project/1234567',
'name' => 'project',
'full_path' => 'group/project'
})
@@ -44,6 +45,7 @@ RSpec.describe BulkImports::Groups::Pipelines::ProjectEntitiesPipeline do
expect(project_entity.source_full_path).to eq('group/project')
expect(project_entity.destination_name).to eq('project')
expect(project_entity.destination_namespace).to eq(destination_group.full_path)
+ expect(project_entity.source_xid).to eq(1234567)
end
end
diff --git a/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb b/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb
index 896af865c56..32d8dc8e207 100644
--- a/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb
+++ b/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb
@@ -24,59 +24,67 @@ RSpec.describe BulkImports::Groups::Transformers::GroupAttributesTransformer do
let(:data) do
{
'name' => 'Source Group Name',
+ 'description' => 'Source Group Description',
'path' => 'source-group-path',
'full_path' => 'source/full/path',
'visibility' => 'private',
'project_creation_level' => 'developer',
- 'subgroup_creation_level' => 'maintainer'
+ 'subgroup_creation_level' => 'maintainer',
+ 'emails_disabled' => true,
+ 'lfs_enabled' => false,
+ 'mentions_disabled' => true,
+ 'share_with_group_lock' => false,
+ 'require_two_factor_authentication' => false,
+ 'two_factor_grace_period' => 100,
+ 'request_access_enabled' => false
}
end
subject { described_class.new }
it 'returns original data with some keys transformed' do
- transformed_data = subject.transform(context, { 'name' => 'Name', 'description' => 'Description' })
+ transformed_data = subject.transform(context, data)
expect(transformed_data).to eq({
- 'name' => 'Name',
- 'description' => 'Description',
+ 'name' => 'Source Group Name',
+ 'description' => 'Source Group Description',
'parent_id' => parent.id,
- 'path' => 'destination-slug-path'
+ 'path' => entity.destination_slug,
+ 'visibility_level' => Gitlab::VisibilityLevel.string_options[data['visibility']],
+ 'project_creation_level' => Gitlab::Access.project_creation_string_options[data['project_creation_level']],
+ 'subgroup_creation_level' => Gitlab::Access.subgroup_creation_string_options[data['subgroup_creation_level']],
+ 'emails_disabled' => true,
+ 'lfs_enabled' => false,
+ 'mentions_disabled' => true,
+ 'share_with_group_lock' => false,
+ 'require_two_factor_authentication' => false,
+ 'two_factor_grace_period' => 100,
+ 'request_access_enabled' => false
})
end
- it 'transforms path from destination_slug' do
- transformed_data = subject.transform(context, data)
-
- expect(transformed_data['path']).to eq(entity.destination_slug)
- end
-
- it 'removes full path' do
- transformed_data = subject.transform(context, data)
-
- expect(transformed_data).not_to have_key('full_path')
- end
-
- it 'transforms visibility level' do
- visibility = data['visibility']
- transformed_data = subject.transform(context, data)
-
- expect(transformed_data).not_to have_key('visibility')
- expect(transformed_data['visibility_level']).to eq(Gitlab::VisibilityLevel.string_options[visibility])
- end
-
- it 'transforms project creation level' do
- level = data['project_creation_level']
- transformed_data = subject.transform(context, data)
+ context 'when some fields are not present' do
+ it 'does not include those fields' do
+ data = {
+ 'name' => 'Source Group Name',
+ 'description' => 'Source Group Description',
+ 'path' => 'source-group-path',
+ 'full_path' => 'source/full/path'
+ }
- expect(transformed_data['project_creation_level']).to eq(Gitlab::Access.project_creation_string_options[level])
- end
-
- it 'transforms subgroup creation level' do
- level = data['subgroup_creation_level']
- transformed_data = subject.transform(context, data)
+ transformed_data = subject.transform(context, data)
- expect(transformed_data['subgroup_creation_level']).to eq(Gitlab::Access.subgroup_creation_string_options[level])
+ expect(transformed_data).to eq({
+ 'name' => 'Source Group Name',
+ 'path' => 'destination-slug-path',
+ 'description' => 'Source Group Description',
+ 'parent_id' => parent.id,
+ 'share_with_group_lock' => nil,
+ 'emails_disabled' => nil,
+ 'lfs_enabled' => nil,
+ 'mentions_disabled' => nil
+ })
+ end
end
describe 'parent group transformation' do
diff --git a/spec/lib/bulk_imports/network_error_spec.rb b/spec/lib/bulk_imports/network_error_spec.rb
index 11f555fee09..54d6554df96 100644
--- a/spec/lib/bulk_imports/network_error_spec.rb
+++ b/spec/lib/bulk_imports/network_error_spec.rb
@@ -46,6 +46,22 @@ RSpec.describe BulkImports::NetworkError, :clean_gitlab_redis_cache do
expect(exception.retriable?(tracker)).to eq(false)
end
end
+
+ context 'when entity is passed' do
+ it 'increments entity cache key' do
+ entity = create(:bulk_import_entity)
+ exception = described_class.new('Error!')
+
+ allow(exception).to receive(:cause).and_return(SocketError.new('Error!'))
+
+ expect(Gitlab::Cache::Import::Caching)
+ .to receive(:increment)
+ .with("bulk_imports/#{entity.id}/network_error/SocketError")
+ .and_call_original
+
+ exception.retriable?(entity)
+ end
+ end
end
describe '#retry_delay' do
diff --git a/spec/lib/bulk_imports/pipeline/runner_spec.rb b/spec/lib/bulk_imports/pipeline/runner_spec.rb
index 810271818ae..a5a01354d0e 100644
--- a/spec/lib/bulk_imports/pipeline/runner_spec.rb
+++ b/spec/lib/bulk_imports/pipeline/runner_spec.rb
@@ -60,7 +60,9 @@ RSpec.describe BulkImports::Pipeline::Runner do
pipeline_step: :extractor,
pipeline_class: 'BulkImports::MyPipeline',
exception_class: exception_class,
- exception_message: exception_message
+ exception_message: exception_message,
+ message: "Pipeline failed",
+ importer: 'gitlab_migration'
)
)
end
@@ -89,7 +91,8 @@ RSpec.describe BulkImports::Pipeline::Runner do
log_params(
context,
message: 'Aborting entity migration due to pipeline failure',
- pipeline_class: 'BulkImports::MyPipeline'
+ pipeline_class: 'BulkImports::MyPipeline',
+ importer: 'gitlab_migration'
)
)
end
@@ -290,9 +293,10 @@ RSpec.describe BulkImports::Pipeline::Runner do
def log_params(context, extra = {})
{
- bulk_import_id: context.bulk_import.id,
+ bulk_import_id: context.bulk_import_id,
bulk_import_entity_id: context.entity.id,
bulk_import_entity_type: context.entity.source_type,
+ importer: 'gitlab_migration',
context_extra: context.extra
}.merge(extra)
end
diff --git a/spec/lib/bulk_imports/pipeline_spec.rb b/spec/lib/bulk_imports/pipeline_spec.rb
index dc169bb8d88..72bc8bd7980 100644
--- a/spec/lib/bulk_imports/pipeline_spec.rb
+++ b/spec/lib/bulk_imports/pipeline_spec.rb
@@ -20,16 +20,17 @@ RSpec.describe BulkImports::Pipeline do
loader BulkImports::Loader, foo: :bar
end
- stub_const('BulkImports::MyPipeline', klass)
+ stub_const('BulkImports::TestWikiPipeline', klass)
end
describe 'pipeline attributes' do
describe 'getters' do
it 'retrieves class attributes' do
- expect(BulkImports::MyPipeline.get_extractor).to eq({ klass: BulkImports::Extractor, options: { foo: :bar } })
- expect(BulkImports::MyPipeline.transformers).to contain_exactly({ klass: BulkImports::Transformer, options: { foo: :bar } })
- expect(BulkImports::MyPipeline.get_loader).to eq({ klass: BulkImports::Loader, options: { foo: :bar } })
- expect(BulkImports::MyPipeline.abort_on_failure?).to eq(true)
+ expect(BulkImports::TestWikiPipeline.get_extractor).to eq({ klass: BulkImports::Extractor, options: { foo: :bar } })
+ expect(BulkImports::TestWikiPipeline.transformers).to contain_exactly({ klass: BulkImports::Transformer, options: { foo: :bar } })
+ expect(BulkImports::TestWikiPipeline.get_loader).to eq({ klass: BulkImports::Loader, options: { foo: :bar } })
+ expect(BulkImports::TestWikiPipeline.abort_on_failure?).to eq(true)
+ expect(BulkImports::TestWikiPipeline.relation).to eq('test_wiki')
end
context 'when extractor and loader are defined within the pipeline' do
@@ -59,23 +60,23 @@ RSpec.describe BulkImports::Pipeline do
klass = Class.new
options = { test: :test }
- BulkImports::MyPipeline.extractor(klass, options)
- BulkImports::MyPipeline.transformer(klass, options)
- BulkImports::MyPipeline.loader(klass, options)
- BulkImports::MyPipeline.abort_on_failure!
- BulkImports::MyPipeline.file_extraction_pipeline!
+ BulkImports::TestWikiPipeline.extractor(klass, options)
+ BulkImports::TestWikiPipeline.transformer(klass, options)
+ BulkImports::TestWikiPipeline.loader(klass, options)
+ BulkImports::TestWikiPipeline.abort_on_failure!
+ BulkImports::TestWikiPipeline.file_extraction_pipeline!
- expect(BulkImports::MyPipeline.get_extractor).to eq({ klass: klass, options: options })
+ expect(BulkImports::TestWikiPipeline.get_extractor).to eq({ klass: klass, options: options })
- expect(BulkImports::MyPipeline.transformers)
+ expect(BulkImports::TestWikiPipeline.transformers)
.to contain_exactly(
{ klass: BulkImports::Transformer, options: { foo: :bar } },
{ klass: klass, options: options })
- expect(BulkImports::MyPipeline.get_loader).to eq({ klass: klass, options: options })
+ expect(BulkImports::TestWikiPipeline.get_loader).to eq({ klass: klass, options: options })
- expect(BulkImports::MyPipeline.abort_on_failure?).to eq(true)
- expect(BulkImports::MyPipeline.file_extraction_pipeline?).to eq(true)
+ expect(BulkImports::TestWikiPipeline.abort_on_failure?).to eq(true)
+ expect(BulkImports::TestWikiPipeline.file_extraction_pipeline?).to eq(true)
end
end
end
@@ -87,7 +88,7 @@ RSpec.describe BulkImports::Pipeline do
expect(BulkImports::Transformer).to receive(:new).with(foo: :bar)
expect(BulkImports::Loader).to receive(:new).with(foo: :bar)
- pipeline = BulkImports::MyPipeline.new(context)
+ pipeline = BulkImports::TestWikiPipeline.new(context)
pipeline.send(:extractor)
pipeline.send(:transformers)
diff --git a/spec/lib/bulk_imports/projects/pipelines/design_bundle_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/design_bundle_pipeline_spec.rb
index 39b539ece21..6a509ca7f14 100644
--- a/spec/lib/bulk_imports/projects/pipelines/design_bundle_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/design_bundle_pipeline_spec.rb
@@ -8,7 +8,10 @@ RSpec.describe BulkImports::Projects::Pipelines::DesignBundlePipeline do
let(:portable) { create(:project) }
let(:tmpdir) { Dir.mktmpdir }
let(:design_bundle_path) { File.join(tmpdir, 'design.bundle') }
- let(:entity) { create(:bulk_import_entity, :project_entity, project: portable, source_full_path: 'test') }
+ let(:entity) do
+ create(:bulk_import_entity, :project_entity, project: portable, source_full_path: 'test', source_xid: nil)
+ end
+
let(:tracker) { create(:bulk_import_tracker, entity: entity) }
let(:context) { BulkImports::Pipeline::Context.new(tracker) }
diff --git a/spec/lib/bulk_imports/projects/pipelines/repository_bundle_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/repository_bundle_pipeline_spec.rb
index 712c37ee578..b8c21feb05d 100644
--- a/spec/lib/bulk_imports/projects/pipelines/repository_bundle_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/repository_bundle_pipeline_spec.rb
@@ -8,7 +8,10 @@ RSpec.describe BulkImports::Projects::Pipelines::RepositoryBundlePipeline do
let(:portable) { create(:project) }
let(:tmpdir) { Dir.mktmpdir }
let(:bundle_path) { File.join(tmpdir, 'repository.bundle') }
- let(:entity) { create(:bulk_import_entity, :project_entity, project: portable, source_full_path: 'test') }
+ let(:entity) do
+ create(:bulk_import_entity, :project_entity, project: portable, source_full_path: 'test', source_xid: nil)
+ end
+
let(:tracker) { create(:bulk_import_tracker, entity: entity) }
let(:context) { BulkImports::Pipeline::Context.new(tracker) }
diff --git a/spec/lib/container_registry/client_spec.rb b/spec/lib/container_registry/client_spec.rb
index f9e08df3399..cb2da24b712 100644
--- a/spec/lib/container_registry/client_spec.rb
+++ b/spec/lib/container_registry/client_spec.rb
@@ -423,6 +423,22 @@ RSpec.describe ContainerRegistry::Client do
end
end
+ describe '#repository_tags' do
+ let(:path) { 'repository/path' }
+
+ subject { client.repository_tags(path) }
+
+ before do
+ stub_container_registry_config(enabled: true, api_url: registry_api_url, key: 'spec/fixtures/x509_certificate_pk.key')
+ end
+
+ it 'returns a successful response' do
+ stub_registry_tags_list(query_params: { n: described_class::DEFAULT_TAGS_PAGE_SIZE }, tags: %w[t1 t2])
+
+ expect(subject).to eq('tags' => %w[t1 t2])
+ end
+ end
+
describe '.registry_info' do
subject { described_class.registry_info }
@@ -458,6 +474,22 @@ RSpec.describe ContainerRegistry::Client do
)
end
+ def stub_registry_tags_list(query_params: {}, status: 200, tags: ['test_tag'])
+ url = "#{registry_api_url}/v2/#{path}/tags/list"
+
+ unless query_params.empty?
+ url += "?"
+ url += query_params.map { |k, v| "#{k}=#{v}" }.join(',')
+ end
+
+ stub_request(:get, url)
+ .with(headers: { 'Accept' => ContainerRegistry::Client::ACCEPTED_TYPES.join(', ') })
+ .to_return(
+ status: status,
+ body: Gitlab::Json.dump(tags: tags),
+ headers: { 'Content-Type' => 'application/json' })
+ end
+
def expect_new_faraday(times: 1, timeout: true)
request_options = timeout ? expected_faraday_request_options : nil
expect(Faraday)
diff --git a/spec/lib/container_registry/gitlab_api_client_spec.rb b/spec/lib/container_registry/gitlab_api_client_spec.rb
index f19bedbda0e..7d78aad8b13 100644
--- a/spec/lib/container_registry/gitlab_api_client_spec.rb
+++ b/spec/lib/container_registry/gitlab_api_client_spec.rb
@@ -307,7 +307,16 @@ RSpec.describe ContainerRegistry::GitlabApiClient do
stub_tags(path, page_size: page_size, status_code: 404)
end
- it { is_expected.to eq({}) }
+ it 'logs an error and returns an empty hash' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:log_exception).with(
+ instance_of(described_class::UnsuccessfulResponseError),
+ class: described_class.name,
+ url: "/gitlab/v1/repositories/#{path}/tags/list/",
+ status_code: 404
+ )
+ expect(subject).to eq({})
+ end
end
end
diff --git a/spec/lib/csv_builders/stream_spec.rb b/spec/lib/csv_builders/stream_spec.rb
index 204baf965d0..7df55fe4230 100644
--- a/spec/lib/csv_builders/stream_spec.rb
+++ b/spec/lib/csv_builders/stream_spec.rb
@@ -25,18 +25,20 @@ RSpec.describe CsvBuilders::Stream do
end
it 'returns all rows up to default max value' do
- expect(builder.render.to_a).to eq([
- "Title,Description\n",
- "Added salt,A teaspoon\n",
- "Added sugar,Just a pinch\n"
- ])
+ expect(builder.render.to_a).to eq(
+ [
+ "Title,Description\n",
+ "Added salt,A teaspoon\n",
+ "Added sugar,Just a pinch\n"
+ ])
end
it 'truncates to max rows' do
- expect(builder.render(1).to_a).to eq([
- "Title,Description\n",
- "Added salt,A teaspoon\n"
- ])
+ expect(builder.render(1).to_a).to eq(
+ [
+ "Title,Description\n",
+ "Added salt,A teaspoon\n"
+ ])
end
end
end
diff --git a/spec/lib/expand_variables_spec.rb b/spec/lib/expand_variables_spec.rb
index 1108d26b2a9..0c5d587d8e8 100644
--- a/spec/lib/expand_variables_spec.rb
+++ b/spec/lib/expand_variables_spec.rb
@@ -87,9 +87,7 @@ RSpec.describe ExpandVariables do
"simple expansion using Collection": {
value: 'key$variable',
result: 'keyvalue',
- variables: Gitlab::Ci::Variables::Collection.new([
- { key: 'variable', value: 'value' }
- ])
+ variables: Gitlab::Ci::Variables::Collection.new([{ key: 'variable', value: 'value' }])
}
}
end
diff --git a/spec/lib/gitlab/analytics/usage_trends/workers_argument_builder_spec.rb b/spec/lib/gitlab/analytics/usage_trends/workers_argument_builder_spec.rb
index 34c5bd6c6ae..06438f8497d 100644
--- a/spec/lib/gitlab/analytics/usage_trends/workers_argument_builder_spec.rb
+++ b/spec/lib/gitlab/analytics/usage_trends/workers_argument_builder_spec.rb
@@ -23,10 +23,11 @@ RSpec.describe Gitlab::Analytics::UsageTrends::WorkersArgumentBuilder do
subject { described_class.new(measurement_identifiers: measurement_identifiers, recorded_at: recorded_at).execute }
it 'returns worker arguments' do
- expect(subject).to eq([
- [projects_measurement_identifier, project_1.id, project_3.id, recorded_at],
- [users_measurement_identifier, user_1.id, user_1.id, recorded_at]
- ])
+ expect(subject).to eq(
+ [
+ [projects_measurement_identifier, project_1.id, project_3.id, recorded_at],
+ [users_measurement_identifier, user_1.id, user_1.id, recorded_at]
+ ])
end
context 'when bogus measurement identifiers are given' do
@@ -36,10 +37,11 @@ RSpec.describe Gitlab::Analytics::UsageTrends::WorkersArgumentBuilder do
end
it 'skips bogus measurement identifiers' do
- expect(subject).to eq([
- [projects_measurement_identifier, project_1.id, project_3.id, recorded_at],
- [users_measurement_identifier, user_1.id, user_1.id, recorded_at]
- ])
+ expect(subject).to eq(
+ [
+ [projects_measurement_identifier, project_1.id, project_3.id, recorded_at],
+ [users_measurement_identifier, user_1.id, user_1.id, recorded_at]
+ ])
end
end
diff --git a/spec/lib/gitlab/anonymous_session_spec.rb b/spec/lib/gitlab/anonymous_session_spec.rb
index 64186e9003a..08087096d49 100644
--- a/spec/lib/gitlab/anonymous_session_spec.rb
+++ b/spec/lib/gitlab/anonymous_session_spec.rb
@@ -61,7 +61,7 @@ RSpec.describe Gitlab::AnonymousSession, :clean_gitlab_redis_sessions do
subject.cleanup_session_per_ip_count
Gitlab::Redis::Sessions.with do |redis|
- expect(redis.exists("session:lookup:ip:gitlab2:127.0.0.1")).to eq(false)
+ expect(redis.exists?("session:lookup:ip:gitlab2:127.0.0.1")).to eq(false)
end
end
end
diff --git a/spec/lib/gitlab/auth/auth_finders_spec.rb b/spec/lib/gitlab/auth/auth_finders_spec.rb
index d0b44135a2f..e2226952d15 100644
--- a/spec/lib/gitlab/auth/auth_finders_spec.rb
+++ b/spec/lib/gitlab/auth/auth_finders_spec.rb
@@ -188,7 +188,7 @@ RSpec.describe Gitlab::Auth::AuthFinders do
end
it 'returns nil if valid feed_token and disabled' do
- stub_application_setting(disable_feed_token: true)
+ allow(Gitlab::CurrentSettings).to receive_messages(disable_feed_token: true)
set_param(:feed_token, user.feed_token)
expect(find_user_from_feed_token(:rss)).to be_nil
diff --git a/spec/lib/gitlab/auth/o_auth/user_spec.rb b/spec/lib/gitlab/auth/o_auth/user_spec.rb
index b160f322fb8..95a518afcf1 100644
--- a/spec/lib/gitlab/auth/o_auth/user_spec.rb
+++ b/spec/lib/gitlab/auth/o_auth/user_spec.rb
@@ -4,7 +4,6 @@ require 'spec_helper'
RSpec.describe Gitlab::Auth::OAuth::User do
include LdapHelpers
- include TermsHelper
let(:oauth_user) { described_class.new(auth_hash) }
let(:oauth_user_2) { described_class.new(auth_hash_2) }
@@ -145,49 +144,6 @@ RSpec.describe Gitlab::Auth::OAuth::User do
expect(gl_user).to be_password_automatically_set
end
- context 'terms of service' do
- context 'when terms are enforced' do
- before do
- enforce_terms
- end
-
- context 'when feature flag update_oauth_registration_flow is enabled' do
- before do
- stub_feature_flags(update_oauth_registration_flow: true)
- end
-
- it 'creates the user with accepted terms' do
- oauth_user.save # rubocop:disable Rails/SaveBang
-
- expect(gl_user).to be_persisted
- expect(gl_user.terms_accepted?).to be(true)
- end
- end
-
- context 'when feature flag update_oauth_registration_flow is disabled' do
- before do
- stub_feature_flags(update_oauth_registration_flow: false)
- end
-
- it 'creates the user without accepted terms' do
- oauth_user.save # rubocop:disable Rails/SaveBang
-
- expect(gl_user).to be_persisted
- expect(gl_user.terms_accepted?).to be(false)
- end
- end
- end
-
- context 'when terms are not enforced' do
- it 'creates the user without accepted terms' do
- oauth_user.save # rubocop:disable Rails/SaveBang
-
- expect(gl_user).to be_persisted
- expect(gl_user.terms_accepted?).to be(false)
- end
- end
- end
-
shared_examples 'to verify compliance with allow_single_sign_on' do
context 'provider is marked as external' do
it 'marks user as external' do
diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb
index c2d64aa2fb3..5a6fa7c416b 100644
--- a/spec/lib/gitlab/auth_spec.rb
+++ b/spec/lib/gitlab/auth_spec.rb
@@ -495,6 +495,12 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
end
end
end
+
+ it 'updates last_used_at column if token is valid' do
+ personal_access_token = create(:personal_access_token, scopes: ['write_repository'])
+
+ expect { gl_auth.find_for_git_client('', personal_access_token.token, project: nil, ip: 'ip') }.to change { personal_access_token.reload.last_used_at }
+ end
end
context 'while using regular user and password' do
diff --git a/spec/lib/gitlab/background_migration/backfill_integrations_type_new_spec.rb b/spec/lib/gitlab/background_migration/backfill_integrations_type_new_spec.rb
index d8a7ec775dd..e6588644b4f 100644
--- a/spec/lib/gitlab/background_migration/backfill_integrations_type_new_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_integrations_type_new_spec.rb
@@ -7,13 +7,14 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillIntegrationsTypeNew, :migrat
let(:integrations) { table(:integrations) }
let(:namespaced_integrations) do
- Set.new(%w[
- Asana Assembla Bamboo Bugzilla Buildkite Campfire Confluence CustomIssueTracker Datadog
- Discord DroneCi EmailsOnPush Ewm ExternalWiki Flowdock HangoutsChat Harbor Irker Jenkins Jira Mattermost
- MattermostSlashCommands MicrosoftTeams MockCi MockMonitoring Packagist PipelinesEmail Pivotaltracker
- Prometheus Pushover Redmine Shimo Slack SlackSlashCommands Teamcity UnifyCircuit WebexTeams Youtrack Zentao
- Github GitlabSlackApplication
- ]).freeze
+ Set.new(
+ %w[
+ Asana Assembla Bamboo Bugzilla Buildkite Campfire Confluence CustomIssueTracker Datadog
+ Discord DroneCi EmailsOnPush Ewm ExternalWiki Flowdock HangoutsChat Harbor Irker Jenkins Jira Mattermost
+ MattermostSlashCommands MicrosoftTeams MockCi MockMonitoring Packagist PipelinesEmail Pivotaltracker
+ Prometheus Pushover Redmine Shimo Slack SlackSlashCommands Teamcity UnifyCircuit WebexTeams Youtrack Zentao
+ Github GitlabSlackApplication
+ ]).freeze
end
before do
@@ -40,13 +41,14 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillIntegrationsTypeNew, :migrat
expect(queries.count).to be(16)
expect(queries.log.grep(/^SELECT/).size).to be(11)
expect(queries.log.grep(/^UPDATE/).size).to be(5)
- expect(queries.log.grep(/^UPDATE/).join.scan(/WHERE .*/)).to eq([
- 'WHERE integrations.id BETWEEN 2 AND 3',
- 'WHERE integrations.id BETWEEN 4 AND 5',
- 'WHERE integrations.id BETWEEN 6 AND 7',
- 'WHERE integrations.id BETWEEN 8 AND 9',
- 'WHERE integrations.id BETWEEN 10 AND 10'
- ])
+ expect(queries.log.grep(/^UPDATE/).join.scan(/WHERE .*/)).to eq(
+ [
+ 'WHERE integrations.id BETWEEN 2 AND 3',
+ 'WHERE integrations.id BETWEEN 4 AND 5',
+ 'WHERE integrations.id BETWEEN 6 AND 7',
+ 'WHERE integrations.id BETWEEN 8 AND 9',
+ 'WHERE integrations.id BETWEEN 10 AND 10'
+ ])
expect(integrations.where(id: 2..10).pluck(:type, :type_new)).to contain_exactly(
['AssemblaService', 'Integrations::Assembla'],
diff --git a/spec/lib/gitlab/background_migration/backfill_internal_on_notes_spec.rb b/spec/lib/gitlab/background_migration/backfill_internal_on_notes_spec.rb
new file mode 100644
index 00000000000..40a4758ba5f
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_internal_on_notes_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillInternalOnNotes, :migration, schema: 20220920124709 do
+ let(:notes_table) { table(:notes) }
+
+ let!(:confidential_note) { notes_table.create!(id: 1, confidential: true, internal: false) }
+ let!(:non_confidential_note) { notes_table.create!(id: 2, confidential: false, internal: false) }
+
+ describe '#perform' do
+ subject(:perform) do
+ described_class.new(
+ start_id: 1,
+ end_id: 2,
+ batch_table: :notes,
+ batch_column: :id,
+ sub_batch_size: 1,
+ pause_ms: 0,
+ connection: ApplicationRecord.connection
+ ).perform
+ end
+
+ it 'backfills internal column on notes when confidential' do
+ expect { perform }
+ .to change { confidential_note.reload.internal }.from(false).to(true)
+ .and not_change { non_confidential_note.reload.internal }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_namespace_details_spec.rb b/spec/lib/gitlab/background_migration/backfill_namespace_details_spec.rb
new file mode 100644
index 00000000000..b6282de0da6
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_namespace_details_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceDetails, :migration do
+ let(:namespaces) { table(:namespaces) }
+ let(:namespace_details) { table(:namespace_details) }
+
+ subject(:perform_migration) do
+ described_class.new(start_id: namespaces.minimum(:id),
+ end_id: namespaces.maximum(:id),
+ batch_table: :namespaces,
+ batch_column: :id,
+ sub_batch_size: 2,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection)
+ .perform
+ end
+
+ describe '#perform' do
+ it 'creates details for all namespaces in range' do
+ namespace1 = namespaces.create!(id: 5, name: 'test1', path: 'test1', description: "Some description1",
+ description_html: "Some description html1", cached_markdown_version: 4)
+ namespaces.create!(id: 6, name: 'test2', path: 'test2', type: 'Project',
+ description: "Some description2", description_html: "Some description html2",
+ cached_markdown_version: 4)
+ namespace3 = namespaces.create!(id: 7, name: 'test3', path: 'test3', description: "Some description3",
+ description_html: "Some description html3", cached_markdown_version: 4)
+ namespace4 = namespaces.create!(id: 8, name: 'test4', path: 'test4', description: "Some description3",
+ description_html: "Some description html4", cached_markdown_version: 4)
+ namespace_details.delete_all
+
+ expect(namespace_details.pluck(:namespace_id)).to eql []
+
+ expect { perform_migration }
+ .to change { namespace_details.pluck(:namespace_id) }.from([]).to contain_exactly(
+ namespace1.id,
+ namespace3.id,
+ namespace4.id
+ )
+
+ expect(namespace_details.find_by_namespace_id(namespace1.id)).to have_attributes(migrated_attributes(namespace1))
+ expect(namespace_details.find_by_namespace_id(namespace3.id)).to have_attributes(migrated_attributes(namespace3))
+ expect(namespace_details.find_by_namespace_id(namespace4.id)).to have_attributes(migrated_attributes(namespace4))
+ end
+ end
+
+ def migrated_attributes(namespace)
+ {
+ description: namespace.description,
+ description_html: namespace.description_html,
+ cached_markdown_version: namespace.cached_markdown_version
+ }
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects_spec.rb b/spec/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects_spec.rb
index 8a3671b2e53..dd202acc372 100644
--- a/spec/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects_spec.rb
+++ b/spec/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects_spec.rb
@@ -40,23 +40,26 @@ RSpec.describe Gitlab::BackgroundMigration::CleanupOrphanedLfsObjectsProjects, s
it 'lfs_objects_projects without an existing lfs object or project are removed' do
subject.perform(without_object1.id, without_object3.id)
- expect(lfs_objects_projects.all).to match_array([
- with_project_and_object1, with_project_and_object2, with_project_and_object3,
- without_project1, without_project2, without_project_and_object
- ])
+ expect(lfs_objects_projects.all).to match_array(
+ [
+ with_project_and_object1, with_project_and_object2, with_project_and_object3,
+ without_project1, without_project2, without_project_and_object
+ ])
subject.perform(with_project_and_object1.id, with_project_and_object3.id)
- expect(lfs_objects_projects.all).to match_array([
- with_project_and_object1, with_project_and_object2, with_project_and_object3,
- without_project1, without_project2, without_project_and_object
- ])
+ expect(lfs_objects_projects.all).to match_array(
+ [
+ with_project_and_object1, with_project_and_object2, with_project_and_object3,
+ without_project1, without_project2, without_project_and_object
+ ])
subject.perform(without_project1.id, without_project_and_object.id)
- expect(lfs_objects_projects.all).to match_array([
- with_project_and_object1, with_project_and_object2, with_project_and_object3
- ])
+ expect(lfs_objects_projects.all).to match_array(
+ [
+ with_project_and_object1, with_project_and_object2, with_project_and_object3
+ ])
expect(lfs_objects.ids).to contain_exactly(lfs_object.id, another_lfs_object.id)
expect(projects.ids).to contain_exactly(project.id, another_project.id)
diff --git a/spec/lib/gitlab/background_migration/delete_orphaned_operational_vulnerabilities_spec.rb b/spec/lib/gitlab/background_migration/delete_orphaned_operational_vulnerabilities_spec.rb
new file mode 100644
index 00000000000..afa955a6056
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/delete_orphaned_operational_vulnerabilities_spec.rb
@@ -0,0 +1,111 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::DeleteOrphanedOperationalVulnerabilities, :migration do
+ include MigrationHelpers::VulnerabilitiesHelper
+
+ let_it_be(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
+ let_it_be(:users) { table(:users) }
+ let_it_be(:user) do
+ users.create!(
+ name: "Example User",
+ email: "user@example.com",
+ username: "Example User",
+ projects_limit: 0,
+ confirmed_at: Time.current
+ )
+ end
+
+ let_it_be(:project) do
+ table(:projects).create!(
+ id: 123,
+ namespace_id: namespace.id,
+ project_namespace_id: namespace.id
+ )
+ end
+
+ let_it_be(:scanners) { table(:vulnerability_scanners) }
+ let_it_be(:scanner) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') }
+ let_it_be(:different_scanner) do
+ scanners.create!(
+ project_id: project.id,
+ external_id: 'test 2',
+ name: 'test scanner 2'
+ )
+ end
+
+ let_it_be(:vulnerabilities) { table(:vulnerabilities) }
+ let_it_be(:vulnerability_with_finding) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id
+ )
+ end
+
+ let_it_be(:vulnerability_without_finding) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id
+ )
+ end
+
+ let_it_be(:cis_vulnerability_without_finding) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id,
+ report_type: 7
+ )
+ end
+
+ let_it_be(:custom_vulnerability_without_finding) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id,
+ report_type: 99
+ )
+ end
+
+ let_it_be(:vulnerability_identifiers) { table(:vulnerability_identifiers) }
+ let_it_be(:primary_identifier) do
+ vulnerability_identifiers.create!(
+ project_id: project.id,
+ external_type: 'uuid-v5',
+ external_id: 'uuid-v5',
+ fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a',
+ name: 'Identifier for UUIDv5')
+ end
+
+ let_it_be(:vulnerabilities_findings) { table(:vulnerability_occurrences) }
+ let_it_be(:finding) do
+ create_finding!(
+ vulnerability_id: vulnerability_with_finding.id,
+ project_id: project.id,
+ scanner_id: scanner.id,
+ primary_identifier_id: primary_identifier.id
+ )
+ end
+
+ subject(:background_migration) do
+ described_class.new(start_id: vulnerabilities.minimum(:id),
+ end_id: vulnerabilities.maximum(:id),
+ batch_table: :vulnerabilities,
+ batch_column: :id,
+ sub_batch_size: 2,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection)
+ end
+
+ it 'drops Cluster Image Scanning and Custom Vulnerabilities without any Findings' do
+ expect(vulnerabilities.pluck(:id)).to match_array([
+ vulnerability_with_finding.id,
+ vulnerability_without_finding.id,
+ cis_vulnerability_without_finding.id,
+ custom_vulnerability_without_finding.id
+ ])
+
+ expect { background_migration.perform }.to change(vulnerabilities, :count).by(-2)
+
+ expect(vulnerabilities.pluck(:id)).to match_array([vulnerability_with_finding.id, vulnerability_without_finding.id])
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/destroy_invalid_members_spec.rb b/spec/lib/gitlab/background_migration/destroy_invalid_members_spec.rb
new file mode 100644
index 00000000000..9b0cb96b30b
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/destroy_invalid_members_spec.rb
@@ -0,0 +1,141 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+# rubocop: disable RSpec/MultipleMemoizedHelpers
+RSpec.describe Gitlab::BackgroundMigration::DestroyInvalidMembers, :migration, schema: 20221004094814 do
+ let!(:migration_attrs) do
+ {
+ start_id: 1,
+ end_id: 1000,
+ batch_table: :members,
+ batch_column: :id,
+ sub_batch_size: 100,
+ pause_ms: 0,
+ connection: ApplicationRecord.connection
+ }
+ end
+
+ let(:users_table) { table(:users) }
+ let(:namespaces_table) { table(:namespaces) }
+ let(:members_table) { table(:members) }
+ let(:projects_table) { table(:projects) }
+ let(:members_table_name) { 'members' }
+ let(:connection) { ApplicationRecord.connection }
+ let(:user1) { users_table.create!(name: 'user1', email: 'user1@example.com', projects_limit: 5) }
+ let(:user2) { users_table.create!(name: 'user2', email: 'user2@example.com', projects_limit: 5) }
+ let(:user3) { users_table.create!(name: 'user3', email: 'user3@example.com', projects_limit: 5) }
+ let(:user4) { users_table.create!(name: 'user4', email: 'user4@example.com', projects_limit: 5) }
+ let(:user5) { users_table.create!(name: 'user5', email: 'user5@example.com', projects_limit: 5) }
+ let(:user6) { users_table.create!(name: 'user6', email: 'user6@example.com', projects_limit: 5) }
+ let(:user7) { users_table.create!(name: 'user7', email: 'user7@example.com', projects_limit: 5) }
+ let(:user8) { users_table.create!(name: 'user8', email: 'user8@example.com', projects_limit: 5) }
+ let!(:group1) { namespaces_table.create!(name: 'marvellous group 1', path: 'group-path-1', type: 'Group') }
+ let!(:group2) { namespaces_table.create!(name: 'outstanding group 2', path: 'group-path-2', type: 'Group') }
+ let!(:project_namespace1) do
+ namespaces_table.create!(name: 'fabulous project', path: 'project-path-1',
+ type: 'ProjectNamespace', parent_id: group1.id)
+ end
+
+ let!(:project1) do
+ projects_table.create!(name: 'fabulous project', path: 'project-path-1',
+ project_namespace_id: project_namespace1.id, namespace_id: group1.id)
+ end
+
+ let!(:project_namespace2) do
+ namespaces_table.create!(name: 'splendiferous project', path: 'project-path-2',
+ type: 'ProjectNamespace', parent_id: group1.id)
+ end
+
+ let!(:project2) do
+ projects_table.create!(name: 'splendiferous project', path: 'project-path-2',
+ project_namespace_id: project_namespace2.id, namespace_id: group1.id)
+ end
+
+ # create valid project member records
+ let!(:project_member1) { create_valid_project_member(id: 1, user_id: user1.id, project: project1) }
+ let!(:project_member2) { create_valid_project_member(id: 2, user_id: user2.id, project: project2) }
+ # create valid group member records
+ let!(:group_member5) { create_valid_group_member(id: 5, user_id: user5.id, group_id: group1.id) }
+ let!(:group_member6) { create_valid_group_member(id: 6, user_id: user6.id, group_id: group2.id) }
+
+ let!(:migration) { described_class.new(**migration_attrs) }
+
+ subject(:perform_migration) { migration.perform }
+
+ # create invalid project and group member records
+ def create_members
+ [
+ create_invalid_project_member(id: 3, user_id: user3.id),
+ create_invalid_project_member(id: 4, user_id: user4.id),
+ create_invalid_group_member(id: 7, user_id: user7.id),
+ create_invalid_group_member(id: 8, user_id: user8.id)
+ ]
+ end
+
+ it 'removes invalid memberships but keeps valid ones', :aggregate_failures do
+ without_check_constraint(members_table_name, 'check_508774aac0', connection: connection) do
+ create_members
+
+ expect(members_table.count).to eq 8
+
+ queries = ActiveRecord::QueryRecorder.new do
+ perform_migration
+ end
+
+ expect(queries.count).to eq(4)
+ expect(members_table.all).to match_array([project_member1, project_member2, group_member5, group_member6])
+ end
+ end
+
+ it 'tracks timings of queries' do
+ without_check_constraint(members_table_name, 'check_508774aac0', connection: connection) do
+ create_members
+
+ expect(migration.batch_metrics.timings).to be_empty
+
+ expect { perform_migration }.to change { migration.batch_metrics.timings }
+ end
+ end
+
+ it 'logs IDs of deleted records' do
+ without_check_constraint(members_table_name, 'check_508774aac0', connection: connection) do
+ members = create_members
+
+ member_data = members.map do |m|
+ { id: m.id, source_id: m.source_id, source_type: m.source_type }
+ end
+
+ expect(Gitlab::AppLogger).to receive(:info).with({ message: 'Removing invalid member records',
+ deleted_count: 4,
+ deleted_member_data: member_data })
+
+ perform_migration
+ end
+ end
+
+ def create_invalid_project_member(id:, user_id:)
+ members_table.create!(id: id, user_id: user_id, source_id: non_existing_record_id,
+ access_level: Gitlab::Access::MAINTAINER, type: "ProjectMember",
+ source_type: "Project", notification_level: 3, member_namespace_id: nil)
+ end
+
+ def create_valid_project_member(id:, user_id:, project:)
+ members_table.create!(id: id, user_id: user_id, source_id: project.id,
+ access_level: Gitlab::Access::MAINTAINER, type: "ProjectMember", source_type: "Project",
+ member_namespace_id: project.project_namespace_id, notification_level: 3)
+ end
+
+ def create_invalid_group_member(id:, user_id:)
+ members_table.create!(id: id, user_id: user_id, source_id: non_existing_record_id,
+ access_level: Gitlab::Access::MAINTAINER, type: "GroupMember",
+ source_type: "Namespace", notification_level: 3, member_namespace_id: nil)
+ end
+
+ def create_valid_group_member(id:, user_id:, group_id:)
+ members_table.create!(id: id, user_id: user_id, source_id: group_id,
+ access_level: Gitlab::Access::MAINTAINER, type: "GroupMember",
+ source_type: "Namespace", member_namespace_id: group_id, notification_level: 3)
+ end
+end
+# rubocop: enable RSpec/MultipleMemoizedHelpers
diff --git a/spec/lib/gitlab/background_migration/migrate_merge_request_diff_commit_users_spec.rb b/spec/lib/gitlab/background_migration/migrate_merge_request_diff_commit_users_spec.rb
index 31b6ee0c7cd..c3ae2cc060c 100644
--- a/spec/lib/gitlab/background_migration/migrate_merge_request_diff_commit_users_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_merge_request_diff_commit_users_spec.rb
@@ -79,10 +79,11 @@ RSpec.describe Gitlab::BackgroundMigration::MigrateMergeRequestDiffCommitUsers,
it 'produces a union of the given queries' do
alice = commit_users.create!(name: 'Alice', email: 'alice@example.com')
bob = commit_users.create!(name: 'Bob', email: 'bob@example.com')
- users = commit_users.union([
- commit_users.where(name: 'Alice').to_sql,
- commit_users.where(name: 'Bob').to_sql
- ])
+ users = commit_users.union(
+ [
+ commit_users.where(name: 'Alice').to_sql,
+ commit_users.where(name: 'Bob').to_sql
+ ])
expect(users).to include(alice)
expect(users).to include(bob)
diff --git a/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb b/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb
index a609227be05..29cc4f34f6d 100644
--- a/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb
+++ b/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb
@@ -246,9 +246,15 @@ RSpec.describe Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrence
end
it 'drops duplicates and related records', :aggregate_failures do
- expect(vulnerability_findings.pluck(:id)).to match_array([
- finding_with_correct_uuid.id, finding_with_incorrect_uuid.id, finding_with_correct_uuid2.id, finding_with_incorrect_uuid2.id, finding_with_incorrect_uuid3.id, duplicate_not_in_the_same_batch.id
- ])
+ expect(vulnerability_findings.pluck(:id)).to match_array(
+ [
+ finding_with_correct_uuid.id,
+ finding_with_incorrect_uuid.id,
+ finding_with_correct_uuid2.id,
+ finding_with_incorrect_uuid2.id,
+ finding_with_incorrect_uuid3.id,
+ duplicate_not_in_the_same_batch.id
+ ])
expect { subject }.to change(vulnerability_finding_pipelines, :count).from(16).to(8)
.and change(vulnerability_findings, :count).from(6).to(3)
@@ -306,7 +312,8 @@ RSpec.describe Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrence
it 'retries the recalculation' do
subject
- expect(Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrencesUuid::VulnerabilitiesFinding).to have_received(:find_by).with(uuid: uuid).once
+ expect(Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrencesUuid::VulnerabilitiesFinding)
+ .to have_received(:find_by).with(uuid: uuid).once
end
it 'logs the conflict' do
diff --git a/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_encrypted_values_spec.rb b/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_encrypted_values_spec.rb
new file mode 100644
index 00000000000..b6da8f7fc2d
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_encrypted_values_spec.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::ResetDuplicateCiRunnersTokenEncryptedValues,
+ :migration,
+ schema: 20220922143634 do
+ it { expect(described_class).to be < Gitlab::BackgroundMigration::BatchedMigrationJob }
+
+ describe '#perform' do
+ let(:ci_runners) { table(:ci_runners, database: :ci) }
+
+ let(:test_worker) do
+ described_class.new(
+ start_id: 1,
+ end_id: 4,
+ batch_table: :ci_runners,
+ batch_column: :id,
+ sub_batch_size: 2,
+ pause_ms: 0,
+ connection: Ci::ApplicationRecord.connection
+ )
+ end
+
+ subject(:perform) { test_worker.perform }
+
+ before do
+ ci_runners.create!(id: 1, runner_type: 1, token_encrypted: 'duplicate')
+ ci_runners.create!(id: 2, runner_type: 1, token_encrypted: 'a-token')
+ ci_runners.create!(id: 3, runner_type: 1, token_encrypted: 'duplicate-2')
+ ci_runners.create!(id: 4, runner_type: 1, token_encrypted: nil)
+ ci_runners.create!(id: 5, runner_type: 1, token_encrypted: 'duplicate-2')
+ ci_runners.create!(id: 6, runner_type: 1, token_encrypted: 'duplicate')
+ ci_runners.create!(id: 7, runner_type: 1, token_encrypted: 'another-token')
+ ci_runners.create!(id: 8, runner_type: 1, token_encrypted: 'another-token')
+ end
+
+ it 'nullifies duplicate encrypted tokens', :aggregate_failures do
+ expect { perform }.to change { ci_runners.all.order(:id).pluck(:id, :token_encrypted).to_h }
+ .from(
+ {
+ 1 => 'duplicate',
+ 2 => 'a-token',
+ 3 => 'duplicate-2',
+ 4 => nil,
+ 5 => 'duplicate-2',
+ 6 => 'duplicate',
+ 7 => 'another-token',
+ 8 => 'another-token'
+ }
+ )
+ .to(
+ {
+ 1 => nil,
+ 2 => 'a-token',
+ 3 => nil,
+ 4 => nil,
+ 5 => nil,
+ 6 => nil,
+ 7 => 'another-token',
+ 8 => 'another-token'
+ }
+ )
+ expect(ci_runners.count).to eq(8)
+ expect(ci_runners.pluck(:token_encrypted).uniq).to match_array [
+ nil, 'a-token', 'another-token'
+ ]
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_values_spec.rb b/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_values_spec.rb
new file mode 100644
index 00000000000..423b1815e75
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/reset_duplicate_ci_runners_token_values_spec.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::ResetDuplicateCiRunnersTokenValues,
+ :migration,
+ schema: 20220922143143 do
+ it { expect(described_class).to be < Gitlab::BackgroundMigration::BatchedMigrationJob }
+
+ describe '#perform' do
+ let(:ci_runners) { table(:ci_runners, database: :ci) }
+
+ let(:test_worker) do
+ described_class.new(
+ start_id: 1,
+ end_id: 4,
+ batch_table: :ci_runners,
+ batch_column: :id,
+ sub_batch_size: 2,
+ pause_ms: 0,
+ connection: Ci::ApplicationRecord.connection
+ )
+ end
+
+ subject(:perform) { test_worker.perform }
+
+ before do
+ ci_runners.create!(id: 1, runner_type: 1, token: 'duplicate')
+ ci_runners.create!(id: 2, runner_type: 1, token: 'a-token')
+ ci_runners.create!(id: 3, runner_type: 1, token: 'duplicate-2')
+ ci_runners.create!(id: 4, runner_type: 1, token: nil)
+ ci_runners.create!(id: 5, runner_type: 1, token: 'duplicate-2')
+ ci_runners.create!(id: 6, runner_type: 1, token: 'duplicate')
+ ci_runners.create!(id: 7, runner_type: 1, token: 'another-token')
+ ci_runners.create!(id: 8, runner_type: 1, token: 'another-token')
+ end
+
+ it 'nullifies duplicate tokens', :aggregate_failures do
+ expect { perform }.to change { ci_runners.all.order(:id).pluck(:id, :token).to_h }
+ .from(
+ {
+ 1 => 'duplicate',
+ 2 => 'a-token',
+ 3 => 'duplicate-2',
+ 4 => nil,
+ 5 => 'duplicate-2',
+ 6 => 'duplicate',
+ 7 => 'another-token',
+ 8 => 'another-token'
+ }
+ )
+ .to(
+ {
+ 1 => nil,
+ 2 => 'a-token',
+ 3 => nil,
+ 4 => nil,
+ 5 => nil,
+ 6 => nil,
+ 7 => 'another-token',
+ 8 => 'another-token'
+ }
+ )
+ expect(ci_runners.count).to eq(8)
+ expect(ci_runners.pluck(:token).uniq).to match_array [
+ nil, 'a-token', 'another-token'
+ ]
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/update_ci_pipeline_artifacts_unknown_locked_status_spec.rb b/spec/lib/gitlab/background_migration/update_ci_pipeline_artifacts_unknown_locked_status_spec.rb
new file mode 100644
index 00000000000..98939e15952
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/update_ci_pipeline_artifacts_unknown_locked_status_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::UpdateCiPipelineArtifactsUnknownLockedStatus do
+ describe '#perform' do
+ let(:batch_table) { :ci_pipeline_artifacts }
+ let(:batch_column) { :id }
+
+ let(:sub_batch_size) { 1 }
+ let(:pause_ms) { 0 }
+ let(:connection) { Ci::ApplicationRecord.connection }
+
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:pipelines) { table(:ci_pipelines, database: :ci) }
+ let(:pipeline_artifacts) { table(:ci_pipeline_artifacts, database: :ci) }
+
+ let(:namespace) { namespaces.create!(name: 'name', path: 'path') }
+ let(:project) do
+ projects
+ .create!(name: "project", path: "project", namespace_id: namespace.id, project_namespace_id: namespace.id)
+ end
+
+ let(:unlocked) { 0 }
+ let(:locked) { 1 }
+ let(:unknown) { 2 }
+
+ let(:unlocked_pipeline) { pipelines.create!(locked: unlocked) }
+ let(:locked_pipeline) { pipelines.create!(locked: locked) }
+
+ # rubocop:disable Layout/LineLength
+ let!(:locked_artifact) { pipeline_artifacts.create!(project_id: project.id, pipeline_id: locked_pipeline.id, size: 1024, file_type: 0, file_format: 'gzip', file: 'a.gz', locked: unknown) }
+ let!(:unlocked_artifact_1) { pipeline_artifacts.create!(project_id: project.id, pipeline_id: unlocked_pipeline.id, size: 2048, file_type: 1, file_format: 'raw', file: 'b', locked: unknown) }
+ let!(:unlocked_artifact_2) { pipeline_artifacts.create!(project_id: project.id, pipeline_id: unlocked_pipeline.id, size: 4096, file_type: 2, file_format: 'gzip', file: 'c.gz', locked: unknown) }
+ let!(:already_unlocked_artifact) { pipeline_artifacts.create!(project_id: project.id, pipeline_id: unlocked_pipeline.id, size: 8192, file_type: 3, file_format: 'raw', file: 'd', locked: unlocked) }
+ let!(:already_locked_artifact) { pipeline_artifacts.create!(project_id: project.id, pipeline_id: locked_pipeline.id, size: 8192, file_type: 3, file_format: 'raw', file: 'd', locked: locked) }
+ # rubocop:enable Layout/LineLength
+
+ subject do
+ described_class.new(
+ start_id: locked_artifact.id,
+ end_id: already_locked_artifact.id,
+ batch_table: batch_table,
+ batch_column: batch_column,
+ sub_batch_size: sub_batch_size,
+ pause_ms: pause_ms,
+ connection: connection
+ ).perform
+ end
+
+ it 'updates ci_pipeline_artifacts with unknown lock status' do
+ subject
+
+ expect(locked_artifact.reload.locked).to eq(locked)
+ expect(unlocked_artifact_1.reload.locked).to eq(unlocked)
+ expect(unlocked_artifact_2.reload.locked).to eq(unlocked)
+ expect(already_unlocked_artifact.reload.locked).to eq(unlocked)
+ expect(already_locked_artifact.reload.locked).to eq(locked)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/bare_repository_import/importer_spec.rb b/spec/lib/gitlab/bare_repository_import/importer_spec.rb
index 8fb903154f3..3a885d70eb4 100644
--- a/spec/lib/gitlab/bare_repository_import/importer_spec.rb
+++ b/spec/lib/gitlab/bare_repository_import/importer_spec.rb
@@ -192,6 +192,6 @@ RSpec.describe Gitlab::BareRepositoryImport::Importer do
cmd = %W(#{Gitlab.config.git.bin_path} clone --bare #{source_project} #{repo_path})
- system(git_env, *cmd, chdir: SEED_STORAGE_PATH, out: '/dev/null', err: '/dev/null')
+ system(git_env, *cmd, chdir: base_dir, out: '/dev/null', err: '/dev/null')
end
end
diff --git a/spec/lib/gitlab/bare_repository_import/repository_spec.rb b/spec/lib/gitlab/bare_repository_import/repository_spec.rb
index becfdced5fb..a9778e0e8a7 100644
--- a/spec/lib/gitlab/bare_repository_import/repository_spec.rb
+++ b/spec/lib/gitlab/bare_repository_import/repository_spec.rb
@@ -55,7 +55,7 @@ RSpec.describe ::Gitlab::BareRepositoryImport::Repository do
context 'hashed storage' do
let(:hashed_path) { "@hashed/6b/86/6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b" }
- let(:root_path) { TestEnv.repos_path }
+ let(:root_path) { Gitlab::GitalyClient::StorageSettings.allow_disk_access { TestEnv.repos_path } }
let(:repo_path) { File.join(root_path, "#{hashed_path}.git") }
let(:wiki_path) { File.join(root_path, "#{hashed_path}.wiki.git") }
let(:raw_repository) { Gitlab::Git::Repository.new('default', "#{hashed_path}.git", nil, nil) }
diff --git a/spec/lib/gitlab/batch_pop_queueing_spec.rb b/spec/lib/gitlab/batch_pop_queueing_spec.rb
deleted file mode 100644
index 5af78ddabe7..00000000000
--- a/spec/lib/gitlab/batch_pop_queueing_spec.rb
+++ /dev/null
@@ -1,147 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BatchPopQueueing do
- include ExclusiveLeaseHelpers
- using RSpec::Parameterized::TableSyntax
-
- describe '#initialize' do
- where(:namespace, :queue_id, :expect_error, :error_type) do
- 'feature' | '1' | false | nil
- :feature | '1' | false | nil
- nil | '1' | true | NoMethodError
- 'feature' | nil | true | NoMethodError
- '' | '1' | true | ArgumentError
- 'feature' | '' | true | ArgumentError
- 'feature' | 1 | true | NoMethodError
- end
-
- with_them do
- it do
- if expect_error
- expect { described_class.new(namespace, queue_id) }.to raise_error(error_type)
- else
- expect { described_class.new(namespace, queue_id) }.not_to raise_error
- end
- end
- end
- end
-
- describe '#safe_execute', :clean_gitlab_redis_queues do
- subject { queue.safe_execute(new_items, lock_timeout: lock_timeout) }
-
- let(:queue) { described_class.new(namespace, queue_id) }
- let(:namespace) { 'feature' }
- let(:queue_id) { '1' }
- let(:lock_timeout) { 10.minutes }
- let(:new_items) { %w[A B] }
- let(:lock_key) { queue.send(:lock_key) }
- let(:queue_key) { queue.send(:queue_key) }
-
- it 'enqueues new items always' do
- Gitlab::Redis::Queues.with do |redis|
- expect(redis).to receive(:sadd).with(queue_key, new_items)
- expect(redis).to receive(:expire).with(queue_key, (lock_timeout + described_class::EXTRA_QUEUE_EXPIRE_WINDOW).to_i)
- end
-
- subject
- end
-
- it 'yields the new items with exclusive lease' do
- uuid = 'test'
- expect_to_obtain_exclusive_lease(lock_key, uuid, timeout: lock_timeout)
- expect_to_cancel_exclusive_lease(lock_key, uuid)
-
- expect { |b| queue.safe_execute(new_items, lock_timeout: lock_timeout, &b) }
- .to yield_with_args(match_array(new_items))
- end
-
- it 'returns the result and no items in the queue' do
- expect(subject[:status]).to eq(:finished)
- expect(subject[:new_items]).to be_empty
-
- Gitlab::Redis::Queues.with do |redis|
- expect(redis.llen(queue_key)).to be(0)
- end
- end
-
- context 'when new items are enqueued during the process' do
- it 'returns the result with newly added items' do
- result = queue.safe_execute(new_items) do
- queue.safe_execute(['C'])
- end
-
- expect(result[:status]).to eq(:finished)
- expect(result[:new_items]).to eq(['C'])
-
- Gitlab::Redis::Queues.with do |redis|
- expect(redis.scard(queue_key)).to be(1)
- end
- end
- end
-
- context 'when interger items are enqueued' do
- let(:new_items) { [1, 2, 3] }
-
- it 'yields as String values' do
- expect { |b| queue.safe_execute(new_items, lock_timeout: lock_timeout, &b) }
- .to yield_with_args(%w[1 2 3])
- end
- end
-
- context 'when the queue key does not exist in Redis' do
- before do
- allow(queue).to receive(:enqueue) {}
- end
-
- it 'yields empty array' do
- expect { |b| queue.safe_execute(new_items, lock_timeout: lock_timeout, &b) }
- .to yield_with_args([])
- end
- end
-
- context 'when the other process has already been working on the queue' do
- before do
- stub_exclusive_lease_taken(lock_key, timeout: lock_timeout)
- end
-
- it 'does not yield the block' do
- expect { |b| queue.safe_execute(new_items, lock_timeout: lock_timeout, &b) }
- .not_to yield_control
- end
-
- it 'returns the result' do
- expect(subject[:status]).to eq(:enqueued)
- end
- end
-
- context 'when a duplicate item is enqueued' do
- it 'returns the poped items to the queue and raise an error' do
- expect { |b| queue.safe_execute(%w[1 1 2 2], &b) }
- .to yield_with_args(match_array(%w[1 2]))
- end
- end
-
- context 'when there are two queues' do
- it 'enqueues items to each queue' do
- queue_1 = described_class.new(namespace, '1')
- queue_2 = described_class.new(namespace, '2')
-
- result_2 = nil
-
- result_1 = queue_1.safe_execute(['A']) do |_|
- result_2 = queue_2.safe_execute(['B']) do |_|
- queue_1.safe_execute(['C'])
- queue_2.safe_execute(['D'])
- end
- end
-
- expect(result_1[:status]).to eq(:finished)
- expect(result_1[:new_items]).to eq(['C'])
- expect(result_2[:status]).to eq(:finished)
- expect(result_2[:new_items]).to eq(['D'])
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/bitbucket_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
index e0a7044e5f9..186d4e1fb42 100644
--- a/spec/lib/gitlab/bitbucket_import/importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
@@ -58,24 +58,15 @@ RSpec.describe Gitlab::BitbucketImport::Importer do
issues
end
- let(:project_identifier) { 'namespace/repo' }
+ let_it_be(:project_identifier) { 'namespace/repo' }
- let(:data) do
- {
- 'bb_session' => {
- 'bitbucket_token' => "123456",
- 'bitbucket_refresh_token' => "secret"
- }
- }
- end
-
- let(:project) do
+ let_it_be_with_reload(:project) do
create(
:project,
:repository,
import_source: project_identifier,
import_url: "https://bitbucket.org/#{project_identifier}.git",
- import_data_attributes: { credentials: data }
+ import_data_attributes: { credentials: { 'token' => 'token' } }
)
end
@@ -88,6 +79,14 @@ RSpec.describe Gitlab::BitbucketImport::Importer do
}
end
+ let(:last_issue_data) do
+ {
+ page: 1,
+ pagelen: 1,
+ values: [sample_issues_statuses.last]
+ }
+ end
+
let(:counter) { double('counter', increment: true) }
subject { described_class.new(project) }
@@ -253,6 +252,13 @@ RSpec.describe Gitlab::BitbucketImport::Importer do
stub_request(
:get,
+ "https://api.bitbucket.org/2.0/repositories/#{project_identifier}/issues?pagelen=1&sort=-created_on&state=ALL"
+ ).to_return(status: 200,
+ headers: { "Content-Type" => "application/json" },
+ body: last_issue_data.to_json)
+
+ stub_request(
+ :get,
"https://api.bitbucket.org/2.0/repositories/#{project_identifier}/issues?pagelen=50&sort=created_on"
).to_return(status: 200,
headers: { "Content-Type" => "application/json" },
@@ -352,6 +358,12 @@ RSpec.describe Gitlab::BitbucketImport::Importer do
end
describe 'issue import' do
+ it 'allocates internal ids' do
+ expect(Issue).to receive(:track_project_iid!).with(project, 6)
+
+ importer.execute
+ end
+
it 'maps reporters to anonymous if bitbucket reporter is nil' do
allow(importer).to receive(:import_wiki)
importer.execute
@@ -371,6 +383,29 @@ RSpec.describe Gitlab::BitbucketImport::Importer do
expect(project.issues.map(&:work_item_type_id).uniq).to contain_exactly(WorkItems::Type.default_issue_type.id)
end
+
+ context 'with issue comments' do
+ let(:inline_note) do
+ instance_double(Bitbucket::Representation::Comment, note: 'Hello world', author: 'someuser', created_at: Time.now, updated_at: Time.now)
+ end
+
+ before do
+ allow_next_instance_of(Bitbucket::Client) do |instance|
+ allow(instance).to receive(:issue_comments).and_return([inline_note])
+ end
+ end
+
+ it 'imports issue comments' do
+ allow(importer).to receive(:import_wiki)
+ importer.execute
+
+ comment = project.notes.first
+ expect(project.notes.size).to eq(7)
+ expect(comment.note).to include(inline_note.note)
+ expect(comment.note).to include(inline_note.author)
+ expect(importer.errors).to be_empty
+ end
+ end
end
context 'metrics' do
diff --git a/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb b/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb
index c0e4d1b5355..c78140a70b3 100644
--- a/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb
+++ b/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb
@@ -288,7 +288,7 @@ RSpec.describe Gitlab::Cache::Ci::ProjectPipelineStatus, :clean_gitlab_redis_cac
it 'deletes values from redis_cache' do
pipeline_status.delete_from_cache
- key_exists = Gitlab::Redis::Cache.with { |redis| redis.exists(cache_key) }
+ key_exists = Gitlab::Redis::Cache.with { |redis| redis.exists?(cache_key) }
expect(key_exists).to be_falsy
end
diff --git a/spec/lib/gitlab/ci/ansi2json_spec.rb b/spec/lib/gitlab/ci/ansi2json_spec.rb
index 4b3b049176f..0f8f3759834 100644
--- a/spec/lib/gitlab/ci/ansi2json_spec.rb
+++ b/spec/lib/gitlab/ci/ansi2json_spec.rb
@@ -7,70 +7,74 @@ RSpec.describe Gitlab::Ci::Ansi2json do
describe 'lines' do
it 'prints non-ansi as-is' do
- expect(convert_json('Hello')).to eq([
- { offset: 0, content: [{ text: 'Hello' }] }
- ])
+ expect(convert_json('Hello')).to eq([{ offset: 0, content: [{ text: 'Hello' }] }])
end
context 'new lines' do
it 'adds new line when encountering \n' do
- expect(convert_json("Hello\nworld")).to eq([
- { offset: 0, content: [{ text: 'Hello' }] },
- { offset: 6, content: [{ text: 'world' }] }
- ])
+ expect(convert_json("Hello\nworld")).to eq(
+ [
+ { offset: 0, content: [{ text: 'Hello' }] },
+ { offset: 6, content: [{ text: 'world' }] }
+ ])
end
it 'adds new line when encountering \r\n' do
- expect(convert_json("Hello\r\nworld")).to eq([
- { offset: 0, content: [{ text: 'Hello' }] },
- { offset: 7, content: [{ text: 'world' }] }
- ])
+ expect(convert_json("Hello\r\nworld")).to eq(
+ [
+ { offset: 0, content: [{ text: 'Hello' }] },
+ { offset: 7, content: [{ text: 'world' }] }
+ ])
end
it 'ignores empty newlines' do
- expect(convert_json("Hello\n\nworld")).to eq([
- { offset: 0, content: [{ text: 'Hello' }] },
- { offset: 7, content: [{ text: 'world' }] }
- ])
- expect(convert_json("Hello\r\n\r\nworld")).to eq([
- { offset: 0, content: [{ text: 'Hello' }] },
- { offset: 9, content: [{ text: 'world' }] }
- ])
+ expect(convert_json("Hello\n\nworld")).to eq(
+ [
+ { offset: 0, content: [{ text: 'Hello' }] },
+ { offset: 7, content: [{ text: 'world' }] }
+ ])
+ expect(convert_json("Hello\r\n\r\nworld")).to eq(
+ [
+ { offset: 0, content: [{ text: 'Hello' }] },
+ { offset: 9, content: [{ text: 'world' }] }
+ ])
end
it 'replace the current line when encountering \r' do
- expect(convert_json("Hello\rworld")).to eq([
- { offset: 0, content: [{ text: 'world' }] }
- ])
+ expect(convert_json("Hello\rworld")).to eq([{ offset: 0, content: [{ text: 'world' }] }])
end
end
it 'recognizes color changing ANSI sequences' do
- expect(convert_json("\e[31mHello\e[0m")).to eq([
- { offset: 0, content: [{ text: 'Hello', style: 'term-fg-red' }] }
- ])
+ expect(convert_json("\e[31mHello\e[0m")).to eq(
+ [
+ { offset: 0, content: [{ text: 'Hello', style: 'term-fg-red' }] }
+ ])
end
it 'recognizes color changing ANSI sequences across multiple lines' do
- expect(convert_json("\e[31mHello\nWorld\e[0m")).to eq([
- { offset: 0, content: [{ text: 'Hello', style: 'term-fg-red' }] },
- { offset: 11, content: [{ text: 'World', style: 'term-fg-red' }] }
- ])
+ expect(convert_json("\e[31mHello\nWorld\e[0m")).to eq(
+ [
+ { offset: 0, content: [{ text: 'Hello', style: 'term-fg-red' }] },
+ { offset: 11, content: [{ text: 'World', style: 'term-fg-red' }] }
+ ])
end
it 'recognizes background and foreground colors' do
- expect(convert_json("\e[31;44mHello")).to eq([
- { offset: 0, content: [{ text: 'Hello', style: 'term-fg-red term-bg-blue' }] }
- ])
+ expect(convert_json("\e[31;44mHello")).to eq(
+ [
+ { offset: 0, content: [{ text: 'Hello', style: 'term-fg-red term-bg-blue' }] }
+ ])
end
it 'recognizes style changes within the same line' do
- expect(convert_json("\e[31;44mHello\e[0m world")).to eq([
- { offset: 0, content: [
- { text: 'Hello', style: 'term-fg-red term-bg-blue' },
- { text: ' world' }
- ] }
- ])
+ expect(convert_json("\e[31;44mHello\e[0m world")).to eq(
+ [
+ { offset: 0, content: [
+ { text: 'Hello', style: 'term-fg-red term-bg-blue' },
+ { text: ' world' }
+ ] }
+ ])
end
context 'with section markers' do
@@ -82,130 +86,137 @@ RSpec.describe Gitlab::Ci::Ansi2json do
let(:section_end) { "section_end:#{section_end_time.to_i}:#{section_name}\r\033[0K" }
it 'marks the first line of the section as header' do
- expect(convert_json("Hello#{section_start}world!")).to eq([
- {
- offset: 0,
- content: [{ text: 'Hello' }]
- },
- {
- offset: 5,
- content: [{ text: 'world!' }],
- section: 'prepare-script',
- section_header: true
- }
- ])
+ expect(convert_json("Hello#{section_start}world!")).to eq(
+ [
+ {
+ offset: 0,
+ content: [{ text: 'Hello' }]
+ },
+ {
+ offset: 5,
+ content: [{ text: 'world!' }],
+ section: 'prepare-script',
+ section_header: true
+ }
+ ])
end
it 'does not marks the other lines of the section as header' do
- expect(convert_json("outside section#{section_start}Hello\nworld!")).to eq([
- {
- offset: 0,
- content: [{ text: 'outside section' }]
- },
- {
- offset: 15,
- content: [{ text: 'Hello' }],
- section: 'prepare-script',
- section_header: true
- },
- {
- offset: 65,
- content: [{ text: 'world!' }],
- section: 'prepare-script'
- }
- ])
+ expect(convert_json("outside section#{section_start}Hello\nworld!")).to eq(
+ [
+ {
+ offset: 0,
+ content: [{ text: 'outside section' }]
+ },
+ {
+ offset: 15,
+ content: [{ text: 'Hello' }],
+ section: 'prepare-script',
+ section_header: true
+ },
+ {
+ offset: 65,
+ content: [{ text: 'world!' }],
+ section: 'prepare-script'
+ }
+ ])
end
it 'marks the last line of the section as footer' do
- expect(convert_json("#{section_start}Good\nmorning\nworld!#{section_end}")).to eq([
- {
- offset: 0,
- content: [{ text: 'Good' }],
- section: 'prepare-script',
- section_header: true
- },
- {
- offset: 49,
- content: [{ text: 'morning' }],
- section: 'prepare-script'
- },
- {
- offset: 57,
- content: [{ text: 'world!' }],
- section: 'prepare-script'
- },
- {
- offset: 63,
- content: [],
- section_duration: '01:03',
- section: 'prepare-script'
- }
- ])
+ expect(convert_json("#{section_start}Good\nmorning\nworld!#{section_end}")).to eq(
+ [
+ {
+ offset: 0,
+ content: [{ text: 'Good' }],
+ section: 'prepare-script',
+ section_header: true
+ },
+ {
+ offset: 49,
+ content: [{ text: 'morning' }],
+ section: 'prepare-script'
+ },
+ {
+ offset: 57,
+ content: [{ text: 'world!' }],
+ section: 'prepare-script'
+ },
+ {
+ offset: 63,
+ content: [],
+ section_duration: '01:03',
+ section: 'prepare-script'
+ }
+ ])
end
it 'marks the first line as header and footer if is the only line in the section' do
- expect(convert_json("#{section_start}Hello world!#{section_end}")).to eq([
- {
- offset: 0,
- content: [{ text: 'Hello world!' }],
- section: 'prepare-script',
- section_header: true
- },
- {
- offset: 56,
- content: [],
- section: 'prepare-script',
- section_duration: '01:03'
- }
- ])
+ expect(convert_json("#{section_start}Hello world!#{section_end}")).to eq(
+ [
+ {
+ offset: 0,
+ content: [{ text: 'Hello world!' }],
+ section: 'prepare-script',
+ section_header: true
+ },
+ {
+ offset: 56,
+ content: [],
+ section: 'prepare-script',
+ section_duration: '01:03'
+ }
+ ])
end
it 'does not add sections attribute to lines after the section is closed' do
- expect(convert_json("#{section_start}Hello#{section_end}world")).to eq([
- {
- offset: 0,
- content: [{ text: 'Hello' }],
- section: 'prepare-script',
- section_header: true
- },
- {
- offset: 49,
- content: [],
- section: 'prepare-script',
- section_duration: '01:03'
- },
- {
- offset: 91,
- content: [{ text: 'world' }]
- }
- ])
+ expect(convert_json("#{section_start}Hello#{section_end}world")).to eq(
+ [
+ {
+ offset: 0,
+ content: [{ text: 'Hello' }],
+ section: 'prepare-script',
+ section_header: true
+ },
+ {
+ offset: 49,
+ content: [],
+ section: 'prepare-script',
+ section_duration: '01:03'
+ },
+ {
+ offset: 91,
+ content: [{ text: 'world' }]
+ }
+ ])
end
it 'ignores section_end marker if no section_start exists' do
- expect(convert_json("Hello #{section_end}world")).to eq([
- {
- offset: 0,
- content: [{ text: 'Hello world' }]
- }
- ])
+ expect(convert_json("Hello #{section_end}world")).to eq(
+ [
+ {
+ offset: 0,
+ content: [{ text: 'Hello world' }]
+ }
+ ])
end
context 'when section name contains .-_ and capital letters' do
let(:section_name) { 'a.Legit-SeCtIoN_namE' }
it 'sanitizes the section name' do
- expect(convert_json("Hello#{section_start}world!")).to eq([
- {
- offset: 0,
- content: [{ text: 'Hello' }]
- },
- {
- offset: 5,
- content: [{ text: 'world!' }],
- section: 'a-legit-section-name',
- section_header: true
- }
- ])
+ expect(convert_json("Hello#{section_start}world!")).to eq(
+ [
+ {
+ offset: 0,
+ content: [{ text: 'Hello' }]
+ },
+ {
+ offset: 5,
+ content: [{ text: 'world!' }],
+ section: 'a-legit-section-name',
+ section_header: true
+ }
+ ])
end
end
@@ -213,12 +224,13 @@ RSpec.describe Gitlab::Ci::Ansi2json do
let(:section_name) { 'my_$ection' }
it 'ignores the section' do
- expect(convert_json("#{section_start}hello")).to eq([
- {
- offset: 0,
- content: [{ text: 'hello' }]
- }
- ])
+ expect(convert_json("#{section_start}hello")).to eq(
+ [
+ {
+ offset: 0,
+ content: [{ text: 'hello' }]
+ }
+ ])
end
end
@@ -226,31 +238,33 @@ RSpec.describe Gitlab::Ci::Ansi2json do
let(:section_name) { '<a_tag>' }
it 'ignores the section' do
- expect(convert_json("#{section_start}hello")).to eq([
- {
- offset: 0,
- content: [{ text: 'hello' }]
- }
- ])
+ expect(convert_json("#{section_start}hello")).to eq(
+ [
+ {
+ offset: 0,
+ content: [{ text: 'hello' }]
+ }
+ ])
end
end
it 'prints HTML tags as is' do
trace = "#{section_start}section_end:1:2<div>hello</div>#{section_end}"
- expect(convert_json(trace)).to eq([
- {
- offset: 0,
- content: [{ text: 'section_end:1:2<div>hello</div>' }],
- section: 'prepare-script',
- section_header: true
- },
- {
- offset: 75,
- content: [],
- section: 'prepare-script',
- section_duration: '01:03'
- }
- ])
+ expect(convert_json(trace)).to eq(
+ [
+ {
+ offset: 0,
+ content: [{ text: 'section_end:1:2<div>hello</div>' }],
+ section: 'prepare-script',
+ section_header: true
+ },
+ {
+ offset: 75,
+ content: [],
+ section: 'prepare-script',
+ section_duration: '01:03'
+ }
+ ])
end
context 'with nested section' do
@@ -264,7 +278,8 @@ RSpec.describe Gitlab::Ci::Ansi2json do
it 'adds multiple sections to the lines inside the nested section' do
trace = "Hello#{section_start}foo#{nested_section_start}bar#{nested_section_end}baz#{section_end}world"
- expect(convert_json(trace)).to eq([
+ expect(convert_json(trace)).to eq(
+ [
{
offset: 0,
content: [{ text: 'Hello' }]
@@ -308,7 +323,8 @@ RSpec.describe Gitlab::Ci::Ansi2json do
it 'adds multiple sections to the lines inside the nested section and closes all sections together' do
trace = "Hello#{section_start}\e[91mfoo\e[0m#{nested_section_start}bar#{nested_section_end}#{section_end}"
- expect(convert_json(trace)).to eq([
+ expect(convert_json(trace)).to eq(
+ [
{
offset: 0,
content: [{ text: 'Hello' }]
@@ -346,24 +362,25 @@ RSpec.describe Gitlab::Ci::Ansi2json do
it 'provides section options when set' do
trace = "#{option_section_start}hello#{section_end}"
- expect(convert_json(trace)).to eq([
- {
- offset: 0,
- content: [{ text: 'hello' }],
- section: 'prepare-script',
- section_header: true,
- section_options: {
- 'collapsed' => 'true',
- 'unused_option' => '123'
+ expect(convert_json(trace)).to eq(
+ [
+ {
+ offset: 0,
+ content: [{ text: 'hello' }],
+ section: 'prepare-script',
+ section_header: true,
+ section_options: {
+ 'collapsed' => 'true',
+ 'unused_option' => '123'
+ }
+ },
+ {
+ offset: 83,
+ content: [],
+ section: 'prepare-script',
+ section_duration: '01:03'
}
- },
- {
- offset: 83,
- content: [],
- section: 'prepare-script',
- section_duration: '01:03'
- }
- ])
+ ])
end
end
end
diff --git a/spec/lib/gitlab/ci/build/rules/rule/clause/changes_spec.rb b/spec/lib/gitlab/ci/build/rules/rule/clause/changes_spec.rb
index 234ba68d627..a22aa30304b 100644
--- a/spec/lib/gitlab/ci/build/rules/rule/clause/changes_spec.rb
+++ b/spec/lib/gitlab/ci/build/rules/rule/clause/changes_spec.rb
@@ -122,19 +122,17 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Changes do
context 'when compare_to is branch or tag' do
using RSpec::Parameterized::TableSyntax
- where(:pipeline_ref, :compare_to, :paths, :ff, :result) do
- 'feature_1' | 'master' | ['file1.txt'] | true | true
- 'feature_1' | 'master' | ['README.md'] | true | false
- 'feature_1' | 'master' | ['xyz.md'] | true | false
- 'feature_2' | 'master' | ['file1.txt'] | true | true
- 'feature_2' | 'master' | ['file2.txt'] | true | true
- 'feature_2' | 'feature_1' | ['file1.txt'] | true | false
- 'feature_2' | 'feature_1' | ['file1.txt'] | false | true
- 'feature_2' | 'feature_1' | ['file2.txt'] | true | true
- 'feature_1' | 'tag_1' | ['file1.txt'] | true | false
- 'feature_1' | 'tag_1' | ['file1.txt'] | false | true
- 'feature_1' | 'tag_1' | ['file2.txt'] | true | true
- 'feature_2' | 'tag_1' | ['file2.txt'] | true | true
+ where(:pipeline_ref, :compare_to, :paths, :result) do
+ 'feature_1' | 'master' | ['file1.txt'] | true
+ 'feature_1' | 'master' | ['README.md'] | false
+ 'feature_1' | 'master' | ['xyz.md'] | false
+ 'feature_2' | 'master' | ['file1.txt'] | true
+ 'feature_2' | 'master' | ['file2.txt'] | true
+ 'feature_2' | 'feature_1' | ['file1.txt'] | false
+ 'feature_2' | 'feature_1' | ['file2.txt'] | true
+ 'feature_1' | 'tag_1' | ['file1.txt'] | false
+ 'feature_1' | 'tag_1' | ['file2.txt'] | true
+ 'feature_2' | 'tag_1' | ['file2.txt'] | true
end
with_them do
@@ -144,10 +142,6 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Changes do
build(:ci_pipeline, project: project, ref: pipeline_ref, sha: project.commit(pipeline_ref).sha)
end
- before do
- stub_feature_flags(ci_rules_changes_compare: ff)
- end
-
it { is_expected.to eq(result) }
end
end
@@ -174,14 +168,6 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Changes do
::Gitlab::Ci::Build::Rules::Rule::Clause::ParseError, 'rules:changes:compare_to is not a valid ref'
)
end
-
- context 'when the FF ci_rules_changes_compare is disabled' do
- before do
- stub_feature_flags(ci_rules_changes_compare: false)
- end
-
- it { is_expected.to be_truthy }
- end
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/legacy_variables_spec.rb b/spec/lib/gitlab/ci/config/entry/legacy_variables_spec.rb
deleted file mode 100644
index e9edec9a0a4..00000000000
--- a/spec/lib/gitlab/ci/config/entry/legacy_variables_spec.rb
+++ /dev/null
@@ -1,173 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Ci::Config::Entry::LegacyVariables do
- let(:config) { {} }
- let(:metadata) { {} }
-
- subject(:entry) { described_class.new(config, **metadata) }
-
- before do
- entry.compose!
- end
-
- shared_examples 'valid config' do
- describe '#value' do
- it 'returns hash with key value strings' do
- expect(entry.value).to eq result
- end
- end
-
- describe '#errors' do
- it 'does not append errors' do
- expect(entry.errors).to be_empty
- end
- end
-
- describe '#valid?' do
- it 'is valid' do
- expect(entry).to be_valid
- end
- end
- end
-
- shared_examples 'invalid config' do |error_message|
- describe '#valid?' do
- it 'is not valid' do
- expect(entry).not_to be_valid
- end
- end
-
- describe '#errors' do
- it 'saves errors' do
- expect(entry.errors)
- .to include(error_message)
- end
- end
- end
-
- context 'when entry config value has key-value pairs' do
- let(:config) do
- { 'VARIABLE_1' => 'value 1', 'VARIABLE_2' => 'value 2' }
- end
-
- let(:result) do
- { 'VARIABLE_1' => 'value 1', 'VARIABLE_2' => 'value 2' }
- end
-
- it_behaves_like 'valid config'
-
- describe '#value_with_data' do
- it 'returns variable with data' do
- expect(entry.value_with_data).to eq(
- 'VARIABLE_1' => { value: 'value 1' },
- 'VARIABLE_2' => { value: 'value 2' }
- )
- end
- end
- end
-
- context 'with numeric keys and values in the config' do
- let(:config) { { 10 => 20 } }
- let(:result) do
- { '10' => '20' }
- end
-
- it_behaves_like 'valid config'
- end
-
- context 'when key is an array' do
- let(:config) { { ['VAR1'] => 'val1' } }
- let(:result) do
- { 'VAR1' => 'val1' }
- end
-
- it_behaves_like 'invalid config', /should be a hash of key value pairs/
- end
-
- context 'when value is a symbol' do
- let(:config) { { 'VAR1' => :val1 } }
- let(:result) do
- { 'VAR1' => 'val1' }
- end
-
- it_behaves_like 'valid config'
- end
-
- context 'when value is a boolean' do
- let(:config) { { 'VAR1' => true } }
- let(:result) do
- { 'VAR1' => 'val1' }
- end
-
- it_behaves_like 'invalid config', /should be a hash of key value pairs/
- end
-
- context 'when entry config value has key-value pair and hash' do
- let(:config) do
- { 'VARIABLE_1' => { value: 'value 1', description: 'variable 1' },
- 'VARIABLE_2' => 'value 2' }
- end
-
- it_behaves_like 'invalid config', /should be a hash of key value pairs/
-
- context 'when metadata has use_value_data: true' do
- let(:metadata) { { use_value_data: true } }
-
- let(:result) do
- { 'VARIABLE_1' => 'value 1', 'VARIABLE_2' => 'value 2' }
- end
-
- it_behaves_like 'valid config'
-
- describe '#value_with_data' do
- it 'returns variable with data' do
- expect(entry.value_with_data).to eq(
- 'VARIABLE_1' => { value: 'value 1', description: 'variable 1' },
- 'VARIABLE_2' => { value: 'value 2' }
- )
- end
- end
- end
- end
-
- context 'when entry value is an array' do
- let(:config) { [:VAR, 'test'] }
-
- it_behaves_like 'invalid config', /should be a hash of key value pairs/
- end
-
- context 'when metadata has use_value_data: true' do
- let(:metadata) { { use_value_data: true } }
-
- context 'when entry value has hash with other key-pairs' do
- let(:config) do
- { 'VARIABLE_1' => { value: 'value 1', hello: 'variable 1' },
- 'VARIABLE_2' => 'value 2' }
- end
-
- it_behaves_like 'invalid config', /should be a hash of key value pairs, value can be a hash/
- end
-
- context 'when entry config value has hash with nil description' do
- let(:config) do
- { 'VARIABLE_1' => { value: 'value 1', description: nil } }
- end
-
- it_behaves_like 'invalid config', /should be a hash of key value pairs, value can be a hash/
- end
-
- context 'when entry config value has hash without description' do
- let(:config) do
- { 'VARIABLE_1' => { value: 'value 1' } }
- end
-
- let(:result) do
- { 'VARIABLE_1' => 'value 1' }
- end
-
- it_behaves_like 'valid config'
- end
- end
-end
diff --git a/spec/lib/gitlab/ci/config/entry/processable_spec.rb b/spec/lib/gitlab/ci/config/entry/processable_spec.rb
index 5f42a8c49a7..ad90dd59585 100644
--- a/spec/lib/gitlab/ci/config/entry/processable_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/processable_spec.rb
@@ -210,20 +210,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do
expect(entry.errors)
.to include 'variables:var2 config must be a string'
end
-
- context 'when the FF ci_variables_refactoring_to_variable is disabled' do
- let(:entry_without_ff) { node_class.new(config, name: :rspec) }
-
- before do
- stub_feature_flags(ci_variables_refactoring_to_variable: false)
- entry_without_ff.compose!
- end
-
- it 'reports error about variable' do
- expect(entry_without_ff.errors)
- .to include /config should be a hash of key value pairs/
- end
- end
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/product/parallel_spec.rb b/spec/lib/gitlab/ci/config/entry/product/parallel_spec.rb
index 937642f07e7..a16f1cf9e43 100644
--- a/spec/lib/gitlab/ci/config/entry/product/parallel_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/product/parallel_spec.rb
@@ -91,10 +91,11 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Product::Parallel do
describe '#value' do
it 'returns job needs configuration' do
- expect(parallel.value).to match(matrix: [
- { PROVIDER: 'aws', STACK: %w[monitoring app1 app2] },
- { PROVIDER: 'gcp', STACK: %w[data processing] }
- ])
+ expect(parallel.value).to match(matrix:
+ [
+ { PROVIDER: 'aws', STACK: %w[monitoring app1 app2] },
+ { PROVIDER: 'gcp', STACK: %w[data processing] }
+ ])
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/root_spec.rb b/spec/lib/gitlab/ci/config/entry/root_spec.rb
index 3d19987a0be..a55e13e7c2d 100644
--- a/spec/lib/gitlab/ci/config/entry/root_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/root_spec.rb
@@ -34,7 +34,11 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
image: 'image:1.0',
default: {},
services: ['postgres:9.1', 'mysql:5.5'],
- variables: { VAR: 'root', VAR2: { value: 'val 2', description: 'this is var 2' } },
+ variables: {
+ VAR: 'root',
+ VAR2: { value: 'val 2', description: 'this is var 2' },
+ VAR3: { value: %w[val3 val3b], description: 'this is var 3' }
+ },
after_script: ['make clean'],
stages: %w(build pages release),
cache: { key: 'k', untracked: true, paths: ['public/'] },
@@ -83,7 +87,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
end
it 'sets correct variables value' do
- expect(root.variables_value).to eq('VAR' => 'root', 'VAR2' => 'val 2')
+ expect(root.variables_value).to eq('VAR' => 'root', 'VAR2' => 'val 2', 'VAR3' => 'val3')
end
describe '#leaf?' do
@@ -361,20 +365,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
expect(root.errors)
.to include /var1 config uses invalid data keys: invalid/
end
-
- context 'when the FF ci_variables_refactoring_to_variable is disabled' do
- let(:root_without_ff) { described_class.new(hash, user: user, project: project) }
-
- before do
- stub_feature_flags(ci_variables_refactoring_to_variable: false)
- root_without_ff.compose!
- end
-
- it 'reports errors about the invalid variable' do
- expect(root_without_ff.errors)
- .to include /variables config should be a hash of key value pairs, value can be a hash/
- end
- end
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb b/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
index 303d825c591..3531d6e9f1a 100644
--- a/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
@@ -364,19 +364,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
it 'returns an error about invalid variables:' do
expect(subject.errors).to include(/variables config should be a hash/)
end
-
- context 'when the FF ci_variables_refactoring_to_variable is disabled' do
- let(:entry_without_ff) { factory.create! }
-
- before do
- stub_feature_flags(ci_variables_refactoring_to_variable: false)
- entry_without_ff.compose!
- end
-
- it 'returns an error about invalid variables:' do
- expect(subject.errors).to include(/variables config should be a hash/)
- end
- end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/variable_spec.rb b/spec/lib/gitlab/ci/config/entry/variable_spec.rb
index 744a89d4509..076a5b32e92 100644
--- a/spec/lib/gitlab/ci/config/entry/variable_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/variable_spec.rb
@@ -127,20 +127,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Variable do
end
end
- context 'when config value is an array' do
- let(:config) { { value: ['value'], description: 'description' } }
-
- describe '#valid?' do
- it { is_expected.not_to be_valid }
- end
-
- describe '#errors' do
- subject(:errors) { entry.errors }
-
- it { is_expected.to include 'var1 config value must be an alphanumeric string' }
- end
- end
-
context 'when config description is a symbol' do
let(:config) { { value: 'value', description: :description } }
@@ -209,4 +195,42 @@ RSpec.describe Gitlab::Ci::Config::Entry::Variable do
end
end
end
+
+ describe 'ComplexArrayVariable' do
+ context 'when allow_array_value metadata is false' do
+ let(:config) { { value: %w[value value2], description: 'description' } }
+ let(:metadata) { { allow_array_value: false } }
+
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
+
+ describe '#errors' do
+ subject(:errors) { entry.errors }
+
+ it { is_expected.to include 'var1 config value must be an alphanumeric string' }
+ end
+ end
+
+ context 'when allow_array_value metadata is true' do
+ let(:config) { { value: %w[value value2], description: 'description' } }
+ let(:metadata) { { allowed_value_data: %i[value description], allow_array_value: true } }
+
+ describe '#valid?' do
+ it { is_expected.to be_valid }
+ end
+
+ describe '#value' do
+ subject(:value) { entry.value }
+
+ it { is_expected.to eq('value') }
+ end
+
+ describe '#value_with_data' do
+ subject(:value_with_data) { entry.value_with_data }
+
+ it { is_expected.to eq(value: 'value', description: 'description', value_options: %w[value value2]) }
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/config/entry/variables_spec.rb b/spec/lib/gitlab/ci/config/entry/variables_spec.rb
index ad7290d0589..085f304094e 100644
--- a/spec/lib/gitlab/ci/config/entry/variables_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/variables_spec.rb
@@ -98,6 +98,62 @@ RSpec.describe Gitlab::Ci::Config::Entry::Variables do
it_behaves_like 'invalid config', /must be either a string or a hash/
end
+ context 'when entry config value has unallowed value key-value pair and value is a string' do
+ let(:config) do
+ { 'VARIABLE_1' => { value: 'value', description: 'variable 1' } }
+ end
+
+ context 'when there is no allowed_value_data metadata' do
+ it_behaves_like 'invalid config', /variable_1 config must be a string/
+ end
+
+ context 'when metadata has allow_array_value and allowed_value_data' do
+ let(:metadata) { { allowed_value_data: %i[value description], allow_array_value: true } }
+
+ let(:result) do
+ { 'VARIABLE_1' => 'value' }
+ end
+
+ it_behaves_like 'valid config'
+
+ describe '#value_with_data' do
+ it 'returns variable with data' do
+ expect(entry.value_with_data).to eq(
+ 'VARIABLE_1' => { value: 'value', description: 'variable 1' }
+ )
+ end
+ end
+ end
+ end
+
+ context 'when entry config value has key-value pair and value is an array' do
+ let(:config) do
+ { 'VARIABLE_1' => { value: %w[value1 value2], description: 'variable 1' } }
+ end
+
+ context 'when there is no allowed_value_data metadata' do
+ it_behaves_like 'invalid config', /variable_1 config value must be an alphanumeric string/
+ end
+
+ context 'when metadata has allow_array_value and allowed_value_data' do
+ let(:metadata) { { allowed_value_data: %i[value description], allow_array_value: true } }
+
+ let(:result) do
+ { 'VARIABLE_1' => 'value1' }
+ end
+
+ it_behaves_like 'valid config'
+
+ describe '#value_with_data' do
+ it 'returns variable with data' do
+ expect(entry.value_with_data).to eq(
+ 'VARIABLE_1' => { value: 'value1', value_options: %w[value1 value2], description: 'variable 1' }
+ )
+ end
+ end
+ end
+ end
+
context 'when entry config value has key-value pair and hash' do
let(:config) do
{ 'VARIABLE_1' => { value: 'value 1', description: 'variable 1' },
diff --git a/spec/lib/gitlab/ci/config/entry/workflow_spec.rb b/spec/lib/gitlab/ci/config/entry/workflow_spec.rb
index 3d19832e13d..97ac199f47d 100644
--- a/spec/lib/gitlab/ci/config/entry/workflow_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/workflow_spec.rb
@@ -65,6 +65,54 @@ RSpec.describe Gitlab::Ci::Config::Entry::Workflow do
end
end
end
+
+ context 'with workflow name' do
+ let(:factory) { Gitlab::Config::Entry::Factory.new(described_class).value(workflow_hash) }
+
+ context 'with a blank name' do
+ let(:workflow_hash) do
+ { name: '' }
+ end
+
+ it 'is invalid' do
+ expect(config).not_to be_valid
+ end
+
+ it 'returns error about invalid name' do
+ expect(config.errors).to include('workflow name is too short (minimum is 1 character)')
+ end
+ end
+
+ context 'with too long name' do
+ let(:workflow_hash) do
+ { name: 'a' * 256 }
+ end
+
+ it 'is invalid' do
+ expect(config).not_to be_valid
+ end
+
+ it 'returns error about invalid name' do
+ expect(config.errors).to include('workflow name is too long (maximum is 255 characters)')
+ end
+ end
+
+ context 'when name is nil' do
+ let(:workflow_hash) { { name: nil } }
+
+ it 'is valid' do
+ expect(config).to be_valid
+ end
+ end
+
+ context 'when name is not provided' do
+ let(:workflow_hash) { { rules: [{ if: '$VAR' }] } }
+
+ it 'is valid' do
+ expect(config).to be_valid
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/config/external/file/artifact_spec.rb b/spec/lib/gitlab/ci/config/external/file/artifact_spec.rb
index 9da8d106862..a8dc7897082 100644
--- a/spec/lib/gitlab/ci/config/external/file/artifact_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/artifact_spec.rb
@@ -174,9 +174,10 @@ RSpec.describe Gitlab::Ci::Config::External::File::Artifact do
context 'when job is provided as a variable' do
let(:variables) do
- Gitlab::Ci::Variables::Collection.new([
- { key: 'VAR1', value: 'a_secret_variable_value', masked: true }
- ])
+ Gitlab::Ci::Variables::Collection.new(
+ [
+ { key: 'VAR1', value: 'a_secret_variable_value', masked: true }
+ ])
end
let(:params) { { artifact: 'generated.yml', job: 'a_secret_variable_value' } }
diff --git a/spec/lib/gitlab/ci/config/external/file/project_spec.rb b/spec/lib/gitlab/ci/config/external/file/project_spec.rb
index 72a85c9b03d..0ba92d1e92d 100644
--- a/spec/lib/gitlab/ci/config/external/file/project_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/project_spec.rb
@@ -163,9 +163,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do
context 'when non-existing project is used with a masked variable' do
let(:variables) do
- Gitlab::Ci::Variables::Collection.new([
- { key: 'VAR1', value: 'a_secret_variable_value', masked: true }
- ])
+ Gitlab::Ci::Variables::Collection.new([{ key: 'VAR1', value: 'a_secret_variable_value', masked: true }])
end
let(:params) do
@@ -180,9 +178,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do
context 'when a project contained in an array is used with a masked variable' do
let(:variables) do
- Gitlab::Ci::Variables::Collection.new([
- { key: 'VAR1', value: 'a_secret_variable_value', masked: true }
- ])
+ Gitlab::Ci::Variables::Collection.new([{ key: 'VAR1', value: 'a_secret_variable_value', masked: true }])
end
let(:params) do
@@ -231,10 +227,11 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do
context 'when project name and ref include masked variables' do
let(:variables) do
- Gitlab::Ci::Variables::Collection.new([
- { key: 'VAR1', value: 'a_secret_variable_value1', masked: true },
- { key: 'VAR2', value: 'a_secret_variable_value2', masked: true }
- ])
+ Gitlab::Ci::Variables::Collection.new(
+ [
+ { key: 'VAR1', value: 'a_secret_variable_value1', masked: true },
+ { key: 'VAR2', value: 'a_secret_variable_value2', masked: true }
+ ])
end
let(:params) { { project: 'a_secret_variable_value1', ref: 'a_secret_variable_value2', file: '/file.yml' } }
diff --git a/spec/lib/gitlab/ci/config/external/mapper_spec.rb b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
index 9eaba12f388..e12f5dcee0a 100644
--- a/spec/lib/gitlab/ci/config/external/mapper_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
@@ -207,9 +207,9 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
context "when duplicate 'include's are defined" do
let(:values) do
{ include: [
- { 'local' => local_file },
- { 'local' => local_file }
- ],
+ { 'local' => local_file },
+ { 'local' => local_file }
+ ],
image: 'image:1.0' }
end
@@ -416,17 +416,18 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
context "when locations are same after masking variables" do
let(:variables) do
- Gitlab::Ci::Variables::Collection.new([
- { 'key' => 'GITLAB_TOKEN', 'value' => 'secret-file1', 'masked' => true },
- { 'key' => 'GITLAB_TOKEN', 'value' => 'secret-file2', 'masked' => true }
- ])
+ Gitlab::Ci::Variables::Collection.new(
+ [
+ { 'key' => 'GITLAB_TOKEN', 'value' => 'secret-file1', 'masked' => true },
+ { 'key' => 'GITLAB_TOKEN', 'value' => 'secret-file2', 'masked' => true }
+ ])
end
let(:values) do
{ include: [
- { 'local' => 'hello/secret-file1.yml' },
- { 'local' => 'hello/secret-file2.yml' }
- ],
+ { 'local' => 'hello/secret-file1.yml' },
+ { 'local' => 'hello/secret-file2.yml' }
+ ],
image: 'ruby:2.7' }
end
diff --git a/spec/lib/gitlab/ci/config_spec.rb b/spec/lib/gitlab/ci/config_spec.rb
index 055114769ea..475503de7da 100644
--- a/spec/lib/gitlab/ci/config_spec.rb
+++ b/spec/lib/gitlab/ci/config_spec.rb
@@ -889,4 +889,31 @@ RSpec.describe Gitlab::Ci::Config do
it { is_expected.to eq([{ if: '$CI_COMMIT_REF_NAME == "master"' }]) }
end
+
+ describe '#workflow_name' do
+ subject(:workflow_name) { config.workflow_name }
+
+ let(:yml) do
+ <<-EOS
+ workflow:
+ name: 'Pipeline name'
+
+ rspec:
+ script: exit 0
+ EOS
+ end
+
+ it { is_expected.to eq('Pipeline name') }
+
+ context 'with no name' do
+ let(:yml) do
+ <<-EOS
+ rspec:
+ script: exit 0
+ EOS
+ end
+
+ it { is_expected.to be_nil }
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/jwt_v2_spec.rb b/spec/lib/gitlab/ci/jwt_v2_spec.rb
index 33aaa145a39..5eeab658a8e 100644
--- a/spec/lib/gitlab/ci/jwt_v2_spec.rb
+++ b/spec/lib/gitlab/ci/jwt_v2_spec.rb
@@ -7,6 +7,8 @@ RSpec.describe Gitlab::Ci::JwtV2 do
let(:project) { build_stubbed(:project, namespace: namespace) }
let(:user) { build_stubbed(:user) }
let(:pipeline) { build_stubbed(:ci_pipeline, ref: 'auto-deploy-2020-03-19') }
+ let(:aud) { described_class::DEFAULT_AUD }
+
let(:build) do
build_stubbed(
:ci_build,
@@ -16,7 +18,7 @@ RSpec.describe Gitlab::Ci::JwtV2 do
)
end
- subject(:ci_job_jwt_v2) { described_class.new(build, ttl: 30) }
+ subject(:ci_job_jwt_v2) { described_class.new(build, ttl: 30, aud: aud) }
it { is_expected.to be_a Gitlab::Ci::Jwt }
@@ -30,5 +32,13 @@ RSpec.describe Gitlab::Ci::JwtV2 do
expect(payload[:sub]).to eq("project_path:#{project.full_path}:ref_type:branch:ref:#{pipeline.source_ref}")
end
end
+
+ context 'when given an aud' do
+ let(:aud) { 'AWS' }
+
+ it 'uses that aud in the payload' do
+ expect(payload[:aud]).to eq('AWS')
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/lint_spec.rb b/spec/lib/gitlab/ci/lint_spec.rb
index 3d46d266c13..cf07e952f26 100644
--- a/spec/lib/gitlab/ci/lint_spec.rb
+++ b/spec/lib/gitlab/ci/lint_spec.rb
@@ -342,6 +342,7 @@ RSpec.describe Gitlab::Ci::Lint do
{
'count' => a_kind_of(Numeric),
'avg' => a_kind_of(Numeric),
+ 'sum' => a_kind_of(Numeric),
'max' => a_kind_of(Numeric),
'min' => a_kind_of(Numeric)
}
diff --git a/spec/lib/gitlab/ci/parsers/sbom/source/dependency_scanning_spec.rb b/spec/lib/gitlab/ci/parsers/sbom/source/dependency_scanning_spec.rb
index 7222ebc3cb8..e12fa380209 100644
--- a/spec/lib/gitlab/ci/parsers/sbom/source/dependency_scanning_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/sbom/source/dependency_scanning_spec.rb
@@ -19,8 +19,7 @@ RSpec.describe Gitlab::Ci::Parsers::Sbom::Source::DependencyScanning do
it 'returns expected source data' do
is_expected.to have_attributes(
source_type: :dependency_scanning,
- data: property_data,
- fingerprint: '4dbcb747e6f0fb3ed4f48d96b777f1d64acdf43e459fdfefad404e55c004a188'
+ data: property_data
)
end
end
diff --git a/spec/lib/gitlab/ci/parsers/sbom/validators/cyclonedx_schema_validator_spec.rb b/spec/lib/gitlab/ci/parsers/sbom/validators/cyclonedx_schema_validator_spec.rb
index c54a3268bbe..f58a463f047 100644
--- a/spec/lib/gitlab/ci/parsers/sbom/validators/cyclonedx_schema_validator_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/sbom/validators/cyclonedx_schema_validator_spec.rb
@@ -72,12 +72,13 @@ RSpec.describe Gitlab::Ci::Parsers::Sbom::Validators::CyclonedxSchemaValidator d
it { is_expected.not_to be_valid }
it "outputs errors for each validation failure" do
- expect(validator.errors).to match_array([
- "property '/components/0' is missing required keys: name",
- "property '/components/0/type' is not one of: [\"application\", \"framework\"," \
- " \"library\", \"container\", \"operating-system\", \"device\", \"firmware\", \"file\"]",
- "property '/components/1' is missing required keys: type"
- ])
+ expect(validator.errors).to match_array(
+ [
+ "property '/components/0' is missing required keys: name",
+ "property '/components/0/type' is not one of: [\"application\", \"framework\"," \
+ " \"library\", \"container\", \"operating-system\", \"device\", \"firmware\", \"file\"]",
+ "property '/components/1' is missing required keys: type"
+ ])
end
end
end
@@ -121,10 +122,11 @@ RSpec.describe Gitlab::Ci::Parsers::Sbom::Validators::CyclonedxSchemaValidator d
it { is_expected.not_to be_valid }
it "outputs errors for each validation failure" do
- expect(validator.errors).to match_array([
- "property '/metadata/properties/0/name' is not of type: string",
- "property '/metadata/properties/0/value' is not of type: string"
- ])
+ expect(validator.errors).to match_array(
+ [
+ "property '/metadata/properties/0/name' is not of type: string",
+ "property '/metadata/properties/0/value' is not of type: string"
+ ])
end
end
end
diff --git a/spec/lib/gitlab/ci/parsers/security/common_spec.rb b/spec/lib/gitlab/ci/parsers/security/common_spec.rb
index 297ef1f5bb9..7dbad354e4c 100644
--- a/spec/lib/gitlab/ci/parsers/security/common_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/security/common_spec.rb
@@ -54,24 +54,15 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common do
context 'when the validate flag is set to `false`' do
let(:validate) { false }
- let(:valid?) { false }
- let(:errors) { ['foo'] }
- let(:warnings) { ['bar'] }
before do
- allow_next_instance_of(validator_class) do |instance|
- allow(instance).to receive(:valid?).and_return(valid?)
- allow(instance).to receive(:errors).and_return(errors)
- allow(instance).to receive(:warnings).and_return(warnings)
- end
-
allow(parser).to receive_messages(create_scanner: true, create_scan: true)
end
- it 'instantiates the validator with correct params' do
+ it 'does not instantiate the validator' do
parse_report
- expect(validator_class).to have_received(:new).with(
+ expect(validator_class).not_to have_received(:new).with(
report.type,
data.deep_stringify_keys,
report.version,
@@ -80,43 +71,17 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common do
)
end
- context 'when the report data is not valid according to the schema' do
- it 'adds warnings to the report' do
- expect { parse_report }.to change { report.warnings }.from([]).to(
- [
- { message: 'foo', type: 'Schema' },
- { message: 'bar', type: 'Schema' }
- ]
- )
- end
-
- it 'keeps the execution flow as normal' do
- parse_report
+ it 'marks the report as valid' do
+ parse_report
- expect(parser).to have_received(:create_scanner)
- expect(parser).to have_received(:create_scan)
- end
+ expect(report).not_to be_errored
end
- context 'when the report data is valid according to the schema' do
- let(:valid?) { true }
- let(:errors) { [] }
- let(:warnings) { [] }
-
- it 'does not add errors to the report' do
- expect { parse_report }.not_to change { report.errors }
- end
-
- it 'does not add warnings to the report' do
- expect { parse_report }.not_to change { report.warnings }
- end
-
- it 'keeps the execution flow as normal' do
- parse_report
+ it 'keeps the execution flow as normal' do
+ parse_report
- expect(parser).to have_received(:create_scanner)
- expect(parser).to have_received(:create_scan)
- end
+ expect(parser).to have_received(:create_scanner)
+ expect(parser).to have_received(:create_scan)
end
end
@@ -152,12 +117,17 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common do
it 'adds errors to the report' do
expect { parse_report }.to change { report.errors }.from([]).to(
[
- { message: 'foo', type: 'Schema' },
- { message: 'bar', type: 'Schema' }
+ { message: 'foo', type: 'Schema' }
]
)
end
+ it 'marks the report as invalid' do
+ parse_report
+
+ expect(report).to be_errored
+ end
+
it 'does not try to create report entities' do
parse_report
@@ -175,8 +145,24 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common do
expect { parse_report }.not_to change { report.errors }.from([])
end
- it 'does not add warnings to the report' do
- expect { parse_report }.not_to change { report.warnings }.from([])
+ context 'and no warnings are present' do
+ let(:warnings) { [] }
+
+ it 'does not add warnings to the report' do
+ expect { parse_report }.not_to change { report.warnings }.from([])
+ end
+ end
+
+ context 'and some warnings are present' do
+ let(:warnings) { ['bar'] }
+
+ it 'does add warnings to the report' do
+ expect { parse_report }.to change { report.warnings }.from([]).to(
+ [
+ { message: 'bar', type: 'Schema' }
+ ]
+ )
+ end
end
it 'keeps the execution flow as normal' do
@@ -298,8 +284,8 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common do
scans = report.findings.map(&:scan)
expect(scans.map(&:status).all?('success')).to be(true)
- expect(scans.map(&:start_time).all?('placeholder-value')).to be(true)
- expect(scans.map(&:end_time).all?('placeholder-value')).to be(true)
+ expect(scans.map(&:start_time).all?('2022-08-10T21:37:00')).to be(true)
+ expect(scans.map(&:end_time).all?('2022-08-10T21:38:00')).to be(true)
expect(scans.size).to eq(7)
expect(scans.first).to be_a(::Gitlab::Ci::Reports::Security::Scan)
end
@@ -418,11 +404,11 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common do
{
'type' => 'source',
'items' => [
- 'signatures' => [
- { 'algorithm' => 'hash', 'value' => 'hash_value' },
- { 'algorithm' => 'location', 'value' => 'location_value' },
- { 'algorithm' => 'scope_offset', 'value' => 'scope_offset_value' }
- ]
+ 'signatures' => [
+ { 'algorithm' => 'hash', 'value' => 'hash_value' },
+ { 'algorithm' => 'location', 'value' => 'location_value' },
+ { 'algorithm' => 'scope_offset', 'value' => 'scope_offset_value' }
+ ]
]
}
end
@@ -440,11 +426,11 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common do
{
'type' => 'source',
'items' => [
- 'signatures' => [
- { 'algorithm' => 'hash', 'value' => 'hash_value' },
- { 'algorithm' => 'location', 'value' => 'location_value' },
- { 'algorithm' => 'INVALID', 'value' => 'scope_offset_value' }
- ]
+ 'signatures' => [
+ { 'algorithm' => 'hash', 'value' => 'hash_value' },
+ { 'algorithm' => 'location', 'value' => 'location_value' },
+ { 'algorithm' => 'INVALID', 'value' => 'scope_offset_value' }
+ ]
]
}
end
diff --git a/spec/lib/gitlab/ci/parsers/security/sast_spec.rb b/spec/lib/gitlab/ci/parsers/security/sast_spec.rb
index 4bc48f6611a..f6113308201 100644
--- a/spec/lib/gitlab/ci/parsers/security/sast_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/security/sast_spec.rb
@@ -10,24 +10,39 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Sast do
let(:created_at) { 2.weeks.ago }
- context "when parsing valid reports" do
- where(:report_format, :report_version, :scanner_length, :finding_length, :identifier_length, :file_path, :line) do
- :sast | '14.0.0' | 1 | 5 | 6 | 'groovy/src/main/java/com/gitlab/security_products/tests/App.groovy' | 47
- :sast_deprecated | '1.2' | 3 | 33 | 17 | 'python/hardcoded/hardcoded-tmp.py' | 1
+ context "when passing valid report" do
+ # rubocop: disable Layout/LineLength
+ where(:report_format, :report_version, :scanner_length, :finding_length, :identifier_length, :file_path, :start_line, :end_line, :primary_identifiers_length) do
+ :sast | '14.0.0' | 1 | 5 | 6 | 'groovy/src/main/java/com/gitlab/security_products/tests/App.groovy' | 47 | 47 | nil
+ :sast_semgrep_for_multiple_findings | '14.0.4' | 1 | 2 | 6 | 'app/app.py' | 39 | nil | 2
end
+ # rubocop: enable Layout/LineLength
with_them do
- let(:report) { Gitlab::Ci::Reports::Security::Report.new(artifact.file_type, pipeline, created_at) }
+ let(:report) do
+ Gitlab::Ci::Reports::Security::Report.new(
+ artifact.file_type,
+ pipeline,
+ created_at
+ )
+ end
+
let(:artifact) { create(:ci_job_artifact, report_format) }
before do
- artifact.each_blob { |blob| described_class.parse!(blob, report) }
+ artifact.each_blob { |blob| described_class.parse!(blob, report, validate: true) }
end
it "parses all identifiers and findings" do
expect(report.findings.length).to eq(finding_length)
expect(report.identifiers.length).to eq(identifier_length)
expect(report.scanners.length).to eq(scanner_length)
+
+ if primary_identifiers_length
+ expect(
+ report.scanners.each_value.first.primary_identifiers.length
+ ).to eq(primary_identifiers_length)
+ end
end
it 'generates expected location' do
@@ -36,8 +51,8 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Sast do
expect(location).to be_a(::Gitlab::Ci::Reports::Security::Locations::Sast)
expect(location).to have_attributes(
file_path: file_path,
- end_line: line,
- start_line: line
+ end_line: end_line,
+ start_line: start_line
)
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb
index b570f2a7f75..fc3de2a14cd 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb
@@ -44,6 +44,20 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::CancelPendingPipelines do
expect(build_statuses(pipeline)).to contain_exactly('pending')
end
+ it 'logs canceled pipelines' do
+ allow(Gitlab::AppLogger).to receive(:info)
+
+ perform
+
+ expect(Gitlab::AppLogger).to have_received(:info).with(
+ class: described_class.name,
+ message: "Pipeline #{pipeline.id} auto-canceling pipeline #{prev_pipeline.id}",
+ canceled_pipeline_id: prev_pipeline.id,
+ canceled_by_pipeline_id: pipeline.id,
+ canceled_by_pipeline_source: pipeline.source
+ )
+ end
+
it 'cancels the builds with 2 queries to avoid query timeout' do
second_query_regex = /WHERE "ci_pipelines"\."id" = \d+ AND \(NOT EXISTS/
recorder = ActiveRecord::QueryRecorder.new { perform }
@@ -141,7 +155,42 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::CancelPendingPipelines do
end
end
- context 'when the prev pipeline source is webide' do
+ context 'when the pipeline is a child pipeline' do
+ let!(:parent_pipeline) { create(:ci_pipeline, project: project, sha: new_commit.sha) }
+ let(:pipeline) { create(:ci_pipeline, child_of: parent_pipeline) }
+
+ before do
+ create(:ci_build, :interruptible, :running, pipeline: parent_pipeline)
+ create(:ci_build, :interruptible, :running, pipeline: parent_pipeline)
+ end
+
+ it 'does not cancel any builds' do
+ expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
+ expect(build_statuses(parent_pipeline)).to contain_exactly('running', 'running')
+
+ perform
+
+ expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
+ expect(build_statuses(parent_pipeline)).to contain_exactly('running', 'running')
+ end
+
+ context 'when feature flag ci_skip_auto_cancelation_on_child_pipelines is disabled' do
+ before do
+ stub_feature_flags(ci_skip_auto_cancelation_on_child_pipelines: false)
+ end
+
+ it 'does not cancel the parent pipeline' do
+ expect(build_statuses(parent_pipeline)).to contain_exactly('running', 'running')
+
+ perform
+
+ expect(build_statuses(prev_pipeline)).to contain_exactly('success', 'canceled', 'canceled')
+ expect(build_statuses(parent_pipeline)).to contain_exactly('running', 'running')
+ end
+ end
+ end
+
+ context 'when the previous pipeline source is webide' do
let(:prev_pipeline) { create(:ci_pipeline, :webide, project: project) }
it 'does not cancel builds of the previous pipeline' do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/config/content_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/config/content_spec.rb
index f451bd6bfef..e0d656f456e 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/config/content_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/config/content_spec.rb
@@ -11,9 +11,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Config::Content do
subject { described_class.new(pipeline, command) }
- # TODO: change this to `describe` and remove rubocop-disable
- # when removing the FF ci_project_pipeline_config_refactoring
- shared_context '#perform!' do # rubocop:disable RSpec/ContextWording
+ describe '#perform!' do
context 'when bridge job is passed in as parameter' do
let(:ci_config_path) { nil }
let(:bridge) { create(:ci_bridge) }
@@ -203,14 +201,4 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Config::Content do
end
end
end
-
- it_behaves_like '#perform!'
-
- context 'when the FF ci_project_pipeline_config_refactoring is disabled' do
- before do
- stub_feature_flags(ci_project_pipeline_config_refactoring: false)
- end
-
- it_behaves_like '#perform!'
- end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/limit/active_jobs_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/limit/active_jobs_spec.rb
new file mode 100644
index 00000000000..bc453f1502b
--- /dev/null
+++ b/spec/lib/gitlab/ci/pipeline/chain/limit/active_jobs_spec.rb
@@ -0,0 +1,97 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Gitlab::Ci::Pipeline::Chain::Limit::ActiveJobs do
+ let_it_be(:namespace) { create(:namespace) }
+ let_it_be(:project) { create(:project, namespace: namespace) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:default_plan) { create(:default_plan) }
+
+ let(:command) do
+ instance_double(
+ ::Gitlab::Ci::Pipeline::Chain::Command,
+ project: project,
+ current_user: user,
+ save_incompleted: true,
+ pipeline_seed: pipeline_seed_double
+ )
+ end
+
+ let(:pipeline_seed_double) do
+ instance_double(::Gitlab::Ci::Pipeline::Seed::Pipeline, size: 5)
+ end
+
+ let(:pipeline) do
+ create(:ci_pipeline, project: project)
+ end
+
+ let(:existing_pipeline) { create(:ci_pipeline, project: project) }
+ let(:step) { described_class.new(pipeline, command) }
+ let(:limit) { 10 }
+
+ subject { step.perform! }
+
+ before do
+ create(:plan_limits, plan: default_plan, ci_active_jobs: limit)
+ namespace.clear_memoization(:actual_plan)
+ end
+
+ shared_examples 'successful step' do
+ it 'doest not fail the pipeline and does not interrupt the chain' do
+ subject
+
+ expect(pipeline).not_to be_failed
+ expect(step).not_to be_break
+ end
+ end
+
+ context 'when active jobs limit is exceeded' do
+ before do
+ create_list(:ci_build, 3, pipeline: existing_pipeline)
+ create_list(:ci_bridge, 3, pipeline: existing_pipeline)
+ end
+
+ it 'fails the pipeline with an error', :aggregate_failures do
+ subject
+
+ expect(pipeline).to be_failed
+ expect(pipeline).to be_job_activity_limit_exceeded
+ expect(pipeline.errors.full_messages).to include(described_class::MESSAGE)
+ end
+
+ it 'logs the failure' do
+ allow(Gitlab::AppLogger).to receive(:info)
+
+ subject
+
+ expect(Gitlab::AppLogger).to have_received(:info).with(
+ class: described_class.name,
+ message: described_class::MESSAGE,
+ project_id: project.id,
+ plan: default_plan.name
+ )
+ end
+
+ it 'breaks the chain' do
+ subject
+
+ expect(step).to be_break
+ end
+
+ context 'when active jobs limit not enabled' do
+ let(:limit) { 0 }
+
+ it_behaves_like 'successful step'
+ end
+ end
+
+ context 'when active jobs limit is not exceeded' do
+ before do
+ create_list(:ci_build, 3, pipeline: existing_pipeline)
+ create_list(:ci_bridge, 1, pipeline: existing_pipeline)
+ end
+
+ it_behaves_like 'successful step'
+ end
+end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
index 62de4d2e96d..51d1661b586 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
@@ -236,4 +236,47 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Populate do
end
end
end
+
+ context 'with pipeline name' do
+ let(:config) do
+ { workflow: { name: ' Pipeline name ' }, rspec: { script: 'rspec' } }
+ end
+
+ context 'with feature flag disabled' do
+ before do
+ stub_feature_flags(pipeline_name: false)
+ end
+
+ it 'does not build pipeline_metadata' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata).to be_nil
+ end
+ end
+
+ context 'with feature flag enabled' do
+ before do
+ stub_feature_flags(pipeline_name: true)
+ end
+
+ it 'builds pipeline_metadata' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata.title).to eq('Pipeline name')
+ expect(pipeline.pipeline_metadata.project).to eq(pipeline.project)
+ end
+
+ context 'with empty name' do
+ let(:config) do
+ { workflow: { name: ' ' }, rspec: { script: 'rspec' } }
+ end
+
+ it 'strips whitespace from name' do
+ run_chain
+
+ expect(pipeline.pipeline_metadata).to be_nil
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
index 8c4f7af0ef4..323bab89e6a 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
@@ -68,8 +68,10 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
end
context 'when refs policy is specified' do
+ let(:tag_name) { project.repository.tags.first.name }
+
let(:pipeline) do
- build(:ci_pipeline, project: project, ref: 'feature', tag: true)
+ build(:ci_pipeline, project: project, ref: tag_name, tag: true)
end
let(:config) do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb
index ee32661f267..c69aa661b05 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb
@@ -100,19 +100,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Sequence do
expect(histogram).to have_received(:observe)
.with(hash_including(plan: project.actual_plan_name), 4)
end
-
- context 'when feature flag ci_limit_active_jobs_early is disabled' do
- before do
- stub_feature_flags(ci_limit_active_jobs_early: false)
- end
-
- it 'counts all the active builds' do
- subject.build!
-
- expect(histogram).to have_received(:observe)
- .with(hash_including(plan: project.actual_plan_name), 3)
- end
- end
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/duration_spec.rb b/spec/lib/gitlab/ci/pipeline/duration_spec.rb
index 46c7072ad8e..36714413da6 100644
--- a/spec/lib/gitlab/ci/pipeline/duration_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/duration_spec.rb
@@ -1,117 +1,187 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Duration do
- let(:calculated_duration) { calculate(data) }
+ describe '.from_periods' do
+ let(:calculated_duration) { calculate(data) }
- shared_examples 'calculating duration' do
- it do
- expect(calculated_duration).to eq(duration)
+ shared_examples 'calculating duration' do
+ it do
+ expect(calculated_duration).to eq(duration)
+ end
end
- end
- context 'test sample A' do
- let(:data) do
- [[0, 1],
- [1, 2],
- [3, 4],
- [5, 6]]
+ context 'test sample A' do
+ let(:data) do
+ [[0, 1],
+ [1, 2],
+ [3, 4],
+ [5, 6]]
+ end
+
+ let(:duration) { 4 }
+
+ it_behaves_like 'calculating duration'
end
- let(:duration) { 4 }
+ context 'test sample B' do
+ let(:data) do
+ [[0, 1],
+ [1, 2],
+ [2, 3],
+ [3, 4],
+ [0, 4]]
+ end
- it_behaves_like 'calculating duration'
- end
+ let(:duration) { 4 }
- context 'test sample B' do
- let(:data) do
- [[0, 1],
- [1, 2],
- [2, 3],
- [3, 4],
- [0, 4]]
+ it_behaves_like 'calculating duration'
end
- let(:duration) { 4 }
+ context 'test sample C' do
+ let(:data) do
+ [[0, 4],
+ [2, 6],
+ [5, 7],
+ [8, 9]]
+ end
- it_behaves_like 'calculating duration'
- end
+ let(:duration) { 8 }
- context 'test sample C' do
- let(:data) do
- [[0, 4],
- [2, 6],
- [5, 7],
- [8, 9]]
+ it_behaves_like 'calculating duration'
end
- let(:duration) { 8 }
+ context 'test sample D' do
+ let(:data) do
+ [[0, 1],
+ [2, 3],
+ [4, 5],
+ [6, 7]]
+ end
- it_behaves_like 'calculating duration'
- end
+ let(:duration) { 4 }
+
+ it_behaves_like 'calculating duration'
+ end
- context 'test sample D' do
- let(:data) do
- [[0, 1],
- [2, 3],
- [4, 5],
- [6, 7]]
+ context 'test sample E' do
+ let(:data) do
+ [[0, 1],
+ [3, 9],
+ [3, 4],
+ [3, 5],
+ [3, 8],
+ [4, 5],
+ [4, 7],
+ [5, 8]]
+ end
+
+ let(:duration) { 7 }
+
+ it_behaves_like 'calculating duration'
end
- let(:duration) { 4 }
+ context 'test sample F' do
+ let(:data) do
+ [[1, 3],
+ [2, 4],
+ [2, 4],
+ [2, 4],
+ [5, 8]]
+ end
- it_behaves_like 'calculating duration'
- end
+ let(:duration) { 6 }
- context 'test sample E' do
- let(:data) do
- [[0, 1],
- [3, 9],
- [3, 4],
- [3, 5],
- [3, 8],
- [4, 5],
- [4, 7],
- [5, 8]]
+ it_behaves_like 'calculating duration'
end
- let(:duration) { 7 }
+ context 'test sample G' do
+ let(:data) do
+ [[1, 3],
+ [2, 4],
+ [6, 7]]
+ end
- it_behaves_like 'calculating duration'
- end
+ let(:duration) { 4 }
- context 'test sample F' do
- let(:data) do
- [[1, 3],
- [2, 4],
- [2, 4],
- [2, 4],
- [5, 8]]
+ it_behaves_like 'calculating duration'
end
- let(:duration) { 6 }
+ def calculate(data)
+ periods = data.shuffle.map do |(first, last)|
+ described_class::Period.new(first, last)
+ end
- it_behaves_like 'calculating duration'
+ described_class.from_periods(periods.sort_by(&:first))
+ end
end
- context 'test sample G' do
- let(:data) do
- [[1, 3],
- [2, 4],
- [6, 7]]
+ describe '.from_pipeline' do
+ let_it_be(:start_time) { Time.current.change(usec: 0) }
+ let_it_be(:current) { start_time + 1000 }
+ let_it_be(:pipeline) { create(:ci_pipeline) }
+ let_it_be(:success_build) { create_build(:success, started_at: start_time, finished_at: start_time + 60) }
+ let_it_be(:failed_build) { create_build(:failed, started_at: start_time + 60, finished_at: start_time + 120) }
+ let_it_be(:canceled_build) { create_build(:canceled, started_at: start_time + 120, finished_at: start_time + 180) }
+ let_it_be(:skipped_build) { create_build(:skipped, started_at: start_time) }
+ let_it_be(:pending_build) { create_build(:pending) }
+ let_it_be(:created_build) { create_build(:created) }
+ let_it_be(:preparing_build) { create_build(:preparing) }
+ let_it_be(:scheduled_build) { create_build(:scheduled) }
+ let_it_be(:expired_scheduled_build) { create_build(:expired_scheduled) }
+ let_it_be(:manual_build) { create_build(:manual) }
+
+ let!(:running_build) { create_build(:running, started_at: start_time) }
+
+ it 'returns the duration of the running build' do
+ travel_to(current) do
+ expect(described_class.from_pipeline(pipeline)).to eq 1000.seconds
+ end
end
- let(:duration) { 4 }
+ context 'when there is no running build' do
+ let(:running_build) { nil }
- it_behaves_like 'calculating duration'
- end
+ it 'returns the duration for all the builds' do
+ travel_to(current) do
+ expect(described_class.from_pipeline(pipeline)).to eq 180.seconds
+ end
+ end
+ end
- def calculate(data)
- periods = data.shuffle.map do |(first, last)|
- described_class::Period.new(first, last)
+ context 'when there are bridge jobs' do
+ let!(:success_bridge) { create_bridge(:success, started_at: start_time + 220, finished_at: start_time + 280) }
+ let!(:failed_bridge) { create_bridge(:failed, started_at: start_time + 180, finished_at: start_time + 240) }
+ let!(:skipped_bridge) { create_bridge(:skipped, started_at: start_time) }
+ let!(:created_bridge) { create_bridge(:created) }
+ let!(:manual_bridge) { create_bridge(:manual) }
+
+ it 'returns the duration of the running build' do
+ travel_to(current) do
+ expect(described_class.from_pipeline(pipeline)).to eq 1000.seconds
+ end
+ end
+
+ context 'when there is no running build' do
+ let!(:running_build) { nil }
+
+ it 'returns the duration for all the builds and bridge jobs' do
+ travel_to(current) do
+ expect(described_class.from_pipeline(pipeline)).to eq 280.seconds
+ end
+ end
+ end
end
- described_class.from_periods(periods.sort_by(&:first))
+ private
+
+ def create_build(trait, **opts)
+ create(:ci_build, trait, pipeline: pipeline, **opts)
+ end
+
+ def create_bridge(trait, **opts)
+ create(:ci_bridge, trait, pipeline: pipeline, **opts)
+ end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/logger_spec.rb b/spec/lib/gitlab/ci/pipeline/logger_spec.rb
index f31361431f2..3af0ebe7484 100644
--- a/spec/lib/gitlab/ci/pipeline/logger_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/logger_spec.rb
@@ -25,6 +25,7 @@ RSpec.describe ::Gitlab::Ci::Pipeline::Logger do
loggable_data = {
'expensive_operation_duration_s' => {
'count' => 1,
+ 'sum' => a_kind_of(Numeric),
'avg' => a_kind_of(Numeric),
'max' => a_kind_of(Numeric),
'min' => a_kind_of(Numeric)
@@ -62,6 +63,7 @@ RSpec.describe ::Gitlab::Ci::Pipeline::Logger do
accumulator[key] = {
'count' => count,
'avg' => a_kind_of(Numeric),
+ 'sum' => a_kind_of(Numeric),
'max' => a_kind_of(Numeric),
'min' => a_kind_of(Numeric)
}
@@ -71,6 +73,7 @@ RSpec.describe ::Gitlab::Ci::Pipeline::Logger do
data['expensive_operation_db_count']['max'] = db_count
data['expensive_operation_db_count']['min'] = db_count
data['expensive_operation_db_count']['avg'] = db_count
+ data['expensive_operation_db_count']['sum'] = count * db_count
end
data
@@ -131,7 +134,7 @@ RSpec.describe ::Gitlab::Ci::Pipeline::Logger do
it 'records durations of observed operations' do
loggable_data = {
'pipeline_creation_duration_s' => {
- 'avg' => 30, 'count' => 1, 'max' => 30, 'min' => 30
+ 'avg' => 30, 'sum' => 30, 'count' => 1, 'max' => 30, 'min' => 30
}
}
@@ -165,10 +168,10 @@ RSpec.describe ::Gitlab::Ci::Pipeline::Logger do
'pipeline_creation_caller' => 'source',
'pipeline_source' => pipeline.source,
'pipeline_save_duration_s' => {
- 'avg' => 60, 'count' => 1, 'max' => 60, 'min' => 60
+ 'avg' => 60, 'sum' => 60, 'count' => 1, 'max' => 60, 'min' => 60
},
'pipeline_creation_duration_s' => {
- 'avg' => 20, 'count' => 2, 'max' => 30, 'min' => 10
+ 'avg' => 20, 'sum' => 40, 'count' => 2, 'max' => 30, 'min' => 10
}
}
end
@@ -215,10 +218,10 @@ RSpec.describe ::Gitlab::Ci::Pipeline::Logger do
'pipeline_creation_service_duration_s' => a_kind_of(Numeric),
'pipeline_creation_caller' => 'source',
'pipeline_save_duration_s' => {
- 'avg' => 60, 'count' => 1, 'max' => 60, 'min' => 60
+ 'avg' => 60, 'sum' => 60, 'count' => 1, 'max' => 60, 'min' => 60
},
'pipeline_creation_duration_s' => {
- 'avg' => 20, 'count' => 2, 'max' => 30, 'min' => 10
+ 'avg' => 20, 'sum' => 40, 'count' => 2, 'max' => 30, 'min' => 10
}
}
end
diff --git a/spec/lib/gitlab/ci/reports/codequality_reports_comparer_spec.rb b/spec/lib/gitlab/ci/reports/codequality_reports_comparer_spec.rb
index e289e59b281..effa2c43418 100644
--- a/spec/lib/gitlab/ci/reports/codequality_reports_comparer_spec.rb
+++ b/spec/lib/gitlab/ci/reports/codequality_reports_comparer_spec.rb
@@ -191,11 +191,12 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReportsComparer do
end
it 'includes the base report errors sorted by severity' do
- expect(existing_errors).to eq([
- blocker_degradation,
- critical_degradation,
- major_degradation
- ])
+ expect(existing_errors).to eq(
+ [
+ blocker_degradation,
+ critical_degradation,
+ major_degradation
+ ])
end
end
@@ -242,11 +243,12 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReportsComparer do
end
it 'includes errors not found in the base report sorted by severity' do
- expect(new_errors).to eq([
- blocker_degradation,
- critical_degradation,
- minor_degradation
- ])
+ expect(new_errors).to eq(
+ [
+ blocker_degradation,
+ critical_degradation,
+ minor_degradation
+ ])
end
end
@@ -304,11 +306,12 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReportsComparer do
end
it 'returns the base report errors not found in the head report, sorted by severity' do
- expect(resolved_errors).to eq([
- blocker_degradation,
- critical_degradation,
- minor_degradation
- ])
+ expect(resolved_errors).to eq(
+ [
+ blocker_degradation,
+ critical_degradation,
+ minor_degradation
+ ])
end
end
diff --git a/spec/lib/gitlab/ci/reports/codequality_reports_spec.rb b/spec/lib/gitlab/ci/reports/codequality_reports_spec.rb
index f4b47893805..68e70525c55 100644
--- a/spec/lib/gitlab/ci/reports/codequality_reports_spec.rb
+++ b/spec/lib/gitlab/ci/reports/codequality_reports_spec.rb
@@ -103,15 +103,16 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReports do
end
it 'sorts degradations based on severity' do
- expect(codequality_report.degradations.values).to eq([
- blocker,
- critical,
- major,
- major_2,
- minor,
- info,
- unknown
- ])
+ expect(codequality_report.degradations.values).to eq(
+ [
+ blocker,
+ critical,
+ major,
+ major_2,
+ minor,
+ info,
+ unknown
+ ])
end
context 'with non-existence and uppercase severities' do
@@ -126,12 +127,13 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReports do
end
it 'sorts unknown last' do
- expect(other_report.degradations.values).to eq([
- blocker,
- uppercase_major,
- minor,
- non_existent
- ])
+ expect(other_report.degradations.values).to eq(
+ [
+ blocker,
+ uppercase_major,
+ minor,
+ non_existent
+ ])
end
end
end
diff --git a/spec/lib/gitlab/ci/reports/sbom/source_spec.rb b/spec/lib/gitlab/ci/reports/sbom/source_spec.rb
index cb30bd721dd..343c0d8c15c 100644
--- a/spec/lib/gitlab/ci/reports/sbom/source_spec.rb
+++ b/spec/lib/gitlab/ci/reports/sbom/source_spec.rb
@@ -12,8 +12,7 @@ RSpec.describe Gitlab::Ci::Reports::Sbom::Source do
'source_file' => { 'path' => 'package.json' },
'package_manager' => { 'name' => 'npm' },
'language' => { 'name' => 'JavaScript' }
- },
- fingerprint: '4dbcb747e6f0fb3ed4f48d96b777f1d64acdf43e459fdfefad404e55c004a188'
+ }
}
end
@@ -22,8 +21,7 @@ RSpec.describe Gitlab::Ci::Reports::Sbom::Source do
it 'has correct attributes' do
expect(subject).to have_attributes(
source_type: attributes[:type],
- data: attributes[:data],
- fingerprint: attributes[:fingerprint]
+ data: attributes[:data]
)
end
end
diff --git a/spec/lib/gitlab/ci/reports/security/report_spec.rb b/spec/lib/gitlab/ci/reports/security/report_spec.rb
index ab0efb90901..d7f967f1c55 100644
--- a/spec/lib/gitlab/ci/reports/security/report_spec.rb
+++ b/spec/lib/gitlab/ci/reports/security/report_spec.rb
@@ -140,6 +140,24 @@ RSpec.describe Gitlab::Ci::Reports::Security::Report do
it { is_expected.to eq(scanner_1) }
end
+ describe '#primary_identifiers' do
+ it 'returns matching identifiers' do
+ scanner_with_identifiers = create(
+ :ci_reports_security_scanner,
+ external_id: 'external_id_1',
+ primary_identifiers: [create(:ci_reports_security_identifier, external_id: 'other_id', name: 'other_scanner')]
+ )
+ scanner_without_identifiers = create(
+ :ci_reports_security_scanner,
+ external_id: 'external_id_2')
+
+ report.add_scanner(scanner_with_identifiers)
+ report.add_scanner(scanner_without_identifiers)
+
+ expect(report.primary_identifiers).to eq(scanner_with_identifiers.primary_identifiers)
+ end
+ end
+
describe '#add_error' do
context 'when the message is not given' do
it 'adds a new error to report with the generic error message' do
diff --git a/spec/lib/gitlab/ci/secure_files/cer_spec.rb b/spec/lib/gitlab/ci/secure_files/cer_spec.rb
new file mode 100644
index 00000000000..6b9cd0e3bfc
--- /dev/null
+++ b/spec/lib/gitlab/ci/secure_files/cer_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::SecureFiles::Cer do
+ context 'when the supplied certificate cannot be parsed' do
+ let(:invalid_certificate) { described_class.new('xyzabc') }
+
+ describe '#certificate_data' do
+ it 'assigns the error message and returns nil' do
+ expect(invalid_certificate.certificate_data).to be nil
+ expect(invalid_certificate.error).to eq('not enough data')
+ end
+ end
+
+ describe '#metadata' do
+ it 'returns an empty hash' do
+ expect(invalid_certificate.metadata).to eq({})
+ end
+ end
+
+ describe '#expires_at' do
+ it 'returns nil' do
+ expect(invalid_certificate.metadata[:expires_at]).to be_nil
+ end
+ end
+ end
+
+ context 'when the supplied certificate can be parsed' do
+ let(:sample_file) { fixture_file('ci_secure_files/sample.cer') }
+ let(:subject) { described_class.new(sample_file) }
+
+ describe '#certificate_data' do
+ it 'returns an OpenSSL::X509::Certificate object' do
+ expect(subject.certificate_data.class).to be(OpenSSL::X509::Certificate)
+ end
+ end
+
+ describe '#metadata' do
+ it 'returns a hash with the expected keys' do
+ expect(subject.metadata.keys).to match_array([:issuer, :subject, :id, :expires_at])
+ end
+ end
+
+ describe '#id' do
+ it 'returns the certificate serial number' do
+ expect(subject.metadata[:id]).to eq('33669367788748363528491290218354043267')
+ end
+ end
+
+ describe '#expires_at' do
+ it 'returns the certificate expiration timestamp' do
+ expect(subject.metadata[:expires_at]).to eq('2022-04-26 19:20:40 UTC')
+ end
+ end
+
+ describe '#issuer' do
+ it 'calls parse on X509Name' do
+ expect(subject.metadata[:issuer]["O"]).to eq('Apple Inc.')
+ end
+ end
+
+ describe '#subject' do
+ it 'calls parse on X509Name' do
+ expect(subject.metadata[:subject]["OU"]).to eq('N7SYAN8PX8')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/secure_files/mobile_provision_spec.rb b/spec/lib/gitlab/ci/secure_files/mobile_provision_spec.rb
new file mode 100644
index 00000000000..fb382174c64
--- /dev/null
+++ b/spec/lib/gitlab/ci/secure_files/mobile_provision_spec.rb
@@ -0,0 +1,149 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::SecureFiles::MobileProvision do
+ context 'when the supplied profile cannot be parsed' do
+ context 'when the supplied certificate cannot be parsed' do
+ let(:invalid_profile) { described_class.new('xyzabc') }
+
+ describe '#decoded_plist' do
+ it 'assigns the error message and returns nil' do
+ expect(invalid_profile.decoded_plist).to be nil
+ expect(invalid_profile.error).to eq('Could not parse the PKCS7: not enough data')
+ end
+ end
+
+ describe '#properties' do
+ it 'returns nil' do
+ expect(invalid_profile.properties).to be_nil
+ end
+ end
+
+ describe '#metadata' do
+ it 'returns an empty hash' do
+ expect(invalid_profile.metadata).to eq({})
+ end
+ end
+
+ describe '#expires_at' do
+ it 'returns nil' do
+ expect(invalid_profile.metadata[:expires_at]).to be_nil
+ end
+ end
+ end
+ end
+
+ context 'when the supplied profile can be parsed' do
+ let(:sample_file) { fixture_file('ci_secure_files/sample.mobileprovision') }
+ let(:subject) { described_class.new(sample_file) }
+
+ describe '#decoded_plist' do
+ it 'returns an XML string' do
+ expect(subject.decoded_plist.class).to be(String)
+ expect(subject.decoded_plist.starts_with?('<?xml version="1.0"')).to be true
+ end
+ end
+
+ describe '#properties' do
+ it 'returns the property list of the decoded plist provided' do
+ expect(subject.properties.class).to be(Hash)
+ expect(subject.properties.keys).to match_array(%w[AppIDName ApplicationIdentifierPrefix CreationDate
+ Platform IsXcodeManaged DeveloperCertificates
+ DER-Encoded-Profile PPQCheck Entitlements ExpirationDate
+ Name ProvisionedDevices TeamIdentifier TeamName
+ TimeToLive UUID Version])
+ end
+
+ it 'returns nil if the property list fails to be parsed from the decoded plist' do
+ allow(subject).to receive(:decoded_plist).and_return('foo/bar')
+ expect(subject.properties).to be nil
+ expect(subject.error).to start_with('invalid XML')
+ end
+ end
+
+ describe '#metadata' do
+ it 'returns a hash with the expected keys' do
+ expect(subject.metadata.keys).to match_array([:id, :expires_at, :app_id, :app_id_prefix, :app_name,
+ :certificate_ids, :devices, :entitlements, :platforms,
+ :team_id, :team_name, :xcode_managed])
+ end
+ end
+
+ describe '#id' do
+ it 'returns the profile UUID' do
+ expect(subject.metadata[:id]).to eq('6b9fcce1-b9a9-4b37-b2ce-ec4da2044abf')
+ end
+ end
+
+ describe '#expires_at' do
+ it 'returns the expiration timestamp of the profile' do
+ expect(subject.metadata[:expires_at].utc).to eq('2023-08-01 23:15:13 UTC')
+ end
+ end
+
+ describe '#platforms' do
+ it 'returns the platforms assigned to the profile' do
+ expect(subject.metadata[:platforms]).to match_array(['iOS'])
+ end
+ end
+
+ describe '#team_name' do
+ it 'returns the team name in the profile' do
+ expect(subject.metadata[:team_name]).to eq('Darby Frey')
+ end
+ end
+
+ describe '#team_id' do
+ it 'returns the team ids in the profile' do
+ expect(subject.metadata[:team_id]).to match_array(['N7SYAN8PX8'])
+ end
+ end
+
+ describe '#app_name' do
+ it 'returns the app name in the profile' do
+ expect(subject.metadata[:app_name]).to eq('iOS Demo')
+ end
+ end
+
+ describe '#app_id' do
+ it 'returns the app id in the profile' do
+ expect(subject.metadata[:app_id]).to eq('match Development com.gitlab.ios-demo')
+ end
+ end
+
+ describe '#app_id_prefix' do
+ it 'returns the app id prefixes in the profile' do
+ expect(subject.metadata[:app_id_prefix]).to match_array(['N7SYAN8PX8'])
+ end
+ end
+
+ describe '#xcode_managed' do
+ it 'returns the xcode_managed property in the profile' do
+ expect(subject.metadata[:xcode_managed]).to be false
+ end
+ end
+
+ describe '#entitlements' do
+ it 'returns the entitlements in the profile' do
+ expect(subject.metadata[:entitlements].keys).to match_array(['application-identifier',
+ 'com.apple.developer.game-center',
+ 'com.apple.developer.team-identifier',
+ 'get-task-allow',
+ 'keychain-access-groups'])
+ end
+ end
+
+ describe '#devices' do
+ it 'returns the devices attached to the profile' do
+ expect(subject.metadata[:devices]).to match_array(["00008101-001454860C10001E"])
+ end
+ end
+
+ describe '#certificate_ids' do
+ it 'returns the certificate ids attached to the profile' do
+ expect(subject.metadata[:certificate_ids]).to match_array(["23380136242930206312716563638445789376"])
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/secure_files/p12_spec.rb b/spec/lib/gitlab/ci/secure_files/p12_spec.rb
new file mode 100644
index 00000000000..beabf4b4856
--- /dev/null
+++ b/spec/lib/gitlab/ci/secure_files/p12_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::SecureFiles::P12 do
+ context 'when the supplied certificate cannot be parsed' do
+ let(:invalid_certificate) { described_class.new('xyzabc') }
+
+ describe '#certificate_data' do
+ it 'assigns the error message and returns nil' do
+ expect(invalid_certificate.certificate_data).to be nil
+ expect(invalid_certificate.error).to eq('PKCS12_parse: mac verify failure')
+ end
+ end
+
+ describe '#metadata' do
+ it 'returns an empty hash' do
+ expect(invalid_certificate.metadata).to eq({})
+ end
+ end
+
+ describe '#expires_at' do
+ it 'returns nil' do
+ expect(invalid_certificate.metadata[:expires_at]).to be_nil
+ end
+ end
+ end
+
+ context 'when the supplied certificate can be parsed, but the password is invalid' do
+ let(:sample_file) { fixture_file('ci_secure_files/sample.p12') }
+ let(:subject) { described_class.new(sample_file, 'foo') }
+
+ describe '#certificate_data' do
+ it 'assigns the error message and returns nil' do
+ expect(subject.certificate_data).to be nil
+ expect(subject.error).to eq('PKCS12_parse: mac verify failure')
+ end
+ end
+ end
+
+ context 'when the supplied certificate can be parsed' do
+ let(:sample_file) { fixture_file('ci_secure_files/sample.p12') }
+ let(:subject) { described_class.new(sample_file) }
+
+ describe '#certificate_data' do
+ it 'returns an OpenSSL::X509::Certificate object' do
+ expect(subject.certificate_data.class).to be(OpenSSL::X509::Certificate)
+ end
+ end
+
+ describe '#metadata' do
+ it 'returns a hash with the expected keys' do
+ expect(subject.metadata.keys).to match_array([:issuer, :subject, :id, :expires_at])
+ end
+ end
+
+ describe '#id' do
+ it 'returns the certificate serial number' do
+ expect(subject.metadata[:id]).to eq('75949910542696343243264405377658443914')
+ end
+ end
+
+ describe '#expires_at' do
+ it 'returns the certificate expiration timestamp' do
+ expect(subject.metadata[:expires_at]).to eq('2022-09-21 14:56:00 UTC')
+ end
+ end
+
+ describe '#issuer' do
+ it 'calls parse on X509Name' do
+ expect(subject.metadata[:issuer]["O"]).to eq('Apple Inc.')
+ end
+ end
+
+ describe '#subject' do
+ it 'calls parse on X509Name' do
+ expect(subject.metadata[:subject]["OU"]).to eq('N7SYAN8PX8')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/secure_files/x509_name_spec.rb b/spec/lib/gitlab/ci/secure_files/x509_name_spec.rb
new file mode 100644
index 00000000000..3a523924c5b
--- /dev/null
+++ b/spec/lib/gitlab/ci/secure_files/x509_name_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::SecureFiles::X509Name do
+ describe '.parse' do
+ it 'parses an X509Name object into a hash format' do
+ sample = OpenSSL::X509::Name.new([
+ ['C', 'Test Country'],
+ ['O', 'Test Org Name'],
+ ['OU', 'Test Org Unit'],
+ ['CN', 'Test Common Name'],
+ ['UID', 'Test UID']
+ ])
+
+ parsed_sample = described_class.parse(sample)
+
+ expect(parsed_sample["C"]).to eq('Test Country')
+ expect(parsed_sample["O"]).to eq('Test Org Name')
+ expect(parsed_sample["OU"]).to eq('Test Org Unit')
+ expect(parsed_sample["CN"]).to eq('Test Common Name')
+ expect(parsed_sample["UID"]).to eq('Test UID')
+ end
+
+ it 'returns an empty hash when an error occurs' do
+ parsed_sample = described_class.parse('unexpectedinput')
+ expect(parsed_sample).to eq({})
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/trace_spec.rb b/spec/lib/gitlab/ci/trace_spec.rb
index 3043c8c5467..321a47c0634 100644
--- a/spec/lib/gitlab/ci/trace_spec.rb
+++ b/spec/lib/gitlab/ci/trace_spec.rb
@@ -74,7 +74,7 @@ RSpec.describe Gitlab::Ci::Trace, :clean_gitlab_redis_shared_state, factory_defa
trace.being_watched!
result = Gitlab::Redis::SharedState.with do |redis|
- redis.exists(cache_key)
+ redis.exists?(cache_key)
end
expect(result).to eq(true)
diff --git a/spec/lib/gitlab/ci/variables/builder/group_spec.rb b/spec/lib/gitlab/ci/variables/builder/group_spec.rb
index 72487588cde..c3743ebd2d7 100644
--- a/spec/lib/gitlab/ci/variables/builder/group_spec.rb
+++ b/spec/lib/gitlab/ci/variables/builder/group_spec.rb
@@ -132,11 +132,12 @@ RSpec.describe Gitlab::Ci::Variables::Builder::Group do
end
it 'orders the variables from least to most matched' do
- variables_collection = Gitlab::Ci::Variables::Collection.new([
- variable,
- partially_matched_variable,
- perfectly_matched_variable
- ]).to_runner_variables
+ variables_collection = Gitlab::Ci::Variables::Collection.new(
+ [
+ variable,
+ partially_matched_variable,
+ perfectly_matched_variable
+ ]).to_runner_variables
expect(subject.to_runner_variables).to eq(variables_collection)
end
diff --git a/spec/lib/gitlab/ci/variables/builder/project_spec.rb b/spec/lib/gitlab/ci/variables/builder/project_spec.rb
index b64b6ea98e2..c1cefc425f5 100644
--- a/spec/lib/gitlab/ci/variables/builder/project_spec.rb
+++ b/spec/lib/gitlab/ci/variables/builder/project_spec.rb
@@ -132,11 +132,12 @@ RSpec.describe Gitlab::Ci::Variables::Builder::Project do
end
it 'puts variables matching environment scope more in the end' do
- variables_collection = Gitlab::Ci::Variables::Collection.new([
- variable,
- partially_matched_variable,
- perfectly_matched_variable
- ]).to_runner_variables
+ variables_collection = Gitlab::Ci::Variables::Collection.new(
+ [
+ variable,
+ partially_matched_variable,
+ perfectly_matched_variable
+ ]).to_runner_variables
expect(subject.to_runner_variables).to eq(variables_collection)
end
diff --git a/spec/lib/gitlab/ci/variables/builder/release_spec.rb b/spec/lib/gitlab/ci/variables/builder/release_spec.rb
new file mode 100644
index 00000000000..85b1659d07b
--- /dev/null
+++ b/spec/lib/gitlab/ci/variables/builder/release_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Variables::Builder::Release do
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:release) { create(:release, project: project) }
+
+ let(:builder) { described_class.new(release) }
+
+ describe '#variables' do
+ let(:description_variable) do
+ {
+ key: 'CI_RELEASE_DESCRIPTION',
+ value: release.description,
+ public: true,
+ masked: false,
+ raw: true
+ }
+ end
+
+ subject do
+ builder.variables
+ end
+
+ context 'when the release is present' do
+ let(:description_item) { item(description_variable) }
+
+ it 'contains all the variables' do
+ is_expected.to contain_exactly(description_item)
+ end
+
+ context 'for large description' do
+ before do
+ release.update_attribute(:description, "Test Description ..." * 5000)
+ end
+
+ it 'truncates' do
+ expect(subject['CI_RELEASE_DESCRIPTION'].value.length).to eq(1024)
+ end
+ end
+
+ context 'when description is nil' do
+ before do
+ release.update_attribute(:description, nil)
+ end
+
+ it 'returns without error' do
+ builder = subject
+
+ expect(builder).to match_array([])
+ expect(builder.errors).to be_nil
+ end
+ end
+ end
+
+ context 'when the release is not present' do
+ let(:release) { nil }
+
+ it 'contains no variables' do
+ is_expected.to match_array([])
+ end
+ end
+ end
+
+ def item(variable)
+ ::Gitlab::Ci::Variables::Collection::Item.fabricate(variable)
+ end
+end
diff --git a/spec/lib/gitlab/ci/variables/builder_spec.rb b/spec/lib/gitlab/ci/variables/builder_spec.rb
index 4833ccf9093..52ba85d2df1 100644
--- a/spec/lib/gitlab/ci/variables/builder_spec.rb
+++ b/spec/lib/gitlab/ci/variables/builder_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache do
let_it_be(:user) { create(:user) }
let_it_be_with_reload(:job) do
create(:ci_build,
+ name: 'rspec:test 1',
pipeline: pipeline,
user: user,
yaml_variables: [{ key: 'YAML_VARIABLE', value: 'value' }]
@@ -24,13 +25,15 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache do
let(:predefined_variables) do
[
{ key: 'CI_JOB_NAME',
- value: job.name },
+ value: 'rspec:test 1' },
+ { key: 'CI_JOB_NAME_SLUG',
+ value: 'rspec-test-1' },
{ key: 'CI_JOB_STAGE',
value: job.stage_name },
{ key: 'CI_NODE_TOTAL',
value: '1' },
{ key: 'CI_BUILD_NAME',
- value: job.name },
+ value: 'rspec:test 1' },
{ key: 'CI_BUILD_STAGE',
value: job.stage_name },
{ key: 'CI',
@@ -171,6 +174,7 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache do
allow(builder).to receive(:secret_project_variables) { [var('L', 12), var('M', 12)] }
allow(pipeline).to receive(:variables) { [var('M', 13), var('N', 13)] }
allow(pipeline).to receive(:pipeline_schedule) { double(job_variables: [var('N', 14), var('O', 14)]) }
+ allow(builder).to receive(:release_variables) { [var('P', 15), var('Q', 15)] }
end
it 'returns variables in order depending on resource hierarchy' do
@@ -187,7 +191,8 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache do
var('K', 11), var('L', 11),
var('L', 12), var('M', 12),
var('M', 13), var('N', 13),
- var('N', 14), var('O', 14)])
+ var('N', 14), var('O', 14),
+ var('P', 15), var('Q', 15)])
end
it 'overrides duplicate keys depending on resource hierarchy' do
@@ -199,7 +204,8 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache do
'I' => '9', 'J' => '10',
'K' => '11', 'L' => '12',
'M' => '13', 'N' => '14',
- 'O' => '14')
+ 'O' => '14', 'P' => '15',
+ 'Q' => '15')
end
end
@@ -216,6 +222,27 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache do
.to include(a_hash_including(key: schedule_variable.key, value: schedule_variable.value))
end
end
+
+ context 'with release variables' do
+ let(:release_description_key) { 'CI_RELEASE_DESCRIPTION' }
+
+ let_it_be(:tag) { project.repository.tags.first }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project, tag: true, ref: tag.name) }
+ let_it_be(:release) { create(:release, tag: tag.name, project: project) }
+
+ it 'includes release variables' do
+ expect(subject.to_hash).to include(release_description_key => release.description)
+ end
+
+ context 'when there is no release' do
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project, tag: false, ref: 'master') }
+ let(:release) { nil }
+
+ it 'does not include release variables' do
+ expect(subject.to_hash).not_to have_key(release_description_key)
+ end
+ end
+ end
end
describe '#user_variables' do
@@ -261,10 +288,11 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache do
end
it 'includes #deployment_variables and merges the KUBECONFIG values', :aggregate_failures do
- expect(builder).to receive(:deployment_variables).and_return([
- { key: 'KUBECONFIG', value: 'deployment-kubeconfig' },
- { key: 'OTHER', value: 'some value' }
- ])
+ expect(builder).to receive(:deployment_variables).and_return(
+ [
+ { key: 'KUBECONFIG', value: 'deployment-kubeconfig' },
+ { key: 'OTHER', value: 'some value' }
+ ])
expect(template).to receive(:merge_yaml).with('deployment-kubeconfig')
expect(subject['KUBECONFIG'].value).to eq('example-kubeconfig')
expect(subject['OTHER'].value).to eq('some value')
diff --git a/spec/lib/gitlab/ci/variables/collection/sort_spec.rb b/spec/lib/gitlab/ci/variables/collection/sort_spec.rb
index 57171e5be69..432225c53f0 100644
--- a/spec/lib/gitlab/ci/variables/collection/sort_spec.rb
+++ b/spec/lib/gitlab/ci/variables/collection/sort_spec.rb
@@ -192,13 +192,14 @@ RSpec.describe Gitlab::Ci::Variables::Collection::Sort do
end
it 'preserves relative order of overridden variables' do
- is_expected.to eq([
- { 'TOP_LEVEL_GROUP_NAME' => 'top-level-group' },
- { 'SUBGROUP_VAR' => '$TOP_LEVEL_GROUP_NAME' },
- { 'SUB_GROUP_NAME' => 'vars-in-vars-subgroup' },
- { 'SUBGROUP_VAR' => '$SUB_GROUP_NAME' },
- { 'PROJECT_VAR' => '$SUBGROUP_VAR' }
- ])
+ is_expected.to eq(
+ [
+ { 'TOP_LEVEL_GROUP_NAME' => 'top-level-group' },
+ { 'SUBGROUP_VAR' => '$TOP_LEVEL_GROUP_NAME' },
+ { 'SUB_GROUP_NAME' => 'vars-in-vars-subgroup' },
+ { 'SUBGROUP_VAR' => '$SUB_GROUP_NAME' },
+ { 'PROJECT_VAR' => '$SUBGROUP_VAR' }
+ ])
end
end
end
diff --git a/spec/lib/gitlab/ci/variables/collection_spec.rb b/spec/lib/gitlab/ci/variables/collection_spec.rb
index 8ac03301322..7d4a1eef70b 100644
--- a/spec/lib/gitlab/ci/variables/collection_spec.rb
+++ b/spec/lib/gitlab/ci/variables/collection_spec.rb
@@ -571,5 +571,42 @@ RSpec.describe Gitlab::Ci::Variables::Collection do
end
end
end
+
+ context 'with the file_variable_is_referenced_in_another_variable logging' do
+ let(:collection) do
+ Gitlab::Ci::Variables::Collection.new
+ .append(key: 'VAR1', value: 'test-1')
+ .append(key: 'VAR2', value: '$VAR1')
+ .append(key: 'VAR3', value: '$VAR1', raw: true)
+ .append(key: 'FILEVAR4', value: 'file-test-4', file: true)
+ .append(key: 'VAR5', value: '$FILEVAR4')
+ .append(key: 'VAR6', value: '$FILEVAR4', raw: true)
+ end
+
+ subject(:sort_and_expand_all) { collection.sort_and_expand_all(project: project) }
+
+ context 'when a project is not passed' do
+ let(:project) {}
+
+ it 'does not log anything' do
+ expect(Gitlab::AppJsonLogger).not_to receive(:info)
+
+ sort_and_expand_all
+ end
+ end
+
+ context 'when a project is passed' do
+ let(:project) { create(:project) }
+
+ it 'logs file_variable_is_referenced_in_another_variable once for VAR5' do
+ expect(Gitlab::AppJsonLogger).to receive(:info).with(
+ event: 'file_variable_is_referenced_in_another_variable',
+ project_id: project.id
+ ).once
+
+ sort_and_expand_all
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/yaml_processor/result_spec.rb b/spec/lib/gitlab/ci/yaml_processor/result_spec.rb
index f7a0905d9da..7f203168706 100644
--- a/spec/lib/gitlab/ci/yaml_processor/result_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor/result_spec.rb
@@ -71,10 +71,11 @@ module Gitlab
subject(:yaml_variables_for) { result.yaml_variables_for(job_name) }
it 'returns calculated variables with root and job variables' do
- is_expected.to match_array([
- { key: 'VAR1', value: 'value 11' },
- { key: 'VAR2', value: 'value 2' }
- ])
+ is_expected.to match_array(
+ [
+ { key: 'VAR1', value: 'value 11' },
+ { key: 'VAR2', value: 'value 2' }
+ ])
end
context 'when an absent job is sent' do
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index cc327f5b5f1..ebf8422489e 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -15,8 +15,10 @@ module Gitlab
end
end
- describe '#build_attributes' do
- subject { described_class.new(config, user: nil).execute.build_attributes(:rspec) }
+ describe '#builds' do
+ subject(:builds) { described_class.new(config, user: nil).execute.builds }
+
+ let(:rspec_build) { builds.find { |build| build[:name] == 'rspec' } }
describe 'attributes list' do
let(:config) do
@@ -30,7 +32,7 @@ module Gitlab
end
it 'returns valid build attributes' do
- expect(subject).to eq({
+ expect(builds).to eq([{
stage: "test",
stage_idx: 2,
name: "rspec",
@@ -45,7 +47,7 @@ module Gitlab
job_variables: [],
root_variables_inheritance: true,
scheduling_type: :stage
- })
+ }])
end
end
@@ -63,7 +65,7 @@ module Gitlab
end
it 'returns valid build attributes' do
- expect(subject).to eq({
+ expect(builds).to eq([{
stage: 'test',
stage_idx: 2,
name: 'rspec',
@@ -77,7 +79,7 @@ module Gitlab
job_variables: [],
root_variables_inheritance: true,
scheduling_type: :stage
- })
+ }])
end
end
@@ -89,21 +91,22 @@ module Gitlab
end
it 'includes coverage regexp in build attributes' do
- expect(subject)
+ expect(rspec_build)
.to include(coverage_regex: 'Code coverage: \d+\.\d+')
end
end
end
describe 'tags entry with default values' do
- it 'applies default values' do
- config = YAML.dump({ default: { tags: %w[A B] },
- rspec: { script: "rspec" } })
-
- config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
+ let(:config) do
+ YAML.dump(
+ default: { tags: %w[A B] },
+ rspec: { script: "rspec" }
+ )
+ end
- expect(config_processor.stage_builds_attributes("test").size).to eq(1)
- expect(config_processor.stage_builds_attributes("test").first).to eq({
+ it 'applies default values' do
+ expect(rspec_build).to eq({
stage: "test",
stage_idx: 2,
name: "rspec",
@@ -125,7 +128,7 @@ module Gitlab
YAML.dump(rspec: { script: 'rspec', interruptible: true })
end
- it { expect(subject[:interruptible]).to be_truthy }
+ it { expect(rspec_build[:interruptible]).to be_truthy }
end
describe 'interruptible job with default value' do
@@ -133,7 +136,7 @@ module Gitlab
YAML.dump(rspec: { script: 'rspec' })
end
- it { expect(subject).not_to have_key(:interruptible) }
+ it { expect(rspec_build).not_to have_key(:interruptible) }
end
describe 'uninterruptible job' do
@@ -141,7 +144,7 @@ module Gitlab
YAML.dump(rspec: { script: 'rspec', interruptible: false })
end
- it { expect(subject[:interruptible]).to be_falsy }
+ it { expect(rspec_build[:interruptible]).to be_falsy }
end
it "returns interruptible when overridden for job" do
@@ -149,9 +152,10 @@ module Gitlab
rspec: { script: "rspec" } })
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
+ builds = config_processor.builds.select { |b| b[:stage] == "test" }
- expect(config_processor.stage_builds_attributes("test").size).to eq(1)
- expect(config_processor.stage_builds_attributes("test").first).to eq({
+ expect(builds.size).to eq(1)
+ expect(builds.first).to eq({
stage: "test",
stage_idx: 2,
name: "rspec",
@@ -174,7 +178,7 @@ module Gitlab
end
it 'includes retry count in build options attribute' do
- expect(subject[:options]).to include(retry: { max: 1 })
+ expect(rspec_build[:options]).to include(retry: { max: 1 })
end
end
@@ -184,7 +188,7 @@ module Gitlab
end
it 'does not persist retry count in the database' do
- expect(subject[:options]).not_to have_key(:retry)
+ expect(rspec_build[:options]).not_to have_key(:retry)
end
end
@@ -195,7 +199,7 @@ module Gitlab
end
it 'does use the default value' do
- expect(subject[:options]).to include(retry: { max: 1 })
+ expect(rspec_build[:options]).to include(retry: { max: 1 })
end
end
@@ -206,7 +210,7 @@ module Gitlab
end
it 'does use the job value' do
- expect(subject[:options]).to include(retry: { max: 2 })
+ expect(rspec_build[:options]).to include(retry: { max: 2 })
end
end
end
@@ -221,7 +225,7 @@ module Gitlab
end
it 'is not allowed to fail' do
- expect(subject[:allow_failure]).to be false
+ expect(rspec_build[:allow_failure]).to be false
end
end
@@ -232,7 +236,7 @@ module Gitlab
end
it 'is allowed to fail' do
- expect(subject[:allow_failure]).to be true
+ expect(rspec_build[:allow_failure]).to be true
end
end
@@ -244,11 +248,11 @@ module Gitlab
end
it 'is not allowed to fail' do
- expect(subject[:allow_failure]).to be false
+ expect(rspec_build[:allow_failure]).to be false
end
it 'saves allow_failure_criteria into options' do
- expect(subject[:options]).to match(
+ expect(rspec_build[:options]).to match(
a_hash_including(allow_failure_criteria: { exit_codes: [1] }))
end
end
@@ -262,7 +266,7 @@ module Gitlab
end
it 'is not allowed to fail' do
- expect(subject[:allow_failure]).to be false
+ expect(rspec_build[:allow_failure]).to be false
end
end
@@ -272,7 +276,7 @@ module Gitlab
end
it 'is not allowed to fail' do
- expect(subject[:allow_failure]).to be false
+ expect(rspec_build[:allow_failure]).to be false
end
end
@@ -283,11 +287,11 @@ module Gitlab
end
it 'is not allowed to fail' do
- expect(subject[:allow_failure]).to be false
+ expect(rspec_build[:allow_failure]).to be false
end
it 'saves allow_failure_criteria into options' do
- expect(subject[:options]).to match(
+ expect(rspec_build[:options]).to match(
a_hash_including(allow_failure_criteria: { exit_codes: [1] }))
end
end
@@ -305,8 +309,8 @@ module Gitlab
end
it 'has the attributes' do
- expect(subject[:when]).to eq 'delayed'
- expect(subject[:options][:start_in]).to eq '1 day'
+ expect(rspec_build[:when]).to eq 'delayed'
+ expect(rspec_build[:options][:start_in]).to eq '1 day'
end
end
end
@@ -321,7 +325,7 @@ module Gitlab
end
it 'has the attributes' do
- expect(subject[:resource_group_key]).to eq 'iOS'
+ expect(rspec_build[:resource_group_key]).to eq 'iOS'
end
end
end
@@ -337,7 +341,7 @@ module Gitlab
end
it 'has the attributes' do
- expect(subject[:options]).to eq(
+ expect(rspec_build[:options]).to eq(
trigger: { project: 'namespace/project', branch: 'main' }
)
end
@@ -353,7 +357,7 @@ module Gitlab
end
it 'has the attributes' do
- expect(subject[:options]).to eq(
+ expect(rspec_build[:options]).to eq(
trigger: { project: 'namespace/project', forward: { pipeline_variables: true } }
)
end
@@ -510,6 +514,35 @@ module Gitlab
expect(subject.root_variables).to eq([])
end
end
+
+ context 'with name' do
+ let(:config) do
+ <<-EOYML
+ workflow:
+ name: 'Pipeline name'
+
+ hello:
+ script: echo world
+ EOYML
+ end
+
+ it 'parses the workflow:name as workflow_name' do
+ expect(subject.workflow_name).to eq('Pipeline name')
+ end
+ end
+
+ context 'with no name' do
+ let(:config) do
+ <<-EOYML
+ hello:
+ script: echo world
+ EOYML
+ end
+
+ it 'parses the workflow:name' do
+ expect(subject.workflow_name).to be_nil
+ end
+ end
end
describe '#warnings' do
@@ -682,7 +715,7 @@ module Gitlab
let(:config_data) { YAML.dump(config) }
let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config_data).execute }
- subject { config_processor.stage_builds_attributes('test').first }
+ subject(:test_build) { config_processor.builds.find { |build| build[:name] == 'test' } }
describe "before_script" do
context "in global context" do
@@ -850,9 +883,9 @@ module Gitlab
rspec: { script: "rspec" } })
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
+ rspec_build = config_processor.builds.find { |build| build[:name] == 'rspec' }
- expect(config_processor.stage_builds_attributes("test").size).to eq(1)
- expect(config_processor.stage_builds_attributes("test").first).to eq({
+ expect(rspec_build).to eq({
stage: "test",
stage_idx: 2,
name: "rspec",
@@ -884,9 +917,9 @@ module Gitlab
script: "rspec" } })
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
+ rspec_build = config_processor.builds.find { |build| build[:name] == 'rspec' }
- expect(config_processor.stage_builds_attributes("test").size).to eq(1)
- expect(config_processor.stage_builds_attributes("test").first).to eq({
+ expect(rspec_build).to eq({
stage: "test",
stage_idx: 2,
name: "rspec",
@@ -916,9 +949,9 @@ module Gitlab
rspec: { script: "rspec" } })
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
+ rspec_build = config_processor.builds.find { |build| build[:name] == 'rspec' }
- expect(config_processor.stage_builds_attributes("test").size).to eq(1)
- expect(config_processor.stage_builds_attributes("test").first).to eq({
+ expect(rspec_build).to eq({
stage: "test",
stage_idx: 2,
name: "rspec",
@@ -944,9 +977,9 @@ module Gitlab
rspec: { image: "image:1.0", services: ["postgresql", "docker:dind"], script: "rspec" } })
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
+ rspec_build = config_processor.builds.find { |build| build[:name] == 'rspec' }
- expect(config_processor.stage_builds_attributes("test").size).to eq(1)
- expect(config_processor.stage_builds_attributes("test").first).to eq({
+ expect(rspec_build).to eq({
stage: "test",
stage_idx: 2,
name: "rspec",
@@ -981,7 +1014,7 @@ module Gitlab
it { is_expected.to be_valid }
it "returns with image" do
- expect(processor.stage_builds_attributes("test")).to contain_exactly({
+ expect(processor.builds).to contain_exactly({
stage: "test",
stage_idx: 2,
name: "test",
@@ -1014,7 +1047,7 @@ module Gitlab
it { is_expected.to be_valid }
it "returns with service" do
- expect(processor.stage_builds_attributes("test")).to contain_exactly({
+ expect(processor.builds).to contain_exactly({
stage: "test",
stage_idx: 2,
name: "test",
@@ -1033,8 +1066,7 @@ module Gitlab
end
end
- # Change this to a `describe` block when removing the FF ci_variables_refactoring_to_variable
- shared_examples 'Variables' do
+ describe 'Variables' do
subject(:execute) { described_class.new(config).execute }
let(:build) { execute.builds.first }
@@ -1163,18 +1195,6 @@ module Gitlab
end
end
- context 'when ci_variables_refactoring_to_variable is enabled' do
- it_behaves_like 'Variables'
- end
-
- context 'when ci_variables_refactoring_to_variable is disabled' do
- before do
- stub_feature_flags(ci_variables_refactoring_to_variable: false)
- end
-
- it_behaves_like 'Variables'
- end
-
context 'when using `extends`' do
let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config).execute }
@@ -1375,7 +1395,7 @@ module Gitlab
})
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
- builds = config_processor.stage_builds_attributes("test")
+ builds = config_processor.builds
expect(builds.size).to eq(1)
expect(builds.first[:when]).to eq(when_state)
@@ -1391,7 +1411,7 @@ module Gitlab
end
it 'creates one build and sets when:' do
- builds = subject.stage_builds_attributes("test")
+ builds = processor.builds
expect(builds.size).to eq(1)
expect(builds.first[:when]).to eq('delayed')
@@ -1419,7 +1439,7 @@ module Gitlab
end
let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config).execute }
- let(:builds) { config_processor.stage_builds_attributes('test') }
+ let(:builds) { config_processor.builds }
context 'when job is parallelized' do
let(:parallel) { 5 }
@@ -1535,15 +1555,16 @@ module Gitlab
})
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
+ rspec_build = config_processor.builds.find { |build| build[:name] == 'rspec' }
- expect(config_processor.stage_builds_attributes("test").size).to eq(1)
- expect(config_processor.stage_builds_attributes("test").first[:cache]).to eq([
- paths: ["logs/", "binaries/"],
- untracked: true,
- key: 'key',
- policy: 'pull-push',
- when: 'on_success'
- ])
+ expect(rspec_build[:cache]).to eq(
+ [
+ paths: ["logs/", "binaries/"],
+ untracked: true,
+ key: 'key',
+ policy: 'pull-push',
+ when: 'on_success'
+ ])
end
it "returns cache when defined in default context" do
@@ -1558,32 +1579,34 @@ module Gitlab
})
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
+ rspec_build = config_processor.builds.find { |build| build[:name] == 'rspec' }
- expect(config_processor.stage_builds_attributes("test").size).to eq(1)
- expect(config_processor.stage_builds_attributes("test").first[:cache]).to eq([
- paths: ["logs/", "binaries/"],
- untracked: true,
- key: { files: ['file'] },
- policy: 'pull-push',
- when: 'on_success'
- ])
+ expect(rspec_build[:cache]).to eq(
+ [
+ paths: ["logs/", "binaries/"],
+ untracked: true,
+ key: { files: ['file'] },
+ policy: 'pull-push',
+ when: 'on_success'
+ ])
end
it 'returns cache key/s when defined in a job' do
- config = YAML.dump({
- rspec: {
- cache: [
- { paths: ['binaries/'], untracked: true, key: 'keya' },
- { paths: ['logs/', 'binaries/'], untracked: true, key: 'key' }
- ],
- script: 'rspec'
- }
- })
+ config = YAML.dump(
+ {
+ rspec: {
+ cache: [
+ { paths: ['binaries/'], untracked: true, key: 'keya' },
+ { paths: ['logs/', 'binaries/'], untracked: true, key: 'key' }
+ ],
+ script: 'rspec'
+ }
+ })
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
+ rspec_build = config_processor.builds.find { |build| build[:name] == 'rspec' }
- expect(config_processor.stage_builds_attributes('test').size).to eq(1)
- expect(config_processor.stage_builds_attributes('test').first[:cache]).to eq(
+ expect(rspec_build[:cache]).to eq(
[
{
paths: ['binaries/'],
@@ -1616,15 +1639,16 @@ module Gitlab
)
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
+ rspec_build = config_processor.builds.find { |build| build[:name] == 'rspec' }
- expect(config_processor.stage_builds_attributes('test').size).to eq(1)
- expect(config_processor.stage_builds_attributes('test').first[:cache]).to eq([
- paths: ['binaries/'],
- untracked: true,
- key: { files: ['file'] },
- policy: 'pull-push',
- when: 'on_success'
- ])
+ expect(rspec_build[:cache]).to eq(
+ [
+ paths: ['binaries/'],
+ untracked: true,
+ key: { files: ['file'] },
+ policy: 'pull-push',
+ when: 'on_success'
+ ])
end
it 'returns cache files with prefix' do
@@ -1640,61 +1664,65 @@ module Gitlab
)
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
+ rspec_build = config_processor.builds.find { |build| build[:name] == 'rspec' }
- expect(config_processor.stage_builds_attributes('test').size).to eq(1)
- expect(config_processor.stage_builds_attributes('test').first[:cache]).to eq([
- paths: ['logs/', 'binaries/'],
- untracked: true,
- key: { files: ['file'], prefix: 'prefix' },
- policy: 'pull-push',
- when: 'on_success'
- ])
+ expect(rspec_build[:cache]).to eq(
+ [
+ paths: ['logs/', 'binaries/'],
+ untracked: true,
+ key: { files: ['file'], prefix: 'prefix' },
+ policy: 'pull-push',
+ when: 'on_success'
+ ])
end
it "overwrite cache when defined for a job and globally" do
- config = YAML.dump({
- cache: { paths: ["logs/", "binaries/"], untracked: true, key: 'global' },
- rspec: {
- script: "rspec",
- cache: { paths: ["test/"], untracked: false, key: 'local' }
- }
- })
+ config = YAML.dump(
+ {
+ cache: { paths: ["logs/", "binaries/"], untracked: true, key: 'global' },
+ rspec: {
+ script: "rspec",
+ cache: { paths: ["test/"], untracked: false, key: 'local' }
+ }
+ })
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
+ rspec_build = config_processor.builds.find { |build| build[:name] == 'rspec' }
- expect(config_processor.stage_builds_attributes("test").size).to eq(1)
- expect(config_processor.stage_builds_attributes("test").first[:cache]).to eq([
- paths: ["test/"],
- untracked: false,
- key: 'local',
- policy: 'pull-push',
- when: 'on_success'
- ])
+ expect(rspec_build[:cache]).to eq(
+ [
+ paths: ["test/"],
+ untracked: false,
+ key: 'local',
+ policy: 'pull-push',
+ when: 'on_success'
+ ])
end
end
describe "Artifacts" do
it "returns artifacts when defined" do
- config = YAML.dump({
- image: "image:1.0",
- services: ["mysql"],
- before_script: ["pwd"],
- rspec: {
- artifacts: {
- paths: ["logs/", "binaries/"],
- expose_as: "Exposed artifacts",
- untracked: true,
- name: "custom_name",
- expire_in: "7d"
- },
- script: "rspec"
- }
- })
+ config = YAML.dump(
+ {
+ image: "image:1.0",
+ services: ["mysql"],
+ before_script: ["pwd"],
+ rspec: {
+ artifacts: {
+ paths: ["logs/", "binaries/"],
+ expose_as: "Exposed artifacts",
+ untracked: true,
+ name: "custom_name",
+ expire_in: "7d"
+ },
+ script: "rspec"
+ }
+ })
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
+ rspec_build = config_processor.builds.find { |build| build[:name] == 'rspec' }
- expect(config_processor.stage_builds_attributes("test").size).to eq(1)
- expect(config_processor.stage_builds_attributes("test").first).to eq({
+ expect(rspec_build).to eq({
stage: "test",
stage_idx: 2,
name: "rspec",
@@ -1729,7 +1757,7 @@ module Gitlab
})
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
- builds = config_processor.stage_builds_attributes("test")
+ builds = config_processor.builds
expect(builds.size).to eq(1)
expect(builds.first[:options][:artifacts][:expire_in]).to eq('never')
@@ -1745,7 +1773,7 @@ module Gitlab
})
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
- builds = config_processor.stage_builds_attributes("test")
+ builds = config_processor.builds
expect(builds.size).to eq(1)
expect(builds.first[:options][:artifacts][:when]).to eq(when_state)
@@ -1778,7 +1806,7 @@ module Gitlab
- my/test/something
YAML
- attributes = Gitlab::Ci::YamlProcessor.new(config).execute.build_attributes('test')
+ attributes = Gitlab::Ci::YamlProcessor.new(config).execute.builds.find { |build| build[:name] == 'test' }
expect(attributes.dig(*%i[options artifacts exclude])).to eq(%w[my/test/something])
end
@@ -1819,7 +1847,7 @@ module Gitlab
end
it "returns release info" do
- expect(processor.stage_builds_attributes('release').first[:options])
+ expect(processor.builds.first[:options])
.to eq(config[:release].except(:stage, :only))
end
end
@@ -1833,7 +1861,7 @@ module Gitlab
subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)).execute }
- let(:builds) { subject.stage_builds_attributes('deploy') }
+ let(:builds) { subject.builds }
context 'when a production environment is specified' do
let(:environment) { 'production' }
@@ -1943,7 +1971,7 @@ module Gitlab
subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)).execute }
- let(:builds) { subject.stage_builds_attributes('deploy') }
+ let(:builds) { subject.builds }
context 'when no timeout was provided' do
it 'does not include job_timeout' do
@@ -2370,8 +2398,8 @@ module Gitlab
it 'returns a valid configuration and sets artifacts: true by default' do
expect(subject).to be_valid
- rspec = subject.build_attributes(:rspec)
- expect(rspec.dig(:options, :cross_dependencies)).to eq(
+ rspec_build = subject.builds.find { |build| build[:name] == 'rspec' }
+ expect(rspec_build.dig(:options, :cross_dependencies)).to eq(
[{ pipeline: '$THE_PIPELINE_ID', job: 'dependency-job', artifacts: true }]
)
end
@@ -2391,8 +2419,8 @@ module Gitlab
it 'returns a valid configuration and sets artifacts: true by default' do
expect(subject).to be_valid
- rspec = subject.build_attributes(:rspec)
- expect(rspec.dig(:options, :cross_dependencies)).to eq(
+ rspec_build = subject.builds.find { |build| build[:name] == 'rspec' }
+ expect(rspec_build.dig(:options, :cross_dependencies)).to eq(
[{ pipeline: '123', job: 'dependency-job', artifacts: true }]
)
end
@@ -2422,7 +2450,7 @@ module Gitlab
describe "Hidden jobs" do
let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config).execute }
- subject { config_processor.stage_builds_attributes("test") }
+ subject { config_processor.builds }
shared_examples 'hidden_job_handling' do
it "doesn't create jobs that start with dot" do
@@ -2470,7 +2498,7 @@ module Gitlab
describe "YAML Alias/Anchor" do
let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config).execute }
- subject { config_processor.stage_builds_attributes("build") }
+ subject { config_processor.builds }
shared_examples 'job_templates_handling' do
it "is correctly supported for jobs" do
diff --git a/spec/lib/gitlab/config/entry/validators_spec.rb b/spec/lib/gitlab/config/entry/validators_spec.rb
index cbc09aac586..0458bcd6354 100644
--- a/spec/lib/gitlab/config/entry/validators_spec.rb
+++ b/spec/lib/gitlab/config/entry/validators_spec.rb
@@ -35,7 +35,7 @@ RSpec.describe Gitlab::Config::Entry::Validators do
expect(instance.valid?).to be(valid_result)
unless valid_result
- expect(instance.errors.messages_for(:config)).to include /please use only one the following keys: foo, bar/
+ expect(instance.errors.messages_for(:config)).to include /please use only one of the following keys: foo, bar/
end
end
end
diff --git a/spec/lib/gitlab/config_checker/external_database_checker_spec.rb b/spec/lib/gitlab/config_checker/external_database_checker_spec.rb
index 933b6d6be9e..9af6aed2b02 100644
--- a/spec/lib/gitlab/config_checker/external_database_checker_spec.rb
+++ b/spec/lib/gitlab/config_checker/external_database_checker_spec.rb
@@ -6,36 +6,97 @@ RSpec.describe Gitlab::ConfigChecker::ExternalDatabaseChecker do
describe '#check' do
subject { described_class.check }
- context 'when database meets minimum supported version' do
+ let(:old_database_version) { 8 }
+ let(:old_database) { instance_double(Gitlab::Database::Reflection) }
+ let(:new_database) { instance_double(Gitlab::Database::Reflection) }
+
+ before do
+ allow(Gitlab::Database::Reflection).to receive(:new).and_return(new_database)
+ allow(old_database).to receive(:postgresql_minimum_supported_version?).and_return(false)
+ allow(old_database).to receive(:version).and_return(old_database_version)
+ allow(new_database).to receive(:postgresql_minimum_supported_version?).and_return(true)
+ end
+
+ context 'with a single database' do
before do
- allow(ApplicationRecord.database).to receive(:postgresql_minimum_supported_version?).and_return(true)
+ skip_if_multiple_databases_are_setup
+ end
+
+ context 'when database meets minimum supported version' do
+ before do
+ allow(Gitlab::Database::Reflection).to receive(:new).with(ActiveRecord::Base).and_return(new_database)
+ end
+
+ it { is_expected.to be_empty }
end
- it { is_expected.to be_empty }
+ context 'when database does not meet minimum supported version' do
+ before do
+ allow(Gitlab::Database::Reflection).to receive(:new).with(ActiveRecord::Base).and_return(old_database)
+ end
+
+ it 'reports deprecated database notice' do
+ is_expected.to contain_exactly(notice_deprecated_database(old_database_version))
+ end
+ end
end
- context 'when database does not meet minimum supported version' do
+ context 'with a multiple database' do
before do
- allow(ApplicationRecord.database).to receive(:postgresql_minimum_supported_version?).and_return(false)
+ skip_if_multiple_databases_not_setup
end
- let(:notice_deprecated_database) do
- {
- type: 'warning',
- message: _('You are using PostgreSQL %{pg_version_current}, but PostgreSQL ' \
- '%{pg_version_minimum} is required for this version of GitLab. ' \
- 'Please upgrade your environment to a supported PostgreSQL version, ' \
- 'see %{pg_requirements_url} for details.') % {
- pg_version_current: ApplicationRecord.database.version,
- pg_version_minimum: Gitlab::Database::MINIMUM_POSTGRES_VERSION,
- pg_requirements_url: '<a href="https://docs.gitlab.com/ee/install/requirements.html#database">database requirements</a>'
- }
- }
+ context 'when both databases meets minimum supported version' do
+ before do
+ allow(Gitlab::Database::Reflection).to receive(:new).with(ActiveRecord::Base).and_return(new_database)
+ allow(Gitlab::Database::Reflection).to receive(:new).with(Ci::ApplicationRecord).and_return(new_database)
+ end
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'when the one of the databases does not meet minimum supported version' do
+ it 'reports deprecated database notice if the main database is using an old version' do
+ allow(Gitlab::Database::Reflection).to receive(:new).with(ActiveRecord::Base).and_return(old_database)
+ allow(Gitlab::Database::Reflection).to receive(:new).with(Ci::ApplicationRecord).and_return(new_database)
+ is_expected.to contain_exactly(notice_deprecated_database(old_database_version))
+ end
+
+ it 'reports deprecated database notice if the ci database is using an old version' do
+ allow(Gitlab::Database::Reflection).to receive(:new).with(ActiveRecord::Base).and_return(new_database)
+ allow(Gitlab::Database::Reflection).to receive(:new).with(Ci::ApplicationRecord).and_return(old_database)
+ is_expected.to contain_exactly(notice_deprecated_database(old_database_version))
+ end
end
- it 'reports deprecated database notice' do
- is_expected.to contain_exactly(notice_deprecated_database)
+ context 'when both databases do not meet minimum supported version' do
+ before do
+ allow(Gitlab::Database::Reflection).to receive(:new).with(ActiveRecord::Base).and_return(old_database)
+ allow(Gitlab::Database::Reflection).to receive(:new).with(Ci::ApplicationRecord).and_return(old_database)
+ end
+
+ it 'reports deprecated database notice' do
+ is_expected.to match_array [
+ notice_deprecated_database(old_database_version),
+ notice_deprecated_database(old_database_version)
+ ]
+ end
end
end
end
+
+ def notice_deprecated_database(database_version)
+ {
+ type: 'warning',
+ message: _('You are using PostgreSQL %{pg_version_current}, but PostgreSQL ' \
+ '%{pg_version_minimum} is required for this version of GitLab. ' \
+ 'Please upgrade your environment to a supported PostgreSQL version, ' \
+ 'see %{pg_requirements_url} for details.') % \
+ {
+ pg_version_current: database_version,
+ pg_version_minimum: Gitlab::Database::MINIMUM_POSTGRES_VERSION,
+ pg_requirements_url: Gitlab::ConfigChecker::ExternalDatabaseChecker::PG_REQUIREMENTS_LINK
+ }
+ }
+ end
end
diff --git a/spec/lib/gitlab/conflict/file_spec.rb b/spec/lib/gitlab/conflict/file_spec.rb
index aac4936b20e..1fa6eee9813 100644
--- a/spec/lib/gitlab/conflict/file_spec.rb
+++ b/spec/lib/gitlab/conflict/file_spec.rb
@@ -141,19 +141,20 @@ RSpec.describe Gitlab::Conflict::File do
let(:raw_conflict_content) { index.merge_file('files/ruby/popen.rb')[:data] }
it 'assign conflict types and adds match line to the end of the section' do
- expect(diff_line_types).to eq([
- 'match',
- nil, nil, nil,
- "conflict_marker_our",
- "conflict_our",
- "conflict_marker",
- "conflict_their",
- "conflict_their",
- "conflict_their",
- "conflict_marker_their",
- nil, nil, nil,
- "match"
- ])
+ expect(diff_line_types).to eq(
+ [
+ 'match',
+ nil, nil, nil,
+ "conflict_marker_our",
+ "conflict_our",
+ "conflict_marker",
+ "conflict_their",
+ "conflict_their",
+ "conflict_their",
+ "conflict_marker_their",
+ nil, nil, nil,
+ "match"
+ ])
end
end
end
diff --git a/spec/lib/gitlab/data_builder/pipeline_spec.rb b/spec/lib/gitlab/data_builder/pipeline_spec.rb
index 86a1539a836..46a12d8c6f6 100644
--- a/spec/lib/gitlab/data_builder/pipeline_spec.rb
+++ b/spec/lib/gitlab/data_builder/pipeline_spec.rb
@@ -30,6 +30,7 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do
expect(attributes[:sha]).to eq(pipeline.sha)
expect(attributes[:tag]).to eq(pipeline.tag)
expect(attributes[:id]).to eq(pipeline.id)
+ expect(attributes[:iid]).to eq(pipeline.iid)
expect(attributes[:source]).to eq(pipeline.source)
expect(attributes[:status]).to eq(pipeline.status)
expect(attributes[:detailed_status]).to eq('passed')
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
index 3daed2508a2..1ac9cbae036 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
@@ -83,7 +83,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
describe '#execute!' do
context 'when an invalid transition is applied' do
- %i[finished finalizing].each do |state|
+ %i[finalizing finished].each do |state|
it 'raises an exception' do
batched_migration = create(:batched_background_migration, state)
@@ -103,6 +103,48 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
end
end
+ describe '#finish!' do
+ context 'when an invalid transition is applied' do
+ it 'raises an exception' do
+ batched_migration = create(:batched_background_migration, :failed)
+
+ expect { batched_migration.finish! }.to raise_error(StateMachines::InvalidTransition, /Cannot transition status/)
+ end
+ end
+
+ context 'when a valid transition is applied' do
+ %i[active paused finished finalizing].each do |state|
+ it 'moves to active' do
+ batched_migration = create(:batched_background_migration, state)
+
+ expect(batched_migration.finish!).to be_truthy
+ end
+ end
+ end
+ end
+
+ describe '#failure!' do
+ context 'when an invalid transition is applied' do
+ %i[paused finished].each do |state|
+ it 'raises an exception' do
+ batched_migration = create(:batched_background_migration, state)
+
+ expect { batched_migration.failure! }.to raise_error(StateMachines::InvalidTransition, /Cannot transition status/)
+ end
+ end
+ end
+
+ context 'when a valid transition is applied' do
+ %i[failed finalizing active].each do |state|
+ it 'moves to active' do
+ batched_migration = create(:batched_background_migration, state)
+
+ expect(batched_migration.failure!).to be_truthy
+ end
+ end
+ end
+ end
+
describe '.valid_status' do
valid_status = [:paused, :active, :finished, :failed, :finalizing]
diff --git a/spec/lib/gitlab/database/each_database_spec.rb b/spec/lib/gitlab/database/each_database_spec.rb
index 2a6eb8f779d..75b543bee85 100644
--- a/spec/lib/gitlab/database/each_database_spec.rb
+++ b/spec/lib/gitlab/database/each_database_spec.rb
@@ -93,12 +93,13 @@ RSpec.describe Gitlab::Database::EachDatabase do
end
it 'yields each model with SharedModel connected to each database connection' do
- expect_yielded_models([model1, model2], [
- { model: model1, connection: ActiveRecord::Base.connection, name: 'main' },
- { model: model1, connection: Ci::ApplicationRecord.connection, name: 'ci' },
- { model: model2, connection: ActiveRecord::Base.connection, name: 'main' },
- { model: model2, connection: Ci::ApplicationRecord.connection, name: 'ci' }
- ])
+ expect_yielded_models([model1, model2],
+ [
+ { model: model1, connection: ActiveRecord::Base.connection, name: 'main' },
+ { model: model1, connection: Ci::ApplicationRecord.connection, name: 'ci' },
+ { model: model2, connection: ActiveRecord::Base.connection, name: 'main' },
+ { model: model2, connection: Ci::ApplicationRecord.connection, name: 'ci' }
+ ])
end
context 'when the model limits connection names' do
@@ -108,10 +109,11 @@ RSpec.describe Gitlab::Database::EachDatabase do
end
it 'only yields the model with SharedModel connected to the limited connections' do
- expect_yielded_models([model1, model2], [
- { model: model1, connection: ActiveRecord::Base.connection, name: 'main' },
- { model: model2, connection: Ci::ApplicationRecord.connection, name: 'ci' }
- ])
+ expect_yielded_models([model1, model2],
+ [
+ { model: model1, connection: ActiveRecord::Base.connection, name: 'main' },
+ { model: model2, connection: Ci::ApplicationRecord.connection, name: 'ci' }
+ ])
end
end
end
@@ -132,10 +134,11 @@ RSpec.describe Gitlab::Database::EachDatabase do
end
it 'yields each model after connecting SharedModel' do
- expect_yielded_models([main_model, ci_model], [
- { model: main_model, connection: main_connection, name: 'main' },
- { model: ci_model, connection: ci_connection, name: 'ci' }
- ])
+ expect_yielded_models([main_model, ci_model],
+ [
+ { model: main_model, connection: main_connection, name: 'main' },
+ { model: ci_model, connection: ci_connection, name: 'ci' }
+ ])
end
end
@@ -154,21 +157,23 @@ RSpec.describe Gitlab::Database::EachDatabase do
context 'when a single name is passed in' do
it 'yields models only connected to the given database' do
- expect_yielded_models([main_model, ci_model, shared_model], [
- { model: ci_model, connection: Ci::ApplicationRecord.connection, name: 'ci' },
- { model: shared_model, connection: Ci::ApplicationRecord.connection, name: 'ci' }
- ], only_on: 'ci')
+ expect_yielded_models([main_model, ci_model, shared_model],
+ [
+ { model: ci_model, connection: Ci::ApplicationRecord.connection, name: 'ci' },
+ { model: shared_model, connection: Ci::ApplicationRecord.connection, name: 'ci' }
+ ], only_on: 'ci')
end
end
context 'when a list of names are passed in' do
it 'yields models only connected to the given databases' do
- expect_yielded_models([main_model, ci_model, shared_model], [
- { model: main_model, connection: ActiveRecord::Base.connection, name: 'main' },
- { model: ci_model, connection: Ci::ApplicationRecord.connection, name: 'ci' },
- { model: shared_model, connection: ActiveRecord::Base.connection, name: 'main' },
- { model: shared_model, connection: Ci::ApplicationRecord.connection, name: 'ci' }
- ], only_on: %i[main ci])
+ expect_yielded_models([main_model, ci_model, shared_model],
+ [
+ { model: main_model, connection: ActiveRecord::Base.connection, name: 'main' },
+ { model: ci_model, connection: Ci::ApplicationRecord.connection, name: 'ci' },
+ { model: shared_model, connection: ActiveRecord::Base.connection, name: 'main' },
+ { model: shared_model, connection: Ci::ApplicationRecord.connection, name: 'ci' }
+ ], only_on: %i[main ci])
end
end
end
diff --git a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
index 9c09253b24c..997c7a31cba 100644
--- a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
@@ -210,10 +210,25 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
end
it 'uses a retry with exponential backoffs' do
- expect(lb).to receive(:retry_with_backoff).and_yield
+ expect(lb).to receive(:retry_with_backoff).and_yield(0)
lb.read_write { 10 }
end
+
+ it 'does not raise NoMethodError error when primary_only?' do
+ connection = ActiveRecord::Base.connection_pool.connection
+ expected_error = Gitlab::Database::LoadBalancing::CONNECTION_ERRORS.first
+
+ allow(lb).to receive(:primary_only?).and_return(true)
+
+ expect do
+ lb.read_write do
+ connection.transaction do
+ raise expected_error
+ end
+ end
+ end.to raise_error(expected_error)
+ end
end
describe '#host' do
@@ -330,6 +345,19 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
expect { lb.retry_with_backoff { raise } }.to raise_error(RuntimeError)
end
+
+ it 'yields the current retry iteration' do
+ allow(lb).to receive(:connection_error?).and_return(true)
+ expect(lb).to receive(:release_primary_connection).exactly(3).times
+ iterations = []
+
+ # time: 0 so that we don't sleep and slow down the test
+ # rubocop: disable Style/Semicolon
+ expect { lb.retry_with_backoff(attempts: 3, time: 0) { |i| iterations << i; raise } }.to raise_error(RuntimeError)
+ # rubocop: enable Style/Semicolon
+
+ expect(iterations).to eq([1, 2, 3])
+ end
end
describe '#connection_error?' do
diff --git a/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb
index a1c141af537..713bff5feea 100644
--- a/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb
@@ -9,10 +9,10 @@ RSpec.describe Gitlab::Database::LoadBalancing::RackMiddleware, :redis do
let(:single_sticking_object) { Set.new([[ActiveRecord::Base.sticking, :user, 42]]) }
let(:multiple_sticking_objects) do
Set.new([
- [ActiveRecord::Base.sticking, :user, 42],
- [ActiveRecord::Base.sticking, :runner, '123456789'],
- [ActiveRecord::Base.sticking, :runner, '1234']
- ])
+ [ActiveRecord::Base.sticking, :user, 42],
+ [ActiveRecord::Base.sticking, :runner, '123456789'],
+ [ActiveRecord::Base.sticking, :runner, '1234']
+ ])
end
after do
@@ -182,11 +182,12 @@ RSpec.describe Gitlab::Database::LoadBalancing::RackMiddleware, :redis do
it 'returns the sticking object' do
env = { described_class::STICK_OBJECT => multiple_sticking_objects }
- expect(middleware.sticking_namespaces(env)).to eq([
- [ActiveRecord::Base.sticking, :user, 42],
- [ActiveRecord::Base.sticking, :runner, '123456789'],
- [ActiveRecord::Base.sticking, :runner, '1234']
- ])
+ expect(middleware.sticking_namespaces(env)).to eq(
+ [
+ [ActiveRecord::Base.sticking, :user, 42],
+ [ActiveRecord::Base.sticking, :runner, '123456789'],
+ [ActiveRecord::Base.sticking, :runner, '1234']
+ ])
end
end
diff --git a/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb
index 8053bd57bba..88007de53d3 100644
--- a/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_
let(:middleware) { described_class.new }
let(:worker) { worker_class.new }
let(:location) { '0/D525E3A8' }
- let(:wal_locations) { { Gitlab::Database::MAIN_DATABASE_NAME.to_sym => location } }
+ let(:wal_locations) { { Gitlab::Database::MAIN_DATABASE_NAME.to_s => location } }
let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'wal_locations' => wal_locations } }
before do
@@ -315,6 +315,46 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_
expect(middleware.send(:databases_in_sync?, locations))
.to eq(false)
end
+
+ context 'when locations have string keys' do
+ it 'returns false when the load balancers are not in sync' do
+ locations = {}
+
+ Gitlab::Database::LoadBalancing.each_load_balancer do |lb|
+ locations[lb.name.to_s] = 'foo'
+
+ allow(lb)
+ .to receive(:select_up_to_date_host)
+ .with('foo')
+ .and_return(false)
+ end
+
+ expect(middleware.send(:databases_in_sync?, locations))
+ .to eq(false)
+ end
+
+ context 'when "indifferent_wal_location_keys" FF is off' do
+ before do
+ stub_feature_flags(indifferent_wal_location_keys: false)
+ end
+
+ it 'returns true when the load balancers are not in sync' do
+ locations = {}
+
+ Gitlab::Database::LoadBalancing.each_load_balancer do |lb|
+ locations[lb.name.to_s] = 'foo'
+
+ allow(lb)
+ .to receive(:select_up_to_date_host)
+ .with('foo')
+ .and_return(false)
+ end
+
+ expect(middleware.send(:databases_in_sync?, locations))
+ .to eq(true)
+ end
+ end
+ end
end
def process_job(job)
diff --git a/spec/lib/gitlab/database/load_balancing/sticking_spec.rb b/spec/lib/gitlab/database/load_balancing/sticking_spec.rb
index 2ffb2c32c32..1e316c55786 100644
--- a/spec/lib/gitlab/database/load_balancing/sticking_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/sticking_spec.rb
@@ -41,10 +41,12 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do
sticking.stick_or_unstick_request(env, :user, 42)
sticking.stick_or_unstick_request(env, :runner, '123456789')
- expect(env[Gitlab::Database::LoadBalancing::RackMiddleware::STICK_OBJECT].to_a).to eq([
- [sticking, :user, 42],
- [sticking, :runner, '123456789']
- ])
+ expect(env[Gitlab::Database::LoadBalancing::RackMiddleware::STICK_OBJECT].to_a).to eq(
+ [
+ [sticking, :user, 42],
+ [sticking, :runner,
+ '123456789']
+ ])
end
end
diff --git a/spec/lib/gitlab/database/load_balancing/transaction_leaking_spec.rb b/spec/lib/gitlab/database/load_balancing/transaction_leaking_spec.rb
index 30e5fbbd803..6026d979bcf 100644
--- a/spec/lib/gitlab/database/load_balancing/transaction_leaking_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/transaction_leaking_spec.rb
@@ -3,6 +3,7 @@
require 'spec_helper'
RSpec.describe 'Load balancer behavior with errors inside a transaction', :redis, :delete do
+ include StubENV
let(:model) { ApplicationRecord }
let(:db_host) { model.connection_pool.db_config.host }
@@ -19,6 +20,10 @@ RSpec.describe 'Load balancer behavior with errors inside a transaction', :redis
model.connection.execute(<<~SQL)
CREATE TABLE IF NOT EXISTS #{test_table_name} (id SERIAL PRIMARY KEY, value INTEGER)
SQL
+
+ # The load balancer sleeps between attempts to retry a query.
+ # Mocking the sleep call significantly reduces the runtime of this spec file.
+ allow(model.connection.load_balancer).to receive(:sleep)
end
after do
@@ -46,36 +51,62 @@ RSpec.describe 'Load balancer behavior with errors inside a transaction', :redis
conn.execute("INSERT INTO #{test_table_name} (value) VALUES (2)")
end
- it 'logs a warning when violating transaction semantics with writes' do
- conn = model.connection
+ context 'with the PREVENT_LOAD_BALANCER_RETRIES_IN_TRANSACTION environment variable not set' do
+ it 'logs a warning when violating transaction semantics with writes' do
+ conn = model.connection
+
+ expect(::Gitlab::Database::LoadBalancing::Logger).to receive(:warn).with(hash_including(event: :transaction_leak))
+
+ conn.transaction do
+ expect(conn).to be_transaction_open
+
+ execute(conn)
- expect(::Gitlab::Database::LoadBalancing::Logger).to receive(:warn).with(hash_including(event: :transaction_leak))
+ expect(conn).not_to be_transaction_open
+ end
- conn.transaction do
- expect(conn).to be_transaction_open
+ values = conn.execute("SELECT value FROM #{test_table_name}").to_a.map { |row| row['value'] }
+ expect(values).to contain_exactly(2) # Does not include 1 because the transaction was aborted and leaked
+ end
+
+ it 'does not log a warning when no transaction is open to be leaked' do
+ conn = model.connection
+
+ expect(::Gitlab::Database::LoadBalancing::Logger)
+ .not_to receive(:warn).with(hash_including(event: :transaction_leak))
+
+ expect(conn).not_to be_transaction_open
execute(conn)
expect(conn).not_to be_transaction_open
- end
- values = conn.execute("SELECT value FROM #{test_table_name}").to_a.map { |row| row['value'] }
- expect(values).to contain_exactly(2) # Does not include 1 because the transaction was aborted and leaked
+ values = conn.execute("SELECT value FROM #{test_table_name}").to_a.map { |row| row['value'] }
+ expect(values).to contain_exactly(1, 2) # Includes both rows because there was no transaction to roll back
+ end
end
- it 'does not log a warning when no transaction is open to be leaked' do
- conn = model.connection
-
- expect(::Gitlab::Database::LoadBalancing::Logger)
- .not_to receive(:warn).with(hash_including(event: :transaction_leak))
+ context 'with the PREVENT_LOAD_BALANCER_RETRIES_IN_TRANSACTION environment variable set' do
+ before do
+ stub_env('PREVENT_LOAD_BALANCER_RETRIES_IN_TRANSACTION' => '1')
+ end
- expect(conn).not_to be_transaction_open
+ it 'raises an exception when a retry would occur during a transaction' do
+ expect(::Gitlab::Database::LoadBalancing::Logger)
+ .not_to receive(:warn).with(hash_including(event: :transaction_leak))
- execute(conn)
+ expect do
+ model.transaction do
+ execute(model.connection)
+ end
+ end.to raise_error(ActiveRecord::StatementInvalid) { |e| expect(e.cause).to be_a(PG::ConnectionBad) }
+ end
- expect(conn).not_to be_transaction_open
+ it 'retries when not in a transaction' do
+ expect(::Gitlab::Database::LoadBalancing::Logger)
+ .not_to receive(:warn).with(hash_including(event: :transaction_leak))
- values = conn.execute("SELECT value FROM #{test_table_name}").to_a.map { |row| row['value'] }
- expect(values).to contain_exactly(1, 2) # Includes both rows because there was no transaction to roll back
+ expect { execute(model.connection) }.not_to raise_error
+ end
end
end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index d73b478ee7c..bcdd5646994 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -757,6 +757,58 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
model.add_concurrent_foreign_key(:projects, :users, column: :user_id, reverse_lock_order: true)
end
end
+
+ context 'when creating foreign key for a group of columns' do
+ it 'references the custom target columns when provided', :aggregate_failures do
+ expect(model).to receive(:with_lock_retries).and_yield
+ expect(model).to receive(:execute).with(
+ "ALTER TABLE projects\n" \
+ "ADD CONSTRAINT fk_multiple_columns\n" \
+ "FOREIGN KEY \(partition_number, user_id\)\n" \
+ "REFERENCES users \(partition_number, id\)\n" \
+ "ON DELETE CASCADE\n" \
+ "NOT VALID;\n"
+ )
+
+ model.add_concurrent_foreign_key(
+ :projects,
+ :users,
+ column: [:partition_number, :user_id],
+ target_column: [:partition_number, :id],
+ validate: false,
+ name: :fk_multiple_columns
+ )
+ end
+
+ context 'when foreign key is already defined' do
+ before do
+ expect(model).to receive(:foreign_key_exists?).with(
+ :projects,
+ :users,
+ {
+ column: [:partition_number, :user_id],
+ name: :fk_multiple_columns,
+ on_delete: :cascade,
+ primary_key: [:partition_number, :id]
+ }
+ ).and_return(true)
+ end
+
+ it 'does not create foreign key', :aggregate_failures do
+ expect(model).not_to receive(:with_lock_retries).and_yield
+ expect(model).not_to receive(:execute).with(/FOREIGN KEY/)
+
+ model.add_concurrent_foreign_key(
+ :projects,
+ :users,
+ column: [:partition_number, :user_id],
+ target_column: [:partition_number, :id],
+ validate: false,
+ name: :fk_multiple_columns
+ )
+ end
+ end
+ end
end
end
@@ -813,6 +865,15 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(name).to be_an_instance_of(String)
expect(name.length).to eq(13)
end
+
+ context 'when using multiple columns' do
+ it 'returns the name of the foreign key', :aggregate_failures do
+ result = model.concurrent_foreign_key_name(:table_name, [:partition_number, :id])
+
+ expect(result).to be_an_instance_of(String)
+ expect(result.length).to eq(13)
+ end
+ end
end
describe '#foreign_key_exists?' do
@@ -887,6 +948,62 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
it 'compares by target table if no column given' do
expect(model.foreign_key_exists?(:projects, :other_table)).to be_falsey
end
+
+ context 'with foreign key using multiple columns' do
+ before do
+ key = ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new(
+ :projects, :users,
+ {
+ column: [:partition_number, :id],
+ name: :fk_projects_users_partition_number_id,
+ on_delete: :cascade,
+ primary_key: [:partition_number, :id]
+ }
+ )
+ allow(model).to receive(:foreign_keys).with(:projects).and_return([key])
+ end
+
+ it 'finds existing foreign keys by columns' do
+ expect(model.foreign_key_exists?(:projects, :users, column: [:partition_number, :id])).to be_truthy
+ end
+
+ it 'finds existing foreign keys by name' do
+ expect(model.foreign_key_exists?(:projects, :users, name: :fk_projects_users_partition_number_id)).to be_truthy
+ end
+
+ it 'finds existing foreign_keys by name and column' do
+ expect(model.foreign_key_exists?(:projects, :users, name: :fk_projects_users_partition_number_id, column: [:partition_number, :id])).to be_truthy
+ end
+
+ it 'finds existing foreign_keys by name, column and on_delete' do
+ expect(model.foreign_key_exists?(:projects, :users, name: :fk_projects_users_partition_number_id, column: [:partition_number, :id], on_delete: :cascade)).to be_truthy
+ end
+
+ it 'finds existing foreign keys by target table only' do
+ expect(model.foreign_key_exists?(:projects, :users)).to be_truthy
+ end
+
+ it 'compares by column name if given' do
+ expect(model.foreign_key_exists?(:projects, :users, column: :id)).to be_falsey
+ end
+
+ it 'compares by target column name if given' do
+ expect(model.foreign_key_exists?(:projects, :users, primary_key: :user_id)).to be_falsey
+ expect(model.foreign_key_exists?(:projects, :users, primary_key: [:partition_number, :id])).to be_truthy
+ end
+
+ it 'compares by foreign key name if given' do
+ expect(model.foreign_key_exists?(:projects, :users, name: :non_existent_foreign_key_name)).to be_falsey
+ end
+
+ it 'compares by foreign key name and column if given' do
+ expect(model.foreign_key_exists?(:projects, :users, name: :non_existent_foreign_key_name, column: [:partition_number, :id])).to be_falsey
+ end
+
+ it 'compares by foreign key name, column and on_delete if given' do
+ expect(model.foreign_key_exists?(:projects, :users, name: :fk_projects_users_partition_number_id, column: [:partition_number, :id], on_delete: :nullify)).to be_falsey
+ end
+ end
end
describe '#disable_statement_timeout' do
@@ -3359,6 +3476,73 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
+ describe '#drop_constraint' do
+ it "executes the statement to drop the constraint" do
+ expect(model).to receive(:execute).with("ALTER TABLE \"test_table\" DROP CONSTRAINT \"constraint_name\" CASCADE\n")
+
+ model.drop_constraint(:test_table, :constraint_name, cascade: true)
+ end
+
+ context 'when cascade option is false' do
+ it "executes the statement to drop the constraint without cascade" do
+ expect(model).to receive(:execute).with("ALTER TABLE \"test_table\" DROP CONSTRAINT \"constraint_name\" \n")
+
+ model.drop_constraint(:test_table, :constraint_name, cascade: false)
+ end
+ end
+ end
+
+ describe '#add_primary_key_using_index' do
+ it "executes the statement to add the primary key" do
+ expect(model).to receive(:execute).with /ALTER TABLE "test_table" ADD CONSTRAINT "old_name" PRIMARY KEY USING INDEX "new_name"/
+
+ model.add_primary_key_using_index(:test_table, :old_name, :new_name)
+ end
+ end
+
+ context 'when changing the primary key of a given table' do
+ before do
+ model.create_table(:test_table, primary_key: :id) do |t|
+ t.integer :partition_number, default: 1
+ end
+
+ model.add_index(:test_table, :id, unique: true, name: :old_index_name)
+ model.add_index(:test_table, [:id, :partition_number], unique: true, name: :new_index_name)
+ end
+
+ describe '#swap_primary_key' do
+ it 'executes statements to swap primary key', :aggregate_failures do
+ expect(model).to receive(:with_lock_retries).with(raise_on_exhaustion: true).ordered.and_yield
+ expect(model).to receive(:execute).with(/ALTER TABLE "test_table" DROP CONSTRAINT "test_table_pkey" CASCADE/).and_call_original
+ expect(model).to receive(:execute).with(/ALTER TABLE "test_table" ADD CONSTRAINT "test_table_pkey" PRIMARY KEY USING INDEX "new_index_name"/).and_call_original
+
+ model.swap_primary_key(:test_table, :test_table_pkey, :new_index_name)
+ end
+
+ context 'when new index does not exist' do
+ before do
+ model.remove_index(:test_table, column: [:id, :partition_number])
+ end
+
+ it 'raises ActiveRecord::StatementInvalid' do
+ expect do
+ model.swap_primary_key(:test_table, :test_table_pkey, :new_index_name)
+ end.to raise_error(ActiveRecord::StatementInvalid)
+ end
+ end
+ end
+
+ describe '#unswap_primary_key' do
+ it 'executes statements to unswap primary key' do
+ expect(model).to receive(:with_lock_retries).with(raise_on_exhaustion: true).ordered.and_yield
+ expect(model).to receive(:execute).with(/ALTER TABLE "test_table" DROP CONSTRAINT "test_table_pkey" CASCADE/).ordered.and_call_original
+ expect(model).to receive(:execute).with(/ALTER TABLE "test_table" ADD CONSTRAINT "test_table_pkey" PRIMARY KEY USING INDEX "old_index_name"/).ordered.and_call_original
+
+ model.unswap_primary_key(:test_table, :test_table_pkey, :old_index_name)
+ end
+ end
+ end
+
describe '#drop_sequence' do
it "executes the statement to drop the sequence" do
expect(model).to receive(:execute).with /ALTER TABLE "test_table" ALTER COLUMN "test_column" DROP DEFAULT;\nDROP SEQUENCE IF EXISTS "test_table_id_seq"/
diff --git a/spec/lib/gitlab/database/migrations/base_background_runner_spec.rb b/spec/lib/gitlab/database/migrations/base_background_runner_spec.rb
index 34c83c42056..c2dc260b2ff 100644
--- a/spec/lib/gitlab/database/migrations/base_background_runner_spec.rb
+++ b/spec/lib/gitlab/database/migrations/base_background_runner_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::Migrations::BaseBackgroundRunner, :freeze_time do
+ let(:connection) { ApplicationRecord.connection }
+
let(:result_dir) { Dir.mktmpdir }
after do
@@ -10,7 +12,7 @@ RSpec.describe Gitlab::Database::Migrations::BaseBackgroundRunner, :freeze_time
end
context 'subclassing' do
- subject { described_class.new(result_dir: result_dir) }
+ subject { described_class.new(result_dir: result_dir, connection: connection) }
it 'requires that jobs_by_migration_name be implemented' do
expect { subject.jobs_by_migration_name }.to raise_error(NotImplementedError)
diff --git a/spec/lib/gitlab/database/migrations/runner_spec.rb b/spec/lib/gitlab/database/migrations/runner_spec.rb
index a37247ba0c6..f364ebfa522 100644
--- a/spec/lib/gitlab/database/migrations/runner_spec.rb
+++ b/spec/lib/gitlab/database/migrations/runner_spec.rb
@@ -1,10 +1,10 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe Gitlab::Database::Migrations::Runner do
+RSpec.describe Gitlab::Database::Migrations::Runner, :reestablished_active_record_base do
include Database::MultipleDatabases
- let(:result_dir) { Pathname.new(Dir.mktmpdir) }
+ let(:base_result_dir) { Pathname.new(Dir.mktmpdir) }
let(:migration_runs) { [] } # This list gets populated as the runner tries to run migrations
@@ -26,11 +26,14 @@ RSpec.describe Gitlab::Database::Migrations::Runner do
end
before do
- stub_const('Gitlab::Database::Migrations::Runner::BASE_RESULT_DIR', result_dir)
+ skip_if_multiple_databases_not_setup unless database == :main
+
+ stub_const('Gitlab::Database::Migrations::Runner::BASE_RESULT_DIR', base_result_dir)
allow(ActiveRecord::Migrator).to receive(:new) do |dir, _all_migrations, _schema_migration_class, version_to_migrate|
migrator = double(ActiveRecord::Migrator)
expect(migrator).to receive(:run) do
- migration_runs << double('migrator', dir: dir, version_to_migrate: version_to_migrate)
+ config_for_migration_run = ActiveRecord::Base.connection_db_config
+ migration_runs << double('migrator', dir: dir, version_to_migrate: version_to_migrate, database: config_for_migration_run.name)
end
migrator
end
@@ -39,133 +42,153 @@ RSpec.describe Gitlab::Database::Migrations::Runner do
migrations = applied_migrations_other_branches + applied_migrations_this_branch + pending_migrations
ctx = double(ActiveRecord::MigrationContext, get_all_versions: all_versions, migrations: migrations, schema_migration: ActiveRecord::SchemaMigration)
- allow(described_class).to receive(:migration_context).and_return(ctx)
+ allow(ActiveRecord::Base.connection).to receive(:migration_context).and_return(ctx)
names_this_branch = (applied_migrations_this_branch + pending_migrations).map { |m| "db/migrate/#{m.version}_#{m.name}.rb" }
allow(described_class).to receive(:migration_file_names_this_branch).and_return(names_this_branch)
end
after do
- FileUtils.rm_rf(result_dir)
+ FileUtils.rm_rf(base_result_dir)
end
- it 'creates the results dir when one does not exist' do
- FileUtils.rm_rf(result_dir)
-
- expect do
- described_class.new(direction: :up, migrations: [], result_dir: result_dir).run
- end.to change { Dir.exist?(result_dir) }.from(false).to(true)
+ where(:case_name, :database, :result_dir, :legacy_mode, :expected_schema_version) do
+ [
+ ['main database', :main, lazy { base_result_dir.join('main') }, false, described_class::SCHEMA_VERSION],
+ ['main database (legacy mode)', :main, lazy { base_result_dir }, true, 3],
+ ['ci database', :ci, lazy { base_result_dir.join('ci') }, false, described_class::SCHEMA_VERSION]
+ ]
end
- describe '.up' do
- context 'result directory' do
- it 'uses the /up subdirectory' do
- expect(described_class.up.result_dir).to eq(result_dir.join('up'))
- end
+ with_them do
+ it 'creates the results dir when one does not exist' do
+ FileUtils.rm_rf(result_dir)
+
+ expect do
+ described_class.new(direction: :up, migrations: [], database: database).run
+ end.to change { Dir.exist?(result_dir) }.from(false).to(true)
end
- context 'migrations to run' do
- subject(:up) { described_class.up }
+ describe '.up' do
+ context 'result directory' do
+ it 'uses the /up subdirectory' do
+ expect(described_class.up(database: database, legacy_mode: legacy_mode).result_dir).to eq(result_dir.join('up'))
+ end
+ end
+
+ context 'migrations to run' do
+ subject(:up) { described_class.up(database: database, legacy_mode: legacy_mode) }
- it 'is the list of pending migrations' do
- expect(up.migrations).to eq(pending_migrations)
+ it 'is the list of pending migrations' do
+ expect(up.migrations).to eq(pending_migrations)
+ end
end
- end
- context 'running migrations' do
- subject(:up) { described_class.up }
+ context 'running migrations' do
+ subject(:up) { described_class.up(database: database, legacy_mode: legacy_mode) }
- it 'runs the unapplied migrations in version order', :aggregate_failures do
- up.run
+ it 'runs the unapplied migrations in version order', :aggregate_failures do
+ up.run
- expect(migration_runs.map(&:dir)).to match_array([:up, :up])
- expect(migration_runs.map(&:version_to_migrate)).to eq(pending_migrations.map(&:version))
- end
+ expect(migration_runs.map(&:dir)).to match_array([:up, :up])
+ expect(migration_runs.map(&:version_to_migrate)).to eq(pending_migrations.map(&:version))
+ end
- it 'writes a metadata file with the current schema version' do
- up.run
+ it 'writes a metadata file with the current schema version and database name' do
+ up.run
- metadata_file = result_dir.join('up', described_class::METADATA_FILENAME)
- expect(metadata_file.exist?).to be_truthy
- metadata = Gitlab::Json.parse(File.read(metadata_file))
- expect(metadata).to match('version' => described_class::SCHEMA_VERSION)
+ metadata_file = result_dir.join('up', described_class::METADATA_FILENAME)
+ expect(metadata_file.exist?).to be_truthy
+ metadata = Gitlab::Json.parse(File.read(metadata_file))
+ expect(metadata).to match('version' => expected_schema_version, 'database' => database.to_s)
+ end
+
+ it 'runs the unapplied migrations on the correct database' do
+ up.run
+
+ expect(migration_runs.map(&:database).uniq).to contain_exactly(database.to_s)
+ end
end
end
- end
- describe '.down' do
- subject(:down) { described_class.down }
+ describe '.down' do
+ subject(:down) { described_class.down(database: database, legacy_mode: legacy_mode) }
- context 'result directory' do
- it 'is the /down subdirectory' do
- expect(down.result_dir).to eq(result_dir.join('down'))
+ context 'result directory' do
+ it 'is the /down subdirectory' do
+ expect(down.result_dir).to eq(result_dir.join('down'))
+ end
end
- end
- context 'migrations to run' do
- it 'is the list of migrations that are up and on this branch' do
- expect(down.migrations).to eq(applied_migrations_this_branch)
+ context 'migrations to run' do
+ it 'is the list of migrations that are up and on this branch' do
+ expect(down.migrations).to eq(applied_migrations_this_branch)
+ end
end
- end
- context 'running migrations' do
- it 'runs the applied migrations for the current branch in reverse order', :aggregate_failures do
- down.run
+ context 'running migrations' do
+ it 'runs the applied migrations for the current branch in reverse order', :aggregate_failures do
+ down.run
- expect(migration_runs.map(&:dir)).to match_array([:down, :down])
- expect(migration_runs.map(&:version_to_migrate)).to eq(applied_migrations_this_branch.reverse.map(&:version))
+ expect(migration_runs.map(&:dir)).to match_array([:down, :down])
+ expect(migration_runs.map(&:version_to_migrate)).to eq(applied_migrations_this_branch.reverse.map(&:version))
+ end
end
- end
-
- it 'writes a metadata file with the current schema version' do
- down.run
- metadata_file = result_dir.join('down', described_class::METADATA_FILENAME)
- expect(metadata_file.exist?).to be_truthy
- metadata = Gitlab::Json.parse(File.read(metadata_file))
- expect(metadata).to match('version' => described_class::SCHEMA_VERSION)
- end
- end
+ it 'writes a metadata file with the current schema version' do
+ down.run
- describe '.background_migrations' do
- it 'is a TestBackgroundRunner' do
- expect(described_class.background_migrations).to be_a(Gitlab::Database::Migrations::TestBackgroundRunner)
+ metadata_file = result_dir.join('down', described_class::METADATA_FILENAME)
+ expect(metadata_file.exist?).to be_truthy
+ metadata = Gitlab::Json.parse(File.read(metadata_file))
+ expect(metadata).to match('version' => expected_schema_version, 'database' => database.to_s)
+ end
end
- it 'is configured with a result dir of /background_migrations' do
- runner = described_class.background_migrations
+ describe '.background_migrations' do
+ it 'is a TestBackgroundRunner' do
+ expect(described_class.background_migrations).to be_a(Gitlab::Database::Migrations::TestBackgroundRunner)
+ end
- expect(runner.result_dir).to eq(described_class::BASE_RESULT_DIR.join( 'background_migrations'))
- end
- end
+ it 'is configured with a result dir of /background_migrations' do
+ runner = described_class.background_migrations
- describe '.batched_background_migrations' do
- it 'is a TestBatchedBackgroundRunner' do
- expect(described_class.batched_background_migrations(for_database: 'main')).to be_a(Gitlab::Database::Migrations::TestBatchedBackgroundRunner)
+ expect(runner.result_dir).to eq(described_class::BASE_RESULT_DIR.join( 'background_migrations'))
+ end
end
- context 'choosing the database to test against' do
- it 'chooses the main database' do
- runner = described_class.batched_background_migrations(for_database: 'main')
+ describe '.batched_background_migrations' do
+ it 'is a TestBatchedBackgroundRunner' do
+ expect(described_class.batched_background_migrations(for_database: database)).to be_a(Gitlab::Database::Migrations::TestBatchedBackgroundRunner)
+ end
- chosen_connection_name = Gitlab::Database.db_config_name(runner.connection)
+ context 'choosing the database to test against' do
+ it 'chooses the provided database' do
+ runner = described_class.batched_background_migrations(for_database: database)
- expect(chosen_connection_name).to eq('main')
- end
+ chosen_connection_name = Gitlab::Database.db_config_name(runner.connection)
- it 'chooses the ci database' do
- skip_if_multiple_databases_not_setup
+ expect(chosen_connection_name).to eq(database.to_s)
+ end
- runner = described_class.batched_background_migrations(for_database: 'ci')
+ it 'throws an error with an invalid name' do
+ expect { described_class.batched_background_migrations(for_database: 'not_a_database') }
+ .to raise_error(/not a valid database name/)
+ end
- chosen_connection_name = Gitlab::Database.db_config_name(runner.connection)
+ it 'includes the database name in the result dir' do
+ runner = described_class.batched_background_migrations(for_database: database)
- expect(chosen_connection_name).to eq('ci')
+ expect(runner.result_dir).to eq(base_result_dir.join(database.to_s, 'background_migrations'))
+ end
end
- it 'throws an error with an invalid name' do
- expect { described_class.batched_background_migrations(for_database: 'not_a_database') }
- .to raise_error(/not a valid database name/)
+ context 'legacy mode' do
+ it 'does not include the database name in the path' do
+ runner = described_class.batched_background_migrations(for_database: database, legacy_mode: true)
+
+ expect(runner.result_dir).to eq(base_result_dir.join('background_migrations'))
+ end
end
end
end
diff --git a/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb b/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb
index 3ac483c8ab7..07226f3d025 100644
--- a/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb
+++ b/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb
@@ -6,106 +6,156 @@ RSpec.describe Gitlab::Database::Migrations::TestBatchedBackgroundRunner, :freez
include Gitlab::Database::MigrationHelpers
include Database::MigrationTestingHelpers
- let(:result_dir) { Dir.mktmpdir }
-
- after do
- FileUtils.rm_rf(result_dir)
+ def queue_migration(
+ job_class_name,
+ batch_table_name,
+ batch_column_name,
+ *job_arguments,
+ job_interval:,
+ batch_size: Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers::BATCH_SIZE,
+ sub_batch_size: Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers::SUB_BATCH_SIZE
+ )
+
+ batch_max_value = define_batchable_model(batch_table_name, connection: connection).maximum(batch_column_name)
+
+ Gitlab::Database::SharedModel.using_connection(connection) do
+ Gitlab::Database::BackgroundMigration::BatchedMigration.create!(
+ job_class_name: job_class_name,
+ table_name: batch_table_name,
+ column_name: batch_column_name,
+ job_arguments: job_arguments,
+ interval: job_interval,
+ min_value: Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers::BATCH_MIN_VALUE,
+ max_value: batch_max_value,
+ batch_class_name: Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers::BATCH_CLASS_NAME,
+ batch_size: batch_size,
+ sub_batch_size: sub_batch_size,
+ status_event: :execute,
+ max_batch_size: nil,
+ gitlab_schema: gitlab_schema
+ )
+ end
end
- let(:migration) do
- ActiveRecord::Migration.new.extend(Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers)
+ where(:case_name, :base_model, :gitlab_schema) do
+ [
+ ['main database', ApplicationRecord, :gitlab_main],
+ ['ci database', Ci::ApplicationRecord, :gitlab_ci]
+ ]
end
- let(:connection) { ApplicationRecord.connection }
+ with_them do
+ let(:result_dir) { Dir.mktmpdir }
- let(:table_name) { "_test_column_copying" }
+ after do
+ FileUtils.rm_rf(result_dir)
+ end
- before do
- connection.execute(<<~SQL)
- CREATE TABLE #{table_name} (
- id bigint primary key not null,
- data bigint default 0
- );
+ let(:connection) { base_model.connection }
- insert into #{table_name} (id) select i from generate_series(1, 1000) g(i);
- SQL
+ let(:table_name) { "_test_column_copying" }
- allow(migration).to receive(:transaction_open?).and_return(false)
- end
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE #{table_name} (
+ id bigint primary key not null,
+ data bigint default 0
+ );
- context 'running a real background migration' do
- it 'runs sampled jobs from the batched background migration' do
- migration.queue_batched_background_migration('CopyColumnUsingBackgroundMigrationJob',
- table_name, :id,
- :id, :data,
- batch_size: 100,
- job_interval: 5.minutes) # job_interval is skipped when testing
-
- # Expect that running sampling for this migration processes some of the rows. Sampling doesn't run
- # over every row in the table, so this does not completely migrate the table.
- expect { described_class.new(result_dir: result_dir, connection: connection).run_jobs(for_duration: 1.minute) }
- .to change { define_batchable_model(table_name).where('id IS DISTINCT FROM data').count }
- .by_at_most(-1)
+ insert into #{table_name} (id) select i from generate_series(1, 1000) g(i);
+ SQL
end
- end
- context 'with jobs to run' do
- let(:migration_name) { 'TestBackgroundMigration' }
+ context 'running a real background migration' do
+ before do
+ queue_migration('CopyColumnUsingBackgroundMigrationJob',
+ table_name, :id,
+ :id, :data,
+ batch_size: 100,
+ job_interval: 5.minutes) # job_interval is skipped when testing
+ end
- it 'samples jobs' do
- calls = []
- define_background_migration(migration_name) do |*args|
- calls << args
+ subject(:sample_migration) do
+ described_class.new(result_dir: result_dir, connection: connection).run_jobs(for_duration: 1.minute)
end
- migration.queue_batched_background_migration(migration_name, table_name, :id,
- job_interval: 5.minutes,
- batch_size: 100)
+ it 'runs sampled jobs from the batched background migration' do
+ # Expect that running sampling for this migration processes some of the rows. Sampling doesn't run
+ # over every row in the table, so this does not completely migrate the table.
+ expect { subject }.to change {
+ define_batchable_model(table_name, connection: connection)
+ .where('id IS DISTINCT FROM data').count
+ }.by_at_most(-1)
+ end
- described_class.new(result_dir: result_dir, connection: connection).run_jobs(for_duration: 3.minutes)
+ it 'uses the correct connection to instrument the background migration' do
+ expect_next_instance_of(Gitlab::Database::Migrations::Instrumentation) do |instrumentation|
+ expect(instrumentation).to receive(:observe).with(hash_including(connection: connection))
+ .at_least(:once).and_call_original
+ end
- expect(calls).not_to be_empty
+ subject
+ end
end
- context 'with multiple jobs to run' do
- it 'runs all jobs created within the last 3 hours' do
- old_migration = define_background_migration(migration_name)
- migration.queue_batched_background_migration(migration_name, table_name, :id,
- job_interval: 5.minutes,
- batch_size: 100)
-
- travel 4.hours
-
- new_migration = define_background_migration('NewMigration') { travel 1.second }
- migration.queue_batched_background_migration('NewMigration', table_name, :id,
- job_interval: 5.minutes,
- batch_size: 10,
- sub_batch_size: 5)
-
- other_new_migration = define_background_migration('NewMigration2') { travel 2.seconds }
- migration.queue_batched_background_migration('NewMigration2', table_name, :id,
- job_interval: 5.minutes,
- batch_size: 10,
- sub_batch_size: 5)
-
- expect_migration_runs(new_migration => 3, other_new_migration => 2, old_migration => 0) do
- described_class.new(result_dir: result_dir, connection: connection).run_jobs(for_duration: 5.seconds)
+ context 'with jobs to run' do
+ let(:migration_name) { 'TestBackgroundMigration' }
+
+ it 'samples jobs' do
+ calls = []
+ define_background_migration(migration_name) do |*args|
+ calls << args
+ end
+
+ queue_migration(migration_name, table_name, :id,
+ job_interval: 5.minutes,
+ batch_size: 100)
+
+ described_class.new(result_dir: result_dir, connection: connection).run_jobs(for_duration: 3.minutes)
+
+ expect(calls).not_to be_empty
+ end
+
+ context 'with multiple jobs to run' do
+ it 'runs all jobs created within the last 3 hours' do
+ old_migration = define_background_migration(migration_name)
+ queue_migration(migration_name, table_name, :id,
+ job_interval: 5.minutes,
+ batch_size: 100)
+
+ travel 4.hours
+
+ new_migration = define_background_migration('NewMigration') { travel 1.second }
+ queue_migration('NewMigration', table_name, :id,
+ job_interval: 5.minutes,
+ batch_size: 10,
+ sub_batch_size: 5)
+
+ other_new_migration = define_background_migration('NewMigration2') { travel 2.seconds }
+ queue_migration('NewMigration2', table_name, :id,
+ job_interval: 5.minutes,
+ batch_size: 10,
+ sub_batch_size: 5)
+
+ expect_migration_runs(new_migration => 3, other_new_migration => 2, old_migration => 0) do
+ described_class.new(result_dir: result_dir, connection: connection).run_jobs(for_duration: 5.seconds)
+ end
end
end
end
- end
- context 'choosing uniform batches to run' do
- subject { described_class.new(result_dir: result_dir, connection: connection) }
+ context 'choosing uniform batches to run' do
+ subject { described_class.new(result_dir: result_dir, connection: connection) }
- describe '#uniform_fractions' do
- it 'generates evenly distributed sequences of fractions' do
- received = subject.uniform_fractions.take(9)
- expected = [0, 1, 1.0 / 2, 1.0 / 4, 3.0 / 4, 1.0 / 8, 3.0 / 8, 5.0 / 8, 7.0 / 8]
+ describe '#uniform_fractions' do
+ it 'generates evenly distributed sequences of fractions' do
+ received = subject.uniform_fractions.take(9)
+ expected = [0, 1, 1.0 / 2, 1.0 / 4, 3.0 / 4, 1.0 / 8, 3.0 / 8, 5.0 / 8, 7.0 / 8]
- # All the fraction numerators are small integers, and all denominators are powers of 2, so these
- # fit perfectly into floating point numbers with zero loss of precision
- expect(received).to eq(expected)
+ # All the fraction numerators are small integers, and all denominators are powers of 2, so these
+ # fit perfectly into floating point numbers with zero loss of precision
+ expect(received).to eq(expected)
+ end
end
end
end
diff --git a/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb b/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb
index 8a35d8149ad..b39b273bba9 100644
--- a/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb
+++ b/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb
@@ -53,15 +53,16 @@ RSpec.describe Gitlab::Database::ObsoleteIgnoredColumns do
describe '#execute' do
it 'returns a list of class names and columns pairs' do
travel_to(REMOVE_DATE) do
- expect(subject.execute).to eq([
- ['Testing::A', {
- 'unused' => IgnorableColumns::ColumnIgnore.new(Date.parse('2019-01-01'), '12.0'),
- 'also_unused' => IgnorableColumns::ColumnIgnore.new(Date.parse('2019-02-01'), '12.1')
- }],
- ['Testing::B', {
- 'other' => IgnorableColumns::ColumnIgnore.new(Date.parse('2019-01-01'), '12.0')
- }]
- ])
+ expect(subject.execute).to eq(
+ [
+ ['Testing::A', {
+ 'unused' => IgnorableColumns::ColumnIgnore.new(Date.parse('2019-01-01'), '12.0'),
+ 'also_unused' => IgnorableColumns::ColumnIgnore.new(Date.parse('2019-02-01'), '12.1')
+ }],
+ ['Testing::B', {
+ 'other' => IgnorableColumns::ColumnIgnore.new(Date.parse('2019-01-01'), '12.0')
+ }]
+ ])
end
end
end
diff --git a/spec/lib/gitlab/database/partitioning/convert_table_to_first_list_partition_spec.rb b/spec/lib/gitlab/database/partitioning/convert_table_to_first_list_partition_spec.rb
index af7d751a404..0e804b4feac 100644
--- a/spec/lib/gitlab/database/partitioning/convert_table_to_first_list_partition_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/convert_table_to_first_list_partition_spec.rb
@@ -153,6 +153,21 @@ RSpec.describe Gitlab::Database::Partitioning::ConvertTableToFirstListPartition
expect(parent_model.pluck(:id)).to match_array([1, 2, 3])
end
+ context 'when the existing table is owned by a different user' do
+ before do
+ connection.execute(<<~SQL)
+ CREATE USER other_user SUPERUSER;
+ ALTER TABLE #{table_name} OWNER TO other_user;
+ SQL
+ end
+
+ let(:current_user) { model.connection.select_value('select current_user') }
+
+ it 'partitions without error' do
+ expect { partition }.not_to raise_error
+ end
+ end
+
context 'when an error occurs during the conversion' do
def fail_first_time
# We can't directly use a boolean here, as we need something that will be passed by-reference to the proc
diff --git a/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb
index 67d80d71e2a..50115a6f3dd 100644
--- a/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb
@@ -29,10 +29,11 @@ RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do
end
it 'detects both partitions' do
- expect(subject).to eq([
- Gitlab::Database::Partitioning::TimePartition.new(table_name, nil, '2020-05-01', partition_name: '_test_partitioned_test_000000'),
- Gitlab::Database::Partitioning::TimePartition.new(table_name, '2020-05-01', '2020-06-01', partition_name: '_test_partitioned_test_202005')
- ])
+ expect(subject).to eq(
+ [
+ Gitlab::Database::Partitioning::TimePartition.new(table_name, nil, '2020-05-01', partition_name: '_test_partitioned_test_000000'),
+ Gitlab::Database::Partitioning::TimePartition.new(table_name, '2020-05-01', '2020-06-01', partition_name: '_test_partitioned_test_202005')
+ ])
end
end
diff --git a/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
index 07c2c6606d8..550f254c4da 100644
--- a/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
@@ -36,14 +36,15 @@ RSpec.describe Gitlab::Database::Partitioning::SlidingListStrategy do
describe '#current_partitions' do
it 'detects both partitions' do
- expect(strategy.current_partitions).to eq([
- Gitlab::Database::Partitioning::SingleNumericListPartition.new(
- table_name, 1, partition_name: '_test_partitioned_test_1'
- ),
- Gitlab::Database::Partitioning::SingleNumericListPartition.new(
- table_name, 2, partition_name: '_test_partitioned_test_2'
- )
- ])
+ expect(strategy.current_partitions).to eq(
+ [
+ Gitlab::Database::Partitioning::SingleNumericListPartition.new(
+ table_name, 1, partition_name: '_test_partitioned_test_1'
+ ),
+ Gitlab::Database::Partitioning::SingleNumericListPartition.new(
+ table_name, 2, partition_name: '_test_partitioned_test_2'
+ )
+ ])
end
end
diff --git a/spec/lib/gitlab/database/partitioning/time_partition_spec.rb b/spec/lib/gitlab/database/partitioning/time_partition_spec.rb
index 700202d81c5..5a17e8d20cf 100644
--- a/spec/lib/gitlab/database/partitioning/time_partition_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/time_partition_spec.rb
@@ -156,12 +156,13 @@ RSpec.describe Gitlab::Database::Partitioning::TimePartition do
described_class.new(table, '2020-03-01', '2020-04-01')
]
- expect(partitions.sort).to eq([
- described_class.new(table, nil, '2020-02-01'),
- described_class.new(table, '2020-02-01', '2020-03-01'),
- described_class.new(table, '2020-03-01', '2020-04-01'),
- described_class.new(table, '2020-04-01', '2020-05-01')
- ])
+ expect(partitions.sort).to eq(
+ [
+ described_class.new(table, nil, '2020-02-01'),
+ described_class.new(table, '2020-02-01', '2020-03-01'),
+ described_class.new(table, '2020-03-01', '2020-04-01'),
+ described_class.new(table, '2020-04-01', '2020-05-01')
+ ])
end
it 'returns nil for partitions of different tables' do
diff --git a/spec/lib/gitlab/database/partitioning_spec.rb b/spec/lib/gitlab/database/partitioning_spec.rb
index 94cdbfb2328..db5ca890155 100644
--- a/spec/lib/gitlab/database/partitioning_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_spec.rb
@@ -130,12 +130,14 @@ RSpec.describe Gitlab::Database::Partitioning do
context 'when no partitioned models are given' do
it 'manages partitions for each registered model' do
described_class.register_models([models.first])
- described_class.register_tables([
- {
- table_name: table_names.last,
- partitioned_column: :created_at, strategy: :monthly
- }
- ])
+ described_class.register_tables(
+ [
+ {
+ table_name: table_names.last,
+ partitioned_column: :created_at,
+ strategy: :monthly
+ }
+ ])
expect { described_class.sync_partitions }
.to change { find_partitions(table_names.first).size }.from(0)
diff --git a/spec/lib/gitlab/database/reflection_spec.rb b/spec/lib/gitlab/database/reflection_spec.rb
index efc5bd1c1e1..389e93364c8 100644
--- a/spec/lib/gitlab/database/reflection_spec.rb
+++ b/spec/lib/gitlab/database/reflection_spec.rb
@@ -314,6 +314,12 @@ RSpec.describe Gitlab::Database::Reflection do
expect(database.flavor).to eq('Azure Database for PostgreSQL - Single Server')
end
+ it 'recognizes AlloyDB for PostgreSQL' do
+ stub_statements("SELECT name FROM pg_settings WHERE name LIKE 'alloydb%'")
+
+ expect(database.flavor).to eq('AlloyDB for PostgreSQL')
+ end
+
it 'returns nil if can not recognize the flavor' do
expect(database.flavor).to be_nil
end
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb
index e222a29c6a1..ac2de43b7c6 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb
@@ -98,7 +98,9 @@ RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespa
it 'moves a project for a namespace' do
create(:project, :repository, :legacy_storage, namespace: namespace, path: 'hello-project')
- expected_path = File.join(TestEnv.repos_path, 'bye-group', 'hello-project.git')
+ expected_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
+ File.join(TestEnv.repos_path, 'bye-group', 'hello-project.git')
+ end
subject.move_repositories(namespace, 'hello-group', 'bye-group')
@@ -109,7 +111,9 @@ RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespa
child_namespace = create(:group, name: 'sub-group', parent: namespace)
create(:project, :repository, :legacy_storage, namespace: child_namespace, path: 'hello-project')
- expected_path = File.join(TestEnv.repos_path, 'hello-group', 'renamed-sub-group', 'hello-project.git')
+ expected_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
+ File.join(TestEnv.repos_path, 'hello-group', 'renamed-sub-group', 'hello-project.git')
+ end
subject.move_repositories(child_namespace, 'hello-group/sub-group', 'hello-group/renamed-sub-group')
@@ -119,7 +123,9 @@ RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespa
it 'moves a parent namespace with subdirectories' do
child_namespace = create(:group, name: 'sub-group', parent: namespace)
create(:project, :repository, :legacy_storage, namespace: child_namespace, path: 'hello-project')
- expected_path = File.join(TestEnv.repos_path, 'renamed-group', 'sub-group', 'hello-project.git')
+ expected_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
+ File.join(TestEnv.repos_path, 'renamed-group', 'sub-group', 'hello-project.git')
+ end
subject.move_repositories(child_namespace, 'hello-group', 'renamed-group')
@@ -170,7 +176,9 @@ RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespa
describe '#rename_namespace_dependencies' do
it "moves the repository for a project in the namespace" do
create(:project, :repository, :legacy_storage, namespace: namespace, path: "the-path-project")
- expected_repo = File.join(TestEnv.repos_path, "the-path0", "the-path-project.git")
+ expected_repo = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
+ File.join(TestEnv.repos_path, "the-path0", "the-path-project.git")
+ end
subject.rename_namespace_dependencies(namespace, 'the-path', 'the-path0')
@@ -268,7 +276,9 @@ RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespa
project.create_repository
subject.rename_namespace(namespace)
- expected_path = File.join(TestEnv.repos_path, 'the-path', 'a-project.git')
+ expected_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
+ File.join(TestEnv.repos_path, 'the-path', 'a-project.git')
+ end
expect(subject).to receive(:rename_namespace_dependencies)
.with(
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
index 50071e3e22b..6292f0246f7 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb
@@ -126,7 +126,9 @@ RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProject
let(:project) { create(:project, :repository, :legacy_storage, path: 'the-path', namespace: known_parent) }
it 'moves the repository for a project' do
- expected_path = File.join(TestEnv.repos_path, 'known-parent', 'new-repo.git')
+ expected_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
+ File.join(TestEnv.repos_path, 'known-parent', 'new-repo.git')
+ end
subject.move_repository(project, 'known-parent/the-path', 'known-parent/new-repo')
@@ -155,7 +157,9 @@ RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProject
project.create_repository
subject.rename_project(project)
- expected_path = File.join(TestEnv.repos_path, 'known-parent', 'the-path.git')
+ expected_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
+ File.join(TestEnv.repos_path, 'known-parent', 'the-path.git')
+ end
expect(subject).to receive(:move_project_folders)
.with(
diff --git a/spec/lib/gitlab/database/similarity_score_spec.rb b/spec/lib/gitlab/database/similarity_score_spec.rb
index b7b66494390..cfee70ed208 100644
--- a/spec/lib/gitlab/database/similarity_score_spec.rb
+++ b/spec/lib/gitlab/database/similarity_score_spec.rb
@@ -78,10 +78,11 @@ RSpec.describe Gitlab::Database::SimilarityScore do
describe 'score multiplier' do
let(:order_expression) do
- Gitlab::Database::SimilarityScore.build_expression(search: search, rules: [
- { column: Arel.sql('path'), multiplier: 1 },
- { column: Arel.sql('name'), multiplier: 0.8 }
- ]).to_sql
+ Gitlab::Database::SimilarityScore.build_expression(search: search, rules:
+ [
+ { column: Arel.sql('path'), multiplier: 1 },
+ { column: Arel.sql('name'), multiplier: 0.8 }
+ ]).to_sql
end
let(:search) { 'different' }
@@ -93,10 +94,11 @@ RSpec.describe Gitlab::Database::SimilarityScore do
describe 'annotation' do
it 'annotates the generated SQL expression' do
- expression = Gitlab::Database::SimilarityScore.build_expression(search: 'test', rules: [
- { column: Arel.sql('path'), multiplier: 1 },
- { column: Arel.sql('name'), multiplier: 0.8 }
- ])
+ expression = Gitlab::Database::SimilarityScore.build_expression(search: 'test', rules:
+ [
+ { column: Arel.sql('path'), multiplier: 1 },
+ { column: Arel.sql('name'), multiplier: 0.8 }
+ ])
expect(Gitlab::Database::SimilarityScore).to be_order_by_similarity(expression)
end
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index c893bca9e62..eb42734d044 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -32,21 +32,6 @@ RSpec.describe Gitlab::Database do
end
describe '.has_config?' do
- context 'two tier database config' do
- before do
- allow(Gitlab::Application).to receive_message_chain(:config, :database_configuration, :[]).with(Rails.env)
- .and_return({ "adapter" => "postgresql", "database" => "gitlabhq_test" })
- end
-
- it 'returns false for primary' do
- expect(described_class.has_config?(:primary)).to eq(false)
- end
-
- it 'returns false for ci' do
- expect(described_class.has_config?(:ci)).to eq(false)
- end
- end
-
context 'three tier database config' do
before do
allow(Gitlab::Application).to receive_message_chain(:config, :database_configuration, :[]).with(Rails.env)
diff --git a/spec/lib/gitlab/diff/char_diff_spec.rb b/spec/lib/gitlab/diff/char_diff_spec.rb
index d38008c16f2..ca0ed6e840d 100644
--- a/spec/lib/gitlab/diff/char_diff_spec.rb
+++ b/spec/lib/gitlab/diff/char_diff_spec.rb
@@ -20,22 +20,24 @@ RSpec.describe Gitlab::Diff::CharDiff do
it 'treats nil values as blank strings' do
changes = subject.generate_diff
- expect(changes).to eq([
- [:insert, "Hello \n World"]
- ])
+ expect(changes).to eq(
+ [
+ [:insert, "Hello \n World"]
+ ])
end
end
it 'generates an array of changes' do
changes = subject.generate_diff
- expect(changes).to eq([
- [:equal, "Hel"],
- [:insert, "l"],
- [:equal, "o \n Worl"],
- [:delete, "l"],
- [:equal, "d"]
- ])
+ expect(changes).to eq(
+ [
+ [:equal, "Hel"],
+ [:insert, "l"],
+ [:equal, "o \n Worl"],
+ [:delete, "l"],
+ [:equal, "d"]
+ ])
end
end
diff --git a/spec/lib/gitlab/diff/file_collection_sorter_spec.rb b/spec/lib/gitlab/diff/file_collection_sorter_spec.rb
index ca9c156c1ad..3f0b0ad5775 100644
--- a/spec/lib/gitlab/diff/file_collection_sorter_spec.rb
+++ b/spec/lib/gitlab/diff/file_collection_sorter_spec.rb
@@ -33,27 +33,28 @@ RSpec.describe Gitlab::Diff::FileCollectionSorter do
let(:sorted_files_paths) { subject.sort.map { |file| file.new_path.presence || file.old_path } }
it 'returns list sorted directory first' do
- expect(sorted_files_paths).to eq([
- '.dir/test',
- '1-folder/nested/A-file.ext',
- '1-folder/nested/M-file.ext',
- '1-folder/nested/Z-file.ext',
- '1-folder/A-file.ext',
- '1-folder/M-file.ext',
- '1-folder/README',
- '1-folder/README',
- '1-folder/Z-file.ext',
- '2-folder/nested/A-file.ext',
- '2-folder/A-file.ext',
- '2-folder/M-file.ext',
- '2-folder/Z-file.ext',
- '.file',
- 'A-file.ext',
- 'M-file.ext',
- 'README',
- 'README',
- 'Z-file.ext'
- ])
+ expect(sorted_files_paths).to eq(
+ [
+ '.dir/test',
+ '1-folder/nested/A-file.ext',
+ '1-folder/nested/M-file.ext',
+ '1-folder/nested/Z-file.ext',
+ '1-folder/A-file.ext',
+ '1-folder/M-file.ext',
+ '1-folder/README',
+ '1-folder/README',
+ '1-folder/Z-file.ext',
+ '2-folder/nested/A-file.ext',
+ '2-folder/A-file.ext',
+ '2-folder/M-file.ext',
+ '2-folder/Z-file.ext',
+ '.file',
+ 'A-file.ext',
+ 'M-file.ext',
+ 'README',
+ 'README',
+ 'Z-file.ext'
+ ])
end
end
end
diff --git a/spec/lib/gitlab/diff/file_spec.rb b/spec/lib/gitlab/diff/file_spec.rb
index 28557aab830..d623a390dc8 100644
--- a/spec/lib/gitlab/diff/file_spec.rb
+++ b/spec/lib/gitlab/diff/file_spec.rb
@@ -309,12 +309,13 @@ RSpec.describe Gitlab::Diff::File do
let(:diffs) { commit.diffs }
before do
- info_dir_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- File.join(project.repository.path_to_repo, 'info')
- end
-
- FileUtils.mkdir(info_dir_path) unless File.exist?(info_dir_path)
- File.write(File.join(info_dir_path, 'attributes'), "*.md -diff\n")
+ project.repository.commit_files(
+ project.creator,
+ branch_name: 'master',
+ message: 'Add attributes',
+ actions: [{ action: :update, file_path: '.gitattributes', content: "*.md -diff\n" }]
+ )
+ project.repository.copy_gitattributes('master')
end
it "returns true for files that do not have attributes" do
diff --git a/spec/lib/gitlab/diff/highlight_cache_spec.rb b/spec/lib/gitlab/diff/highlight_cache_spec.rb
index 53e74748234..33e9360ee01 100644
--- a/spec/lib/gitlab/diff/highlight_cache_spec.rb
+++ b/spec/lib/gitlab/diff/highlight_cache_spec.rb
@@ -109,58 +109,36 @@ RSpec.describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do
end
shared_examples 'caches missing entries' do
- where(:expiration_period, :renewable_expiration_ff, :short_renewable_expiration_ff) do
- [
- [1.day, false, true],
- [1.day, false, false],
- [1.hour, true, true],
- [8.hours, true, false]
- ]
- end
-
- with_them do
- before do
- stub_feature_flags(
- highlight_diffs_renewable_expiration: renewable_expiration_ff,
- highlight_diffs_short_renewable_expiration: short_renewable_expiration_ff
- )
- end
+ it 'filters the key/value list of entries to be caches for each invocation' do
+ expect(cache).to receive(:write_to_redis_hash)
+ .with(hash_including(*paths))
+ .once
+ .and_call_original
- it 'filters the key/value list of entries to be caches for each invocation' do
- expect(cache).to receive(:write_to_redis_hash)
- .with(hash_including(*paths))
- .once
- .and_call_original
-
- 2.times { cache.write_if_empty }
- end
+ 2.times { cache.write_if_empty }
+ end
- it 'reads from cache once' do
- expect(cache).to receive(:read_cache).once.and_call_original
+ it 'reads from cache once' do
+ expect(cache).to receive(:read_cache).once.and_call_original
- cache.write_if_empty
- end
+ cache.write_if_empty
+ end
- it 'refreshes TTL of the key on read' do
- cache.write_if_empty
+ it 'refreshes TTL of the key on read' do
+ cache.write_if_empty
- time_until_expire = 30.minutes
+ time_until_expire = 30.minutes
- Gitlab::Redis::Cache.with do |redis|
- # Emulate that a key is going to expire soon
- redis.expire(cache.key, time_until_expire)
+ Gitlab::Redis::Cache.with do |redis|
+ # Emulate that a key is going to expire soon
+ redis.expire(cache.key, time_until_expire)
- expect(redis.ttl(cache.key)).to be <= time_until_expire
+ expect(redis.ttl(cache.key)).to be <= time_until_expire
- cache.send(:read_cache)
+ cache.send(:read_cache)
- if renewable_expiration_ff
- expect(redis.ttl(cache.key)).to be > time_until_expire
- expect(redis.ttl(cache.key)).to be_within(1.minute).of(expiration_period)
- else
- expect(redis.ttl(cache.key)).to be <= time_until_expire
- end
- end
+ expect(redis.ttl(cache.key)).to be > time_until_expire
+ expect(redis.ttl(cache.key)).to be_within(1.minute).of(described_class::EXPIRATION)
end
end
end
diff --git a/spec/lib/gitlab/encoding_helper_spec.rb b/spec/lib/gitlab/encoding_helper_spec.rb
index 690396d4dbc..c62e3071fc1 100644
--- a/spec/lib/gitlab/encoding_helper_spec.rb
+++ b/spec/lib/gitlab/encoding_helper_spec.rb
@@ -114,18 +114,6 @@ RSpec.describe Gitlab::EncodingHelper do
expect(ext_class.encode_utf8_with_escaping!(input)).to eq(expected)
end
end
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(escape_gitaly_refs: false)
- end
-
- it 'uses #encode! method' do
- expect(ext_class).to receive(:encode!).with('String')
-
- ext_class.encode_utf8_with_escaping!('String')
- end
- end
end
describe '#encode_utf8' do
diff --git a/spec/lib/gitlab/error_tracking/stack_trace_highlight_decorator_spec.rb b/spec/lib/gitlab/error_tracking/stack_trace_highlight_decorator_spec.rb
index 3d23249d00d..73ebee49169 100644
--- a/spec/lib/gitlab/error_tracking/stack_trace_highlight_decorator_spec.rb
+++ b/spec/lib/gitlab/error_tracking/stack_trace_highlight_decorator_spec.rb
@@ -53,9 +53,9 @@ RSpec.describe Gitlab::ErrorTracking::StackTraceHighlightDecorator do
'lineNo' => 3,
'filename' => 'hello_world.php',
'context' => [
- [1, '<span id="LC1" class="line" lang="hack"><span class="c1">// PHP/Hack example</span></span>'],
- [2, '<span id="LC1" class="line" lang="hack"><span class="cp">&lt;?php</span></span>'],
- [3, '<span id="LC1" class="line" lang="hack"><span class="k">echo</span> <span class="s1">\'Hello, World!\'</span><span class="p">;</span></span>']
+ [1, '<span id="LC1" class="line" lang="hack"><span class="c1">// PHP/Hack example</span></span>'],
+ [2, '<span id="LC1" class="line" lang="hack"><span class="cp">&lt;?php</span></span>'],
+ [3, '<span id="LC1" class="line" lang="hack"><span class="k">echo</span> <span class="s1">\'Hello, World!\'</span><span class="p">;</span></span>']
]
},
{
diff --git a/spec/lib/gitlab/experimentation/controller_concern_spec.rb b/spec/lib/gitlab/experimentation/controller_concern_spec.rb
deleted file mode 100644
index 799884d7a74..00000000000
--- a/spec/lib/gitlab/experimentation/controller_concern_spec.rb
+++ /dev/null
@@ -1,675 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
- include TrackingHelpers
-
- before do
- stub_const('Gitlab::Experimentation::EXPERIMENTS', {
- test_experiment: {
- tracking_category: 'Team',
- rollout_strategy: rollout_strategy
- },
- my_experiment: {
- tracking_category: 'Team'
- }
- }
- )
-
- allow(Gitlab).to receive(:com?).and_return(is_gitlab_com)
-
- Feature.enable_percentage_of_time(:test_experiment_experiment_percentage, enabled_percentage)
- end
-
- let(:enabled_percentage) { 10 }
- let(:rollout_strategy) { nil }
- let(:is_gitlab_com) { true }
-
- controller(ApplicationController) do
- include Gitlab::Experimentation::ControllerConcern
-
- def index
- head :ok
- end
- end
-
- describe '#set_experimentation_subject_id_cookie' do
- let(:do_not_track) { nil }
- let(:cookie) { cookies.permanent.signed[:experimentation_subject_id] }
- let(:cookie_value) { nil }
-
- before do
- stub_do_not_track(do_not_track) if do_not_track.present?
- request.cookies[:experimentation_subject_id] = cookie_value if cookie_value
-
- get :index
- end
-
- context 'cookie is present' do
- let(:cookie_value) { 'test' }
-
- it 'does not change the cookie' do
- expect(cookies[:experimentation_subject_id]).to eq 'test'
- end
- end
-
- context 'cookie is not present' do
- it 'sets a permanent signed cookie' do
- expect(cookie).to be_present
- end
-
- context 'DNT: 0' do
- let(:do_not_track) { '0' }
-
- it 'sets a permanent signed cookie' do
- expect(cookie).to be_present
- end
- end
-
- context 'DNT: 1' do
- let(:do_not_track) { '1' }
-
- it 'does nothing' do
- expect(cookie).not_to be_present
- end
- end
- end
-
- context 'when not on gitlab.com' do
- let(:is_gitlab_com) { false }
-
- context 'when cookie was set' do
- let(:cookie_value) { 'test' }
-
- it 'cookie gets deleted' do
- expect(cookie).not_to be_present
- end
- end
-
- context 'when no cookie was set before' do
- it 'does nothing' do
- expect(cookie).not_to be_present
- end
- end
- end
- end
-
- describe '#push_frontend_experiment' do
- it 'pushes an experiment to the frontend' do
- gon = class_double('Gon')
- stub_experiment_for_subject(my_experiment: true)
- allow(controller).to receive(:gon).and_return(gon)
-
- expect(gon).to receive(:push).with({ experiments: { 'myExperiment' => true } }, true)
-
- controller.push_frontend_experiment(:my_experiment)
- end
- end
-
- describe '#experiment_enabled?' do
- def check_experiment(exp_key = :test_experiment, subject = nil)
- controller.experiment_enabled?(exp_key, subject: subject)
- end
-
- subject { check_experiment }
-
- context 'cookie is not present' do
- it { is_expected.to eq(false) }
- end
-
- context 'cookie is present' do
- before do
- cookies.permanent.signed[:experimentation_subject_id] = 'abcd-1234'
- get :index
- end
-
- it 'calls Gitlab::Experimentation.in_experiment_group? with the name of the experiment and the calculated experimentation_subject_index based on the uuid' do
- expect(Gitlab::Experimentation).to receive(:in_experiment_group?).with(:test_experiment, subject: 'abcd-1234')
-
- check_experiment(:test_experiment)
- end
-
- context 'when subject is given' do
- let(:rollout_strategy) { :user }
- let(:user) { build(:user) }
-
- it 'uses the subject' do
- expect(Gitlab::Experimentation).to receive(:in_experiment_group?).with(:test_experiment, subject: user)
-
- check_experiment(:test_experiment, user)
- end
- end
- end
-
- context 'do not track' do
- before do
- allow(Gitlab::Experimentation).to receive(:in_experiment_group?) { true }
- end
-
- context 'when do not track is disabled' do
- before do
- controller.request.headers['DNT'] = '0'
- end
-
- it { is_expected.to eq(true) }
- end
-
- context 'when do not track is enabled' do
- before do
- controller.request.headers['DNT'] = '1'
- end
-
- it { is_expected.to eq(false) }
- end
- end
-
- context 'URL parameter to force enable experiment' do
- it 'returns true unconditionally' do
- get :index, params: { force_experiment: :test_experiment }
-
- is_expected.to eq(true)
- end
- end
-
- context 'Cookie parameter to force enable experiment' do
- it 'returns true unconditionally' do
- cookies[:force_experiment] = 'test_experiment,another_experiment'
- get :index
-
- expect(check_experiment(:test_experiment)).to eq(true)
- expect(check_experiment(:another_experiment)).to eq(true)
- end
- end
- end
-
- describe '#track_experiment_event', :snowplow do
- let(:user) { build(:user) }
-
- context 'when the experiment is enabled' do
- before do
- stub_experiment(test_experiment: true)
- allow(controller).to receive(:current_user).and_return(user)
- end
-
- context 'the user is part of the experimental group' do
- before do
- stub_experiment_for_subject(test_experiment: true)
- end
-
- it 'tracks the event with the right parameters' do
- controller.track_experiment_event(:test_experiment, 'start', 1)
-
- expect_snowplow_event(
- category: 'Team',
- action: 'start',
- property: 'experimental_group',
- value: 1,
- user: user
- )
- end
- end
-
- context 'the user is part of the control group' do
- before do
- stub_experiment_for_subject(test_experiment: false)
- end
-
- it 'tracks the event with the right parameters' do
- controller.track_experiment_event(:test_experiment, 'start', 1)
-
- expect_snowplow_event(
- category: 'Team',
- action: 'start',
- property: 'control_group',
- value: 1,
- user: user
- )
- end
- end
-
- context 'do not track is disabled' do
- before do
- stub_do_not_track('0')
- end
-
- it 'does track the event' do
- controller.track_experiment_event(:test_experiment, 'start', 1)
-
- expect_snowplow_event(
- category: 'Team',
- action: 'start',
- property: 'control_group',
- value: 1,
- user: user
- )
- end
- end
-
- context 'do not track enabled' do
- before do
- stub_do_not_track('1')
- end
-
- it 'does not track the event' do
- controller.track_experiment_event(:test_experiment, 'start', 1)
-
- expect_no_snowplow_event
- end
- end
-
- context 'subject is provided' do
- before do
- stub_experiment_for_subject(test_experiment: false)
- end
-
- it "provides the subject's hashed global_id as label" do
- experiment_subject = double(:subject, to_global_id: 'abc')
- allow(Gitlab::Experimentation).to receive(:valid_subject_for_rollout_strategy?).and_return(true)
-
- controller.track_experiment_event(:test_experiment, 'start', 1, subject: experiment_subject)
-
- expect_snowplow_event(
- category: 'Team',
- action: 'start',
- property: 'control_group',
- value: 1,
- label: Digest::SHA256.hexdigest('abc'),
- user: user
- )
- end
-
- it "provides the subject's hashed string representation as label" do
- experiment_subject = 'somestring'
-
- controller.track_experiment_event(:test_experiment, 'start', 1, subject: experiment_subject)
-
- expect_snowplow_event(
- category: 'Team',
- action: 'start',
- property: 'control_group',
- value: 1,
- label: Digest::SHA256.hexdigest('somestring'),
- user: user
- )
- end
- end
-
- context 'no subject is provided but cookie is set' do
- before do
- get :index
- stub_experiment_for_subject(test_experiment: false)
- end
-
- it 'uses the experimentation_subject_id as fallback' do
- controller.track_experiment_event(:test_experiment, 'start', 1)
-
- expect_snowplow_event(
- category: 'Team',
- action: 'start',
- property: 'control_group',
- value: 1,
- label: cookies.permanent.signed[:experimentation_subject_id],
- user: user
- )
- end
- end
- end
-
- context 'when the experiment is disabled' do
- before do
- stub_experiment(test_experiment: false)
- end
-
- it 'does not track the event' do
- controller.track_experiment_event(:test_experiment, 'start')
-
- expect_no_snowplow_event
- end
- end
- end
-
- describe '#frontend_experimentation_tracking_data' do
- context 'when the experiment is enabled' do
- before do
- stub_experiment(test_experiment: true)
- end
-
- context 'the user is part of the experimental group' do
- before do
- stub_experiment_for_subject(test_experiment: true)
- end
-
- it 'pushes the right parameters to gon' do
- controller.frontend_experimentation_tracking_data(:test_experiment, 'start', 'team_id')
- expect(Gon.tracking_data).to eq(
- {
- category: 'Team',
- action: 'start',
- property: 'experimental_group',
- value: 'team_id'
- }
- )
- end
- end
-
- context 'the user is part of the control group' do
- before do
- stub_experiment_for_subject(test_experiment: false)
- end
-
- it 'pushes the right parameters to gon' do
- controller.frontend_experimentation_tracking_data(:test_experiment, 'start', 'team_id')
- expect(Gon.tracking_data).to eq(
- {
- category: 'Team',
- action: 'start',
- property: 'control_group',
- value: 'team_id'
- }
- )
- end
-
- it 'does not send nil value to gon' do
- controller.frontend_experimentation_tracking_data(:test_experiment, 'start')
- expect(Gon.tracking_data).to eq(
- {
- category: 'Team',
- action: 'start',
- property: 'control_group'
- }
- )
- end
- end
-
- context 'do not track disabled' do
- before do
- stub_do_not_track('0')
- end
-
- it 'pushes the right parameters to gon' do
- controller.frontend_experimentation_tracking_data(:test_experiment, 'start')
-
- expect(Gon.tracking_data).to eq(
- {
- category: 'Team',
- action: 'start',
- property: 'control_group'
- }
- )
- end
- end
-
- context 'do not track enabled' do
- before do
- stub_do_not_track('1')
- end
-
- it 'does not push data to gon' do
- controller.frontend_experimentation_tracking_data(:test_experiment, 'start')
-
- expect(Gon.method_defined?(:tracking_data)).to eq(false)
- end
- end
- end
-
- context 'when the experiment is disabled' do
- before do
- stub_experiment(test_experiment: false)
- end
-
- it 'does not push data to gon' do
- expect(Gon.method_defined?(:tracking_data)).to eq(false)
- controller.track_experiment_event(:test_experiment, 'start')
- end
- end
- end
-
- describe '#record_experiment_user' do
- let(:user) { build(:user) }
- let(:context) { { a: 42 } }
-
- context 'when the experiment is enabled' do
- before do
- stub_experiment(test_experiment: true)
- allow(controller).to receive(:current_user).and_return(user)
- end
-
- context 'the user is part of the experimental group' do
- before do
- stub_experiment_for_subject(test_experiment: true)
- end
-
- it 'calls add_user on the Experiment model' do
- expect(::Experiment).to receive(:add_user).with(:test_experiment, :experimental, user, context)
-
- controller.record_experiment_user(:test_experiment, context)
- end
-
- context 'with a cookie based rollout strategy' do
- it 'calls tracking_group with a nil subject' do
- expect(controller).to receive(:tracking_group).with(:test_experiment, nil, subject: nil).and_return(:experimental)
- allow(::Experiment).to receive(:add_user).with(:test_experiment, :experimental, user, context)
-
- controller.record_experiment_user(:test_experiment, context)
- end
- end
-
- context 'with a user based rollout strategy' do
- let(:rollout_strategy) { :user }
-
- it 'calls tracking_group with a user subject' do
- expect(controller).to receive(:tracking_group).with(:test_experiment, nil, subject: user).and_return(:experimental)
- allow(::Experiment).to receive(:add_user).with(:test_experiment, :experimental, user, context)
-
- controller.record_experiment_user(:test_experiment, context)
- end
- end
- end
-
- context 'the user is part of the control group' do
- before do
- stub_experiment_for_subject(test_experiment: false)
- end
-
- it 'calls add_user on the Experiment model' do
- expect(::Experiment).to receive(:add_user).with(:test_experiment, :control, user, context)
-
- controller.record_experiment_user(:test_experiment, context)
- end
- end
- end
-
- context 'when the experiment is disabled' do
- before do
- stub_experiment(test_experiment: false)
- allow(controller).to receive(:current_user).and_return(user)
- end
-
- it 'does not call add_user on the Experiment model' do
- expect(::Experiment).not_to receive(:add_user)
-
- controller.record_experiment_user(:test_experiment, context)
- end
- end
-
- context 'when there is no current_user' do
- before do
- stub_experiment(test_experiment: true)
- end
-
- it 'does not call add_user on the Experiment model' do
- expect(::Experiment).not_to receive(:add_user)
-
- controller.record_experiment_user(:test_experiment, context)
- end
- end
-
- context 'do not track' do
- before do
- stub_experiment(test_experiment: true)
- allow(controller).to receive(:current_user).and_return(user)
- end
-
- context 'is disabled' do
- before do
- stub_do_not_track('0')
- stub_experiment_for_subject(test_experiment: false)
- end
-
- it 'calls add_user on the Experiment model' do
- expect(::Experiment).to receive(:add_user).with(:test_experiment, :control, user, context)
-
- controller.record_experiment_user(:test_experiment, context)
- end
- end
-
- context 'is enabled' do
- before do
- stub_do_not_track('1')
- end
-
- it 'does not call add_user on the Experiment model' do
- expect(::Experiment).not_to receive(:add_user)
-
- controller.record_experiment_user(:test_experiment, context)
- end
- end
- end
- end
-
- describe '#record_experiment_group' do
- let(:group) { 'a group object' }
- let(:experiment_key) { :some_experiment_key }
- let(:dnt_enabled) { false }
- let(:experiment_active) { true }
- let(:rollout_strategy) { :whatever }
- let(:variant) { 'variant' }
-
- before do
- allow(controller).to receive(:dnt_enabled?).and_return(dnt_enabled)
- allow(::Gitlab::Experimentation).to receive(:active?).and_return(experiment_active)
- allow(::Gitlab::Experimentation).to receive(:rollout_strategy).and_return(rollout_strategy)
- allow(controller).to receive(:tracking_group).and_return(variant)
- allow(::Experiment).to receive(:add_group)
- end
-
- subject(:record_experiment_group) { controller.record_experiment_group(experiment_key, group) }
-
- shared_examples 'exits early without recording' do
- it 'returns early without recording the group as an ExperimentSubject' do
- expect(::Experiment).not_to receive(:add_group)
- record_experiment_group
- end
- end
-
- shared_examples 'calls tracking_group' do |using_cookie_rollout|
- it "calls tracking_group with #{using_cookie_rollout ? 'a nil' : 'the group as the'} subject" do
- expect(controller).to receive(:tracking_group).with(experiment_key, nil, subject: using_cookie_rollout ? nil : group).and_return(variant)
- record_experiment_group
- end
- end
-
- shared_examples 'records the group' do
- it 'records the group' do
- expect(::Experiment).to receive(:add_group).with(experiment_key, group: group, variant: variant)
- record_experiment_group
- end
- end
-
- context 'when DNT is enabled' do
- let(:dnt_enabled) { true }
-
- include_examples 'exits early without recording'
- end
-
- context 'when the experiment is not active' do
- let(:experiment_active) { false }
-
- include_examples 'exits early without recording'
- end
-
- context 'when a nil group is given' do
- let(:group) { nil }
-
- include_examples 'exits early without recording'
- end
-
- context 'when the experiment uses a cookie-based rollout strategy' do
- let(:rollout_strategy) { :cookie }
-
- include_examples 'calls tracking_group', true
- include_examples 'records the group'
- end
-
- context 'when the experiment uses a non-cookie-based rollout strategy' do
- let(:rollout_strategy) { :group }
-
- include_examples 'calls tracking_group', false
- include_examples 'records the group'
- end
- end
-
- describe '#record_experiment_conversion_event' do
- let(:user) { build(:user) }
-
- before do
- allow(controller).to receive(:dnt_enabled?).and_return(false)
- allow(controller).to receive(:current_user).and_return(user)
- stub_experiment(test_experiment: true)
- end
-
- subject(:record_conversion_event) do
- controller.record_experiment_conversion_event(:test_experiment)
- end
-
- it 'records the conversion event for the experiment & user' do
- expect(::Experiment).to receive(:record_conversion_event).with(:test_experiment, user, {})
- record_conversion_event
- end
-
- shared_examples 'does not record the conversion event' do
- it 'does not record the conversion event' do
- expect(::Experiment).not_to receive(:record_conversion_event)
- record_conversion_event
- end
- end
-
- context 'when DNT is enabled' do
- before do
- allow(controller).to receive(:dnt_enabled?).and_return(true)
- end
-
- include_examples 'does not record the conversion event'
- end
-
- context 'when there is no current user' do
- before do
- allow(controller).to receive(:current_user).and_return(nil)
- end
-
- include_examples 'does not record the conversion event'
- end
-
- context 'when the experiment is not enabled' do
- before do
- stub_experiment(test_experiment: false)
- end
-
- include_examples 'does not record the conversion event'
- end
- end
-
- describe '#experiment_tracking_category_and_group' do
- let_it_be(:experiment_key) { :test_something }
-
- subject { controller.experiment_tracking_category_and_group(experiment_key) }
-
- it 'returns a string with the experiment tracking category & group joined with a ":"' do
- expect(controller).to receive(:tracking_category).with(experiment_key).and_return('Experiment::Category')
- expect(controller).to receive(:tracking_group).with(experiment_key, '_group', subject: nil).and_return('experimental_group')
-
- expect(subject).to eq('Experiment::Category:experimental_group')
- end
- end
-end
diff --git a/spec/lib/gitlab/experimentation/experiment_spec.rb b/spec/lib/gitlab/experimentation/experiment_spec.rb
deleted file mode 100644
index a5cc69b9538..00000000000
--- a/spec/lib/gitlab/experimentation/experiment_spec.rb
+++ /dev/null
@@ -1,58 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Experimentation::Experiment do
- using RSpec::Parameterized::TableSyntax
-
- let(:percentage) { 50 }
- let(:params) do
- {
- tracking_category: 'Category1',
- rollout_strategy: nil
- }
- end
-
- before do
- skip_feature_flags_yaml_validation
- skip_default_enabled_yaml_check
- allow(Feature).to receive(:log_feature_flag_states?).and_return(false)
- feature = double('FeatureFlag', percentage_of_time_value: percentage, enabled?: true)
- allow(Feature).to receive(:get).with(:experiment_key_experiment_percentage).and_return(feature)
- end
-
- subject(:experiment) { described_class.new(:experiment_key, **params) }
-
- describe '#active?' do
- before do
- allow(Gitlab).to receive(:com?).and_return(on_gitlab_com)
- end
-
- subject { experiment.active? }
-
- where(:on_gitlab_com, :percentage, :is_active) do
- true | 0 | false
- true | 10 | true
- false | 0 | false
- false | 10 | false
- end
-
- with_them do
- it { is_expected.to eq(is_active) }
- end
- end
-
- describe '#enabled_for_index?' do
- subject { experiment.enabled_for_index?(index) }
-
- where(:index, :percentage, :is_enabled) do
- 50 | 40 | false
- 40 | 50 | true
- nil | 50 | false
- end
-
- with_them do
- it { is_expected.to eq(is_enabled) }
- end
- end
-end
diff --git a/spec/lib/gitlab/experimentation_spec.rb b/spec/lib/gitlab/experimentation_spec.rb
deleted file mode 100644
index c482874b725..00000000000
--- a/spec/lib/gitlab/experimentation_spec.rb
+++ /dev/null
@@ -1,161 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Experimentation do
- using RSpec::Parameterized::TableSyntax
-
- before do
- stub_const('Gitlab::Experimentation::EXPERIMENTS', {
- test_experiment: {
- tracking_category: 'Team'
- },
- tabular_experiment: {
- tracking_category: 'Team',
- rollout_strategy: rollout_strategy
- }
- })
-
- skip_feature_flags_yaml_validation
- skip_default_enabled_yaml_check
- Feature.enable_percentage_of_time(:test_experiment_experiment_percentage, enabled_percentage)
- allow(Gitlab).to receive(:com?).and_return(true)
- end
-
- let(:enabled_percentage) { 10 }
- let(:rollout_strategy) { nil }
-
- describe '.get_experiment' do
- subject { described_class.get_experiment(:test_experiment) }
-
- context 'returns experiment' do
- it { is_expected.to be_instance_of(Gitlab::Experimentation::Experiment) }
- end
-
- context 'experiment is not defined' do
- subject { described_class.get_experiment(:missing_experiment) }
-
- it { is_expected.to be_nil }
- end
- end
-
- describe '.active?' do
- subject { described_class.active?(:test_experiment) }
-
- context 'feature toggle is enabled' do
- it { is_expected.to eq(true) }
- end
-
- describe 'experiment is not defined' do
- it 'returns false' do
- expect(described_class.active?(:missing_experiment)).to eq(false)
- end
- end
-
- describe 'experiment is disabled' do
- let(:enabled_percentage) { 0 }
-
- it { is_expected.to eq(false) }
- end
- end
-
- describe '.in_experiment_group?' do
- let(:enabled_percentage) { 50 }
- let(:experiment_subject) { 'z' } # Zlib.crc32('test_experimentz') % 100 = 33
-
- subject { described_class.in_experiment_group?(:test_experiment, subject: experiment_subject) }
-
- context 'when experiment is active' do
- context 'when subject is part of the experiment' do
- it { is_expected.to eq(true) }
- end
-
- context 'when subject is not part of the experiment' do
- let(:experiment_subject) { 'a' } # Zlib.crc32('test_experimenta') % 100 = 61
-
- it { is_expected.to eq(false) }
- end
-
- context 'when subject has a global_id' do
- let(:experiment_subject) { double(:subject, to_global_id: 'z') }
-
- it { is_expected.to eq(true) }
- end
-
- context 'when subject is nil' do
- let(:experiment_subject) { nil }
-
- it { is_expected.to eq(false) }
- end
-
- context 'when subject is an empty string' do
- let(:experiment_subject) { '' }
-
- it { is_expected.to eq(false) }
- end
- end
-
- context 'when experiment is not active' do
- before do
- allow(described_class).to receive(:active?).and_return(false)
- end
-
- it { is_expected.to eq(false) }
- end
- end
-
- describe '.log_invalid_rollout' do
- subject { described_class.log_invalid_rollout(:test_experiment, 1) }
-
- before do
- allow(described_class).to receive(:valid_subject_for_rollout_strategy?).and_return(valid)
- end
-
- context 'subject is not valid for experiment' do
- let(:valid) { false }
-
- it 'logs a warning message' do
- expect_next_instance_of(Gitlab::ExperimentationLogger) do |logger|
- expect(logger)
- .to receive(:warn)
- .with(
- message: 'Subject must conform to the rollout strategy',
- experiment_key: :test_experiment,
- subject: 'Integer',
- rollout_strategy: :cookie
- )
- end
-
- subject
- end
- end
-
- context 'subject is valid for experiment' do
- let(:valid) { true }
-
- it 'does not log a warning message' do
- expect(Gitlab::ExperimentationLogger).not_to receive(:build)
-
- subject
- end
- end
- end
-
- describe '.valid_subject_for_rollout_strategy?' do
- subject { described_class.valid_subject_for_rollout_strategy?(:tabular_experiment, experiment_subject) }
-
- where(:rollout_strategy, :experiment_subject, :result) do
- :cookie | nil | true
- nil | nil | true
- :cookie | 'string' | true
- nil | User.new | false
- :user | User.new | true
- :group | User.new | false
- :group | Group.new | true
- end
-
- with_them do
- it { is_expected.to be(result) }
- end
- end
-end
diff --git a/spec/lib/gitlab/git/keep_around_spec.rb b/spec/lib/gitlab/git/keep_around_spec.rb
index 44c3caf3f8d..d6359d55646 100644
--- a/spec/lib/gitlab/git/keep_around_spec.rb
+++ b/spec/lib/gitlab/git/keep_around_spec.rb
@@ -18,23 +18,14 @@ RSpec.describe Gitlab::Git::KeepAround do
expect(service.kept_around?(sample_commit.id)).to be_truthy
end
- it "attempting to call keep around on truncated ref does not fail" do
- service.execute([sample_commit.id])
- ref = service.send(:keep_around_ref_name, sample_commit.id)
-
- path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- File.join(repository.path, ref)
- end
- # Corrupt the reference
- File.truncate(path, 0)
+ it "does not fail if writting the ref fails" do
+ expect(repository.raw).to receive(:write_ref).and_raise(Gitlab::Git::CommandError)
expect(service.kept_around?(sample_commit.id)).to be_falsey
service.execute([sample_commit.id])
expect(service.kept_around?(sample_commit.id)).to be_falsey
-
- File.delete(path)
end
context 'for multiple SHAs' do
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index 9a87911b6e8..f3d3fd2034c 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -461,7 +461,11 @@ RSpec.describe Gitlab::Git::Repository do
end
it 'raises an error if it failed' do
- expect { repository.delete_refs('refs\heads\fix') }.to raise_error(Gitlab::Git::Repository::GitError)
+ # TODO: Once https://gitlab.com/gitlab-org/gitaly/-/merge_requests/4921
+ # is merged, remove the assertion for Gitlab::Git::Repository::GitError
+ expect { repository.delete_refs('refs\heads\fix') }.to raise_error do |e|
+ expect(e).to be_a(Gitlab::Git::Repository::GitError).or be_a(Gitlab::Git::InvalidRefFormatError)
+ end
end
end
@@ -483,6 +487,12 @@ RSpec.describe Gitlab::Git::Repository do
it 'displays that branch' do
expect(repository.branch_names_contains_sha(head_id)).to include('master', new_branch, utf8_branch)
end
+
+ context 'when limit is provided' do
+ it 'displays limited number of branches' do
+ expect(repository.branch_names_contains_sha(head_id, limit: 1)).to match_array(['2-mb-file'])
+ end
+ end
end
describe "#refs_hash" do
@@ -668,11 +678,11 @@ RSpec.describe Gitlab::Git::Repository do
expect_any_instance_of(Gitlab::GitalyClient::RemoteService)
.to receive(:find_remote_root_ref).and_call_original
- expect(repository.find_remote_root_ref(SeedHelper::GITLAB_GIT_TEST_REPO_URL)).to eq 'master'
+ expect(repository.find_remote_root_ref(TestEnv.factory_repo_path.to_s)).to eq 'master'
end
it 'returns UTF-8' do
- expect(repository.find_remote_root_ref(SeedHelper::GITLAB_GIT_TEST_REPO_URL)).to be_utf8
+ expect(repository.find_remote_root_ref(TestEnv.factory_repo_path.to_s)).to be_utf8
end
it 'returns nil when remote name is nil' do
@@ -690,7 +700,7 @@ RSpec.describe Gitlab::Git::Repository do
end
it_behaves_like 'wrapping gRPC errors', Gitlab::GitalyClient::RemoteService, :find_remote_root_ref do
- subject { repository.find_remote_root_ref(SeedHelper::GITLAB_GIT_TEST_REPO_URL) }
+ subject { repository.find_remote_root_ref(TestEnv.factory_repo_path.to_s) }
end
end
@@ -1769,12 +1779,13 @@ RSpec.describe Gitlab::Git::Repository do
it 'returns exactly the expected results' do
languages = repository.languages(TestEnv::BRANCH_SHA['master'])
- expect(languages).to match_array([
- { value: a_value_within(0.1).of(66.7), label: "Ruby", color: "#701516", highlight: "#701516" },
- { value: a_value_within(0.1).of(22.96), label: "JavaScript", color: "#f1e05a", highlight: "#f1e05a" },
- { value: a_value_within(0.1).of(7.9), label: "HTML", color: "#e34c26", highlight: "#e34c26" },
- { value: a_value_within(0.1).of(2.51), label: "CoffeeScript", color: "#244776", highlight: "#244776" }
- ])
+ expect(languages).to match_array(
+ [
+ { value: a_value_within(0.1).of(66.7), label: "Ruby", color: "#701516", highlight: "#701516" },
+ { value: a_value_within(0.1).of(22.96), label: "JavaScript", color: "#f1e05a", highlight: "#f1e05a" },
+ { value: a_value_within(0.1).of(7.9), label: "HTML", color: "#e34c26", highlight: "#e34c26" },
+ { value: a_value_within(0.1).of(2.51), label: "CoffeeScript", color: "#244776", highlight: "#244776" }
+ ])
end
it "uses the repository's HEAD when no ref is passed" do
@@ -1784,22 +1795,48 @@ RSpec.describe Gitlab::Git::Repository do
end
end
- describe '#license_short_name' do
- subject { repository.license_short_name }
+ describe '#license' do
+ where(from_gitaly: [true, false])
+ with_them do
+ subject(:license) { repository.license(from_gitaly) }
- context 'when no license file can be found' do
- let(:project) { create(:project, :repository) }
- let(:repository) { project.repository.raw_repository }
+ context 'when no license file can be found' do
+ let_it_be(:project) { create(:project, :repository) }
+ let(:repository) { project.repository.raw_repository }
- before do
- project.repository.delete_file(project.owner, 'LICENSE', message: 'remove license', branch_name: 'master')
+ before do
+ project.repository.delete_file(project.owner, 'LICENSE', message: 'remove license', branch_name: 'master')
+ end
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when an mit license is found' do
+ it { is_expected.to have_attributes(key: 'mit') }
end
- it { is_expected.to be_nil }
+ context 'when license is not recognized ' do
+ let_it_be(:project) { create(:project, :repository) }
+ let(:repository) { project.repository.raw_repository }
+
+ before do
+ project.repository.update_file(
+ project.owner,
+ 'LICENSE',
+ 'This software is licensed under the Dummy license.',
+ message: 'Update license',
+ branch_name: 'master')
+ end
+
+ it { is_expected.to have_attributes(key: 'other', nickname: 'LICENSE') }
+ end
end
- context 'when an mit license is found' do
- it { is_expected.to eq('mit') }
+ it 'does not crash when license is invalid' do
+ expect(Licensee::License).to receive(:new)
+ .and_raise(Licensee::InvalidLicense)
+
+ expect(repository.license(false)).to be_nil
end
end
diff --git a/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb b/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
index 747611a59e6..524b373a5b7 100644
--- a/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
+++ b/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
@@ -213,7 +213,8 @@ RSpec.describe Gitlab::Git::RuggedImpl::UseRugged do
end
def create_gitaly_metadata_file
- File.open(File.join(SEED_STORAGE_PATH, '.gitaly-metadata'), 'w+') do |f|
+ metadata_filename = File.join(TestEnv.repos_path, '.gitaly-metadata')
+ File.open(metadata_filename, 'w+') do |f|
gitaly_metadata = {
"gitaly_filesystem_id" => SecureRandom.uuid
}
diff --git a/spec/lib/gitlab/git/wiki_spec.rb b/spec/lib/gitlab/git/wiki_spec.rb
deleted file mode 100644
index 05c7ac149e4..00000000000
--- a/spec/lib/gitlab/git/wiki_spec.rb
+++ /dev/null
@@ -1,134 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Git::Wiki do
- using RSpec::Parameterized::TableSyntax
-
- let(:project) { create(:project) }
- let(:user) { project.first_owner }
- let(:project_wiki) { ProjectWiki.new(project, user) }
- let(:repository) { project_wiki.repository }
- let(:default_branch) { described_class.default_ref(project) }
-
- subject(:wiki) { project_wiki.wiki }
-
- before do
- repository.create_if_not_exists(project_wiki.default_branch)
- end
-
- describe '#pages' do
- before do
- create_page('page1', 'content')
- create_page('page2', 'content2')
- end
-
- after do
- destroy_page('page1')
- destroy_page('page2')
- end
-
- it 'returns all the pages' do
- expect(subject.list_pages.count).to eq(2)
- expect(subject.list_pages.first.title).to eq 'page1'
- expect(subject.list_pages.last.title).to eq 'page2'
- end
-
- it 'returns only one page' do
- pages = subject.list_pages(limit: 1)
-
- expect(pages.count).to eq(1)
- expect(pages.first.title).to eq 'page1'
- end
- end
-
- describe '#page' do
- before do
- create_page('page1', 'content')
- create_page('foo/page1', 'content foo/page1')
- end
-
- after do
- destroy_page('page1')
- destroy_page('foo/page1')
- end
-
- it 'returns the right page' do
- page = subject.page(title: 'page1', dir: '')
- expect(page.url_path).to eq 'page1'
- expect(page.raw_data).to eq 'content'
-
- page = subject.page(title: 'page1', dir: 'foo')
- expect(page.url_path).to eq 'foo/page1'
- expect(page.raw_data).to eq 'content foo/page1'
- end
-
- it 'returns nil for invalid arguments' do
- expect(subject.page(title: '')).to be_nil
- expect(subject.page(title: 'foo', version: ':')).to be_nil
- end
-
- it 'does not return content if load_content param is set to false' do
- page = subject.page(title: 'page1', dir: '', load_content: false)
-
- expect(page.url_path).to eq 'page1'
- expect(page.raw_data).to be_empty
- end
- end
-
- describe '#preview_slug' do
- where(:title, :file_extension, :format, :expected_slug) do
- 'The Best Thing' | :md | :markdown | 'The-Best-Thing'
- 'The Best Thing' | :md | :md | 'The-Best-Thing'
- 'The Best Thing' | :txt | :txt | 'The-Best-Thing'
- 'A Subject/Title Here' | :txt | :txt | 'A-Subject/Title-Here'
- 'A subject' | :txt | :txt | 'A-subject'
- 'A 1/B 2/C 3' | :txt | :txt | 'A-1/B-2/C-3'
- 'subject/title' | :txt | :txt | 'subject/title'
- 'subject/title.md' | :txt | :txt | 'subject/title.md'
- 'foo<bar>+baz' | :txt | :txt | 'foo-bar--baz'
- 'foo%2Fbar' | :txt | :txt | 'foo%2Fbar'
- '' | :md | :markdown | '.md'
- '' | :md | :md | '.md'
- '' | :txt | :txt | '.txt'
- end
-
- with_them do
- subject { wiki.preview_slug(title, format) }
-
- let(:gitaly_slug) { wiki.list_pages.first }
-
- it { is_expected.to eq(expected_slug) }
-
- it 'matches the slug generated by gitaly' do
- skip('Gitaly cannot generate a slug for an empty title') unless title.present?
-
- create_page(title, 'content', file_extension)
-
- gitaly_slug = wiki.list_pages.first.url_path
-
- is_expected.to eq(gitaly_slug)
- end
- end
- end
-
- def create_page(name, content, extension = :md)
- repository.create_file(
- user, ::Wiki.sluggified_full_path(name, extension.to_s), content,
- branch_name: default_branch,
- message: "created page #{name}",
- author_email: user.email,
- author_name: user.name
- )
- end
-
- def destroy_page(name, extension = :md)
- repository.delete_file(
- user, ::Wiki.sluggified_full_path(name, extension.to_s),
- branch_name: described_class.default_ref(project),
- message: "delete page #{name}",
- author_email: user.email,
- author_name: user.name
- )
- end
-end
diff --git a/spec/lib/gitlab/git_access_snippet_spec.rb b/spec/lib/gitlab/git_access_snippet_spec.rb
index a7036a4f20a..0d069d36e48 100644
--- a/spec/lib/gitlab/git_access_snippet_spec.rb
+++ b/spec/lib/gitlab/git_access_snippet_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::GitAccessSnippet do
include ProjectHelpers
+ include UserHelpers
include TermsHelper
include AdminModeHelper
include_context 'ProjectPolicyTable context'
diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb
index 8577cad1011..7e3a1bf61bc 100644
--- a/spec/lib/gitlab/git_access_spec.rb
+++ b/spec/lib/gitlab/git_access_spec.rb
@@ -4,11 +4,9 @@ require 'spec_helper'
RSpec.describe Gitlab::GitAccess, :aggregate_failures do
include TermsHelper
- include GitHelpers
include AdminModeHelper
let(:user) { create(:user) }
-
let(:actor) { user }
let(:project) { create(:project, :repository) }
let(:repository_path) { "#{project.full_path}.git" }
@@ -139,27 +137,18 @@ RSpec.describe Gitlab::GitAccess, :aggregate_failures do
end
end
- # For backwards compatibility
+ # legacy behavior that is blocked/deprecated
context 'when actor is :ci' do
let(:actor) { :ci }
let(:authentication_abilities) { build_authentication_abilities }
- it 'allows pull access' do
- expect { pull_access_check }.not_to raise_error
+ it 'disallows pull access' do
+ expect { pull_access_check }.to raise_error(Gitlab::GitAccess::NotFoundError)
end
it 'does not block pushes with "not found"' do
expect { push_access_check }.to raise_forbidden(described_class::ERROR_MESSAGES[:auth_upload])
end
-
- it 'logs' do
- expect(Gitlab::AppJsonLogger).to receive(:info).with(
- message: 'Actor was :ci',
- project_id: project.id
- ).once
-
- pull_access_check
- end
end
context 'when actor is DeployToken' do
@@ -741,18 +730,7 @@ RSpec.describe Gitlab::GitAccess, :aggregate_failures do
describe 'generic CI (build without a user)' do
let(:actor) { :ci }
- context 'pull code' do
- it { expect { pull_access_check }.not_to raise_error }
-
- it 'logs' do
- expect(Gitlab::AppJsonLogger).to receive(:info).with(
- message: 'Actor was :ci',
- project_id: project.id
- ).once
-
- pull_access_check
- end
- end
+ specify { expect { pull_access_check }.to raise_error Gitlab::GitAccess::NotFoundError }
end
end
end
@@ -810,18 +788,29 @@ RSpec.describe Gitlab::GitAccess, :aggregate_failures do
def merge_into_protected_branch
@protected_branch_merge_commit ||= begin
project.repository.add_branch(user, unprotected_branch, 'feature')
- rugged = rugged_repo(project.repository)
- target_branch = rugged.rev_parse('feature')
+ target_branch = TestEnv::BRANCH_SHA['feature']
source_branch = project.repository.create_file(
user,
'filename',
'This is the file content',
message: 'This is a good commit message',
branch_name: unprotected_branch)
- author = { email: "email@example.com", time: Time.now, name: "Example Git User" }
-
- merge_index = rugged.merge_commits(target_branch, source_branch)
- Rugged::Commit.create(rugged, author: author, committer: author, message: "commit message", parents: [target_branch, source_branch], tree: merge_index.write_tree(rugged))
+ merge_id = project.repository.raw.merge_to_ref(
+ user,
+ branch: target_branch,
+ first_parent_ref: target_branch,
+ source_sha: source_branch,
+ target_ref: 'refs/merge-requests/test',
+ message: 'commit message'
+ )
+
+ # We are trying to simulate what the repository would look like
+ # during the pre-receive hook, before the actual ref is
+ # written/created. Repository#new_commits relies on there being no
+ # ref pointing to the merge commit.
+ project.repository.delete_refs('refs/merge-requests/test')
+
+ merge_id
end
end
diff --git a/spec/lib/gitlab/gitaly_client/blob_service_spec.rb b/spec/lib/gitlab/gitaly_client/blob_service_spec.rb
index f869c66337e..d02b4492216 100644
--- a/spec/lib/gitlab/gitaly_client/blob_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/blob_service_spec.rb
@@ -174,20 +174,22 @@ RSpec.describe Gitlab::GitalyClient::BlobService do
expect(service)
.to receive(:list_blobs)
.with(gitaly_request_with_params(expected_params), kind_of(Hash))
- .and_return([
- Gitaly::ListBlobsResponse.new(blobs: [
- Gitaly::ListBlobsResponse::Blob.new(oid: "012345", size: 8, data: "0x01"),
- Gitaly::ListBlobsResponse::Blob.new(data: "23")
- ]),
- Gitaly::ListBlobsResponse.new(blobs: [
- Gitaly::ListBlobsResponse::Blob.new(data: "45"),
- Gitaly::ListBlobsResponse::Blob.new(oid: "56", size: 4, data: "0x5"),
- Gitaly::ListBlobsResponse::Blob.new(data: "6")
- ]),
- Gitaly::ListBlobsResponse.new(blobs: [
- Gitaly::ListBlobsResponse::Blob.new(oid: "78", size: 4, data: "0x78")
+ .and_return(
+ [
+ Gitaly::ListBlobsResponse.new(
+ blobs: [
+ Gitaly::ListBlobsResponse::Blob.new(oid: "012345", size: 8, data: "0x01"),
+ Gitaly::ListBlobsResponse::Blob.new(data: "23")
+ ]),
+ Gitaly::ListBlobsResponse.new(
+ blobs: [
+ Gitaly::ListBlobsResponse::Blob.new(data: "45"),
+ Gitaly::ListBlobsResponse::Blob.new(oid: "56", size: 4, data: "0x5"),
+ Gitaly::ListBlobsResponse::Blob.new(data: "6")
+ ]),
+ Gitaly::ListBlobsResponse.new(
+ blobs: [Gitaly::ListBlobsResponse::Blob.new(oid: "78", size: 4, data: "0x78")])
])
- ])
end
blobs = subject.to_a
diff --git a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
index b7c21516c77..5ce88b06241 100644
--- a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::GitalyClient::RefService do
- let(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
+
let(:storage_name) { project.repository_storage }
let(:relative_path) { project.disk_path + '.git' }
let(:repository) { project.repository }
@@ -179,13 +180,22 @@ RSpec.describe Gitlab::GitalyClient::RefService do
)
)
end
+
local_branches = target_commits.each_with_index.map do |gitaly_commit, i|
Gitaly::Branch.new(name: "#{remote_name}/#{i}", target_commit: gitaly_commit)
end
- response = [
- Gitaly::FindLocalBranchesResponse.new(branches: branches[0, 2], local_branches: local_branches[0, 2]),
- Gitaly::FindLocalBranchesResponse.new(branches: branches[2, 2], local_branches: local_branches[2, 2])
- ]
+
+ response = if set_local_branches
+ [
+ Gitaly::FindLocalBranchesResponse.new(local_branches: local_branches[0, 2]),
+ Gitaly::FindLocalBranchesResponse.new(local_branches: local_branches[2, 2])
+ ]
+ else
+ [
+ Gitaly::FindLocalBranchesResponse.new(branches: branches[0, 2]),
+ Gitaly::FindLocalBranchesResponse.new(branches: branches[2, 2])
+ ]
+ end
expect_any_instance_of(Gitaly::RefService::Stub)
.to receive(:find_local_branches)
@@ -220,18 +230,14 @@ RSpec.describe Gitlab::GitalyClient::RefService do
end
end
- context 'when feature flag :gitaly_simplify_find_local_branches_response is enabled' do
- before do
- stub_feature_flags(gitaly_simplify_find_local_branches_response: true)
- end
+ context 'when local_branches variable is not set' do
+ let(:set_local_branches) { false }
it_behaves_like 'common examples'
end
- context 'when feature flag :gitaly_simplify_find_local_branches_response is disabled' do
- before do
- stub_feature_flags(gitaly_simplify_find_local_branches_response: false)
- end
+ context 'when local_branches variable is set' do
+ let(:set_local_branches) { true }
it_behaves_like 'common examples'
end
diff --git a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
index 63d32cb906f..58ace05b0d3 100644
--- a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
@@ -308,7 +308,7 @@ RSpec.describe Gitlab::GitalyClient::RepositoryService do
end
describe '#replicate' do
- let(:source_repository) { Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '', 'group/project') }
+ let(:source_repository) { Gitlab::Git::Repository.new('default', 'repo/path', '', 'group/project') }
it 'sends a replicate_repository message' do
expect_any_instance_of(Gitaly::RepositoryService::Stub)
@@ -343,4 +343,18 @@ RSpec.describe Gitlab::GitalyClient::RepositoryService do
expect(client.full_path).to eq(path)
end
end
+
+ describe "#find_license" do
+ it 'sends a find_license request with medium timeout' do
+ expect_any_instance_of(Gitaly::RepositoryService::Stub)
+ .to receive(:find_license) do |_service, _request, headers|
+ expect(headers[:deadline]).to be_between(
+ Gitlab::GitalyClient.fast_timeout.seconds.from_now.to_f,
+ Gitlab::GitalyClient.medium_timeout.seconds.from_now.to_f
+ )
+ end
+
+ client.find_license
+ end
+ end
end
diff --git a/spec/lib/gitlab/gitaly_client/wiki_service_spec.rb b/spec/lib/gitlab/gitaly_client/wiki_service_spec.rb
deleted file mode 100644
index 8a169acb69c..00000000000
--- a/spec/lib/gitlab/gitaly_client/wiki_service_spec.rb
+++ /dev/null
@@ -1,118 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::GitalyClient::WikiService do
- let(:project) { create(:project) }
- let(:storage_name) { project.repository_storage }
- let(:relative_path) { project.disk_path + '.git' }
- let(:client) { described_class.new(project.repository) }
- let(:commit) { create(:gitaly_commit) }
- let(:page_version) { Gitaly::WikiPageVersion.new(format: 'markdown', commit: commit) }
- let(:page_info) { { title: 'My Page', raw_data: 'a', version: page_version } }
-
- describe '#find_page' do
- let(:response) do
- [
- Gitaly::WikiFindPageResponse.new(page: Gitaly::WikiPage.new(page_info)),
- Gitaly::WikiFindPageResponse.new(page: Gitaly::WikiPage.new(raw_data: 'b'))
- ]
- end
-
- let(:wiki_page) { subject.first }
- let(:wiki_page_version) { subject.last }
-
- subject { client.find_page(title: 'My Page', version: 'master', dir: '') }
-
- it 'sends a wiki_find_page message' do
- expect_any_instance_of(Gitaly::WikiService::Stub)
- .to receive(:wiki_find_page)
- .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
- .and_return([].each)
-
- subject
- end
-
- it 'concatenates the raw data and returns a pair of WikiPage and WikiPageVersion' do
- expect_any_instance_of(Gitaly::WikiService::Stub)
- .to receive(:wiki_find_page)
- .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
- .and_return(response.each)
-
- expect(wiki_page.title).to eq('My Page')
- expect(wiki_page.raw_data).to eq('ab')
- expect(wiki_page_version.format).to eq('markdown')
-
- expect(wiki_page.title).to be_utf8
- expect(wiki_page.path).to be_utf8
- expect(wiki_page.name).to be_utf8
- end
- end
-
- describe '#load_all_pages' do
- let(:page_2_info) { { title: 'My Page 2', raw_data: 'c', version: page_version } }
- let(:response) do
- [
- Gitaly::WikiGetAllPagesResponse.new(page: Gitaly::WikiPage.new(page_info)),
- Gitaly::WikiGetAllPagesResponse.new(page: Gitaly::WikiPage.new(raw_data: 'b')),
- Gitaly::WikiGetAllPagesResponse.new(end_of_page: true),
- Gitaly::WikiGetAllPagesResponse.new(page: Gitaly::WikiPage.new(page_2_info)),
- Gitaly::WikiGetAllPagesResponse.new(page: Gitaly::WikiPage.new(raw_data: 'd')),
- Gitaly::WikiGetAllPagesResponse.new(end_of_page: true)
- ]
- end
-
- let(:wiki_page_1) { subject[0].first }
- let(:wiki_page_1_version) { subject[0].last }
- let(:wiki_page_2) { subject[1].first }
- let(:wiki_page_2_version) { subject[1].last }
-
- subject { client.load_all_pages }
-
- it 'sends a wiki_get_all_pages message' do
- expect_any_instance_of(Gitaly::WikiService::Stub)
- .to receive(:wiki_get_all_pages)
- .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
- .and_return([].each)
-
- subject
- end
-
- it 'sends a limit of 0 to wiki_get_all_pages' do
- expect_any_instance_of(Gitaly::WikiService::Stub)
- .to receive(:wiki_get_all_pages)
- .with(gitaly_request_with_params(limit: 0), kind_of(Hash))
- .and_return([].each)
-
- subject
- end
-
- it 'concatenates the raw data and returns a pair of WikiPage and WikiPageVersion for each page' do
- expect_any_instance_of(Gitaly::WikiService::Stub)
- .to receive(:wiki_get_all_pages)
- .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
- .and_return(response.each)
-
- expect(subject.size).to be(2)
- expect(wiki_page_1.title).to eq('My Page')
- expect(wiki_page_1.raw_data).to eq('ab')
- expect(wiki_page_1_version.format).to eq('markdown')
- expect(wiki_page_2.title).to eq('My Page 2')
- expect(wiki_page_2.raw_data).to eq('cd')
- expect(wiki_page_2_version.format).to eq('markdown')
- end
-
- context 'with limits' do
- subject { client.load_all_pages(limit: 1) }
-
- it 'sends a request with the limit' do
- expect_any_instance_of(Gitaly::WikiService::Stub)
- .to receive(:wiki_get_all_pages)
- .with(gitaly_request_with_params(limit: 1), kind_of(Hash))
- .and_return([].each)
-
- subject
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/github_import/client_spec.rb b/spec/lib/gitlab/github_import/client_spec.rb
index c88bb6de859..3361b039a27 100644
--- a/spec/lib/gitlab/github_import/client_spec.rb
+++ b/spec/lib/gitlab/github_import/client_spec.rb
@@ -148,7 +148,25 @@ RSpec.describe Gitlab::GithubImport::Client do
.to receive(:branch_protection).with('org/repo', 'bar')
expect(client).to receive(:with_rate_limit).and_yield
- client.branch_protection('org/repo', 'bar')
+ branch_protection = client.branch_protection('org/repo', 'bar')
+
+ expect(branch_protection).to be_a(Hash)
+ end
+ end
+
+ describe '#each_object' do
+ it 'converts each object into a hash' do
+ client = described_class.new('foo')
+
+ stub_request(:get, 'https://api.github.com/rate_limit')
+ .to_return(status: 200, headers: { 'X-RateLimit-Limit' => 5000, 'X-RateLimit-Remaining' => 5000 })
+
+ stub_request(:get, 'https://api.github.com/repos/foo/bar/releases?per_page=100')
+ .to_return(status: 200, body: [{ id: 1 }].to_json, headers: { 'Content-Type' => 'application/json' })
+
+ client.each_object(:releases, 'foo/bar') do |release|
+ expect(release).to eq({ id: 1 })
+ end
end
end
@@ -575,11 +593,11 @@ RSpec.describe Gitlab::GithubImport::Client do
describe 'search' do
let(:client) { described_class.new('foo') }
- let(:user) { double(:user, login: 'user') }
- let(:org1) { double(:org, login: 'org1') }
- let(:org2) { double(:org, login: 'org2') }
- let(:repo1) { double(:repo, full_name: 'repo1') }
- let(:repo2) { double(:repo, full_name: 'repo2') }
+ let(:user) { { login: 'user' } }
+ let(:org1) { { login: 'org1' } }
+ let(:org2) { { login: 'org2' } }
+ let(:repo1) { { full_name: 'repo1' } }
+ let(:repo2) { { full_name: 'repo2' } }
before do
allow(client)
diff --git a/spec/lib/gitlab/github_import/importer/attachments/base_importer_spec.rb b/spec/lib/gitlab/github_import/importer/attachments/base_importer_spec.rb
new file mode 100644
index 00000000000..5e60be44621
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/attachments/base_importer_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::Attachments::BaseImporter do
+ subject(:importer) { importer_class.new(project, client) }
+
+ let(:project) { instance_double(Project, id: 1) }
+ let(:client) { instance_double(Gitlab::GithubImport::Client) }
+ let(:importer_class) do
+ Class.new(described_class) do
+ private
+
+ def collection_method
+ 'test'
+ end
+ end
+ end
+
+ describe '#each_object_to_import' do
+ context 'with not implemented #collection interface' do
+ it 'raises NotImplementedError' do
+ expect { importer.each_object_to_import }
+ .to raise_error(Gitlab::GithubImport::Exceptions::NotImplementedError, '#collection')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/attachments/issues_importer_spec.rb b/spec/lib/gitlab/github_import/importer/attachments/issues_importer_spec.rb
new file mode 100644
index 00000000000..85bc67376d3
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/attachments/issues_importer_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::Attachments::IssuesImporter do
+ subject(:importer) { described_class.new(project, client) }
+
+ let_it_be(:project) { create(:project) }
+
+ let(:client) { instance_double(Gitlab::GithubImport::Client) }
+
+ describe '#sequential_import', :clean_gitlab_redis_cache do
+ let_it_be(:issue_1) { create(:issue, project: project) }
+ let_it_be(:issue_2) { create(:issue, project: project) }
+
+ let(:importer_stub) { instance_double('Gitlab::GithubImport::Importer::NoteAttachmentsImporter') }
+ let(:importer_attrs) { [instance_of(Gitlab::GithubImport::Representation::NoteText), project, client] }
+
+ it 'imports each project issue attachments' do
+ expect_next_instances_of(
+ Gitlab::GithubImport::Importer::NoteAttachmentsImporter, 2, false, *importer_attrs
+ ) do |note_attachments_importer|
+ expect(note_attachments_importer).to receive(:execute)
+ end
+
+ importer.sequential_import
+ end
+
+ context 'when issue is already processed' do
+ it "doesn't import this issue attachments" do
+ importer.mark_as_imported(issue_1)
+
+ expect_next_instance_of(
+ Gitlab::GithubImport::Importer::NoteAttachmentsImporter, *importer_attrs
+ ) do |note_attachments_importer|
+ expect(note_attachments_importer).to receive(:execute)
+ end
+
+ importer.sequential_import
+ end
+ end
+ end
+
+ describe '#sidekiq_worker_class' do
+ it { expect(importer.sidekiq_worker_class).to eq(Gitlab::GithubImport::Attachments::ImportIssueWorker) }
+ end
+
+ describe '#collection_method' do
+ it { expect(importer.collection_method).to eq(:issue_attachments) }
+ end
+
+ describe '#object_type' do
+ it { expect(importer.object_type).to eq(:issue_attachment) }
+ end
+
+ describe '#id_for_already_imported_cache' do
+ let(:issue) { build_stubbed(:issue) }
+
+ it { expect(importer.id_for_already_imported_cache(issue)).to eq(issue.id) }
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/attachments/merge_requests_importer_spec.rb b/spec/lib/gitlab/github_import/importer/attachments/merge_requests_importer_spec.rb
new file mode 100644
index 00000000000..e4718c2d17c
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/attachments/merge_requests_importer_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::Attachments::MergeRequestsImporter do
+ subject(:importer) { described_class.new(project, client) }
+
+ let_it_be(:project) { create(:project) }
+
+ let(:client) { instance_double(Gitlab::GithubImport::Client) }
+
+ describe '#sequential_import', :clean_gitlab_redis_cache do
+ let_it_be(:merge_request_1) { create(:merge_request, source_project: project, target_branch: 'feature1') }
+ let_it_be(:merge_request_2) { create(:merge_request, source_project: project, target_branch: 'feature2') }
+
+ let(:importer_stub) { instance_double('Gitlab::GithubImport::Importer::NoteAttachmentsImporter') }
+ let(:importer_attrs) { [instance_of(Gitlab::GithubImport::Representation::NoteText), project, client] }
+
+ it 'imports each project merge request attachments' do
+ expect_next_instances_of(
+ Gitlab::GithubImport::Importer::NoteAttachmentsImporter, 2, false, *importer_attrs
+ ) do |note_attachments_importer|
+ expect(note_attachments_importer).to receive(:execute)
+ end
+
+ importer.sequential_import
+ end
+
+ context 'when merge request is already processed' do
+ it "doesn't import this merge request attachments" do
+ importer.mark_as_imported(merge_request_1)
+
+ expect_next_instance_of(
+ Gitlab::GithubImport::Importer::NoteAttachmentsImporter, *importer_attrs
+ ) do |note_attachments_importer|
+ expect(note_attachments_importer).to receive(:execute)
+ end
+
+ importer.sequential_import
+ end
+ end
+ end
+
+ describe '#sidekiq_worker_class' do
+ it { expect(importer.sidekiq_worker_class).to eq(Gitlab::GithubImport::Attachments::ImportMergeRequestWorker) }
+ end
+
+ describe '#collection_method' do
+ it { expect(importer.collection_method).to eq(:merge_request_attachments) }
+ end
+
+ describe '#object_type' do
+ it { expect(importer.object_type).to eq(:merge_request_attachment) }
+ end
+
+ describe '#id_for_already_imported_cache' do
+ let(:merge_request) { build_stubbed(:merge_request) }
+
+ it { expect(importer.id_for_already_imported_cache(merge_request)).to eq(merge_request.id) }
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/attachments/notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/attachments/notes_importer_spec.rb
new file mode 100644
index 00000000000..7ed353e1b71
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/attachments/notes_importer_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::Attachments::NotesImporter do
+ subject(:importer) { described_class.new(project, client) }
+
+ let_it_be(:project) { create(:project) }
+
+ let(:client) { instance_double(Gitlab::GithubImport::Client) }
+
+ describe '#sequential_import', :clean_gitlab_redis_cache do
+ let_it_be(:note_1) { create(:note, project: project) }
+ let_it_be(:note_2) { create(:note, project: project) }
+ let_it_be(:system_note) { create(:note, :system, project: project) }
+
+ let(:importer_stub) { instance_double('Gitlab::GithubImport::Importer::NoteAttachmentsImporter') }
+ let(:importer_attrs) { [instance_of(Gitlab::GithubImport::Representation::NoteText), project, client] }
+
+ it 'imports each project user note' do
+ expect(Gitlab::GithubImport::Importer::NoteAttachmentsImporter).to receive(:new)
+ .with(*importer_attrs).twice.and_return(importer_stub)
+ expect(importer_stub).to receive(:execute).twice
+
+ importer.sequential_import
+ end
+
+ context 'when note is already processed' do
+ it "doesn't import this note" do
+ importer.mark_as_imported(note_1)
+
+ expect(Gitlab::GithubImport::Importer::NoteAttachmentsImporter).to receive(:new)
+ .with(*importer_attrs).once.and_return(importer_stub)
+ expect(importer_stub).to receive(:execute).once
+
+ importer.sequential_import
+ end
+ end
+ end
+
+ describe '#sidekiq_worker_class' do
+ it { expect(importer.sidekiq_worker_class).to eq(Gitlab::GithubImport::Attachments::ImportNoteWorker) }
+ end
+
+ describe '#collection_method' do
+ it { expect(importer.collection_method).to eq(:note_attachments) }
+ end
+
+ describe '#object_type' do
+ it { expect(importer.object_type).to eq(:note_attachment) }
+ end
+
+ describe '#id_for_already_imported_cache' do
+ let(:note) { build_stubbed(:note) }
+
+ it { expect(importer.id_for_already_imported_cache(note)).to eq(note.id) }
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/attachments/releases_importer_spec.rb b/spec/lib/gitlab/github_import/importer/attachments/releases_importer_spec.rb
new file mode 100644
index 00000000000..b989345ae09
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/attachments/releases_importer_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::Attachments::ReleasesImporter do
+ subject(:importer) { described_class.new(project, client) }
+
+ let_it_be(:project) { create(:project) }
+
+ let(:client) { instance_double(Gitlab::GithubImport::Client) }
+
+ describe '#sequential_import', :clean_gitlab_redis_cache do
+ let_it_be(:release_1) { create(:release, project: project) }
+ let_it_be(:release_2) { create(:release, project: project) }
+
+ let(:importer_stub) { instance_double('Gitlab::GithubImport::Importer::NoteAttachmentsImporter') }
+ let(:importer_attrs) { [instance_of(Gitlab::GithubImport::Representation::NoteText), project, client] }
+
+ it 'imports each project release' do
+ expect(Gitlab::GithubImport::Importer::NoteAttachmentsImporter).to receive(:new)
+ .with(*importer_attrs).twice.and_return(importer_stub)
+ expect(importer_stub).to receive(:execute).twice
+
+ importer.sequential_import
+ end
+
+ context 'when note is already processed' do
+ it "doesn't import this release" do
+ importer.mark_as_imported(release_1)
+
+ expect(Gitlab::GithubImport::Importer::NoteAttachmentsImporter).to receive(:new)
+ .with(*importer_attrs).once.and_return(importer_stub)
+ expect(importer_stub).to receive(:execute).once
+
+ importer.sequential_import
+ end
+ end
+ end
+
+ describe '#sidekiq_worker_class' do
+ it { expect(importer.sidekiq_worker_class).to eq(Gitlab::GithubImport::Attachments::ImportReleaseWorker) }
+ end
+
+ describe '#collection_method' do
+ it { expect(importer.collection_method).to eq(:release_attachments) }
+ end
+
+ describe '#object_type' do
+ it { expect(importer.object_type).to eq(:release_attachment) }
+ end
+
+ describe '#id_for_already_imported_cache' do
+ let(:release) { build_stubbed(:release) }
+
+ it { expect(importer.id_for_already_imported_cache(release)).to eq(release.id) }
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb
index 6eb92cdeab9..a8dd6b4725d 100644
--- a/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb
@@ -7,14 +7,13 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNotesImporter do
let(:client) { double(:client) }
let(:github_comment) do
- double(
- :response,
+ {
html_url: 'https://github.com/foo/bar/pull/42',
path: 'README.md',
commit_id: '123abc',
original_commit_id: 'original123abc',
diff_hunk: "@@ -1 +1 @@\n-Hello\n+Hello world",
- user: double(:user, id: 4, login: 'alice'),
+ user: { id: 4, login: 'alice' },
created_at: Time.zone.now,
updated_at: Time.zone.now,
line: 23,
@@ -29,7 +28,7 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNotesImporter do
sug1
```
BODY
- )
+ }
end
describe '#parallel?' do
@@ -98,9 +97,10 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNotesImporter do
.to receive(:each_object_to_import)
.and_yield(github_comment)
- expect(Gitlab::GithubImport::ImportDiffNoteWorker).to receive(:bulk_perform_in).with(1.second, [
- [project.id, an_instance_of(Hash), an_instance_of(String)]
- ], batch_size: 1000, batch_delay: 1.minute)
+ expect(Gitlab::GithubImport::ImportDiffNoteWorker).to receive(:bulk_perform_in)
+ .with(1.second, [
+ [project.id, an_instance_of(Hash), an_instance_of(String)]
+ ], batch_size: 1000, batch_delay: 1.minute)
waiter = importer.parallel_import
diff --git a/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb
index 6b807bdf098..308b8185589 100644
--- a/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb
@@ -9,20 +9,19 @@ RSpec.describe Gitlab::GithubImport::Importer::IssuesImporter do
let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
let(:github_issue) do
- double(
- :response,
+ {
number: 42,
title: 'My Issue',
body: 'This is my issue',
- milestone: double(:milestone, number: 4),
+ milestone: { number: 4 },
state: 'open',
- assignees: [double(:user, id: 4, login: 'alice')],
- labels: [double(:label, name: 'bug')],
- user: double(:user, id: 4, login: 'alice'),
+ assignees: [{ id: 4, login: 'alice' }],
+ labels: [{ name: 'bug' }],
+ user: { id: 4, login: 'alice' },
created_at: created_at,
updated_at: updated_at,
pull_request: false
- )
+ }
end
describe '#parallel?' do
@@ -110,4 +109,24 @@ RSpec.describe Gitlab::GithubImport::Importer::IssuesImporter do
.to eq(42)
end
end
+
+ describe '#increment_object_counter?' do
+ let(:importer) { described_class.new(project, client) }
+
+ context 'when issue is a pull request' do
+ let(:github_issue) { { pull_request: { url: 'some_url' } } }
+
+ it 'returns false' do
+ expect(importer).not_to be_increment_object_counter(github_issue)
+ end
+ end
+
+ context 'when issue is a regular issue' do
+ let(:github_issue) { {} }
+
+ it 'returns true' do
+ expect(importer).to be_increment_object_counter(github_issue)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/github_import/importer/labels_importer_spec.rb b/spec/lib/gitlab/github_import/importer/labels_importer_spec.rb
index ca9d3e1e21c..81d534c566f 100644
--- a/spec/lib/gitlab/github_import/importer/labels_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/labels_importer_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe Gitlab::GithubImport::Importer::LabelsImporter, :clean_gitlab_red
describe '#build_labels' do
it 'returns an Array containnig label rows' do
- label = double(:label, name: 'bug', color: 'ffffff')
+ label = { name: 'bug', color: 'ffffff' }
expect(importer).to receive(:each_label).and_return([label])
@@ -41,7 +41,7 @@ RSpec.describe Gitlab::GithubImport::Importer::LabelsImporter, :clean_gitlab_red
it 'does not create labels that already exist' do
create(:label, project: project, title: 'bug')
- label = double(:label, name: 'bug', color: 'ffffff')
+ label = { name: 'bug', color: 'ffffff' }
expect(importer).to receive(:each_label).and_return([label])
expect(importer.build_labels).to be_empty
@@ -60,7 +60,7 @@ RSpec.describe Gitlab::GithubImport::Importer::LabelsImporter, :clean_gitlab_red
describe '#build' do
let(:label_hash) do
- importer.build(double(:label, name: 'bug', color: 'ffffff'))
+ importer.build({ name: 'bug', color: 'ffffff' })
end
it 'returns the attributes of the label as a Hash' do
diff --git a/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb b/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
index 251829b83a0..99536588718 100644
--- a/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
@@ -118,9 +118,10 @@ RSpec.describe Gitlab::GithubImport::Importer::LfsObjectsImporter do
expect(service).to receive(:execute).and_return([lfs_download_object])
end
- expect(Gitlab::GithubImport::ImportLfsObjectWorker).to receive(:bulk_perform_in).with(1.second, [
- [project.id, an_instance_of(Hash), an_instance_of(String)]
- ], batch_size: 1000, batch_delay: 1.minute)
+ expect(Gitlab::GithubImport::ImportLfsObjectWorker).to receive(:bulk_perform_in)
+ .with(1.second, [
+ [project.id, an_instance_of(Hash), an_instance_of(String)]
+ ], batch_size: 1000, batch_delay: 1.minute)
waiter = importer.parallel_import
diff --git a/spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb b/spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb
index dad1efc5a8d..04d76bd1f06 100644
--- a/spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb
@@ -11,8 +11,7 @@ RSpec.describe Gitlab::GithubImport::Importer::MilestonesImporter, :clean_gitlab
let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
let(:milestone) do
- double(
- :milestone,
+ {
number: 1,
title: '1.0',
description: 'The first release',
@@ -20,12 +19,11 @@ RSpec.describe Gitlab::GithubImport::Importer::MilestonesImporter, :clean_gitlab
due_on: due_on,
created_at: created_at,
updated_at: updated_at
- )
+ }
end
let(:milestone2) do
- double(
- :milestone,
+ {
number: 1,
title: '1.0',
description: 'The first release',
@@ -33,7 +31,7 @@ RSpec.describe Gitlab::GithubImport::Importer::MilestonesImporter, :clean_gitlab
due_on: nil,
created_at: created_at,
updated_at: updated_at
- )
+ }
end
describe '#execute' do
diff --git a/spec/lib/gitlab/github_import/importer/note_attachments_importer_spec.rb b/spec/lib/gitlab/github_import/importer/note_attachments_importer_spec.rb
new file mode 100644
index 00000000000..7d4e3c3bcce
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/note_attachments_importer_spec.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::NoteAttachmentsImporter do
+ subject(:importer) { described_class.new(note_text, project, client) }
+
+ let_it_be(:project) { create(:project) }
+
+ let(:note_text) { Gitlab::GithubImport::Representation::NoteText.from_db_record(record) }
+ let(:client) { instance_double('Gitlab::GithubImport::Client') }
+
+ let(:doc_url) { 'https://github.com/nickname/public-test-repo/files/9020437/git-cheat-sheet.txt' }
+ let(:image_url) { 'https://user-images.githubusercontent.com/6833842/0cf366b61ef2.jpeg' }
+ let(:image_tag_url) { 'https://user-images.githubusercontent.com/6833842/0cf366b61ea5.jpeg' }
+ let(:text) do
+ <<-TEXT.split("\n").map(&:strip).join("\n")
+ Some text...
+
+ [special-doc](#{doc_url})
+ ![image.jpeg](#{image_url})
+ <img width=\"248\" alt=\"tag-image\" src="#{image_tag_url}">
+ TEXT
+ end
+
+ shared_examples 'updates record description' do
+ it do
+ importer.execute
+
+ record.reload
+ expect(record.description).to start_with("Some text...\n\n[special-doc](/uploads/")
+ expect(record.description).to include('![image.jpeg](/uploads/')
+ expect(record.description).to include('<img width="248" alt="tag-image" src="/uploads')
+ end
+ end
+
+ describe '#execute' do
+ let(:downloader_stub) { instance_double(Gitlab::GithubImport::AttachmentsDownloader) }
+ let(:tmp_stub_doc) { Tempfile.create('attachment_download_test.txt') }
+ let(:tmp_stub_image) { Tempfile.create('image.jpeg') }
+ let(:tmp_stub_image_tag) { Tempfile.create('image-tag.jpeg') }
+
+ before do
+ allow(Gitlab::GithubImport::AttachmentsDownloader).to receive(:new).with(doc_url)
+ .and_return(downloader_stub)
+ allow(Gitlab::GithubImport::AttachmentsDownloader).to receive(:new).with(image_url)
+ .and_return(downloader_stub)
+ allow(Gitlab::GithubImport::AttachmentsDownloader).to receive(:new).with(image_tag_url)
+ .and_return(downloader_stub)
+ allow(downloader_stub).to receive(:perform).and_return(tmp_stub_doc, tmp_stub_image, tmp_stub_image_tag)
+ allow(downloader_stub).to receive(:delete).exactly(3).times
+ end
+
+ context 'when importing release attachments' do
+ let(:record) { create(:release, project: project, description: text) }
+
+ it_behaves_like 'updates record description'
+ end
+
+ context 'when importing issue attachments' do
+ let(:record) { create(:issue, project: project, description: text) }
+
+ it_behaves_like 'updates record description'
+ end
+
+ context 'when importing merge request attachments' do
+ let(:record) { create(:merge_request, source_project: project, description: text) }
+
+ it_behaves_like 'updates record description'
+ end
+
+ context 'when importing note attachments' do
+ let(:record) { create(:note, project: project, note: text) }
+
+ it 'updates note text with new attachment urls' do
+ importer.execute
+
+ record.reload
+ expect(record.note).to start_with("Some text...\n\n[special-doc](/uploads/")
+ expect(record.note).to include('![image.jpeg](/uploads/')
+ expect(record.note).to include('<img width="248" alt="tag-image" src="/uploads')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/note_importer_spec.rb b/spec/lib/gitlab/github_import/importer/note_importer_spec.rb
index 165f543525d..c60ecd85e92 100644
--- a/spec/lib/gitlab/github_import/importer/note_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/note_importer_spec.rb
@@ -160,6 +160,13 @@ RSpec.describe Gitlab::GithubImport::Importer::NoteImporter do
expect(project.notes.take).to be_valid
end
+
+ # rubocop:disable RSpec/AnyInstanceOf
+ it 'skips markdown field cache callback' do
+ expect_any_instance_of(Note).not_to receive(:refresh_markdown_cache)
+ importer.execute
+ end
+ # rubocop:enable RSpec/AnyInstanceOf
end
describe '#find_noteable_id' do
diff --git a/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb
index 3b4fe652da8..ca4560b6a1a 100644
--- a/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb
@@ -7,15 +7,14 @@ RSpec.describe Gitlab::GithubImport::Importer::NotesImporter do
let(:client) { double(:client) }
let(:github_comment) do
- double(
- :response,
+ {
html_url: 'https://github.com/foo/bar/issues/42',
- user: double(:user, id: 4, login: 'alice'),
+ user: { id: 4, login: 'alice' },
body: 'Hello world',
created_at: Time.zone.now,
updated_at: Time.zone.now,
id: 1
- )
+ }
end
describe '#parallel?' do
@@ -84,9 +83,10 @@ RSpec.describe Gitlab::GithubImport::Importer::NotesImporter do
.to receive(:each_object_to_import)
.and_yield(github_comment)
- expect(Gitlab::GithubImport::ImportNoteWorker).to receive(:bulk_perform_in).with(1.second, [
- [project.id, an_instance_of(Hash), an_instance_of(String)]
- ], batch_size: 1000, batch_delay: 1.minute)
+ expect(Gitlab::GithubImport::ImportNoteWorker).to receive(:bulk_perform_in)
+ .with(1.second, [
+ [project.id, an_instance_of(Hash), an_instance_of(String)]
+ ], batch_size: 1000, batch_delay: 1.minute)
waiter = importer.parallel_import
diff --git a/spec/lib/gitlab/github_import/importer/protected_branch_importer_spec.rb b/spec/lib/gitlab/github_import/importer/protected_branch_importer_spec.rb
index 6dc6db739f4..027b2ac422e 100644
--- a/spec/lib/gitlab/github_import/importer/protected_branch_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/protected_branch_importer_spec.rb
@@ -5,11 +5,21 @@ require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchImporter do
subject(:importer) { described_class.new(github_protected_branch, project, client) }
+ let(:branch_name) { 'protection' }
let(:allow_force_pushes_on_github) { true }
+ let(:required_conversation_resolution) { false }
+ let(:required_signatures) { false }
+ let(:required_pull_request_reviews) { false }
+ let(:expected_push_access_level) { Gitlab::Access::MAINTAINER }
+ let(:expected_merge_access_level) { Gitlab::Access::MAINTAINER }
+ let(:expected_allow_force_push) { true }
let(:github_protected_branch) do
Gitlab::GithubImport::Representation::ProtectedBranch.new(
- id: 'protection',
- allow_force_pushes: allow_force_pushes_on_github
+ id: branch_name,
+ allow_force_pushes: allow_force_pushes_on_github,
+ required_conversation_resolution: required_conversation_resolution,
+ required_signatures: required_signatures,
+ required_pull_request_reviews: required_pull_request_reviews
)
end
@@ -23,8 +33,8 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchImporter do
let(:expected_ruleset) do
{
name: 'protection',
- push_access_levels_attributes: [{ access_level: Gitlab::Access::MAINTAINER }],
- merge_access_levels_attributes: [{ access_level: Gitlab::Access::MAINTAINER }],
+ push_access_levels_attributes: [{ access_level: expected_push_access_level }],
+ merge_access_levels_attributes: [{ access_level: expected_merge_access_level }],
allow_force_push: expected_allow_force_push
}
end
@@ -47,6 +57,18 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchImporter do
end
end
+ shared_examples 'does not change project attributes' do
+ it 'does not change only_allow_merge_if_all_discussions_are_resolved' do
+ expect { importer.execute }.not_to change(project, :only_allow_merge_if_all_discussions_are_resolved)
+ end
+
+ it 'does not change push_rule for the project' do
+ expect(project).not_to receive(:push_rule)
+
+ importer.execute
+ end
+ end
+
context 'when branch is protected on GitLab' do
before do
create(
@@ -87,5 +109,193 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchImporter do
it_behaves_like 'create branch protection by the strictest ruleset'
end
+
+ context "when branch is default" do
+ before do
+ allow(project).to receive(:default_branch).and_return(branch_name)
+ end
+
+ context 'when required_conversation_resolution rule is enabled' do
+ let(:required_conversation_resolution) { true }
+
+ it 'changes project settings' do
+ expect { importer.execute }.to change(project, :only_allow_merge_if_all_discussions_are_resolved).to(true)
+ end
+ end
+
+ context 'when required_conversation_resolution rule is disabled' do
+ let(:required_conversation_resolution) { false }
+
+ it_behaves_like 'does not change project attributes'
+ end
+
+ context 'when required_signatures rule is enabled' do
+ let(:required_signatures) { true }
+ let(:push_rules_feature_available?) { true }
+
+ before do
+ stub_licensed_features(push_rules: push_rules_feature_available?)
+ end
+
+ context 'when the push_rules feature is available', if: Gitlab.ee? do
+ context 'when project push_rules did previously exist' do
+ before do
+ create(:push_rule, project: project)
+ end
+
+ it 'updates push_rule reject_unsigned_commits attribute' do
+ expect { importer.execute }.to change { project.reload.push_rule.reject_unsigned_commits }.to(true)
+ end
+ end
+
+ context 'when project push_rules did not previously exist' do
+ it 'creates project push_rule with the enabled reject_unsigned_commits attribute' do
+ expect { importer.execute }.to change(project, :push_rule).from(nil)
+ expect(project.push_rule.reject_unsigned_commits).to be_truthy
+ end
+ end
+ end
+
+ context 'when the push_rules feature is not available' do
+ let(:push_rules_feature_available?) { false }
+
+ it_behaves_like 'does not change project attributes'
+ end
+ end
+
+ context 'when required_signatures rule is disabled' do
+ let(:required_signatures) { false }
+
+ it_behaves_like 'does not change project attributes'
+ end
+ end
+
+ context 'when branch is not default' do
+ context 'when required_conversation_resolution rule is enabled' do
+ let(:required_conversation_resolution) { true }
+
+ it_behaves_like 'does not change project attributes'
+ end
+
+ context 'when required_conversation_resolution rule is disabled' do
+ let(:required_conversation_resolution) { false }
+
+ it_behaves_like 'does not change project attributes'
+ end
+
+ context 'when required_signatures rule is enabled' do
+ let(:required_signatures) { true }
+
+ it_behaves_like 'does not change project attributes'
+ end
+
+ context 'when required_signatures rule is disabled' do
+ let(:required_signatures) { false }
+
+ it_behaves_like 'does not change project attributes'
+ end
+ end
+
+ context 'when required_pull_request_reviews rule is enabled on GitHub' do
+ let(:required_pull_request_reviews) { true }
+ let(:expected_push_access_level) { Gitlab::Access::NO_ACCESS }
+ let(:expected_merge_access_level) { Gitlab::Access::MAINTAINER }
+
+ it_behaves_like 'create branch protection by the strictest ruleset'
+ end
+
+ context 'when required_pull_request_reviews rule is disabled on GitHub' do
+ let(:required_pull_request_reviews) { false }
+
+ context 'when branch is default' do
+ before do
+ allow(project).to receive(:default_branch).and_return(branch_name)
+ end
+
+ context 'when default branch protection = Gitlab::Access::PROTECTION_DEV_CAN_PUSH' do
+ before do
+ stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_DEV_CAN_PUSH)
+ end
+
+ let(:expected_push_access_level) { Gitlab::Access::DEVELOPER }
+ let(:expected_merge_access_level) { Gitlab::Access::MAINTAINER }
+
+ it_behaves_like 'create branch protection by the strictest ruleset'
+ end
+
+ context 'when default branch protection = Gitlab::Access::PROTECTION_DEV_CAN_MERGE' do
+ before do
+ stub_application_setting(default_branch_protection: Gitlab::Access::PROTECTION_DEV_CAN_MERGE)
+ end
+
+ let(:expected_push_access_level) { Gitlab::Access::MAINTAINER }
+ let(:expected_merge_access_level) { Gitlab::Access::DEVELOPER }
+
+ it_behaves_like 'create branch protection by the strictest ruleset'
+ end
+ end
+
+ context 'when branch is protected on GitLab' do
+ let(:protected_branch) do
+ create(
+ :protected_branch,
+ project: project,
+ name: 'protect*',
+ allow_force_push: true
+ )
+ end
+
+ let(:push_access_level) { protected_branch.push_access_levels.first }
+ let(:merge_access_level) { protected_branch.merge_access_levels.first }
+
+ context 'when there is branch protection rule for the role' do
+ context 'when No one can merge' do
+ before do
+ merge_access_level.update_column(:access_level, Gitlab::Access::NO_ACCESS)
+ end
+
+ let(:expected_push_access_level) { push_access_level.access_level }
+ let(:expected_merge_access_level) { Gitlab::Access::NO_ACCESS }
+
+ it_behaves_like 'create branch protection by the strictest ruleset'
+ end
+
+ context 'when Maintainers and Developers can merge' do
+ before do
+ merge_access_level.update_column(:access_level, Gitlab::Access::DEVELOPER)
+ end
+
+ let(:gitlab_push_access_level) { push_access_level.access_level }
+ let(:gitlab_merge_access_level) { merge_access_level.access_level }
+ let(:expected_push_access_level) { gitlab_push_access_level }
+ let(:expected_merge_access_level) { [gitlab_merge_access_level, github_default_merge_access_level].max }
+ let(:github_default_merge_access_level) do
+ Gitlab::GithubImport::Importer::ProtectedBranchImporter::GITHUB_DEFAULT_MERGE_ACCESS_LEVEL
+ end
+
+ it_behaves_like 'create branch protection by the strictest ruleset'
+ end
+ end
+
+ context 'when there is no branch protection rule for the role' do
+ before do
+ push_access_level.update_column(:user_id, project.owner.id)
+ merge_access_level.update_column(:user_id, project.owner.id)
+ end
+
+ let(:expected_push_access_level) { ProtectedBranch::PushAccessLevel::GITLAB_DEFAULT_ACCESS_LEVEL }
+ let(:expected_merge_access_level) { Gitlab::Access::MAINTAINER }
+
+ it_behaves_like 'create branch protection by the strictest ruleset'
+ end
+ end
+
+ context 'when branch is neither default nor protected on GitLab' do
+ let(:expected_push_access_level) { ProtectedBranch::PushAccessLevel::GITLAB_DEFAULT_ACCESS_LEVEL }
+ let(:expected_merge_access_level) { ProtectedBranch::MergeAccessLevel::GITLAB_DEFAULT_ACCESS_LEVEL }
+
+ it_behaves_like 'create branch protection by the strictest ruleset'
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb b/spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb
index 4e9208be985..a0ced456391 100644
--- a/spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb
@@ -23,11 +23,13 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchesImporter do
let(:github_protection_rule) do
response = Struct.new(:name, :url, :required_signatures, :enforce_admins, :required_linear_history,
:allow_force_pushes, :allow_deletion, :block_creations, :required_conversation_resolution,
+ :required_pull_request_reviews,
keyword_init: true
)
required_signatures = Struct.new(:url, :enabled, keyword_init: true)
enforce_admins = Struct.new(:url, :enabled, keyword_init: true)
allow_option = Struct.new(:enabled, keyword_init: true)
+ required_pull_request_reviews = Struct.new(:url, :dismissal_restrictions, keyword_init: true)
response.new(
name: 'main',
url: 'https://example.com/branches/main/protection',
@@ -53,6 +55,10 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchesImporter do
),
required_conversation_resolution: allow_option.new(
enabled: false
+ ),
+ required_pull_request_reviews: required_pull_request_reviews.new(
+ url: 'https://example.com/branches/main/protection/required_pull_request_reviews',
+ dismissal_restrictions: {}
)
)
end
diff --git a/spec/lib/gitlab/github_import/importer/pull_request_merged_by_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_request_merged_by_importer_spec.rb
index 016f6e5377b..f3a9bbac785 100644
--- a/spec/lib/gitlab/github_import/importer/pull_request_merged_by_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_request_merged_by_importer_spec.rb
@@ -7,15 +7,16 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestMergedByImporter, :cle
let(:project) { merge_request.project }
let(:merged_at) { Time.new(2017, 1, 1, 12, 00).utc }
- let(:client_double) { double(user: double(id: 999, login: 'merger', email: 'merger@email.com')) }
- let(:merger_user) { double(id: 999, login: 'merger') }
+ let(:client_double) { double(user: { id: 999, login: 'merger', email: 'merger@email.com' } ) }
+ let(:merger_user) { { id: 999, login: 'merger' } }
let(:pull_request) do
- instance_double(
- Gitlab::GithubImport::Representation::PullRequest,
- iid: merge_request.iid,
- merged_at: merged_at,
- merged_by: merger_user
+ Gitlab::GithubImport::Representation::PullRequest.from_api_response(
+ {
+ number: merge_request.iid,
+ merged_at: merged_at,
+ merged_by: merger_user
+ }
)
end
diff --git a/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb
index a6da40f47f1..fb6024d0952 100644
--- a/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter, :clean
let_it_be(:merge_request) { create(:merge_request) }
let(:project) { merge_request.project }
- let(:client_double) { double(user: double(id: 999, login: 'author', email: 'author@email.com')) }
+ let(:client_double) { double(user: { id: 999, login: 'author', email: 'author@email.com' }) }
let(:submitted_at) { Time.new(2017, 1, 1, 12, 00).utc }
subject { described_class.new(review, project, client_double) }
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
index c5846fa7a87..aa92abdb110 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
@@ -8,33 +8,30 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsImporter do
let(:client) { double(:client) }
let(:pull_request) do
- double(
- :response,
+ {
number: 42,
title: 'My Pull Request',
body: 'This is my pull request',
state: 'closed',
- head: double(
- :head,
+ head: {
sha: '123abc',
ref: 'my-feature',
- repo: double(:repo, id: 400),
- user: double(:user, id: 4, login: 'alice')
- ),
- base: double(
- :base,
+ repo: { id: 400 },
+ user: { id: 4, login: 'alice' }
+ },
+ base: {
sha: '456def',
ref: 'master',
- repo: double(:repo, id: 200)
- ),
- milestone: double(:milestone, number: 4),
- user: double(:user, id: 4, login: 'alice'),
- assignee: double(:user, id: 4, login: 'alice'),
- merged_by: double(:user, id: 4, login: 'alice'),
+ repo: { id: 200 }
+ },
+ milestone: { number: 4 },
+ user: { id: 4, login: 'alice' },
+ assignee: { id: 4, login: 'alice' },
+ merged_by: { id: 4, login: 'alice' },
created_at: 1.second.ago,
updated_at: 1.second.ago,
merged_at: 1.second.ago
- )
+ }
end
describe '#parallel?' do
@@ -184,12 +181,11 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsImporter do
context 'when the pull request was updated after the last update' do
let(:pr) do
- double(
- :pr,
+ {
updated_at: Time.zone.now,
- head: double(:head, sha: '123'),
- base: double(:base, sha: '456')
- )
+ head: { sha: '123' },
+ base: { sha: '456' }
+ }
end
before do
@@ -201,7 +197,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsImporter do
it 'returns true when the head SHA is not present' do
expect(importer)
.to receive(:commit_exists?)
- .with(pr.head.sha)
+ .with('123')
.and_return(false)
expect(importer.update_repository?(pr)).to eq(true)
@@ -210,12 +206,12 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsImporter do
it 'returns true when the base SHA is not present' do
expect(importer)
.to receive(:commit_exists?)
- .with(pr.head.sha)
+ .with('123')
.and_return(true)
expect(importer)
.to receive(:commit_exists?)
- .with(pr.base.sha)
+ .with('456')
.and_return(false)
expect(importer.update_repository?(pr)).to eq(true)
@@ -224,12 +220,12 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsImporter do
it 'returns false if both the head and base SHAs are present' do
expect(importer)
.to receive(:commit_exists?)
- .with(pr.head.sha)
+ .with('123')
.and_return(true)
expect(importer)
.to receive(:commit_exists?)
- .with(pr.base.sha)
+ .with('456')
.and_return(true)
expect(importer.update_repository?(pr)).to eq(false)
@@ -238,7 +234,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsImporter do
context 'when the pull request was updated before the last update' do
it 'returns false' do
- pr = double(:pr, updated_at: 1.year.ago)
+ pr = { updated_at: 1.year.ago }
allow(project)
.to receive(:last_repository_updated_at)
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb
index 0eb86feb040..5f9c73cbfff 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsReviewsImporter do
end
describe '#id_for_already_imported_cache' do
- it { expect(subject.id_for_already_imported_cache(double(id: 1))).to eq(1) }
+ it { expect(subject.id_for_already_imported_cache({ id: 1 })).to eq(1) }
end
describe '#each_object_to_import', :clean_gitlab_redis_cache do
@@ -36,15 +36,11 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsReviewsImporter do
)
end
- let(:review) { double(id: 1) }
+ let(:review) { { id: 1 } }
it 'fetches the pull requests reviews data' do
page = double(objects: [review], number: 1)
- expect(review)
- .to receive(:merge_request_id=)
- .with(merge_request.id)
-
expect(client)
.to receive(:each_page)
.exactly(:once) # ensure to be cached on the second call
@@ -55,6 +51,8 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsReviewsImporter do
.to yield_with_args(review)
subject.each_object_to_import {}
+
+ expect(review[:merge_request_id]).to eq(merge_request.id)
end
it 'skips cached pages' do
diff --git a/spec/lib/gitlab/github_import/importer/release_attachments_importer_spec.rb b/spec/lib/gitlab/github_import/importer/release_attachments_importer_spec.rb
deleted file mode 100644
index 4779f9c8982..00000000000
--- a/spec/lib/gitlab/github_import/importer/release_attachments_importer_spec.rb
+++ /dev/null
@@ -1,57 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::GithubImport::Importer::ReleaseAttachmentsImporter do
- subject(:importer) { described_class.new(release_attachments, project, client) }
-
- let_it_be(:project) { create(:project) }
-
- let(:client) { instance_double('Gitlab::GithubImport::Client') }
- let(:release) { create(:release, project: project, description: description) }
- let(:release_attachments) do
- Gitlab::GithubImport::Representation::ReleaseAttachments
- .from_json_hash(release_db_id: release.id, description: release.description)
- end
-
- let(:doc_url) { 'https://github.com/nickname/public-test-repo/files/9020437/git-cheat-sheet.txt' }
- let(:image_url) { 'https://user-images.githubusercontent.com/6833842/0cf366b61ef2.jpeg' }
- let(:description) do
- <<-TEXT.strip
- Some text...
-
- [special-doc](#{doc_url})
- ![image.jpeg](#{image_url})
- TEXT
- end
-
- describe '#execute' do
- let(:downloader_stub) { instance_double(Gitlab::GithubImport::AttachmentsDownloader) }
- let(:tmp_stub_doc) { Tempfile.create('attachment_download_test.txt') }
- let(:tmp_stub_image) { Tempfile.create('image.jpeg') }
-
- context 'when importing doc attachment' do
- before do
- allow(Gitlab::GithubImport::AttachmentsDownloader).to receive(:new).with(doc_url)
- .and_return(downloader_stub)
- allow(Gitlab::GithubImport::AttachmentsDownloader).to receive(:new).with(image_url)
- .and_return(downloader_stub)
- allow(downloader_stub).to receive(:perform).and_return(tmp_stub_doc, tmp_stub_image)
- allow(downloader_stub).to receive(:delete).twice
-
- allow(UploadService).to receive(:new)
- .with(project, tmp_stub_doc, FileUploader).and_call_original
- allow(UploadService).to receive(:new)
- .with(project, tmp_stub_image, FileUploader).and_call_original
- end
-
- it 'updates release description with new attachment url' do
- importer.execute
-
- release.reload
- expect(release.description).to start_with("Some text...\n\n [special-doc](/uploads/")
- expect(release.description).to include('![image.jpeg](/uploads/')
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/github_import/importer/releases_attachments_importer_spec.rb b/spec/lib/gitlab/github_import/importer/releases_attachments_importer_spec.rb
deleted file mode 100644
index 1aeb3462cd5..00000000000
--- a/spec/lib/gitlab/github_import/importer/releases_attachments_importer_spec.rb
+++ /dev/null
@@ -1,74 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::GithubImport::Importer::ReleasesAttachmentsImporter do
- subject { described_class.new(project, client) }
-
- let_it_be(:project) { create(:project) }
-
- let(:client) { instance_double(Gitlab::GithubImport::Client) }
-
- describe '#each_object_to_import', :clean_gitlab_redis_cache do
- let!(:release_1) { create(:release, project: project) }
- let!(:release_2) { create(:release, project: project) }
-
- it 'iterates each project release' do
- list = []
- subject.each_object_to_import do |object|
- list << object
- end
- expect(list).to contain_exactly(release_1, release_2)
- end
-
- context 'when release is already processed' do
- it "doesn't process this release" do
- subject.mark_as_imported(release_1)
-
- list = []
- subject.each_object_to_import do |object|
- list << object
- end
- expect(list).to contain_exactly(release_2)
- end
- end
- end
-
- describe '#representation_class' do
- it { expect(subject.representation_class).to eq(Gitlab::GithubImport::Representation::ReleaseAttachments) }
- end
-
- describe '#importer_class' do
- it { expect(subject.importer_class).to eq(Gitlab::GithubImport::Importer::ReleaseAttachmentsImporter) }
- end
-
- describe '#sidekiq_worker_class' do
- it { expect(subject.sidekiq_worker_class).to eq(Gitlab::GithubImport::ImportReleaseAttachmentsWorker) }
- end
-
- describe '#collection_method' do
- it { expect(subject.collection_method).to eq(:release_attachments) }
- end
-
- describe '#object_type' do
- it { expect(subject.object_type).to eq(:release_attachment) }
- end
-
- describe '#id_for_already_imported_cache' do
- let(:release) { build_stubbed(:release) }
-
- it { expect(subject.id_for_already_imported_cache(release)).to eq(release.id) }
- end
-
- describe '#object_representation' do
- let(:release) { build_stubbed(:release) }
-
- it 'returns release attachments representation' do
- representation = subject.object_representation(release)
-
- expect(representation.class).to eq subject.representation_class
- expect(representation.release_db_id).to eq release.id
- expect(representation.description).to eq release.description
- end
- end
-end
diff --git a/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb b/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb
index b0f553dbef7..84d639a09ef 100644
--- a/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb
@@ -10,22 +10,21 @@ RSpec.describe Gitlab::GithubImport::Importer::ReleasesImporter do
let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
let(:released_at) { Time.new(2017, 1, 1, 12, 00) }
let(:author) do
- double(
+ {
login: 'User A',
id: 1
- )
+ }
end
let(:github_release) do
- double(
- :github_release,
+ {
tag_name: '1.0',
name: github_release_name,
body: 'This is my release',
created_at: created_at,
published_at: released_at,
author: author
- )
+ }
end
def stub_email_for_github_username(user_name = 'User A', user_email = 'user@example.com')
@@ -56,7 +55,7 @@ RSpec.describe Gitlab::GithubImport::Importer::ReleasesImporter do
end
it 'imports draft releases' do
- release_double = double(
+ release_double = {
name: 'Test',
body: 'This is description',
tag_name: '1.0',
@@ -65,7 +64,7 @@ RSpec.describe Gitlab::GithubImport::Importer::ReleasesImporter do
updated_at: created_at,
published_at: nil,
author: author
- )
+ }
expect(importer).to receive(:each_release).and_return([release_double])
@@ -101,7 +100,7 @@ RSpec.describe Gitlab::GithubImport::Importer::ReleasesImporter do
end
it 'uses a default release description if none is provided' do
- expect(github_release).to receive(:body).and_return('')
+ github_release[:body] = nil
expect(importer).to receive(:each_release).and_return([github_release])
release = importer.build_releases.first
@@ -110,10 +109,10 @@ RSpec.describe Gitlab::GithubImport::Importer::ReleasesImporter do
end
it 'does not create releases that have a NULL tag' do
- null_tag_release = double(
+ null_tag_release = {
name: 'NULL Test',
tag_name: nil
- )
+ }
expect(importer).to receive(:each_release).and_return([null_tag_release])
expect(importer.build_releases).to be_empty
@@ -179,13 +178,13 @@ RSpec.describe Gitlab::GithubImport::Importer::ReleasesImporter do
end
it 'returns ghost user when author is empty in Github release' do
- allow(github_release).to receive(:author).and_return(nil)
+ github_release[:author] = nil
expect(release_hash[:author_id]).to eq(Gitlab::GithubImport.ghost_user_id)
end
context 'when Github author is not found in Gitlab' do
- let(:author) { double(login: 'octocat', id: 1 ) }
+ let(:author) { { login: 'octocat', id: 1 } }
before do
# Stub user email which does not match a Gitlab user.
@@ -222,11 +221,11 @@ RSpec.describe Gitlab::GithubImport::Importer::ReleasesImporter do
describe '#description_for' do
it 'returns the description when present' do
- expect(importer.description_for(github_release)).to eq(github_release.body)
+ expect(importer.description_for(github_release)).to eq(github_release[:body])
end
it 'returns a generated description when one is not present' do
- allow(github_release).to receive(:body).and_return('')
+ github_release[:body] = nil
expect(importer.description_for(github_release)).to eq('Release for tag 1.0')
end
diff --git a/spec/lib/gitlab/github_import/importer/single_endpoint_diff_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/single_endpoint_diff_notes_importer_spec.rb
index 471302cb31b..081d08edfb3 100644
--- a/spec/lib/gitlab/github_import/importer/single_endpoint_diff_notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/single_endpoint_diff_notes_importer_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointDiffNotesImporter d
it { expect(subject.importer_class).to eq(Gitlab::GithubImport::Importer::DiffNoteImporter) }
it { expect(subject.collection_method).to eq(:pull_request_comments) }
it { expect(subject.object_type).to eq(:diff_note) }
- it { expect(subject.id_for_already_imported_cache(double(id: 1))).to eq(1) }
+ it { expect(subject.id_for_already_imported_cache({ id: 1 })).to eq(1) }
describe '#each_object_to_import', :clean_gitlab_redis_cache do
let(:merge_request) do
@@ -26,7 +26,7 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointDiffNotesImporter d
)
end
- let(:note) { double(id: 1) }
+ let(:note) { { id: 1 } }
let(:page) { double(objects: [note], number: 1) }
it 'fetches data' do
diff --git a/spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb b/spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb
index 4ed01fd7e0b..dde730d46d2 100644
--- a/spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb
@@ -40,7 +40,7 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter
end
describe '#id_for_already_imported_cache' do
- let(:event) { instance_double('Event', id: 1) }
+ let(:event) { { id: 1 } }
it { expect(subject.id_for_already_imported_cache(event)).to eq(1) }
end
@@ -88,7 +88,7 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter
describe '#each_object_to_import', :clean_gitlab_redis_cache do
let(:issue_event) do
struct = Struct.new(:id, :event, :created_at, :issue, keyword_init: true)
- struct.new(id: rand(10), event: 'closed', created_at: '2022-04-26 18:30:53 UTC')
+ struct.new(id: 1, event: 'closed', created_at: '2022-04-26 18:30:53 UTC')
end
let(:page) do
@@ -115,9 +115,17 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter
it 'imports each issue event page by page' do
counter = 0
subject.each_object_to_import do |object|
- expect(object).to eq issue_event
- expect(issue_event.issue['number']).to eq issuable.iid
- expect(issue_event.issue['pull_request']).to eq false
+ expect(object).to eq(
+ {
+ id: 1,
+ event: 'closed',
+ created_at: '2022-04-26 18:30:53 UTC',
+ issue: {
+ number: issuable.iid,
+ pull_request: false
+ }
+ }
+ )
counter += 1
end
expect(counter).to eq 1
@@ -130,9 +138,17 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter
it 'imports each merge request event page by page' do
counter = 0
subject.each_object_to_import do |object|
- expect(object).to eq issue_event
- expect(issue_event.issue['number']).to eq issuable.iid
- expect(issue_event.issue['pull_request']).to eq true
+ expect(object).to eq(
+ {
+ id: 1,
+ event: 'closed',
+ created_at: '2022-04-26 18:30:53 UTC',
+ issue: {
+ number: issuable.iid,
+ pull_request: true
+ }
+ }
+ )
counter += 1
end
expect(counter).to eq 1
diff --git a/spec/lib/gitlab/github_import/importer/single_endpoint_issue_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/single_endpoint_issue_notes_importer_spec.rb
index d769f4fdcf5..e1f65546e1d 100644
--- a/spec/lib/gitlab/github_import/importer/single_endpoint_issue_notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/single_endpoint_issue_notes_importer_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueNotesImporter
it { expect(subject.importer_class).to eq(Gitlab::GithubImport::Importer::NoteImporter) }
it { expect(subject.collection_method).to eq(:issue_comments) }
it { expect(subject.object_type).to eq(:note) }
- it { expect(subject.id_for_already_imported_cache(double(id: 1))).to eq(1) }
+ it { expect(subject.id_for_already_imported_cache({ id: 1 })).to eq(1) }
describe '#each_object_to_import', :clean_gitlab_redis_cache do
let(:issue) do
@@ -25,7 +25,7 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueNotesImporter
)
end
- let(:note) { double(id: 1) }
+ let(:note) { { id: 1 } }
let(:page) { double(objects: [note], number: 1) }
it 'fetches data' do
diff --git a/spec/lib/gitlab/github_import/importer/single_endpoint_merge_request_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/single_endpoint_merge_request_notes_importer_spec.rb
index 1dcc466d34c..5523b97acc3 100644
--- a/spec/lib/gitlab/github_import/importer/single_endpoint_merge_request_notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/single_endpoint_merge_request_notes_importer_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointMergeRequestNotesIm
it { expect(subject.importer_class).to eq(Gitlab::GithubImport::Importer::NoteImporter) }
it { expect(subject.collection_method).to eq(:issue_comments) }
it { expect(subject.object_type).to eq(:note) }
- it { expect(subject.id_for_already_imported_cache(double(id: 1))).to eq(1) }
+ it { expect(subject.id_for_already_imported_cache({ id: 1 })).to eq(1) }
describe '#each_object_to_import', :clean_gitlab_redis_cache do
let(:merge_request) do
@@ -26,7 +26,7 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointMergeRequestNotesIm
)
end
- let(:note) { double(id: 1) }
+ let(:note) { { id: 1 } }
let(:page) { double(objects: [note], number: 1) }
it 'fetches data' do
diff --git a/spec/lib/gitlab/github_import/issuable_finder_spec.rb b/spec/lib/gitlab/github_import/issuable_finder_spec.rb
index d550f15e8c5..d3236994cef 100644
--- a/spec/lib/gitlab/github_import/issuable_finder_spec.rb
+++ b/spec/lib/gitlab/github_import/issuable_finder_spec.rb
@@ -3,11 +3,20 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::IssuableFinder, :clean_gitlab_redis_cache do
- let(:project) { double(:project, id: 4, group: nil) }
- let(:issue) do
- double(:issue, issuable_type: MergeRequest, issuable_id: 1)
+ let(:project) { double(:project, id: 4, import_data: import_data) }
+ let(:single_endpoint_optional_stage) { false }
+ let(:import_data) do
+ instance_double(
+ ProjectImportData,
+ data: {
+ optional_stages: {
+ single_endpoint_notes_import: single_endpoint_optional_stage
+ }
+ }.deep_stringify_keys
+ )
end
+ let(:issue) { double(:issue, issuable_type: MergeRequest, issuable_id: 1) }
let(:finder) { described_class.new(project, issue) }
describe '#database_id' do
@@ -28,13 +37,10 @@ RSpec.describe Gitlab::GithubImport::IssuableFinder, :clean_gitlab_redis_cache d
end
context 'when group is present' do
- context 'when github_importer_single_endpoint_notes_import feature flag is enabled' do
- it 'reads cache value with longer timeout' do
- project = create(:project, import_url: 'http://t0ken@github.com/user/repo.git')
- group = create(:group, projects: [project])
-
- stub_feature_flags(github_importer_single_endpoint_notes_import: group)
+ context 'when settings single_endpoint_notes_import is enabled' do
+ let(:single_endpoint_optional_stage) { true }
+ it 'reads cache value with longer timeout' do
expect(Gitlab::Cache::Import::Caching)
.to receive(:read)
.with(anything, timeout: Gitlab::Cache::Import::Caching::LONGER_TIMEOUT)
@@ -43,12 +49,8 @@ RSpec.describe Gitlab::GithubImport::IssuableFinder, :clean_gitlab_redis_cache d
end
end
- context 'when github_importer_single_endpoint_notes_import feature flag is disabled' do
+ context 'when settings single_endpoint_notes_import is disabled' do
it 'reads cache value with default timeout' do
- project = double(:project, id: 4, group: create(:group))
-
- stub_feature_flags(github_importer_single_endpoint_notes_import: false)
-
expect(Gitlab::Cache::Import::Caching)
.to receive(:read)
.with(anything, timeout: Gitlab::Cache::Import::Caching::TIMEOUT)
@@ -68,34 +70,25 @@ RSpec.describe Gitlab::GithubImport::IssuableFinder, :clean_gitlab_redis_cache d
finder.cache_database_id(10)
end
- context 'when group is present' do
- context 'when github_importer_single_endpoint_notes_import feature flag is enabled' do
- it 'caches value with longer timeout' do
- project = create(:project, import_url: 'http://t0ken@github.com/user/repo.git')
- group = create(:group, projects: [project])
-
- stub_feature_flags(github_importer_single_endpoint_notes_import: group)
+ context 'when settings single_endpoint_notes_import is enabled' do
+ let(:single_endpoint_optional_stage) { true }
- expect(Gitlab::Cache::Import::Caching)
- .to receive(:write)
- .with(anything, anything, timeout: Gitlab::Cache::Import::Caching::LONGER_TIMEOUT)
+ it 'caches value with longer timeout' do
+ expect(Gitlab::Cache::Import::Caching)
+ .to receive(:write)
+ .with(anything, anything, timeout: Gitlab::Cache::Import::Caching::LONGER_TIMEOUT)
- described_class.new(project, issue).cache_database_id(10)
- end
+ described_class.new(project, issue).cache_database_id(10)
end
+ end
- context 'when github_importer_single_endpoint_notes_import feature flag is disabled' do
- it 'caches value with default timeout' do
- project = double(:project, id: 4, group: create(:group))
-
- stub_feature_flags(github_importer_single_endpoint_notes_import: false)
-
- expect(Gitlab::Cache::Import::Caching)
- .to receive(:write)
- .with(anything, anything, timeout: Gitlab::Cache::Import::Caching::TIMEOUT)
+ context 'when settings single_endpoint_notes_import is disabled' do
+ it 'caches value with default timeout' do
+ expect(Gitlab::Cache::Import::Caching)
+ .to receive(:write)
+ .with(anything, anything, timeout: Gitlab::Cache::Import::Caching::TIMEOUT)
- described_class.new(project, issue).cache_database_id(10)
- end
+ described_class.new(project, issue).cache_database_id(10)
end
end
end
diff --git a/spec/lib/gitlab/github_import/markdown/attachment_spec.rb b/spec/lib/gitlab/github_import/markdown/attachment_spec.rb
new file mode 100644
index 00000000000..5d29de34141
--- /dev/null
+++ b/spec/lib/gitlab/github_import/markdown/attachment_spec.rb
@@ -0,0 +1,93 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Markdown::Attachment do
+ let(:name) { FFaker::Lorem.word }
+ let(:url) { FFaker::Internet.uri('https') }
+
+ describe '.from_markdown' do
+ context "when it's a doc attachment" do
+ let(:doc_extension) { Gitlab::GithubImport::Markdown::Attachment::DOC_TYPES.sample }
+ let(:url) { "https://github.com/nickname/public-test-repo/files/3/git-cheat-sheet.#{doc_extension}" }
+ let(:name) { FFaker::Lorem.word }
+ let(:markdown_node) do
+ instance_double('CommonMarker::Node', url: url, to_plaintext: name, type: :link)
+ end
+
+ it 'returns instance with attachment info' do
+ attachment = described_class.from_markdown(markdown_node)
+
+ expect(attachment.name).to eq name
+ expect(attachment.url).to eq url
+ end
+
+ context "when type is not in whitelist" do
+ let(:doc_extension) { 'exe' }
+
+ it { expect(described_class.from_markdown(markdown_node)).to eq nil }
+ end
+
+ context 'when domain name is unknown' do
+ let(:url) do
+ "https://bitbucket.com/nickname/public-test-repo/files/3/git-cheat-sheet.#{doc_extension}"
+ end
+
+ it { expect(described_class.from_markdown(markdown_node)).to eq nil }
+ end
+ end
+
+ context "when it's an image attachment" do
+ let(:image_extension) { Gitlab::GithubImport::Markdown::Attachment::MEDIA_TYPES.sample }
+ let(:url) { "https://user-images.githubusercontent.com/1/uuid-1.#{image_extension}" }
+ let(:name) { FFaker::Lorem.word }
+ let(:markdown_node) do
+ instance_double('CommonMarker::Node', url: url, to_plaintext: name, type: :image)
+ end
+
+ it 'returns instance with attachment info' do
+ attachment = described_class.from_markdown(markdown_node)
+
+ expect(attachment.name).to eq name
+ expect(attachment.url).to eq url
+ end
+
+ context "when type is not in whitelist" do
+ let(:image_extension) { 'mkv' }
+
+ it { expect(described_class.from_markdown(markdown_node)).to eq nil }
+ end
+
+ context 'when domain name is unknown' do
+ let(:url) { "https://user-images.github.com/1/uuid-1.#{image_extension}" }
+
+ it { expect(described_class.from_markdown(markdown_node)).to eq nil }
+ end
+ end
+
+ context "when it's an inline html node" do
+ let(:name) { FFaker::Lorem.word }
+ let(:image_extension) { Gitlab::GithubImport::Markdown::Attachment::MEDIA_TYPES.sample }
+ let(:url) { "https://user-images.githubusercontent.com/1/uuid-1.#{image_extension}" }
+ let(:img) { "<img width=\"248\" alt=\"#{name}\" src=\"#{url}\">" }
+ let(:markdown_node) do
+ instance_double('CommonMarker::Node', string_content: img, type: :inline_html)
+ end
+
+ it 'returns instance with attachment info' do
+ attachment = described_class.from_markdown(markdown_node)
+
+ expect(attachment.name).to eq name
+ expect(attachment.url).to eq url
+ end
+ end
+ end
+
+ describe '#inspect' do
+ it 'returns attachment basic info' do
+ attachment = described_class.new(name, url)
+
+ expect(attachment.inspect).to eq "<Gitlab::GithubImport::Markdown::Attachment: { name: #{name}, url: #{url} }>"
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/markdown_text_spec.rb b/spec/lib/gitlab/github_import/markdown_text_spec.rb
index 1da6bb06403..3f771970588 100644
--- a/spec/lib/gitlab/github_import/markdown_text_spec.rb
+++ b/spec/lib/gitlab/github_import/markdown_text_spec.rb
@@ -60,31 +60,48 @@ RSpec.describe Gitlab::GithubImport::MarkdownText do
end
end
- describe '.fetch_attachment_urls' do
- let(:image_extension) { described_class::MEDIA_TYPES.sample }
+ describe '.fetch_attachments' do
+ let(:image_extension) { Gitlab::GithubImport::Markdown::Attachment::MEDIA_TYPES.sample }
let(:image_attachment) do
- "![special-image](https://user-images.githubusercontent.com/6833862/"\
- "176685788-e7a93168-7ded-406a-82b5-eb1c56685a93.#{image_extension})"
+ "![special-image](https://user-images.githubusercontent.com/1/uuid-1.#{image_extension})"
end
- let(:doc_extension) { described_class::DOC_TYPES.sample }
+ let(:img_tag_attachment) do
+ "<img width=\"248\" alt=\"tag-image\" src=\"https://user-images.githubusercontent.com/2/"\
+ "uuid-2.#{image_extension}\">"
+ end
+
+ let(:damaged_img_tag) do
+ "<img width=\"248\" alt=\"tag-image\" src=\"https://user-images.githubusercontent.com"
+ end
+
+ let(:doc_extension) { Gitlab::GithubImport::Markdown::Attachment::DOC_TYPES.sample }
let(:doc_attachment) do
"[some-doc](https://github.com/nickname/public-test-repo/"\
- "files/9020437/git-cheat-sheet.#{doc_extension})"
+ "files/3/git-cheat-sheet.#{doc_extension})"
end
let(:text) do
- <<-TEXT
+ <<-TEXT.split("\n").map(&:strip).join("\n")
Comment with an attachment
#{image_attachment}
#{FFaker::Lorem.sentence}
#{doc_attachment}
+ #{damaged_img_tag}
+ #{FFaker::Lorem.paragraph}
+ #{img_tag_attachment}
TEXT
end
- it 'fetches attachment urls' do
- expect(described_class.fetch_attachment_urls(text))
- .to contain_exactly(image_attachment, doc_attachment)
+ it 'fetches attachments' do
+ attachments = described_class.fetch_attachments(text)
+
+ expect(attachments.map(&:name)).to contain_exactly('special-image', 'tag-image', 'some-doc')
+ expect(attachments.map(&:url)).to contain_exactly(
+ "https://user-images.githubusercontent.com/1/uuid-1.#{image_extension}",
+ "https://user-images.githubusercontent.com/2/uuid-2.#{image_extension}",
+ "https://github.com/nickname/public-test-repo/files/3/git-cheat-sheet.#{doc_extension}"
+ )
end
end
diff --git a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
index 860bb60f3ed..cefad3baa31 100644
--- a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
+++ b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
@@ -295,11 +295,12 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling do
end
it 'imports data in parallel batches with delays' do
- expect(worker_class).to receive(:bulk_perform_in).with(1.second, [
- [project.id, { title: 'Foo' }, an_instance_of(String)],
- [project.id, { title: 'Foo' }, an_instance_of(String)],
- [project.id, { title: 'Foo' }, an_instance_of(String)]
- ], batch_size: batch_size, batch_delay: batch_delay)
+ expect(worker_class).to receive(:bulk_perform_in)
+ .with(1.second, [
+ [project.id, { title: 'Foo' }, an_instance_of(String)],
+ [project.id, { title: 'Foo' }, an_instance_of(String)],
+ [project.id, { title: 'Foo' }, an_instance_of(String)]
+ ], batch_size: batch_size, batch_delay: batch_delay)
importer.parallel_import
end
@@ -308,7 +309,8 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling do
describe '#each_object_to_import' do
let(:importer) { importer_class.new(project, client) }
- let(:object) { double(:object) }
+ let(:object) { {} }
+ let(:object_counter_class) { Gitlab::GithubImport::ObjectCounter }
before do
expect(importer)
@@ -334,6 +336,9 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling do
.with(object)
.and_return(false)
+ expect(object_counter_class)
+ .to receive(:increment)
+
expect(importer)
.to receive(:mark_as_imported)
.with(object)
@@ -364,6 +369,9 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling do
.with(object)
.and_return(false)
+ expect(object_counter_class)
+ .to receive(:increment)
+
expect(importer)
.to receive(:mark_as_imported)
.with(object)
@@ -407,6 +415,9 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling do
.with(object)
.and_return(true)
+ expect(object_counter_class)
+ .not_to receive(:increment)
+
expect(importer)
.not_to receive(:mark_as_imported)
@@ -463,4 +474,13 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling do
importer.mark_as_imported(object)
end
end
+
+ describe '#increment_object_counter?' do
+ let(:github_issue) { {} }
+ let(:importer) { importer_class.new(project, client) }
+
+ it 'returns true' do
+ expect(importer).to be_increment_object_counter(github_issue)
+ end
+ end
end
diff --git a/spec/lib/gitlab/github_import/representation/diff_note_spec.rb b/spec/lib/gitlab/github_import/representation/diff_note_spec.rb
index fe3040c102b..a656cd0d056 100644
--- a/spec/lib/gitlab/github_import/representation/diff_note_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/diff_note_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe Gitlab::GithubImport::Representation::DiffNote, :clean_gitlab_red
let(:start_line) { nil }
let(:end_line) { 23 }
let(:note_body) { 'Hello world' }
- let(:user_data) { { 'id' => 4, 'login' => 'alice' } }
+ let(:user_data) { { id: 4, login: 'alice' } }
let(:side) { 'RIGHT' }
let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
@@ -275,15 +275,14 @@ RSpec.describe Gitlab::GithubImport::Representation::DiffNote, :clean_gitlab_red
describe '.from_api_response' do
it_behaves_like 'a DiffNote representation' do
let(:response) do
- double(
- :response,
+ {
id: note_id,
html_url: 'https://github.com/foo/bar/pull/42',
path: 'README.md',
commit_id: '123abc',
original_commit_id: 'original123abc',
side: side,
- user: user_data && double(:user, user_data),
+ user: user_data,
diff_hunk: hunk,
body: note_body,
created_at: created_at,
@@ -291,7 +290,7 @@ RSpec.describe Gitlab::GithubImport::Representation::DiffNote, :clean_gitlab_red
line: end_line,
start_line: start_line,
in_reply_to_id: in_reply_to_id
- )
+ }
end
subject(:note) { described_class.from_api_response(response) }
diff --git a/spec/lib/gitlab/github_import/representation/issue_event_spec.rb b/spec/lib/gitlab/github_import/representation/issue_event_spec.rb
index 0256858ecf1..0dd281cb3b0 100644
--- a/spec/lib/gitlab/github_import/representation/issue_event_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/issue_event_spec.rb
@@ -186,7 +186,7 @@ RSpec.describe Gitlab::GithubImport::Representation::IssueEvent do
assignee: with_assignee ? user_resource.new(id: 5, login: 'tom') : nil,
requested_reviewer: with_reviewer ? user_resource.new(id: 6, login: 'mickey') : nil,
review_requester: with_reviewer ? user_resource.new(id: 7, login: 'minnie') : nil,
- issue: { 'number' => 2, 'pull_request' => pull_request },
+ issue: { number: 2, pull_request: pull_request },
created_at: '2022-04-26 18:30:53 UTC',
performed_via_github_app: nil
)
diff --git a/spec/lib/gitlab/github_import/representation/issue_spec.rb b/spec/lib/gitlab/github_import/representation/issue_spec.rb
index 5898518343a..263ef8b1708 100644
--- a/spec/lib/gitlab/github_import/representation/issue_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/issue_spec.rb
@@ -74,20 +74,19 @@ RSpec.describe Gitlab::GithubImport::Representation::Issue do
describe '.from_api_response' do
let(:response) do
- double(
- :response,
+ {
number: 42,
title: 'My Issue',
body: 'This is my issue',
- milestone: double(:milestone, number: 4),
+ milestone: { number: 4 },
state: 'open',
- assignees: [double(:user, id: 4, login: 'alice')],
- labels: [double(:label, name: 'bug')],
- user: double(:user, id: 4, login: 'alice'),
+ assignees: [{ id: 4, login: 'alice' }],
+ labels: [{ name: 'bug' }],
+ user: { id: 4, login: 'alice' },
created_at: created_at,
updated_at: updated_at,
pull_request: false
- )
+ }
end
let(:additional_data) { { work_item_type_id: work_item_type_id } }
@@ -97,9 +96,7 @@ RSpec.describe Gitlab::GithubImport::Representation::Issue do
end
it 'does not set the user if the response did not include a user' do
- allow(response)
- .to receive(:user)
- .and_return(nil)
+ response[:user] = nil
issue = described_class.from_api_response(response, additional_data)
diff --git a/spec/lib/gitlab/github_import/representation/note_spec.rb b/spec/lib/gitlab/github_import/representation/note_spec.rb
index 9f416eb3c02..49126dbe9c5 100644
--- a/spec/lib/gitlab/github_import/representation/note_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/note_spec.rb
@@ -48,15 +48,14 @@ RSpec.describe Gitlab::GithubImport::Representation::Note do
describe '.from_api_response' do
let(:response) do
- double(
- :response,
+ {
html_url: 'https://github.com/foo/bar/issues/42',
- user: double(:user, id: 4, login: 'alice'),
+ user: { id: 4, login: 'alice' },
body: 'Hello world',
created_at: created_at,
updated_at: updated_at,
id: 1
- )
+ }
end
it_behaves_like 'a Note' do
@@ -64,9 +63,7 @@ RSpec.describe Gitlab::GithubImport::Representation::Note do
end
it 'does not set the user if the response did not include a user' do
- allow(response)
- .to receive(:user)
- .and_return(nil)
+ response[:user] = nil
note = described_class.from_api_response(response)
diff --git a/spec/lib/gitlab/github_import/representation/note_text_spec.rb b/spec/lib/gitlab/github_import/representation/note_text_spec.rb
new file mode 100644
index 00000000000..8b57c9a0373
--- /dev/null
+++ b/spec/lib/gitlab/github_import/representation/note_text_spec.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Representation::NoteText do
+ shared_examples 'a Note text data' do |match_record_type|
+ it 'returns an instance of NoteText' do
+ expect(representation).to be_an_instance_of(described_class)
+ end
+
+ it 'includes record DB id' do
+ expect(representation.record_db_id).to eq 42
+ end
+
+ it 'includes record type' do
+ expect(representation.record_type).to eq match_record_type
+ end
+
+ it 'includes note text' do
+ expect(representation.text).to eq 'Some text here..'
+ end
+ end
+
+ describe '.from_db_record' do
+ context 'with Release' do
+ let(:record) { build_stubbed(:release, id: 42, description: 'Some text here..') }
+
+ it_behaves_like 'a Note text data', 'Release' do
+ let(:representation) { described_class.from_db_record(record) }
+ end
+ end
+
+ context 'with Issue' do
+ let(:record) { build_stubbed(:issue, id: 42, description: 'Some text here..') }
+
+ it_behaves_like 'a Note text data', 'Issue' do
+ let(:representation) { described_class.from_db_record(record) }
+ end
+ end
+
+ context 'with MergeRequest' do
+ let(:record) { build_stubbed(:merge_request, id: 42, description: 'Some text here..') }
+
+ it_behaves_like 'a Note text data', 'MergeRequest' do
+ let(:representation) { described_class.from_db_record(record) }
+ end
+ end
+
+ context 'with Note' do
+ let(:record) { build_stubbed(:note, id: 42, note: 'Some text here..') }
+
+ it_behaves_like 'a Note text data', 'Note' do
+ let(:representation) { described_class.from_db_record(record) }
+ end
+ end
+ end
+
+ describe '.from_json_hash' do
+ it_behaves_like 'a Note text data', 'Release' do
+ let(:hash) do
+ {
+ 'record_db_id' => 42,
+ 'record_type' => 'Release',
+ 'text' => 'Some text here..'
+ }
+ end
+
+ let(:representation) { described_class.from_json_hash(hash) }
+ end
+ end
+
+ describe '#github_identifiers' do
+ it 'returns a hash with needed identifiers' do
+ record_id = rand(100)
+ representation = described_class.new(record_db_id: record_id, text: 'text')
+
+ expect(representation.github_identifiers).to eq({ db_id: record_id })
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/representation/protected_branch_spec.rb b/spec/lib/gitlab/github_import/representation/protected_branch_spec.rb
index e762dc469c1..30b29659eee 100644
--- a/spec/lib/gitlab/github_import/representation/protected_branch_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/protected_branch_spec.rb
@@ -9,24 +9,47 @@ RSpec.describe Gitlab::GithubImport::Representation::ProtectedBranch do
end
context 'with ProtectedBranch' do
- it 'includes the protected branch ID (name)' do
+ it 'includes the protected branch ID (name) attribute' do
expect(protected_branch.id).to eq 'main'
end
- it 'includes the protected branch allow_force_pushes' do
+ it 'includes the protected branch allow_force_pushes attribute' do
expect(protected_branch.allow_force_pushes).to eq true
end
+
+ it 'includes the protected branch required_conversation_resolution attribute' do
+ expect(protected_branch.required_conversation_resolution).to eq true
+ end
+
+ it 'includes the protected branch required_pull_request_reviews' do
+ expect(protected_branch.required_pull_request_reviews).to eq true
+ end
end
end
describe '.from_api_response' do
let(:response) do
- response = Struct.new(:url, :allow_force_pushes, keyword_init: true)
- allow_force_pushes = Struct.new(:enabled, keyword_init: true)
+ response = Struct.new(
+ :url, :allow_force_pushes, :required_conversation_resolution, :required_signatures,
+ :required_pull_request_reviews,
+ keyword_init: true
+ )
+ enabled_setting = Struct.new(:enabled, keyword_init: true)
+ required_pull_request_reviews = Struct.new(:url, :dismissal_restrictions, keyword_init: true)
response.new(
url: 'https://example.com/branches/main/protection',
- allow_force_pushes: allow_force_pushes.new(
+ allow_force_pushes: enabled_setting.new(
+ enabled: true
+ ),
+ required_conversation_resolution: enabled_setting.new(
+ enabled: true
+ ),
+ required_signatures: enabled_setting.new(
enabled: true
+ ),
+ required_pull_request_reviews: required_pull_request_reviews.new(
+ url: 'https://example.com/branches/main/protection/required_pull_request_reviews',
+ dismissal_restrictions: {}
)
)
end
@@ -41,7 +64,10 @@ RSpec.describe Gitlab::GithubImport::Representation::ProtectedBranch do
let(:hash) do
{
'id' => 'main',
- 'allow_force_pushes' => true
+ 'allow_force_pushes' => true,
+ 'required_conversation_resolution' => true,
+ 'required_signatures' => true,
+ 'required_pull_request_reviews' => true
}
end
diff --git a/spec/lib/gitlab/github_import/representation/pull_request_review_spec.rb b/spec/lib/gitlab/github_import/representation/pull_request_review_spec.rb
index d6e7a8172f7..0203da9f4fb 100644
--- a/spec/lib/gitlab/github_import/representation/pull_request_review_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/pull_request_review_spec.rb
@@ -21,15 +21,14 @@ RSpec.describe Gitlab::GithubImport::Representation::PullRequestReview do
describe '.from_api_response' do
let(:response) do
- double(
- :response,
+ {
id: 999,
merge_request_id: 42,
body: 'note',
state: 'APPROVED',
- user: double(:user, id: 4, login: 'alice'),
+ user: { id: 4, login: 'alice' },
submitted_at: submitted_at
- )
+ }
end
it_behaves_like 'a PullRequest review' do
@@ -37,9 +36,7 @@ RSpec.describe Gitlab::GithubImport::Representation::PullRequestReview do
end
it 'does not set the user if the response did not include a user' do
- allow(response)
- .to receive(:user)
- .and_return(nil)
+ response[:user] = nil
review = described_class.from_api_response(response)
diff --git a/spec/lib/gitlab/github_import/representation/pull_request_spec.rb b/spec/lib/gitlab/github_import/representation/pull_request_spec.rb
index deb9535a845..b8c1c67e07c 100644
--- a/spec/lib/gitlab/github_import/representation/pull_request_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/pull_request_spec.rb
@@ -93,33 +93,30 @@ RSpec.describe Gitlab::GithubImport::Representation::PullRequest do
describe '.from_api_response' do
let(:response) do
- double(
- :response,
+ {
number: 42,
title: 'My Pull Request',
body: 'This is my pull request',
state: 'closed',
- head: double(
- :head,
+ head: {
sha: '123abc',
ref: 'my-feature',
- repo: double(:repo, id: 400),
- user: double(:user, id: 4, login: 'alice')
- ),
- base: double(
- :base,
+ repo: { id: 400 },
+ user: { id: 4, login: 'alice' }
+ },
+ base: {
sha: '456def',
ref: 'master',
- repo: double(:repo, id: 200)
- ),
- milestone: double(:milestone, number: 4),
- user: double(:user, id: 4, login: 'alice'),
- assignee: double(:user, id: 4, login: 'alice'),
- merged_by: double(:user, id: 4, login: 'alice'),
+ repo: { id: 200 }
+ },
+ milestone: { number: 4 },
+ user: { id: 4, login: 'alice' },
+ assignee: { id: 4, login: 'alice' },
+ merged_by: { id: 4, login: 'alice' },
created_at: created_at,
updated_at: updated_at,
merged_at: merged_at
- )
+ }
end
it_behaves_like 'a PullRequest' do
@@ -127,9 +124,7 @@ RSpec.describe Gitlab::GithubImport::Representation::PullRequest do
end
it 'does not set the user if the response did not include a user' do
- allow(response)
- .to receive(:user)
- .and_return(nil)
+ response[:user] = nil
pr = described_class.from_api_response(response)
diff --git a/spec/lib/gitlab/github_import/representation/release_attachments_spec.rb b/spec/lib/gitlab/github_import/representation/release_attachments_spec.rb
deleted file mode 100644
index 0ef9dad6a13..00000000000
--- a/spec/lib/gitlab/github_import/representation/release_attachments_spec.rb
+++ /dev/null
@@ -1,49 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::GithubImport::Representation::ReleaseAttachments do
- shared_examples 'a Release attachments data' do
- it 'returns an instance of ReleaseAttachments' do
- expect(representation).to be_an_instance_of(described_class)
- end
-
- it 'includes release DB id' do
- expect(representation.release_db_id).to eq 42
- end
-
- it 'includes release description' do
- expect(representation.description).to eq 'Some text here..'
- end
- end
-
- describe '.from_db_record' do
- let(:release) { build_stubbed(:release, id: 42, description: 'Some text here..') }
-
- it_behaves_like 'a Release attachments data' do
- let(:representation) { described_class.from_db_record(release) }
- end
- end
-
- describe '.from_json_hash' do
- it_behaves_like 'a Release attachments data' do
- let(:hash) do
- {
- 'release_db_id' => 42,
- 'description' => 'Some text here..'
- }
- end
-
- let(:representation) { described_class.from_json_hash(hash) }
- end
- end
-
- describe '#github_identifiers' do
- it 'returns a hash with needed identifiers' do
- release_id = rand(100)
- representation = described_class.new(release_db_id: release_id, description: 'text')
-
- expect(representation.github_identifiers).to eq({ db_id: release_id })
- end
- end
-end
diff --git a/spec/lib/gitlab/github_import/representation/user_spec.rb b/spec/lib/gitlab/github_import/representation/user_spec.rb
index d7219556ada..ccada558f8b 100644
--- a/spec/lib/gitlab/github_import/representation/user_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/user_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe Gitlab::GithubImport::Representation::User do
describe '.from_api_response' do
it_behaves_like 'a User' do
- let(:response) { double(:response, id: 42, login: 'alice') }
+ let(:response) { { id: 42, login: 'alice' } }
let(:user) { described_class.from_api_response(response) }
end
end
diff --git a/spec/lib/gitlab/github_import/settings_spec.rb b/spec/lib/gitlab/github_import/settings_spec.rb
new file mode 100644
index 00000000000..ad0c47e8e8a
--- /dev/null
+++ b/spec/lib/gitlab/github_import/settings_spec.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Settings do
+ subject(:settings) { described_class.new(project) }
+
+ let_it_be(:project) { create(:project) }
+
+ let(:optional_stages) do
+ {
+ single_endpoint_issue_events_import: true,
+ single_endpoint_notes_import: false,
+ attachments_import: false
+ }
+ end
+
+ describe '.stages_array' do
+ let(:expected_list) do
+ stages = described_class::OPTIONAL_STAGES
+ [
+ {
+ name: 'single_endpoint_issue_events_import',
+ label: stages[:single_endpoint_issue_events_import][:label],
+ details: stages[:single_endpoint_issue_events_import][:details]
+ },
+ {
+ name: 'single_endpoint_notes_import',
+ label: stages[:single_endpoint_notes_import][:label],
+ details: stages[:single_endpoint_notes_import][:details]
+ },
+ {
+ name: 'attachments_import',
+ label: stages[:attachments_import][:label].strip,
+ details: stages[:attachments_import][:details]
+ }
+ ]
+ end
+
+ it 'returns stages list as array' do
+ expect(described_class.stages_array).to match_array(expected_list)
+ end
+ end
+
+ describe '#write' do
+ let(:data_input) do
+ {
+ single_endpoint_issue_events_import: true,
+ single_endpoint_notes_import: 'false',
+ attachments_import: nil,
+ foo: :bar
+ }.stringify_keys
+ end
+
+ it 'puts optional steps flags into projects import_data' do
+ settings.write(data_input)
+
+ expect(project.import_data.data['optional_stages'])
+ .to eq optional_stages.stringify_keys
+ end
+ end
+
+ describe '#enabled?' do
+ it 'returns is enabled or not specific optional stage' do
+ project.create_or_update_import_data(data: { optional_stages: optional_stages })
+
+ expect(settings.enabled?(:single_endpoint_issue_events_import)).to eq true
+ expect(settings.enabled?(:single_endpoint_notes_import)).to eq false
+ expect(settings.enabled?(:attachments_import)).to eq false
+ end
+ end
+
+ describe '#disabled?' do
+ it 'returns is disabled or not specific optional stage' do
+ project.create_or_update_import_data(data: { optional_stages: optional_stages })
+
+ expect(settings.disabled?(:single_endpoint_issue_events_import)).to eq false
+ expect(settings.disabled?(:single_endpoint_notes_import)).to eq true
+ expect(settings.disabled?(:attachments_import)).to eq true
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/user_finder_spec.rb b/spec/lib/gitlab/github_import/user_finder_spec.rb
index 8ebbff31f64..d77aaa0e846 100644
--- a/spec/lib/gitlab/github_import/user_finder_spec.rb
+++ b/spec/lib/gitlab/github_import/user_finder_spec.rb
@@ -17,8 +17,8 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache do
describe '#author_id_for' do
context 'with default author_key' do
it 'returns the user ID for the author of an object' do
- user = double(:user, id: 4, login: 'kittens')
- note = double(:note, author: user)
+ user = { id: 4, login: 'kittens' }
+ note = { author: user }
expect(finder).to receive(:user_id_for).with(user).and_return(42)
@@ -26,8 +26,8 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache do
end
it 'returns the ID of the project creator if no user ID could be found' do
- user = double(:user, id: 4, login: 'kittens')
- note = double(:note, author: user)
+ user = { id: 4, login: 'kittens' }
+ note = { author: user }
expect(finder).to receive(:user_id_for).with(user).and_return(nil)
@@ -35,7 +35,7 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache do
end
it 'returns the ID of the ghost user when the object has no user' do
- note = double(:note, author: nil)
+ note = { author: nil }
expect(finder.author_id_for(note)).to eq([User.ghost.id, true])
end
@@ -46,7 +46,7 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache do
end
context 'with a non-default author_key' do
- let(:user) { double(:user, id: 4, login: 'kittens') }
+ let(:user) { { id: 4, login: 'kittens' } }
shared_examples 'user ID finder' do |author_key|
it 'returns the user ID for an object' do
@@ -57,25 +57,25 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache do
end
context 'when the author_key parameter is :actor' do
- let(:issue_event) { double('Gitlab::GithubImport::Representation::IssueEvent', actor: user) }
+ let(:issue_event) { { actor: user } }
it_behaves_like 'user ID finder', :actor
end
context 'when the author_key parameter is :assignee' do
- let(:issue_event) { double('Gitlab::GithubImport::Representation::IssueEvent', assignee: user) }
+ let(:issue_event) { { assignee: user } }
it_behaves_like 'user ID finder', :assignee
end
context 'when the author_key parameter is :requested_reviewer' do
- let(:issue_event) { double('Gitlab::GithubImport::Representation::IssueEvent', requested_reviewer: user) }
+ let(:issue_event) { { requested_reviewer: user } }
it_behaves_like 'user ID finder', :requested_reviewer
end
context 'when the author_key parameter is :review_requester' do
- let(:issue_event) { double('Gitlab::GithubImport::Representation::IssueEvent', review_requester: user) }
+ let(:issue_event) { { review_requester: user } }
it_behaves_like 'user ID finder', :review_requester
end
@@ -84,15 +84,15 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache do
describe '#assignee_id_for' do
it 'returns the user ID for the assignee of an issuable' do
- user = double(:user, id: 4, login: 'kittens')
- issue = double(:issue, assignee: user)
+ user = { id: 4, login: 'kittens' }
+ issue = { assignee: user }
expect(finder).to receive(:user_id_for).with(user).and_return(42)
expect(finder.assignee_id_for(issue)).to eq(42)
end
it 'returns nil if the issuable does not have an assignee' do
- issue = double(:issue, assignee: nil)
+ issue = { assignee: nil }
expect(finder).not_to receive(:user_id_for)
expect(finder.assignee_id_for(issue)).to be_nil
@@ -101,9 +101,9 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache do
describe '#user_id_for' do
it 'returns the user ID for the given user' do
- user = double(:user, id: 4, login: 'kittens')
+ user = { id: 4, login: 'kittens' }
- expect(finder).to receive(:find).with(user.id, user.login).and_return(42)
+ expect(finder).to receive(:find).with(user[:id], user[:login]).and_return(42)
expect(finder.user_id_for(user)).to eq(42)
end
@@ -221,7 +221,7 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache do
end
context 'when an Email address is not cached' do
- let(:user) { double(:user, email: email) }
+ let(:user) { { email: email } }
it 'retrieves the Email address from the GitHub API' do
expect(client).to receive(:user).with('kittens').and_return(user)
@@ -251,7 +251,7 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache do
end
it 'shortens the timeout for Email address in cache when an Email address is private/nil from GitHub' do
- user = double(:user, email: nil)
+ user = { email: nil }
expect(client).to receive(:user).with('kittens').and_return(user)
expect(Gitlab::Cache::Import::Caching)
diff --git a/spec/lib/gitlab/grape_logging/formatters/lograge_with_timestamp_spec.rb b/spec/lib/gitlab/grape_logging/formatters/lograge_with_timestamp_spec.rb
index 5006d27c356..f14f0098a1f 100644
--- a/spec/lib/gitlab/grape_logging/formatters/lograge_with_timestamp_spec.rb
+++ b/spec/lib/gitlab/grape_logging/formatters/lograge_with_timestamp_spec.rb
@@ -43,10 +43,11 @@ RSpec.describe Gitlab::GrapeLogging::Formatters::LogrageWithTimestamp do
it 're-formats the params hash' do
params = result['params']
- expect(params).to eq([
- { 'key' => 'description', 'value' => '[FILTERED]' },
- { 'key' => 'name', 'value' => 'gitlab test' },
- { 'key' => 'int', 'value' => 42 }
- ])
+ expect(params).to eq(
+ [
+ { 'key' => 'description', 'value' => '[FILTERED]' },
+ { 'key' => 'name', 'value' => 'gitlab test' },
+ { 'key' => 'int', 'value' => 42 }
+ ])
end
end
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
index bf09e98331f..1124868bdae 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
@@ -51,6 +51,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
before do
stub_feature_flags(graphql_keyset_pagination_without_next_page_query: false)
+ allow(GitlabSchema).to receive(:default_max_page_size).and_return(2)
end
it 'invokes an extra query for the next page check' do
diff --git a/spec/lib/gitlab/health_checks/master_check_spec.rb b/spec/lib/gitlab/health_checks/master_check_spec.rb
index 8a87b01c560..5cd26f6302a 100644
--- a/spec/lib/gitlab/health_checks/master_check_spec.rb
+++ b/spec/lib/gitlab/health_checks/master_check_spec.rb
@@ -4,16 +4,14 @@ require 'fast_spec_helper'
require_relative './simple_check_shared'
RSpec.describe Gitlab::HealthChecks::MasterCheck do
- before do
- stub_const('SUCCESS_CODE', 100)
- stub_const('FAILURE_CODE', 101)
- end
-
context 'when Puma runs in Clustered mode' do
before do
allow(Gitlab::Runtime).to receive(:puma_in_clustered_mode?).and_return(true)
- described_class.register_master
+ # We need to capture the read pipe here to stub out the non-blocking read.
+ # The original implementation actually forked the test suite for a more
+ # end-to-end test but that caused knock-on effects on other tests.
+ @pipe_read, _ = described_class.register_master
end
after do
@@ -25,34 +23,40 @@ RSpec.describe Gitlab::HealthChecks::MasterCheck do
end
describe '.readiness' do
- context 'when master is running' do
- it 'worker does return success' do
- _, child_status = run_worker
-
- expect(child_status.exitstatus).to eq(SUCCESS_CODE)
+ context 'when no worker registered' do
+ it 'succeeds' do
+ expect(described_class.readiness.success).to be(true)
end
end
- context 'when master finishes early' do
- before do
- described_class.send(:close_write)
+ context 'when worker registers itself' do
+ context 'when reading from pipe succeeds' do
+ it 'succeeds' do
+ expect(@pipe_read).to receive(:read_nonblock) # rubocop: disable RSpec/InstanceVariable
+
+ described_class.register_worker
+
+ expect(described_class.readiness.success).to be(true)
+ end
end
- it 'worker does return failure' do
- _, child_status = run_worker
+ context 'when read pipe is open but not ready for reading' do
+ it 'succeeds' do
+ expect(@pipe_read).to receive(:read_nonblock).and_raise(IO::EAGAINWaitReadable) # rubocop: disable RSpec/InstanceVariable
+
+ described_class.register_worker
- expect(child_status.exitstatus).to eq(FAILURE_CODE)
+ expect(described_class.readiness.success).to be(true)
+ end
end
end
- def run_worker
- pid = fork do
- described_class.register_worker
+ context 'when master finishes early' do
+ it 'fails' do
+ described_class.finish_master
- exit(described_class.readiness.success ? SUCCESS_CODE : FAILURE_CODE)
+ expect(described_class.readiness.success).to be(false)
end
-
- Process.wait2(pid)
end
end
end
diff --git a/spec/lib/gitlab/hook_data/release_builder_spec.rb b/spec/lib/gitlab/hook_data/release_builder_spec.rb
index 449965f5df1..08f9de4a2ed 100644
--- a/spec/lib/gitlab/hook_data/release_builder_spec.rb
+++ b/spec/lib/gitlab/hook_data/release_builder_spec.rb
@@ -13,12 +13,12 @@ RSpec.describe Gitlab::HookData::ReleaseBuilder do
it 'includes safe attribute' do
%w[
- id
- created_at
- description
- name
- released_at
- tag
+ id
+ created_at
+ description
+ name
+ released_at
+ tag
].each do |key|
expect(data).to include(key)
end
diff --git a/spec/lib/gitlab/import_export/after_export_strategies/web_upload_strategy_spec.rb b/spec/lib/gitlab/import_export/after_export_strategies/web_upload_strategy_spec.rb
index 42cf9c54798..297fe3ade07 100644
--- a/spec/lib/gitlab/import_export/after_export_strategies/web_upload_strategy_spec.rb
+++ b/spec/lib/gitlab/import_export/after_export_strategies/web_upload_strategy_spec.rb
@@ -9,8 +9,6 @@ RSpec.describe Gitlab::ImportExport::AfterExportStrategies::WebUploadStrategy do
allow_next_instance_of(ProjectExportWorker) do |job|
allow(job).to receive(:jid).and_return(SecureRandom.hex(8))
end
-
- stub_feature_flags(import_export_web_upload_stream: false)
stub_uploads_object_storage(FileUploader, enabled: false)
end
@@ -109,108 +107,68 @@ RSpec.describe Gitlab::ImportExport::AfterExportStrategies::WebUploadStrategy do
end
context 'when object store is enabled' do
+ let(:object_store_url) { 'http://object-storage/project.tar.gz' }
+
before do
- object_store_url = 'http://object-storage/project.tar.gz'
stub_uploads_object_storage(FileUploader)
- stub_request(:get, object_store_url)
- stub_request(:post, example_url)
+
allow(import_export_upload.export_file).to receive(:url).and_return(object_store_url)
allow(import_export_upload.export_file).to receive(:file_storage?).and_return(false)
end
- it 'reads file using Gitlab::HttpIO and uploads to external url' do
- expect_next_instance_of(Gitlab::HttpIO) do |http_io|
- expect(http_io).to receive(:read).and_call_original
+ it 'uploads file as a remote stream' do
+ arguments = {
+ download_url: object_store_url,
+ upload_url: example_url,
+ options: {
+ upload_method: :post,
+ upload_content_type: 'application/gzip'
+ }
+ }
+
+ expect_next_instance_of(Gitlab::ImportExport::RemoteStreamUpload, arguments) do |remote_stream_upload|
+ expect(remote_stream_upload).to receive(:execute)
end
- expect(Gitlab::ImportExport::RemoteStreamUpload).not_to receive(:new)
+ expect(Gitlab::HttpIO).not_to receive(:new)
strategy.execute(user, project)
-
- expect(a_request(:post, example_url)).to have_been_made
end
- end
-
- context 'when `import_export_web_upload_stream` feature is enabled' do
- before do
- stub_feature_flags(import_export_web_upload_stream: true)
- end
-
- context 'when remote object store is disabled' do
- it 'reads file from disk and uploads to external url' do
- stub_request(:post, example_url).to_return(status: 200)
- expect(Gitlab::ImportExport::RemoteStreamUpload).not_to receive(:new)
- expect(Gitlab::HttpIO).not_to receive(:new)
-
- strategy.execute(user, project)
-
- expect(a_request(:post, example_url)).to have_been_made
- end
- end
-
- context 'when object store is enabled' do
- let(:object_store_url) { 'http://object-storage/project.tar.gz' }
+ context 'when upload as remote stream raises an exception' do
before do
- stub_uploads_object_storage(FileUploader)
-
- allow(import_export_upload.export_file).to receive(:url).and_return(object_store_url)
- allow(import_export_upload.export_file).to receive(:file_storage?).and_return(false)
+ allow_next_instance_of(Gitlab::ImportExport::RemoteStreamUpload) do |remote_stream_upload|
+ allow(remote_stream_upload).to receive(:execute).and_raise(
+ Gitlab::ImportExport::RemoteStreamUpload::StreamError.new('Exception error message', 'Response body')
+ )
+ end
end
- it 'uploads file as a remote stream' do
- arguments = {
- download_url: object_store_url,
- upload_url: example_url,
- options: {
- upload_method: :post,
- upload_content_type: 'application/gzip'
- }
- }
-
- expect_next_instance_of(Gitlab::ImportExport::RemoteStreamUpload, arguments) do |remote_stream_upload|
- expect(remote_stream_upload).to receive(:execute)
+ it 'logs the exception and stores the error message' do
+ expect_next_instance_of(Gitlab::Export::Logger) do |logger|
+ expect(logger).to receive(:error).ordered.with(
+ {
+ project_id: project.id,
+ project_name: project.name,
+ message: 'Exception error message',
+ response_body: 'Response body'
+ }
+ )
+
+ expect(logger).to receive(:error).ordered.with(
+ {
+ project_id: project.id,
+ project_name: project.name,
+ message: 'After export strategy failed',
+ 'exception.class' => 'Gitlab::ImportExport::RemoteStreamUpload::StreamError',
+ 'exception.message' => 'Exception error message',
+ 'exception.backtrace' => anything
+ }
+ )
end
- expect(Gitlab::HttpIO).not_to receive(:new)
strategy.execute(user, project)
- end
- context 'when upload as remote stream raises an exception' do
- before do
- allow_next_instance_of(Gitlab::ImportExport::RemoteStreamUpload) do |remote_stream_upload|
- allow(remote_stream_upload).to receive(:execute).and_raise(
- Gitlab::ImportExport::RemoteStreamUpload::StreamError.new('Exception error message', 'Response body')
- )
- end
- end
-
- it 'logs the exception and stores the error message' do
- expect_next_instance_of(Gitlab::Export::Logger) do |logger|
- expect(logger).to receive(:error).ordered.with(
- {
- project_id: project.id,
- project_name: project.name,
- message: 'Exception error message',
- response_body: 'Response body'
- }
- )
-
- expect(logger).to receive(:error).ordered.with(
- {
- project_id: project.id,
- project_name: project.name,
- message: 'After export strategy failed',
- 'exception.class' => 'Gitlab::ImportExport::RemoteStreamUpload::StreamError',
- 'exception.message' => 'Exception error message',
- 'exception.backtrace' => anything
- }
- )
- end
-
- strategy.execute(user, project)
-
- expect(project.import_export_shared.errors.first).to eq('Exception error message')
- end
+ expect(project.import_export_shared.errors.first).to eq('Exception error message')
end
end
end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index e270ca9ec6a..ccc4f1f7149 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -61,6 +61,8 @@ issues:
- requirement
- incident_management_issuable_escalation_status
- incident_management_timeline_events
+- incident_management_timeline_event_tags
+- incident_management_timeline_event_links
- pending_escalations
- customer_relations_contacts
- issue_customer_relations_contacts
@@ -95,6 +97,7 @@ label_links:
label:
- subscriptions
- project
+- parent_container
- lists
- label_links
- issues
@@ -296,6 +299,10 @@ ci_pipelines:
- package_build_infos
- package_file_build_infos
- build_trace_chunks
+- pipeline_metadata
+pipeline_metadata:
+- project
+- pipeline
ci_refs:
- project
- ci_pipelines
@@ -541,6 +548,7 @@ project:
- path_locks
- approver_groups
- repository_state
+- wiki_repository_state
- source_pipelines
- sourced_pipelines
- prometheus_metrics
@@ -567,6 +575,7 @@ project:
- project_registry
- packages
- package_files
+- repository_files
- packages_cleanup_policy
- alerting_setting
- project_setting
@@ -615,6 +624,7 @@ project:
- incident_management_oncall_rotations
- incident_management_escalation_policies
- incident_management_issuable_escalation_statuses
+- incident_management_timeline_event_tags
- debian_distributions
- merge_request_metrics
- security_orchestration_policy_configuration
@@ -632,6 +642,7 @@ project:
- vulnerability_reads
- build_artifacts_size_refresh
- project_callouts
+- pipeline_metadata
award_emoji:
- awardable
- user
diff --git a/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb
index 89ae869ae86..1444897e136 100644
--- a/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb
@@ -116,15 +116,15 @@ RSpec.describe Gitlab::ImportExport::Group::TreeRestorer do
shared_examples 'excluded attributes' do
excluded_attributes = %w[
- id
- parent_id
- owner_id
- created_at
- updated_at
- runners_token
- runners_token_encrypted
- saml_discovery_token
- ]
+ id
+ parent_id
+ owner_id
+ created_at
+ updated_at
+ runners_token
+ runners_token_encrypted
+ saml_discovery_token
+ ]
before do
group.add_owner(importer_user)
diff --git a/spec/lib/gitlab/import_export/group/tree_saver_spec.rb b/spec/lib/gitlab/import_export/group/tree_saver_spec.rb
index de4d193a21c..85d07e3fe63 100644
--- a/spec/lib/gitlab/import_export/group/tree_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/group/tree_saver_spec.rb
@@ -51,11 +51,12 @@ RSpec.describe Gitlab::ImportExport::Group::TreeSaver do
.map { |line| Integer(line) }
expect(groups_catalog.size).to eq(3)
- expect(groups_catalog).to eq([
- group.id,
- group.descendants.first.id,
- group.descendants.first.descendants.first.id
- ])
+ expect(groups_catalog).to eq(
+ [
+ group.id,
+ group.descendants.first.id,
+ group.descendants.first.descendants.first.id
+ ])
end
it 'has a file per group' do
diff --git a/spec/lib/gitlab/import_export/project/relation_factory_spec.rb b/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
index 52b33e22089..936c63fd6cd 100644
--- a/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
+++ b/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory, :use_clean_rails_
context 'hook object' do
let(:relation_sym) { :hooks }
let(:id) { 999 }
- let(:service_id) { 99 }
+ let(:integration_id) { 99 }
let(:original_project_id) { 8 }
let(:token) { 'secret' }
@@ -52,7 +52,7 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory, :use_clean_rails_
'project_id' => original_project_id,
'created_at' => '2016-08-12T09:41:03.462Z',
'updated_at' => '2016-08-12T09:41:03.462Z',
- 'service_id' => service_id,
+ 'integration_id' => integration_id,
'push_events' => true,
'issues_events' => false,
'confidential_issues_events' => false,
@@ -71,8 +71,8 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory, :use_clean_rails_
expect(created_object.id).not_to eq(id)
end
- it 'does not have the original service_id' do
- expect(created_object.service_id).not_to eq(service_id)
+ it 'does not have the original integration_id' do
+ expect(created_object.integration_id).not_to eq(integration_id)
end
it 'does not have the original project_id' do
@@ -88,10 +88,10 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory, :use_clean_rails_
end
context 'original service exists' do
- let(:service_id) { create(:integration, project: project).id }
+ let(:integration_id) { create(:integration, project: project).id }
- it 'does not have the original service_id' do
- expect(created_object.service_id).not_to eq(service_id)
+ it 'does not have the original integration_id' do
+ expect(created_object.integration_id).not_to eq(integration_id)
end
end
@@ -302,7 +302,7 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory, :use_clean_rails_
let(:relation_sym) { :hazardous_foo_model }
let(:relation_hash) do
{
- 'service_id' => 99,
+ 'integration_id' => 99,
'moved_to_id' => 99,
'namespace_id' => 99,
'ci_id' => 99,
@@ -317,7 +317,7 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory, :use_clean_rails_
before do
stub_const('HazardousFooModel', Class.new(FooModel))
HazardousFooModel.class_eval do
- attr_accessor :service_id, :moved_to_id, :namespace_id, :ci_id, :random_project_id, :random_id, :milestone_id, :project_id
+ attr_accessor :integration_id, :moved_to_id, :namespace_id, :ci_id, :random_project_id, :random_id, :milestone_id, :project_id
end
allow(HazardousFooModel).to receive(:reflect_on_association).and_return(nil)
diff --git a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
index 299e107c881..fae94a3b544 100644
--- a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
@@ -140,13 +140,13 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do
it 'restores pipelines based on ascending id order' do
expected_ordered_shas = %w[
- 2ea1f3dec713d940208fb5ce4a38765ecb5d3f73
- ce84140e8b878ce6e7c4d298c7202ff38170e3ac
- 048721d90c449b244b7b4c53a9186b04330174ec
- sha-notes
- 5f923865dde3436854e9ceb9cdb7815618d4e849
- d2d430676773caa88cdaf7c55944073b2fd5561a
- 2ea1f3dec713d940208fb5ce4a38765ecb5d3f73
+ 2ea1f3dec713d940208fb5ce4a38765ecb5d3f73
+ ce84140e8b878ce6e7c4d298c7202ff38170e3ac
+ 048721d90c449b244b7b4c53a9186b04330174ec
+ sha-notes
+ 5f923865dde3436854e9ceb9cdb7815618d4e849
+ d2d430676773caa88cdaf7c55944073b2fd5561a
+ 2ea1f3dec713d940208fb5ce4a38765ecb5d3f73
]
project = Project.find_by_path('project')
@@ -156,6 +156,15 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do
end
end
+ it 'restores pipeline metadata' do
+ pipeline = Ci::Pipeline.find_by_sha('sha-notes')
+ pipeline_metadata = pipeline.pipeline_metadata
+
+ expect(pipeline_metadata.title).to eq('Build pipeline')
+ expect(pipeline_metadata.pipeline_id).to eq(pipeline.id)
+ expect(pipeline_metadata.project_id).to eq(pipeline.project_id)
+ end
+
it 'preserves updated_at on issues' do
issue = Issue.find_by(description: 'Aliquam enim illo et possimus.')
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index e591cbd05a0..23eb93a1bce 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -332,6 +332,11 @@ Ci::Pipeline:
- iid
- merge_request_id
- external_pull_request_id
+Ci::PipelineMetadata:
+- id
+- project_id
+- pipeline_id
+- title
Ci::Stage:
- id
- name
@@ -697,6 +702,7 @@ ProjectCiCdSetting:
- runner_token_expiration_interval
ProjectSetting:
- allow_merge_on_skipped_pipeline
+- only_allow_merge_if_all_status_checks_passed
- has_confluence
- has_shimo
- has_vulnerabilities
diff --git a/spec/lib/gitlab/import_export/uploads_manager_spec.rb b/spec/lib/gitlab/import_export/uploads_manager_spec.rb
index 0cfe3a69a09..5fc3a70169a 100644
--- a/spec/lib/gitlab/import_export/uploads_manager_spec.rb
+++ b/spec/lib/gitlab/import_export/uploads_manager_spec.rb
@@ -78,16 +78,30 @@ RSpec.describe Gitlab::ImportExport::UploadsManager do
context 'when upload is in object storage' do
before do
stub_uploads_object_storage(FileUploader)
- allow(manager).to receive(:download_or_copy_upload).and_raise(Errno::ENAMETOOLONG)
end
- it 'ignores problematic upload and logs exception' do
- expect(Gitlab::ErrorTracking).to receive(:log_exception).with(instance_of(Errno::ENAMETOOLONG), project_id: project.id)
+ shared_examples 'export with invalid upload' do
+ it 'ignores problematic upload and logs exception' do
+ allow(manager).to receive(:download_or_copy_upload).and_raise(exception)
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(instance_of(exception), project_id: project.id)
- manager.save # rubocop:disable Rails/SaveBang
+ manager.save # rubocop:disable Rails/SaveBang
- expect(shared.errors).to be_empty
- expect(File).not_to exist(exported_file_path)
+ expect(shared.errors).to be_empty
+ expect(File).not_to exist(exported_file_path)
+ end
+ end
+
+ context 'when filename is too long' do
+ let(:exception) { Errno::ENAMETOOLONG }
+
+ include_examples 'export with invalid upload'
+ end
+
+ context 'when network exception occurs' do
+ let(:exception) { Net::OpenTimeout }
+
+ include_examples 'export with invalid upload'
end
end
end
diff --git a/spec/lib/gitlab/import_export/wiki_repo_saver_spec.rb b/spec/lib/gitlab/import_export/wiki_repo_saver_spec.rb
index c936d2bc27d..0e6173b611f 100644
--- a/spec/lib/gitlab/import_export/wiki_repo_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/wiki_repo_saver_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe Gitlab::ImportExport::WikiRepoSaver do
allow_next_instance_of(Gitlab::ImportExport) do |instance|
allow(instance).to receive(:storage_path).and_return(export_path)
end
- project_wiki.wiki
+ project_wiki.create_wiki_repository
project_wiki.create_page("index", "test content")
end
diff --git a/spec/lib/gitlab/jira_import/issues_importer_spec.rb b/spec/lib/gitlab/jira_import/issues_importer_spec.rb
index a2a482dde7c..9f654bbcd15 100644
--- a/spec/lib/gitlab/jira_import/issues_importer_spec.rb
+++ b/spec/lib/gitlab/jira_import/issues_importer_spec.rb
@@ -40,11 +40,11 @@ RSpec.describe Gitlab::JiraImport::IssuesImporter do
context 'with results returned' do
jira_issue = Struct.new(:id)
- let_it_be(:jira_issues) { [jira_issue.new(1), jira_issue.new(2), jira_issue.new(3)] }
+ let_it_be(:jira_issues) { [jira_issue.new(1), jira_issue.new(2)] }
def mock_issue_serializer(count, raise_exception_on_even_mocks: false)
serializer = instance_double(Gitlab::JiraImport::IssueSerializer, execute: { key: 'data' })
- next_iid = project.issues.maximum(:iid).to_i
+ allow(Issue).to receive(:with_project_iid_supply).and_return('issue_iid')
count.times do |i|
if raise_exception_on_even_mocks && i.even?
@@ -53,16 +53,15 @@ RSpec.describe Gitlab::JiraImport::IssuesImporter do
jira_issues[i],
current_user.id,
default_issue_type_id,
- { iid: next_iid + 1 }
+ { iid: 'issue_iid' }
).and_raise('Some error')
else
- next_iid += 1
expect(Gitlab::JiraImport::IssueSerializer).to receive(:new).with(
project,
jira_issues[i],
current_user.id,
default_issue_type_id,
- { iid: next_iid }
+ { iid: 'issue_iid' }
).and_return(serializer)
end
end
@@ -125,47 +124,6 @@ RSpec.describe Gitlab::JiraImport::IssuesImporter do
expect(Gitlab::JiraImport.get_issues_next_start_at(project.id)).to eq(2)
end
end
-
- context 'when number of issues is above the threshold' do
- before do
- stub_const("#{described_class.name}::JIRA_IMPORT_THRESHOLD", 2)
- stub_const("#{described_class.name}::JIRA_IMPORT_PAUSE_LIMIT", 1)
- allow(Gitlab::ErrorTracking).to receive(:track_exception)
- allow_next_instance_of(Gitlab::JobWaiter) do |job_waiter|
- allow(job_waiter).to receive(:wait).with(5).and_return(job_waiter.wait(0.1))
- end
- end
-
- it 'schedules 2 import jobs with two pause points' do
- expect(subject).to receive(:fetch_issues).with(0).and_return([jira_issues[0], jira_issues[1], jira_issues[2]])
- expect(Gitlab::JiraImport::ImportIssueWorker).to receive(:perform_async).exactly(3).times
- expect(Gitlab::JiraImport::ImportIssueWorker)
- .to receive(:queue_size)
- .exactly(6).times
- .and_return(1, 2, 3, 2, 1, 0)
-
- mock_issue_serializer(3)
-
- expect(subject.execute).to have_received(:wait).with(5).twice
- end
-
- it 'tracks the exception if the queue size does not reduce' do
- expect(subject).to receive(:fetch_issues).with(0).and_return([jira_issues[0]])
- expect(Gitlab::JiraImport::ImportIssueWorker).not_to receive(:perform_async)
- expect(Gitlab::JiraImport::ImportIssueWorker)
- .to receive(:queue_size)
- .exactly(11).times
- .and_return(3)
-
- mock_issue_serializer(1)
-
- expect(subject.execute).to have_received(:wait).with(5).exactly(10).times
- expect(Gitlab::ErrorTracking)
- .to have_received(:track_exception)
- .with(described_class::RetriesExceededError, { project_id: project.id })
- .once
- end
- end
end
end
end
diff --git a/spec/lib/gitlab/json_spec.rb b/spec/lib/gitlab/json_spec.rb
index 7c093049e18..73276288765 100644
--- a/spec/lib/gitlab/json_spec.rb
+++ b/spec/lib/gitlab/json_spec.rb
@@ -8,6 +8,12 @@ RSpec.describe Gitlab::Json do
end
describe ".parse" do
+ it "is aliased" do
+ [:parse!, :load, :decode].each do |method|
+ expect(described_class.method(method)).to eq(described_class.method(:parse))
+ end
+ end
+
context "legacy_mode is disabled by default" do
it "parses an object" do
expect(subject.parse('{ "foo": "bar" }')).to eq({ "foo" => "bar" })
@@ -178,6 +184,10 @@ RSpec.describe Gitlab::Json do
{ test: true, "foo.bar" => "baz", is_json: 1, some: [1, 2, 3] }
end
+ it "is aliased" do
+ expect(described_class.method(:encode)).to eq(described_class.method(:generate))
+ end
+
it "generates JSON" do
expected_string = <<~STR.chomp
{"test":true,"foo.bar":"baz","is_json":1,"some":[1,2,3]}
diff --git a/spec/lib/gitlab/kubernetes/rollout_instances_spec.rb b/spec/lib/gitlab/kubernetes/rollout_instances_spec.rb
index 3ac97ddc75d..a7b2352f496 100644
--- a/spec/lib/gitlab/kubernetes/rollout_instances_spec.rb
+++ b/spec/lib/gitlab/kubernetes/rollout_instances_spec.rb
@@ -51,13 +51,14 @@ RSpec.describe Gitlab::Kubernetes::RolloutInstances do
end
it 'returns instances when there are two stable deployments' do
- deployments, pods = setup([
- kube_deployment(name: 'one', track: 'stable', replicas: 1),
- kube_deployment(name: 'two', track: 'stable', replicas: 1)
- ], [
- kube_pod(name: 'one', status: 'Running', track: 'stable'),
- kube_pod(name: 'two', status: 'Running', track: 'stable')
- ])
+ deployments, pods = setup(
+ [
+ kube_deployment(name: 'one', track: 'stable', replicas: 1),
+ kube_deployment(name: 'two', track: 'stable', replicas: 1)
+ ], [
+ kube_pod(name: 'one', status: 'Running', track: 'stable'),
+ kube_pod(name: 'two', status: 'Running', track: 'stable')
+ ])
rollout_instances = described_class.new(deployments, pods)
expect(rollout_instances.pod_instances).to eq([{
@@ -76,13 +77,14 @@ RSpec.describe Gitlab::Kubernetes::RolloutInstances do
end
it 'returns instances for two deployments with different tracks' do
- deployments, pods = setup([
- kube_deployment(name: 'one', track: 'mytrack', replicas: 1),
- kube_deployment(name: 'two', track: 'othertrack', replicas: 1)
- ], [
- kube_pod(name: 'one', status: 'Running', track: 'mytrack'),
- kube_pod(name: 'two', status: 'Running', track: 'othertrack')
- ])
+ deployments, pods = setup(
+ [
+ kube_deployment(name: 'one', track: 'mytrack', replicas: 1),
+ kube_deployment(name: 'two', track: 'othertrack', replicas: 1)
+ ], [
+ kube_pod(name: 'one', status: 'Running', track: 'mytrack'),
+ kube_pod(name: 'two', status: 'Running', track: 'othertrack')
+ ])
rollout_instances = described_class.new(deployments, pods)
expect(rollout_instances.pod_instances).to eq([{
@@ -101,13 +103,14 @@ RSpec.describe Gitlab::Kubernetes::RolloutInstances do
end
it 'sorts stable tracks after canary tracks' do
- deployments, pods = setup([
- kube_deployment(name: 'one', track: 'stable', replicas: 1),
- kube_deployment(name: 'two', track: 'canary', replicas: 1)
- ], [
- kube_pod(name: 'one', status: 'Running', track: 'stable'),
- kube_pod(name: 'two', status: 'Running', track: 'canary')
- ])
+ deployments, pods = setup(
+ [
+ kube_deployment(name: 'one', track: 'stable', replicas: 1),
+ kube_deployment(name: 'two', track: 'canary', replicas: 1)
+ ], [
+ kube_pod(name: 'one', status: 'Running', track: 'stable'),
+ kube_pod(name: 'two', status: 'Running', track: 'canary')
+ ])
rollout_instances = described_class.new(deployments, pods)
expect(rollout_instances.pod_instances).to eq([{
diff --git a/spec/lib/gitlab/legacy_github_import/branch_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/branch_formatter_spec.rb
index 1a21ed29ab7..09dd04c76c9 100644
--- a/spec/lib/gitlab/legacy_github_import/branch_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/branch_formatter_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::LegacyGithubImport::BranchFormatter do
- let(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
let(:commit) { create(:commit, project: project) }
let(:repo) { double }
let(:raw) do
@@ -16,19 +16,19 @@ RSpec.describe Gitlab::LegacyGithubImport::BranchFormatter do
describe '#exists?' do
it 'returns true when branch exists and commit is part of the branch' do
- branch = described_class.new(project, double(raw))
+ branch = described_class.new(project, raw)
expect(branch.exists?).to eq true
end
it 'returns false when branch exists and commit is not part of the branch' do
- branch = described_class.new(project, double(raw.merge(ref: 'feature')))
+ branch = described_class.new(project, raw.merge(ref: 'feature'))
expect(branch.exists?).to eq false
end
it 'returns false when branch does not exist' do
- branch = described_class.new(project, double(raw.merge(ref: 'removed-branch')))
+ branch = described_class.new(project, raw.merge(ref: 'removed-branch'))
expect(branch.exists?).to eq false
end
@@ -36,7 +36,7 @@ RSpec.describe Gitlab::LegacyGithubImport::BranchFormatter do
describe '#repo' do
it 'returns raw repo' do
- branch = described_class.new(project, double(raw))
+ branch = described_class.new(project, raw)
expect(branch.repo).to eq repo
end
@@ -44,7 +44,7 @@ RSpec.describe Gitlab::LegacyGithubImport::BranchFormatter do
describe '#sha' do
it 'returns raw sha' do
- branch = described_class.new(project, double(raw))
+ branch = described_class.new(project, raw)
expect(branch.sha).to eq commit.id
end
@@ -52,19 +52,19 @@ RSpec.describe Gitlab::LegacyGithubImport::BranchFormatter do
describe '#valid?' do
it 'returns true when raw sha and ref are present' do
- branch = described_class.new(project, double(raw))
+ branch = described_class.new(project, raw)
expect(branch.valid?).to eq true
end
it 'returns false when raw sha is blank' do
- branch = described_class.new(project, double(raw.merge(sha: nil)))
+ branch = described_class.new(project, raw.merge(sha: nil))
expect(branch.valid?).to eq false
end
it 'returns false when raw ref is blank' do
- branch = described_class.new(project, double(raw.merge(ref: nil)))
+ branch = described_class.new(project, raw.merge(ref: nil))
expect(branch.valid?).to eq false
end
diff --git a/spec/lib/gitlab/legacy_github_import/comment_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/comment_formatter_spec.rb
index 85f7666fe85..8d6415b8179 100644
--- a/spec/lib/gitlab/legacy_github_import/comment_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/comment_formatter_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe Gitlab::LegacyGithubImport::CommentFormatter do
+ let_it_be(:project) { create(:project) }
let(:client) { double }
- let(:project) { create(:project) }
- let(:octocat) { double(id: 123456, login: 'octocat', email: 'octocat@example.com') }
+ let(:octocat) { { id: 123456, login: 'octocat', email: 'octocat@example.com' } }
let(:created_at) { DateTime.strptime('2013-04-10T20:09:31Z') }
let(:updated_at) { DateTime.strptime('2014-03-03T18:58:10Z') }
let(:base) do
@@ -27,7 +27,7 @@ RSpec.describe Gitlab::LegacyGithubImport::CommentFormatter do
describe '#attributes' do
context 'when do not reference a portion of the diff' do
- let(:raw) { double(base) }
+ let(:raw) { base }
it 'returns formatted attributes' do
expected = {
@@ -55,7 +55,7 @@ RSpec.describe Gitlab::LegacyGithubImport::CommentFormatter do
}
end
- let(:raw) { double(base.merge(diff)) }
+ let(:raw) { base.merge(diff) }
it 'returns formatted attributes' do
expected = {
@@ -74,22 +74,22 @@ RSpec.describe Gitlab::LegacyGithubImport::CommentFormatter do
end
context 'when author is a GitLab user' do
- let(:raw) { double(base.merge(user: octocat)) }
+ let(:raw) { base.merge(user: octocat) }
it 'returns GitLab user id associated with GitHub id as author_id' do
- gl_user = create(:omniauth_user, extern_uid: octocat.id, provider: 'github')
+ gl_user = create(:omniauth_user, extern_uid: octocat[:id], provider: 'github')
expect(comment.attributes.fetch(:author_id)).to eq gl_user.id
end
it 'returns GitLab user id associated with GitHub email as author_id' do
- gl_user = create(:user, email: octocat.email)
+ gl_user = create(:user, email: octocat[:email])
expect(comment.attributes.fetch(:author_id)).to eq gl_user.id
end
it 'returns note without created at tag line' do
- create(:omniauth_user, extern_uid: octocat.id, provider: 'github')
+ create(:omniauth_user, extern_uid: octocat[:id], provider: 'github')
expect(comment.attributes.fetch(:note)).to eq("I'm having a problem with this.")
end
diff --git a/spec/lib/gitlab/legacy_github_import/importer_spec.rb b/spec/lib/gitlab/legacy_github_import/importer_spec.rb
index 1800b42160d..cd66b93eb8b 100644
--- a/spec/lib/gitlab/legacy_github_import/importer_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/importer_spec.rb
@@ -59,23 +59,23 @@ RSpec.describe Gitlab::LegacyGithubImport::Importer do
end
let(:label1) do
- double(
+ {
name: 'Bug',
color: 'ff0000',
url: "#{api_root}/repos/octocat/Hello-World/labels/bug"
- )
+ }
end
let(:label2) do
- double(
+ {
name: nil,
color: 'ff0000',
url: "#{api_root}/repos/octocat/Hello-World/labels/bug"
- )
+ }
end
let(:milestone) do
- double(
+ {
id: 1347, # For Gitea
number: 1347,
state: 'open',
@@ -86,11 +86,11 @@ RSpec.describe Gitlab::LegacyGithubImport::Importer do
updated_at: updated_at,
closed_at: nil,
url: "#{api_root}/repos/octocat/Hello-World/milestones/1"
- )
+ }
end
let(:issue1) do
- double(
+ {
number: 1347,
milestone: nil,
state: 'open',
@@ -104,12 +104,12 @@ RSpec.describe Gitlab::LegacyGithubImport::Importer do
updated_at: updated_at,
closed_at: nil,
url: "#{api_root}/repos/octocat/Hello-World/issues/1347",
- labels: [double(name: 'Label #1')]
- )
+ labels: [{ name: 'Label #1' }]
+ }
end
let(:issue2) do
- double(
+ {
number: 1348,
milestone: nil,
state: 'open',
@@ -123,12 +123,12 @@ RSpec.describe Gitlab::LegacyGithubImport::Importer do
updated_at: updated_at,
closed_at: nil,
url: "#{api_root}/repos/octocat/Hello-World/issues/1348",
- labels: [double(name: 'Label #2')]
- )
+ labels: [{ name: 'Label #2' }]
+ }
end
let(:release1) do
- double(
+ {
tag_name: 'v1.0.0',
name: 'First release',
body: 'Release v1.0.0',
@@ -137,11 +137,11 @@ RSpec.describe Gitlab::LegacyGithubImport::Importer do
published_at: created_at,
updated_at: updated_at,
url: "#{api_root}/repos/octocat/Hello-World/releases/1"
- )
+ }
end
let(:release2) do
- double(
+ {
tag_name: 'v1.1.0',
name: 'Second release',
body: nil,
@@ -150,7 +150,7 @@ RSpec.describe Gitlab::LegacyGithubImport::Importer do
published_at: created_at,
updated_at: updated_at,
url: "#{api_root}/repos/octocat/Hello-World/releases/2"
- )
+ }
end
subject { described_class.new(project) }
@@ -210,18 +210,18 @@ RSpec.describe Gitlab::LegacyGithubImport::Importer do
end
let(:project) { create(:project, :repository, :wiki_disabled, import_url: "#{repo_root}/octocat/Hello-World.git") }
- let(:octocat) { double(id: 123456, login: 'octocat', email: 'octocat@example.com') }
+ let(:octocat) { { id: 123456, login: 'octocat', email: 'octocat@example.com' } }
let(:credentials) { { user: 'joe' } }
let(:created_at) { DateTime.strptime('2011-01-26T19:01:12Z') }
let(:updated_at) { DateTime.strptime('2011-01-27T19:01:12Z') }
- let(:repository) { double(id: 1, fork: false) }
+ let(:repository) { { id: 1, fork: false } }
let(:source_sha) { create(:commit, project: project).id }
- let(:source_branch) { double(ref: 'branch-merged', repo: repository, sha: source_sha, user: octocat) }
+ let(:source_branch) { { ref: 'branch-merged', repo: repository, sha: source_sha, user: octocat } }
let(:target_sha) { create(:commit, project: project, git_commit: RepoHelpers.another_sample_commit).id }
- let(:target_branch) { double(ref: 'master', repo: repository, sha: target_sha, user: octocat) }
+ let(:target_branch) { { ref: 'master', repo: repository, sha: target_sha, user: octocat } }
let(:pull_request) do
- double(
+ {
number: 1347,
milestone: nil,
state: 'open',
@@ -236,12 +236,12 @@ RSpec.describe Gitlab::LegacyGithubImport::Importer do
closed_at: nil,
merged_at: nil,
url: "#{api_root}/repos/octocat/Hello-World/pulls/1347",
- labels: [double(name: 'Label #2')]
- )
+ labels: [{ name: 'Label #2' }]
+ }
end
let(:closed_pull_request) do
- double(
+ {
number: 1347,
milestone: nil,
state: 'closed',
@@ -256,8 +256,8 @@ RSpec.describe Gitlab::LegacyGithubImport::Importer do
closed_at: updated_at,
merged_at: nil,
url: "#{api_root}/repos/octocat/Hello-World/pulls/1347",
- labels: [double(name: 'Label #2')]
- )
+ labels: [{ name: 'Label #2' }]
+ }
end
context 'when importing a Gitea project' do
diff --git a/spec/lib/gitlab/legacy_github_import/issuable_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/issuable_formatter_spec.rb
index a285a5820a2..56a51c6bddd 100644
--- a/spec/lib/gitlab/legacy_github_import/issuable_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/issuable_formatter_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
RSpec.describe Gitlab::LegacyGithubImport::IssuableFormatter do
let(:raw_data) do
- double(number: 42)
+ { number: 42 }
end
let(:project) { double(import_type: 'github') }
diff --git a/spec/lib/gitlab/legacy_github_import/issue_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/issue_formatter_spec.rb
index 454bab8846c..d3548fecbcd 100644
--- a/spec/lib/gitlab/legacy_github_import/issue_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/issue_formatter_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe Gitlab::LegacyGithubImport::IssueFormatter do
+ let_it_be(:project) { create(:project, namespace: create(:namespace, path: 'octocat')) }
let(:client) { double }
- let!(:project) { create(:project, namespace: create(:namespace, path: 'octocat')) }
- let(:octocat) { double(id: 123456, login: 'octocat', email: 'octocat@example.com') }
+ let(:octocat) { { id: 123456, login: 'octocat', email: 'octocat@example.com' } }
let(:created_at) { DateTime.strptime('2011-01-26T19:01:12Z') }
let(:updated_at) { DateTime.strptime('2011-01-27T19:01:12Z') }
@@ -34,7 +34,7 @@ RSpec.describe Gitlab::LegacyGithubImport::IssueFormatter do
shared_examples 'Gitlab::LegacyGithubImport::IssueFormatter#attributes' do
context 'when issue is open' do
- let(:raw_data) { double(base_data.merge(state: 'open')) }
+ let(:raw_data) { base_data.merge(state: 'open') }
it 'returns formatted attributes' do
expected = {
@@ -55,7 +55,7 @@ RSpec.describe Gitlab::LegacyGithubImport::IssueFormatter do
end
context 'when issue is closed' do
- let(:raw_data) { double(base_data.merge(state: 'closed')) }
+ let(:raw_data) { base_data.merge(state: 'closed') }
it 'returns formatted attributes' do
expected = {
@@ -76,28 +76,28 @@ RSpec.describe Gitlab::LegacyGithubImport::IssueFormatter do
end
context 'when it is assigned to someone' do
- let(:raw_data) { double(base_data.merge(assignee: octocat)) }
+ let(:raw_data) { base_data.merge(assignee: octocat) }
it 'returns nil as assignee_id when is not a GitLab user' do
expect(issue.attributes.fetch(:assignee_ids)).to be_empty
end
it 'returns GitLab user id associated with GitHub id as assignee_id' do
- gl_user = create(:omniauth_user, extern_uid: octocat.id, provider: 'github')
+ gl_user = create(:omniauth_user, extern_uid: octocat[:id], provider: 'github')
expect(issue.attributes.fetch(:assignee_ids)).to eq [gl_user.id]
end
it 'returns GitLab user id associated with GitHub email as assignee_id' do
- gl_user = create(:user, email: octocat.email)
+ gl_user = create(:user, email: octocat[:email])
expect(issue.attributes.fetch(:assignee_ids)).to eq [gl_user.id]
end
end
context 'when it has a milestone' do
- let(:milestone) { double(id: 42, number: 42) }
- let(:raw_data) { double(base_data.merge(milestone: milestone)) }
+ let(:milestone) { { id: 42, number: 42 } }
+ let(:raw_data) { base_data.merge(milestone: milestone) }
it 'returns nil when milestone does not exist' do
expect(issue.attributes.fetch(:milestone)).to be_nil
@@ -111,26 +111,26 @@ RSpec.describe Gitlab::LegacyGithubImport::IssueFormatter do
end
context 'when author is a GitLab user' do
- let(:raw_data) { double(base_data.merge(user: octocat)) }
+ let(:raw_data) { base_data.merge(user: octocat) }
it 'returns project creator_id as author_id when is not a GitLab user' do
expect(issue.attributes.fetch(:author_id)).to eq project.creator_id
end
it 'returns GitLab user id associated with GitHub id as author_id' do
- gl_user = create(:omniauth_user, extern_uid: octocat.id, provider: 'github')
+ gl_user = create(:omniauth_user, extern_uid: octocat[:id], provider: 'github')
expect(issue.attributes.fetch(:author_id)).to eq gl_user.id
end
it 'returns GitLab user id associated with GitHub email as author_id' do
- gl_user = create(:user, email: octocat.email)
+ gl_user = create(:user, email: octocat[:email])
expect(issue.attributes.fetch(:author_id)).to eq gl_user.id
end
it 'returns description without created at tag line' do
- create(:omniauth_user, extern_uid: octocat.id, provider: 'github')
+ create(:omniauth_user, extern_uid: octocat[:id], provider: 'github')
expect(issue.attributes.fetch(:description)).to eq("I'm having a problem with this.")
end
@@ -138,7 +138,7 @@ RSpec.describe Gitlab::LegacyGithubImport::IssueFormatter do
end
shared_examples 'Gitlab::LegacyGithubImport::IssueFormatter#number' do
- let(:raw_data) { double(base_data.merge(number: 1347)) }
+ let(:raw_data) { base_data.merge(number: 1347) }
it 'returns issue number' do
expect(issue.number).to eq 1347
@@ -161,7 +161,7 @@ RSpec.describe Gitlab::LegacyGithubImport::IssueFormatter do
describe '#has_comments?' do
context 'when number of comments is greater than zero' do
- let(:raw_data) { double(base_data.merge(comments: 1)) }
+ let(:raw_data) { base_data.merge(comments: 1) }
it 'returns true' do
expect(issue.has_comments?).to eq true
@@ -169,7 +169,7 @@ RSpec.describe Gitlab::LegacyGithubImport::IssueFormatter do
end
context 'when number of comments is equal to zero' do
- let(:raw_data) { double(base_data.merge(comments: 0)) }
+ let(:raw_data) { base_data.merge(comments: 0) }
it 'returns false' do
expect(issue.has_comments?).to eq false
@@ -179,7 +179,7 @@ RSpec.describe Gitlab::LegacyGithubImport::IssueFormatter do
describe '#pull_request?' do
context 'when mention a pull request' do
- let(:raw_data) { double(base_data.merge(pull_request: double)) }
+ let(:raw_data) { base_data.merge(pull_request: double) }
it 'returns true' do
expect(issue.pull_request?).to eq true
@@ -187,7 +187,7 @@ RSpec.describe Gitlab::LegacyGithubImport::IssueFormatter do
end
context 'when does not mention a pull request' do
- let(:raw_data) { double(base_data.merge(pull_request: nil)) }
+ let(:raw_data) { base_data.merge(pull_request: nil) }
it 'returns false' do
expect(issue.pull_request?).to eq false
diff --git a/spec/lib/gitlab/legacy_github_import/label_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/label_formatter_spec.rb
index ab7c8ea4a58..8e2c8031a6f 100644
--- a/spec/lib/gitlab/legacy_github_import/label_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/label_formatter_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::LegacyGithubImport::LabelFormatter do
- let(:project) { create(:project) }
- let(:raw) { double(name: 'improvements', color: 'e6e6e6') }
+ let_it_be(:project) { create(:project) }
+ let(:raw) { { name: 'improvements', color: 'e6e6e6' } }
subject { described_class.new(project, raw) }
@@ -27,7 +27,7 @@ RSpec.describe Gitlab::LegacyGithubImport::LabelFormatter do
context 'when label exists' do
it 'does not create a new label' do
- Labels::CreateService.new(name: raw.name).execute(project: project)
+ Labels::CreateService.new(name: raw[:name]).execute(project: project)
expect { subject.create! }.not_to change(Label, :count)
end
diff --git a/spec/lib/gitlab/legacy_github_import/milestone_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/milestone_formatter_spec.rb
index 64fcc46d304..7c57bf9c707 100644
--- a/spec/lib/gitlab/legacy_github_import/milestone_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/milestone_formatter_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::LegacyGithubImport::MilestoneFormatter do
- let(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
let(:created_at) { DateTime.strptime('2011-01-26T19:01:12Z') }
let(:updated_at) { DateTime.strptime('2011-01-27T19:01:12Z') }
let(:base_data) do
@@ -26,7 +26,7 @@ RSpec.describe Gitlab::LegacyGithubImport::MilestoneFormatter do
let(:data) { base_data.merge(iid_attr => 1347) }
context 'when milestone is open' do
- let(:raw_data) { double(data.merge(state: 'open')) }
+ let(:raw_data) { data.merge(state: 'open') }
it 'returns formatted attributes' do
expected = {
@@ -45,7 +45,7 @@ RSpec.describe Gitlab::LegacyGithubImport::MilestoneFormatter do
end
context 'when milestone is closed' do
- let(:raw_data) { double(data.merge(state: 'closed')) }
+ let(:raw_data) { data.merge(state: 'closed') }
it 'returns formatted attributes' do
expected = {
@@ -65,7 +65,7 @@ RSpec.describe Gitlab::LegacyGithubImport::MilestoneFormatter do
context 'when milestone has a due date' do
let(:due_date) { DateTime.strptime('2011-01-28T19:01:12Z') }
- let(:raw_data) { double(data.merge(due_on: due_date)) }
+ let(:raw_data) { data.merge(due_on: due_date) }
it 'returns formatted attributes' do
expected = {
diff --git a/spec/lib/gitlab/legacy_github_import/pull_request_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/pull_request_formatter_spec.rb
index 7d8875e36c3..90469693820 100644
--- a/spec/lib/gitlab/legacy_github_import/pull_request_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/pull_request_formatter_spec.rb
@@ -3,22 +3,22 @@
require 'spec_helper'
RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter do
+ let_it_be(:project) { create(:project, :repository) }
let(:client) { double }
- let(:project) { create(:project, :repository) }
let(:source_sha) { create(:commit, project: project).id }
let(:target_commit) { create(:commit, project: project, git_commit: RepoHelpers.another_sample_commit) }
let(:target_sha) { target_commit.id }
let(:target_short_sha) { target_commit.id.to_s[0..7] }
- let(:repository) { double(id: 1, fork: false) }
+ let(:repository) { { id: 1, fork: false } }
let(:source_repo) { repository }
- let(:source_branch) { double(ref: 'branch-merged', repo: source_repo, sha: source_sha) }
- let(:forked_source_repo) { double(id: 2, fork: true, name: 'otherproject', full_name: 'company/otherproject') }
+ let(:source_branch) { { ref: 'branch-merged', repo: source_repo, sha: source_sha } }
+ let(:forked_source_repo) { { id: 2, fork: true, name: 'otherproject', full_name: 'company/otherproject' } }
let(:target_repo) { repository }
- let(:target_branch) { double(ref: 'master', repo: target_repo, sha: target_sha, user: octocat) }
- let(:removed_branch) { double(ref: 'removed-branch', repo: source_repo, sha: '2e5d3239642f9161dcbbc4b70a211a68e5e45e2b', user: octocat) }
- let(:forked_branch) { double(ref: 'master', repo: forked_source_repo, sha: '2e5d3239642f9161dcbbc4b70a211a68e5e45e2b', user: octocat) }
- let(:branch_deleted_repo) { double(ref: 'master', repo: nil, sha: '2e5d3239642f9161dcbbc4b70a211a68e5e45e2b', user: octocat) }
- let(:octocat) { double(id: 123456, login: 'octocat', email: 'octocat@example.com') }
+ let(:target_branch) { { ref: 'master', repo: target_repo, sha: target_sha, user: octocat } }
+ let(:removed_branch) { { ref: 'removed-branch', repo: source_repo, sha: '2e5d3239642f9161dcbbc4b70a211a68e5e45e2b', user: octocat } }
+ let(:forked_branch) { { ref: 'master', repo: forked_source_repo, sha: '2e5d3239642f9161dcbbc4b70a211a68e5e45e2b', user: octocat } }
+ let(:branch_deleted_repo) { { ref: 'master', repo: nil, sha: '2e5d3239642f9161dcbbc4b70a211a68e5e45e2b', user: octocat } }
+ let(:octocat) { { id: 123456, login: 'octocat', email: 'octocat@example.com' } }
let(:created_at) { DateTime.strptime('2011-01-26T19:01:12Z') }
let(:updated_at) { DateTime.strptime('2011-01-27T19:01:12Z') }
let(:base_data) do
@@ -48,7 +48,7 @@ RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter do
shared_examples 'Gitlab::LegacyGithubImport::PullRequestFormatter#attributes' do
context 'when pull request is open' do
- let(:raw_data) { double(base_data.merge(state: 'open')) }
+ let(:raw_data) { base_data.merge(state: 'open') }
it 'returns formatted attributes' do
expected = {
@@ -75,7 +75,7 @@ RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter do
end
context 'when pull request is closed' do
- let(:raw_data) { double(base_data.merge(state: 'closed')) }
+ let(:raw_data) { base_data.merge(state: 'closed') }
it 'returns formatted attributes' do
expected = {
@@ -103,7 +103,7 @@ RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter do
context 'when pull request is merged' do
let(:merged_at) { DateTime.strptime('2011-01-28T13:01:12Z') }
- let(:raw_data) { double(base_data.merge(state: 'closed', merged_at: merged_at)) }
+ let(:raw_data) { base_data.merge(state: 'closed', merged_at: merged_at) }
it 'returns formatted attributes' do
expected = {
@@ -130,54 +130,54 @@ RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter do
end
context 'when it is assigned to someone' do
- let(:raw_data) { double(base_data.merge(assignee: octocat)) }
+ let(:raw_data) { base_data.merge(assignee: octocat) }
it 'returns nil as assignee_id when is not a GitLab user' do
expect(pull_request.attributes.fetch(:assignee_id)).to be_nil
end
it 'returns GitLab user id associated with GitHub id as assignee_id' do
- gl_user = create(:omniauth_user, extern_uid: octocat.id, provider: 'github')
+ gl_user = create(:omniauth_user, extern_uid: octocat[:id], provider: 'github')
expect(pull_request.attributes.fetch(:assignee_id)).to eq gl_user.id
end
it 'returns GitLab user id associated with GitHub email as assignee_id' do
- gl_user = create(:user, email: octocat.email)
+ gl_user = create(:user, email: octocat[:email])
expect(pull_request.attributes.fetch(:assignee_id)).to eq gl_user.id
end
end
context 'when author is a GitLab user' do
- let(:raw_data) { double(base_data.merge(user: octocat)) }
+ let(:raw_data) { base_data.merge(user: octocat) }
it 'returns project creator_id as author_id when is not a GitLab user' do
expect(pull_request.attributes.fetch(:author_id)).to eq project.creator_id
end
it 'returns GitLab user id associated with GitHub id as author_id' do
- gl_user = create(:omniauth_user, extern_uid: octocat.id, provider: 'github')
+ gl_user = create(:omniauth_user, extern_uid: octocat[:id], provider: 'github')
expect(pull_request.attributes.fetch(:author_id)).to eq gl_user.id
end
it 'returns GitLab user id associated with GitHub email as author_id' do
- gl_user = create(:user, email: octocat.email)
+ gl_user = create(:user, email: octocat[:email])
expect(pull_request.attributes.fetch(:author_id)).to eq gl_user.id
end
it 'returns description without created at tag line' do
- create(:omniauth_user, extern_uid: octocat.id, provider: 'github')
+ create(:omniauth_user, extern_uid: octocat[:id], provider: 'github')
expect(pull_request.attributes.fetch(:description)).to eq('Please pull these awesome changes')
end
end
context 'when it has a milestone' do
- let(:milestone) { double(id: 42, number: 42) }
- let(:raw_data) { double(base_data.merge(milestone: milestone)) }
+ let(:milestone) { { id: 42, number: 42 } }
+ let(:raw_data) { base_data.merge(milestone: milestone) }
it 'returns nil when milestone does not exist' do
expect(pull_request.attributes.fetch(:milestone)).to be_nil
@@ -192,7 +192,7 @@ RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter do
end
shared_examples 'Gitlab::LegacyGithubImport::PullRequestFormatter#number' do
- let(:raw_data) { double(base_data) }
+ let(:raw_data) { base_data }
it 'returns pull request number' do
expect(pull_request.number).to eq 1347
@@ -201,7 +201,7 @@ RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter do
shared_examples 'Gitlab::LegacyGithubImport::PullRequestFormatter#source_branch_name' do
context 'when source branch exists' do
- let(:raw_data) { double(base_data) }
+ let(:raw_data) { base_data }
it 'returns branch ref' do
expect(pull_request.source_branch_name).to eq 'branch-merged'
@@ -209,7 +209,7 @@ RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter do
end
context 'when source branch does not exist' do
- let(:raw_data) { double(base_data.merge(head: removed_branch)) }
+ let(:raw_data) { base_data.merge(head: removed_branch) }
it 'prefixes branch name with gh-:short_sha/:number/:user pattern to avoid collision' do
expect(pull_request.source_branch_name).to eq "gh-#{target_short_sha}/1347/octocat/removed-branch"
@@ -217,7 +217,7 @@ RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter do
end
context 'when source branch is from a fork' do
- let(:raw_data) { double(base_data.merge(head: forked_branch)) }
+ let(:raw_data) { base_data.merge(head: forked_branch) }
it 'prefixes branch name with gh-:short_sha/:number/:user pattern to avoid collision' do
expect(pull_request.source_branch_name).to eq "gh-#{target_short_sha}/1347/octocat/master"
@@ -225,7 +225,7 @@ RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter do
end
context 'when source branch is from a deleted fork' do
- let(:raw_data) { double(base_data.merge(head: branch_deleted_repo)) }
+ let(:raw_data) { base_data.merge(head: branch_deleted_repo) }
it 'prefixes branch name with gh-:short_sha/:number/:user pattern to avoid collision' do
expect(pull_request.source_branch_name).to eq "gh-#{target_short_sha}/1347/octocat/master"
@@ -235,7 +235,7 @@ RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter do
shared_examples 'Gitlab::LegacyGithubImport::PullRequestFormatter#target_branch_name' do
context 'when target branch exists' do
- let(:raw_data) { double(base_data) }
+ let(:raw_data) { base_data }
it 'returns branch ref' do
expect(pull_request.target_branch_name).to eq 'master'
@@ -243,7 +243,7 @@ RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter do
end
context 'when target branch does not exist' do
- let(:raw_data) { double(base_data.merge(base: removed_branch)) }
+ let(:raw_data) { base_data.merge(base: removed_branch) }
it 'prefixes branch name with gh-:short_sha/:number/:user pattern to avoid collision' do
expect(pull_request.target_branch_name).to eq 'gl-2e5d3239/1347/octocat/removed-branch'
@@ -271,7 +271,7 @@ RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter do
describe '#valid?' do
context 'when source, and target repos are not a fork' do
- let(:raw_data) { double(base_data) }
+ let(:raw_data) { base_data }
it 'returns true' do
expect(pull_request.valid?).to eq true
@@ -279,8 +279,8 @@ RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter do
end
context 'when source repo is a fork' do
- let(:source_repo) { double(id: 2) }
- let(:raw_data) { double(base_data) }
+ let(:source_repo) { { id: 2 } }
+ let(:raw_data) { base_data }
it 'returns true' do
expect(pull_request.valid?).to eq true
@@ -288,8 +288,8 @@ RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter do
end
context 'when target repo is a fork' do
- let(:target_repo) { double(id: 2) }
- let(:raw_data) { double(base_data) }
+ let(:target_repo) { { id: 2 } }
+ let(:raw_data) { base_data }
it 'returns true' do
expect(pull_request.valid?).to eq true
@@ -299,7 +299,7 @@ RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter do
describe '#cross_project?' do
context 'when source and target repositories are different' do
- let(:raw_data) { double(base_data.merge(head: forked_branch)) }
+ let(:raw_data) { base_data.merge(head: forked_branch) }
it 'returns true' do
expect(pull_request.cross_project?).to eq true
@@ -307,7 +307,7 @@ RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter do
end
context 'when source repository does not exist anymore' do
- let(:raw_data) { double(base_data.merge(head: branch_deleted_repo)) }
+ let(:raw_data) { base_data.merge(head: branch_deleted_repo) }
it 'returns true' do
expect(pull_request.cross_project?).to eq true
@@ -315,7 +315,7 @@ RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter do
end
context 'when source and target repositories are the same' do
- let(:raw_data) { double(base_data.merge(head: source_branch)) }
+ let(:raw_data) { base_data.merge(head: source_branch) }
it 'returns false' do
expect(pull_request.cross_project?).to eq false
@@ -324,7 +324,7 @@ RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter do
end
describe '#source_branch_exists?' do
- let(:raw_data) { double(base_data.merge(head: forked_branch)) }
+ let(:raw_data) { base_data.merge(head: forked_branch) }
it 'returns false when is a cross_project' do
expect(pull_request.source_branch_exists?).to eq false
@@ -332,7 +332,7 @@ RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter do
end
describe '#url' do
- let(:raw_data) { double(base_data) }
+ let(:raw_data) { base_data }
it 'return raw url' do
expect(pull_request.url).to eq 'https://api.github.com/repos/octocat/Hello-World/pulls/1347'
@@ -340,7 +340,7 @@ RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter do
end
describe '#opened?' do
- let(:raw_data) { double(base_data.merge(state: 'open')) }
+ let(:raw_data) { base_data.merge(state: 'open') }
it 'returns true when state is "open"' do
expect(pull_request.opened?).to be_truthy
diff --git a/spec/lib/gitlab/legacy_github_import/release_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/release_formatter_spec.rb
index cbd1a30c417..237646f81dc 100644
--- a/spec/lib/gitlab/legacy_github_import/release_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/release_formatter_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::LegacyGithubImport::ReleaseFormatter do
- let!(:project) { create(:project, namespace: create(:namespace, path: 'octocat')) }
- let(:octocat) { double(id: 123456, login: 'octocat') }
+ let_it_be(:project) { create(:project, namespace: create(:namespace, path: 'octocat')) }
+ let(:octocat) { { id: 123456, login: 'octocat' } }
let(:created_at) { DateTime.strptime('2011-01-26T19:01:12Z') }
let(:published_at) { DateTime.strptime('2011-01-26T20:00:00Z') }
@@ -22,7 +22,7 @@ RSpec.describe Gitlab::LegacyGithubImport::ReleaseFormatter do
subject(:release) { described_class.new(project, raw_data) }
describe '#attributes' do
- let(:raw_data) { double(base_data) }
+ let(:raw_data) { base_data }
it 'returns formatted attributes' do
expected = {
@@ -49,7 +49,7 @@ RSpec.describe Gitlab::LegacyGithubImport::ReleaseFormatter do
describe '#valid' do
context 'when release is not a draft' do
- let(:raw_data) { double(base_data) }
+ let(:raw_data) { base_data }
it 'returns true' do
expect(release.valid?).to eq true
@@ -57,7 +57,7 @@ RSpec.describe Gitlab::LegacyGithubImport::ReleaseFormatter do
end
context 'when release is draft' do
- let(:raw_data) { double(base_data.merge(draft: true)) }
+ let(:raw_data) { base_data.merge(draft: true) }
it 'returns false' do
expect(release.valid?).to eq false
@@ -65,7 +65,7 @@ RSpec.describe Gitlab::LegacyGithubImport::ReleaseFormatter do
end
context 'when release has NULL tag' do
- let(:raw_data) { double(base_data.merge(tag_name: '')) }
+ let(:raw_data) { base_data.merge(tag_name: '') }
it 'returns false' do
expect(release.valid?).to eq false
diff --git a/spec/lib/gitlab/legacy_github_import/user_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/user_formatter_spec.rb
index ab3ffddc042..bc127f74e84 100644
--- a/spec/lib/gitlab/legacy_github_import/user_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/user_formatter_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::LegacyGithubImport::UserFormatter do
let(:client) { double }
- let(:octocat) { double(id: 123456, login: 'octocat', email: 'octocat@example.com') }
+ let(:octocat) { { id: 123456, login: 'octocat', email: 'octocat@example.com' } }
subject(:user) { described_class.new(client, octocat) }
@@ -15,33 +15,33 @@ RSpec.describe Gitlab::LegacyGithubImport::UserFormatter do
describe '#gitlab_id' do
context 'when GitHub user is a GitLab user' do
it 'return GitLab user id when user associated their account with GitHub' do
- gl_user = create(:omniauth_user, extern_uid: octocat.id, provider: 'github')
+ gl_user = create(:omniauth_user, extern_uid: octocat[:id], provider: 'github')
expect(user.gitlab_id).to eq gl_user.id
end
it 'returns GitLab user id when user confirmed primary email matches GitHub email' do
- gl_user = create(:user, email: octocat.email)
+ gl_user = create(:user, email: octocat[:email])
expect(user.gitlab_id).to eq gl_user.id
end
it 'returns GitLab user id when user unconfirmed primary email matches GitHub email' do
- gl_user = create(:user, :unconfirmed, email: octocat.email)
+ gl_user = create(:user, :unconfirmed, email: octocat[:email])
expect(user.gitlab_id).to eq gl_user.id
end
it 'returns GitLab user id when user confirmed secondary email matches GitHub email' do
gl_user = create(:user, email: 'johndoe@example.com')
- create(:email, :confirmed, user: gl_user, email: octocat.email)
+ create(:email, :confirmed, user: gl_user, email: octocat[:email])
expect(user.gitlab_id).to eq gl_user.id
end
it 'returns nil when user unconfirmed secondary email matches GitHub email' do
gl_user = create(:user, email: 'johndoe@example.com')
- create(:email, user: gl_user, email: octocat.email)
+ create(:email, user: gl_user, email: octocat[:email])
expect(user.gitlab_id).to be_nil
end
diff --git a/spec/lib/gitlab/memory/diagnostic_reports_logger_spec.rb b/spec/lib/gitlab/memory/diagnostic_reports_logger_spec.rb
new file mode 100644
index 00000000000..6be528e34b6
--- /dev/null
+++ b/spec/lib/gitlab/memory/diagnostic_reports_logger_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Memory::DiagnosticReportsLogger do
+ subject { described_class.new('/dev/null') }
+
+ let(:now) { Time.current }
+
+ describe '#format_message' do
+ it 'formats incoming hash properly' do
+ output = subject.format_message('INFO', now, 'test', { hello: 1 })
+ # Disabling the cop because it is not relevant, we encode with `JSON.generate`. Allows `fast_spec_helper`.
+ data = JSON.parse(output) # rubocop: disable Gitlab/Json
+
+ expect(data['severity']).to eq('INFO')
+ expect(data['time']).to eq(now.utc.iso8601(3))
+ expect(data['hello']).to eq(1)
+ expect(data['message']).to be_nil
+ end
+ end
+end
diff --git a/spec/lib/gitlab/memory/reports_daemon_spec.rb b/spec/lib/gitlab/memory/reports_daemon_spec.rb
index c9562470971..0473e170502 100644
--- a/spec/lib/gitlab/memory/reports_daemon_spec.rb
+++ b/spec/lib/gitlab/memory/reports_daemon_spec.rb
@@ -2,9 +2,15 @@
require 'spec_helper'
-RSpec.describe Gitlab::Memory::ReportsDaemon do
+RSpec.describe Gitlab::Memory::ReportsDaemon, :aggregate_failures do
let(:daemon) { described_class.new }
+ let_it_be(:tmp_dir) { Dir.mktmpdir }
+
+ after(:all) do
+ FileUtils.remove_entry(tmp_dir)
+ end
+
describe '#run_thread' do
let(:report_duration_counter) { instance_double(::Prometheus::Client::Counter) }
let(:file_size) { 1_000_000 }
@@ -22,13 +28,10 @@ RSpec.describe Gitlab::Memory::ReportsDaemon do
allow(File).to receive(:size).with(/#{daemon.reports_path}.*\.json/).and_return(file_size)
end
- it 'runs reports' do
- expect(daemon.send(:reports)).to all(receive(:run).twice.and_call_original)
-
- daemon.send(:run_thread)
- end
+ it 'runs reports, logs and sets gauge' do
+ expect(daemon.send(:reports))
+ .to all(receive(:run).twice { Tempfile.new("report.json", tmp_dir).path })
- it 'logs report execution' do
expect(::Prometheus::PidProvider).to receive(:worker_id).at_least(:once).and_return('worker_1')
expect(Gitlab::AppLogger).to receive(:info).with(
@@ -42,6 +45,8 @@ RSpec.describe Gitlab::Memory::ReportsDaemon do
perf_report: 'jemalloc_stats'
)).twice
+ expect(report_duration_counter).to receive(:increment).with({ report: 'jemalloc_stats' }, an_instance_of(Float))
+
daemon.send(:run_thread)
end
@@ -51,18 +56,15 @@ RSpec.describe Gitlab::Memory::ReportsDaemon do
end
it 'logs `0` as `perf_report_size_bytes`' do
+ expect(daemon.send(:reports))
+ .to all(receive(:run).twice { Tempfile.new("report.json", tmp_dir).path })
+
expect(Gitlab::AppLogger).to receive(:info).with(hash_including(perf_report_size_bytes: 0)).twice
daemon.send(:run_thread)
end
end
- it 'sets real time duration gauge' do
- expect(report_duration_counter).to receive(:increment).with({ report: 'jemalloc_stats' }, an_instance_of(Float))
-
- daemon.send(:run_thread)
- end
-
it 'allows configure and run multiple reports' do
# rubocop: disable RSpec/VerifiedDoubles
# We test how ReportsDaemon could be extended in the future
@@ -74,8 +76,8 @@ RSpec.describe Gitlab::Memory::ReportsDaemon do
allow(daemon).to receive(:reports).and_return([active_report_1, inactive_report, active_report_2])
- expect(active_report_1).to receive(:run).and_return('/tmp/report_1.json').twice
- expect(active_report_2).to receive(:run).and_return('/tmp/report_2.json').twice
+ expect(active_report_1).to receive(:run).and_return(File.join(tmp_dir, 'report_1.json')).twice
+ expect(active_report_2).to receive(:run).and_return(File.join(tmp_dir, 'report_2.json')).twice
expect(inactive_report).not_to receive(:run)
daemon.send(:run_thread)
@@ -87,6 +89,9 @@ RSpec.describe Gitlab::Memory::ReportsDaemon do
daemon = described_class.new
allow(daemon).to receive(:alive).and_return(true, true, false)
+ expect(daemon.send(:reports))
+ .to all(receive(:run).twice { Tempfile.new("report.json", tmp_dir).path })
+
expect(daemon).to receive(:sleep).with(described_class::DEFAULT_SLEEP_S).ordered
expect(daemon).to receive(:sleep).with(described_class::DEFAULT_SLEEP_BETWEEN_REPORTS_S).ordered
expect(daemon).to receive(:sleep).with(described_class::DEFAULT_SLEEP_S).ordered
@@ -120,7 +125,7 @@ RSpec.describe Gitlab::Memory::ReportsDaemon do
stub_env('GITLAB_DIAGNOSTIC_REPORTS_SLEEP_S', 100)
stub_env('GITLAB_DIAGNOSTIC_REPORTS_SLEEP_MAX_DELTA_S', 50)
stub_env('GITLAB_DIAGNOSTIC_REPORTS_SLEEP_BETWEEN_REPORTS_S', 2)
- stub_env('GITLAB_DIAGNOSTIC_REPORTS_PATH', '/empty-dir')
+ stub_env('GITLAB_DIAGNOSTIC_REPORTS_PATH', tmp_dir)
end
it 'uses provided values' do
@@ -129,7 +134,7 @@ RSpec.describe Gitlab::Memory::ReportsDaemon do
expect(daemon.sleep_s).to eq(100)
expect(daemon.sleep_max_delta_s).to eq(50)
expect(daemon.sleep_between_reports_s).to eq(2)
- expect(daemon.reports_path).to eq('/empty-dir')
+ expect(daemon.reports_path).to eq(tmp_dir)
end
end
end
diff --git a/spec/lib/gitlab/memory/reports_uploader_spec.rb b/spec/lib/gitlab/memory/reports_uploader_spec.rb
new file mode 100644
index 00000000000..9ff830716f2
--- /dev/null
+++ b/spec/lib/gitlab/memory/reports_uploader_spec.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Memory::ReportsUploader, :aggregate_failures do
+ let(:gcs_key) { 'test_gcs_key' }
+ let(:gcs_project) { 'test_gcs_project' }
+ let(:gcs_bucket) { 'test_gcs_bucket' }
+ let(:logger) { instance_double(Gitlab::Memory::DiagnosticReportsLogger) }
+
+ let(:uploader) do
+ described_class.new(gcs_key: gcs_key, gcs_project: gcs_project, gcs_bucket: gcs_bucket, logger: logger)
+ end
+
+ # rubocop: disable RSpec/VerifiedDoubles
+ # `Fog::Storage::Google` does not implement `put_object` itself, so it is tricky to pinpoint particular method
+ # with instance_double without revealing `Fog::Storage::Google` internals. For simplicity, we use a simple double.
+ let(:fog) { double("Fog::Storage::Google") }
+ # rubocop: enable RSpec/VerifiedDoubles
+
+ let(:report) { Tempfile.new("report.1.worker_1.#{Time.current.to_i}.json") }
+
+ after do
+ FileUtils.remove_entry(report)
+ end
+
+ describe '#upload' do
+ before do
+ allow(Fog::Storage::Google)
+ .to receive(:new)
+ .with(google_project: gcs_project, google_json_key_location: gcs_key)
+ .and_return(fog)
+ end
+
+ it 'calls fog, logs upload requested and success with duration' do
+ expect(logger)
+ .to receive(:info)
+ .with(hash_including(:pid, message: "Diagnostic reports", perf_report_status: "upload requested",
+ class: 'Gitlab::Memory::ReportsUploader', perf_report_path: report.path))
+ .ordered
+
+ expect(fog).to receive(:put_object).with(gcs_bucket, File.basename(report), instance_of(File))
+
+ expect(logger)
+ .to receive(:info)
+ .with(hash_including(:pid, :duration_s,
+ message: "Diagnostic reports", perf_report_status: "upload success",
+ class: 'Gitlab::Memory::ReportsUploader', perf_report_path: report.path))
+ .ordered
+
+ uploader.upload(report.path)
+ end
+
+ context 'when Google API responds with an error' do
+ let(:invalid_bucket) { 'WRONG BUCKET' }
+
+ let(:uploader) do
+ described_class.new(gcs_key: gcs_key, gcs_project: gcs_project, gcs_bucket: invalid_bucket, logger: logger)
+ end
+
+ it 'logs error raised by Fog and do not re-raise' do
+ expect(logger)
+ .to receive(:info)
+ .with(hash_including(:pid, message: "Diagnostic reports", perf_report_status: "upload requested",
+ class: 'Gitlab::Memory::ReportsUploader', perf_report_path: report.path))
+
+ expect(fog).to receive(:put_object).with(invalid_bucket, File.basename(report), instance_of(File))
+ .and_raise(Google::Apis::ClientError.new("invalid: Invalid bucket name: #{invalid_bucket}"))
+
+ expect(logger)
+ .to receive(:error)
+ .with(hash_including(:pid,
+ message: "Diagnostic reports", class: 'Gitlab::Memory::ReportsUploader',
+ perf_report_status: 'error', error: "invalid: Invalid bucket name: #{invalid_bucket}"))
+
+ expect { uploader.upload(report.path) }.not_to raise_error
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/memory/upload_and_cleanup_reports_spec.rb b/spec/lib/gitlab/memory/upload_and_cleanup_reports_spec.rb
new file mode 100644
index 00000000000..f3351b276cc
--- /dev/null
+++ b/spec/lib/gitlab/memory/upload_and_cleanup_reports_spec.rb
@@ -0,0 +1,109 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Memory::UploadAndCleanupReports, :aggregate_failures do
+ let(:uploader) { instance_double(Gitlab::Memory::ReportsUploader) }
+ let(:logger) { instance_double(Gitlab::Memory::DiagnosticReportsLogger) }
+
+ describe '#initalize' do
+ let(:reports_path) { '/path/to/reports' }
+
+ context 'when sleep_time_seconds is passed through the environment' do
+ before do
+ stub_env('GITLAB_DIAGNOSTIC_REPORTS_UPLOADER_SLEEP_S', '600')
+ end
+
+ it 'initializes with these settings' do
+ upload_and_cleanup = described_class.new(uploader: uploader, reports_path: reports_path, logger: logger)
+
+ expect(upload_and_cleanup.sleep_time_seconds).to eq(600)
+ end
+ end
+
+ context 'when sleep_time_seconds is passed through the initializer' do
+ it 'initializes with these settings' do
+ upload_and_cleanup = described_class.new(uploader: uploader, reports_path: reports_path, sleep_time_seconds: 60,
+ logger: logger)
+
+ expect(upload_and_cleanup.sleep_time_seconds).to eq(60)
+ end
+ end
+
+ context 'when `sleep_time_seconds` is not passed' do
+ it 'initialized with the default' do
+ upload_and_cleanup = described_class.new(uploader: uploader, reports_path: reports_path, logger: logger)
+
+ expect(upload_and_cleanup.sleep_time_seconds).to eq(described_class::DEFAULT_SLEEP_TIME_SECONDS)
+ end
+ end
+ end
+
+ describe '#call' do
+ let(:upload_and_cleanup) do
+ described_class.new(sleep_time_seconds: 600, reports_path: dir, uploader: uploader,
+ logger: logger).tap do |instance|
+ allow(instance).to receive(:loop).and_yield
+ allow(instance).to receive(:sleep)
+ end
+ end
+
+ let(:dir) { Dir.mktmpdir }
+
+ let(:reports_count) { 3 }
+
+ let(:reports) do
+ (1..reports_count).map do |i|
+ Tempfile.new("report.1.worker_#{i}.#{Time.current.to_i}.json", dir)
+ end
+ end
+
+ after do
+ FileUtils.remove_entry(dir)
+ end
+
+ it 'invokes the uploader and cleans the files' do
+ expect(logger)
+ .to receive(:info)
+ .with(hash_including(:pid,
+ message: "Diagnostic reports",
+ class: 'Gitlab::Memory::UploadAndCleanupReports',
+ perf_report_status: 'started'))
+
+ reports.each do |report|
+ expect(upload_and_cleanup.uploader).to receive(:upload).with(report.path)
+ end
+
+ expect { upload_and_cleanup.call }
+ .to change { Dir.entries(dir).count { |e| e.match(/report.*/) } }
+ .from(reports_count).to(0)
+ end
+
+ context 'when there is an exception' do
+ let(:report) { Tempfile.new("report.1.worker_1.#{Time.current.to_i}.json", dir) }
+
+ it 'logs it and does not crash the loop' do
+ expect(logger)
+ .to receive(:info)
+ .with(hash_including(:pid,
+ message: "Diagnostic reports",
+ class: 'Gitlab::Memory::UploadAndCleanupReports',
+ perf_report_status: 'started'))
+ .ordered
+
+ expect(upload_and_cleanup.uploader)
+ .to receive(:upload)
+ .with(report.path)
+ .and_raise(StandardError, 'Error Message')
+
+ expect(logger)
+ .to receive(:error)
+ .with(hash_including(:pid, message: "Diagnostic reports", class: 'Gitlab::Memory::UploadAndCleanupReports',
+ perf_report_status: 'error', error: 'Error Message'))
+ .ordered
+
+ expect { upload_and_cleanup.call }.not_to raise_error
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/memory/watchdog/configuration_spec.rb b/spec/lib/gitlab/memory/watchdog/configuration_spec.rb
new file mode 100644
index 00000000000..892a4b06ad0
--- /dev/null
+++ b/spec/lib/gitlab/memory/watchdog/configuration_spec.rb
@@ -0,0 +1,121 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require_dependency 'gitlab/cluster/lifecycle_events'
+
+RSpec.describe Gitlab::Memory::Watchdog::Configuration do
+ subject(:configuration) { described_class.new }
+
+ describe '#initialize' do
+ it 'initialize monitors' do
+ expect(configuration.monitors).to be_an_instance_of(described_class::MonitorStack)
+ end
+ end
+
+ describe '#handler' do
+ context 'when handler is not set' do
+ it 'defaults to NullHandler' do
+ expect(configuration.handler).to be(Gitlab::Memory::Watchdog::NullHandler.instance)
+ end
+ end
+ end
+
+ describe '#logger' do
+ context 'when logger is not set, defaults to stdout logger' do
+ it 'defaults to Logger' do
+ expect(configuration.logger).to be_an_instance_of(::Gitlab::Logger)
+ end
+ end
+ end
+
+ describe '#sleep_time_seconds' do
+ context 'when sleep_time_seconds is not set' do
+ it 'defaults to SLEEP_TIME_SECONDS' do
+ expect(configuration.sleep_time_seconds).to eq(described_class::DEFAULT_SLEEP_TIME_SECONDS)
+ end
+ end
+ end
+
+ describe '#monitors' do
+ context 'when monitors are configured to be used' do
+ let(:payload1) do
+ {
+ message: 'monitor_1_text',
+ memwd_max_strikes: 5,
+ memwd_cur_strikes: 0
+ }
+ end
+
+ let(:payload2) do
+ {
+ message: 'monitor_2_text',
+ memwd_max_strikes: 0,
+ memwd_cur_strikes: 1
+ }
+ end
+
+ let(:monitor_class_1) do
+ Struct.new(:threshold_violated, :payload) do
+ def call
+ { threshold_violated: !!threshold_violated, payload: payload || {} }
+ end
+
+ def self.name
+ 'Monitor1'
+ end
+ end
+ end
+
+ let(:monitor_class_2) do
+ Struct.new(:threshold_violated, :payload) do
+ def call
+ { threshold_violated: !!threshold_violated, payload: payload || {} }
+ end
+
+ def self.name
+ 'Monitor2'
+ end
+ end
+ end
+
+ context 'when two monitors are configured to be used' do
+ before do
+ configuration.monitors.use monitor_class_1, false, { message: 'monitor_1_text' }, max_strikes: 5
+ configuration.monitors.use monitor_class_2, true, { message: 'monitor_2_text' }, max_strikes: 0
+ end
+
+ it 'calls each monitor and returns correct results', :aggregate_failures do
+ payloads = []
+ thresholds = []
+ strikes = []
+ monitor_names = []
+
+ configuration.monitors.call_each do |result|
+ payloads << result.payload
+ thresholds << result.threshold_violated?
+ strikes << result.strikes_exceeded?
+ monitor_names << result.monitor_name
+ end
+
+ expect(payloads).to eq([payload1, payload2])
+ expect(thresholds).to eq([false, true])
+ expect(strikes).to eq([false, true])
+ expect(monitor_names).to eq([:monitor1, :monitor2])
+ end
+ end
+
+ context 'when same monitor class is configured to be used twice' do
+ before do
+ configuration.monitors.use monitor_class_1, max_strikes: 1
+ configuration.monitors.use monitor_class_1, max_strikes: 1
+ end
+
+ it 'calls same monitor only once' do
+ expect do |b|
+ configuration.monitors.call_each(&b)
+ end.to yield_control.once
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/memory/watchdog/monitor/heap_fragmentation_spec.rb b/spec/lib/gitlab/memory/watchdog/monitor/heap_fragmentation_spec.rb
new file mode 100644
index 00000000000..dad19cfd588
--- /dev/null
+++ b/spec/lib/gitlab/memory/watchdog/monitor/heap_fragmentation_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'support/shared_examples/lib/gitlab/memory/watchdog/monitor_result_shared_examples'
+require 'prometheus/client'
+
+RSpec.describe Gitlab::Memory::Watchdog::Monitor::HeapFragmentation do
+ let(:heap_frag_limit_gauge) { instance_double(::Prometheus::Client::Gauge) }
+ let(:max_heap_fragmentation) { 0.2 }
+ let(:fragmentation) { 0.3 }
+
+ subject(:monitor) do
+ described_class.new(max_heap_fragmentation: max_heap_fragmentation)
+ end
+
+ before do
+ allow(Gitlab::Metrics).to receive(:gauge)
+ .with(:gitlab_memwd_heap_frag_limit, anything)
+ .and_return(heap_frag_limit_gauge)
+ allow(heap_frag_limit_gauge).to receive(:set)
+
+ allow(Gitlab::Metrics::Memory).to receive(:gc_heap_fragmentation).and_return(fragmentation)
+ end
+
+ describe '#initialize' do
+ it 'sets the heap fragmentation limit gauge' do
+ expect(heap_frag_limit_gauge).to receive(:set).with({}, max_heap_fragmentation)
+
+ monitor
+ end
+ end
+
+ describe '#call' do
+ it 'gets gc_heap_fragmentation' do
+ expect(Gitlab::Metrics::Memory).to receive(:gc_heap_fragmentation)
+
+ monitor.call
+ end
+
+ context 'when process exceeds threshold' do
+ let(:fragmentation) { max_heap_fragmentation + 0.1 }
+ let(:payload) do
+ {
+ message: 'heap fragmentation limit exceeded',
+ memwd_cur_heap_frag: fragmentation,
+ memwd_max_heap_frag: max_heap_fragmentation
+ }
+ end
+
+ include_examples 'returns Watchdog Monitor result', threshold_violated: true
+ end
+
+ context 'when process does not exceed threshold' do
+ let(:fragmentation) { max_heap_fragmentation - 0.1 }
+ let(:payload) { {} }
+
+ include_examples 'returns Watchdog Monitor result', threshold_violated: false
+ end
+ end
+end
diff --git a/spec/lib/gitlab/memory/watchdog/monitor/unique_memory_growth_spec.rb b/spec/lib/gitlab/memory/watchdog/monitor/unique_memory_growth_spec.rb
new file mode 100644
index 00000000000..22494af4425
--- /dev/null
+++ b/spec/lib/gitlab/memory/watchdog/monitor/unique_memory_growth_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'support/shared_examples/lib/gitlab/memory/watchdog/monitor_result_shared_examples'
+require_dependency 'gitlab/cluster/lifecycle_events'
+
+RSpec.describe Gitlab::Memory::Watchdog::Monitor::UniqueMemoryGrowth do
+ let(:primary_memory) { 2048 }
+ let(:worker_memory) { 0 }
+ let(:max_mem_growth) { 2 }
+
+ subject(:monitor) do
+ described_class.new(max_mem_growth: max_mem_growth)
+ end
+
+ before do
+ allow(Gitlab::Metrics::System).to receive(:memory_usage_uss_pss).and_return({ uss: worker_memory })
+ allow(Gitlab::Metrics::System).to receive(:memory_usage_uss_pss).with(
+ pid: Gitlab::Cluster::PRIMARY_PID
+ ).and_return({ uss: primary_memory })
+ end
+
+ describe '#call' do
+ it 'gets memory_usage_uss_pss' do
+ expect(Gitlab::Metrics::System).to receive(:memory_usage_uss_pss).with(no_args)
+ expect(Gitlab::Metrics::System).to receive(:memory_usage_uss_pss).with(pid: Gitlab::Cluster::PRIMARY_PID)
+
+ monitor.call
+ end
+
+ context 'when monitor is called twice' do
+ it 'reference memory is calculated only once' do
+ expect(Gitlab::Metrics::System).to receive(:memory_usage_uss_pss).with(no_args).twice
+ expect(Gitlab::Metrics::System).to receive(:memory_usage_uss_pss).with(pid: Gitlab::Cluster::PRIMARY_PID).once
+
+ monitor.call
+ monitor.call
+ end
+ end
+
+ context 'when process exceeds threshold' do
+ let(:worker_memory) { max_mem_growth * primary_memory + 1 }
+ let(:payload) do
+ {
+ message: 'memory limit exceeded',
+ memwd_max_uss_bytes: max_mem_growth * primary_memory,
+ memwd_ref_uss_bytes: primary_memory,
+ memwd_uss_bytes: worker_memory
+ }
+ end
+
+ include_examples 'returns Watchdog Monitor result', threshold_violated: true
+ end
+
+ context 'when process does not exceed threshold' do
+ let(:worker_memory) { max_mem_growth * primary_memory - 1 }
+ let(:payload) { {} }
+
+ include_examples 'returns Watchdog Monitor result', threshold_violated: false
+ end
+ end
+end
diff --git a/spec/lib/gitlab/memory/watchdog/monitor_state_spec.rb b/spec/lib/gitlab/memory/watchdog/monitor_state_spec.rb
new file mode 100644
index 00000000000..ace1353c6e3
--- /dev/null
+++ b/spec/lib/gitlab/memory/watchdog/monitor_state_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Memory::Watchdog::MonitorState do
+ let(:max_strikes) { 2 }
+ let(:payload) { { message: 'DummyMessage' } }
+ let(:threshold_violated) { true }
+ let(:monitor) { monitor_class.new(threshold_violated, payload) }
+ let(:monitor_class) do
+ Struct.new(:threshold_violated, :payload) do
+ def call
+ { threshold_violated: threshold_violated, payload: payload }
+ end
+
+ def self.name
+ 'MonitorName'
+ end
+ end
+ end
+
+ subject(:monitor_state) { described_class.new(monitor, max_strikes: max_strikes) }
+
+ shared_examples 'returns correct result' do
+ it 'returns correct result', :aggregate_failures do
+ result = monitor_state.call
+
+ expect(result).to be_an_instance_of(described_class::Result)
+ expect(result.strikes_exceeded?).to eq(strikes_exceeded)
+ expect(result.threshold_violated?).to eq(threshold_violated)
+ expect(result.payload).to eq(expected_payload)
+ expect(result.monitor_name).to eq(:monitor_name)
+ end
+ end
+
+ describe '#call' do
+ let(:strikes_exceeded) { false }
+ let(:curr_strikes) { 0 }
+ let(:expected_payload) do
+ {
+ memwd_max_strikes: max_strikes,
+ memwd_cur_strikes: curr_strikes
+ }.merge(payload)
+ end
+
+ context 'when threshold is not violated' do
+ let(:threshold_violated) { false }
+
+ include_examples 'returns correct result'
+ end
+
+ context 'when threshold is violated' do
+ let(:curr_strikes) { 1 }
+ let(:threshold_violated) { true }
+
+ include_examples 'returns correct result'
+
+ context 'when strikes_exceeded' do
+ let(:max_strikes) { 0 }
+ let(:strikes_exceeded) { true }
+
+ include_examples 'returns correct result'
+ end
+ end
+ end
+
+ describe '#monitor_class' do
+ subject { monitor_state.monitor_class }
+
+ it { is_expected.to eq(monitor_class) }
+ end
+end
diff --git a/spec/lib/gitlab/memory/watchdog_spec.rb b/spec/lib/gitlab/memory/watchdog_spec.rb
index beb49660022..84e9a577afb 100644
--- a/spec/lib/gitlab/memory/watchdog_spec.rb
+++ b/spec/lib/gitlab/memory/watchdog_spec.rb
@@ -1,35 +1,35 @@
# frozen_string_literal: true
require 'spec_helper'
-require_relative '../../../../lib/gitlab/cluster/lifecycle_events'
-RSpec.describe Gitlab::Memory::Watchdog, :aggregate_failures, :prometheus do
+RSpec.describe Gitlab::Memory::Watchdog, :aggregate_failures do
context 'watchdog' do
- let(:logger) { instance_double(::Logger) }
+ let(:configuration) { instance_double(described_class::Configuration) }
let(:handler) { instance_double(described_class::NullHandler) }
-
- let(:heap_frag_limit_gauge) { instance_double(::Prometheus::Client::Gauge) }
+ let(:logger) { instance_double(::Logger) }
+ let(:sleep_time_seconds) { 60 }
+ let(:threshold_violated) { false }
let(:violations_counter) { instance_double(::Prometheus::Client::Counter) }
let(:violations_handled_counter) { instance_double(::Prometheus::Client::Counter) }
-
- let(:sleep_time) { 0.1 }
- let(:max_heap_fragmentation) { 0.2 }
- let(:max_mem_growth) { 2 }
-
- # Defaults that will not trigger any events.
- let(:fragmentation) { 0 }
- let(:worker_memory) { 0 }
- let(:primary_memory) { 0 }
- let(:max_strikes) { 0 }
-
- # Tests should set this to control the number of loop iterations in `call`.
let(:watchdog_iterations) { 1 }
+ let(:name) { :monitor_name }
+ let(:payload) { { message: 'dummy_text' } }
+ let(:max_strikes) { 2 }
+ let(:monitor_class) do
+ Struct.new(:threshold_violated, :payload) do
+ def call
+ { threshold_violated: threshold_violated, payload: payload }
+ end
+
+ def self.name
+ 'MonitorName'
+ end
+ end
+ end
subject(:watchdog) do
- described_class.new(handler: handler, logger: logger, sleep_time_seconds: sleep_time,
- max_strikes: max_strikes, max_mem_growth: max_mem_growth,
- max_heap_fragmentation: max_heap_fragmentation).tap do |instance|
- # We need to defuse `sleep` and stop the internal loop after N iterations.
+ described_class.new.tap do |instance|
+ # We need to defuse `sleep` and stop the internal loop after 1 iteration
iterations = 0
allow(instance).to receive(:sleep) do
instance.stop if (iterations += 1) > watchdog_iterations
@@ -38,9 +38,6 @@ RSpec.describe Gitlab::Memory::Watchdog, :aggregate_failures, :prometheus do
end
def stub_prometheus_metrics
- allow(Gitlab::Metrics).to receive(:gauge)
- .with(:gitlab_memwd_heap_frag_limit, anything)
- .and_return(heap_frag_limit_gauge)
allow(Gitlab::Metrics).to receive(:counter)
.with(:gitlab_memwd_violations_total, anything, anything)
.and_return(violations_counter)
@@ -48,318 +45,195 @@ RSpec.describe Gitlab::Memory::Watchdog, :aggregate_failures, :prometheus do
.with(:gitlab_memwd_violations_handled_total, anything, anything)
.and_return(violations_handled_counter)
- allow(heap_frag_limit_gauge).to receive(:set)
allow(violations_counter).to receive(:increment)
allow(violations_handled_counter).to receive(:increment)
end
- before do
- stub_prometheus_metrics
-
- allow(handler).to receive(:call).and_return(true)
-
- allow(logger).to receive(:warn)
- allow(logger).to receive(:info)
-
- allow(Gitlab::Metrics::Memory).to receive(:gc_heap_fragmentation).and_return(fragmentation)
- allow(Gitlab::Metrics::System).to receive(:memory_usage_uss_pss).and_return({ uss: worker_memory })
- allow(Gitlab::Metrics::System).to receive(:memory_usage_uss_pss).with(
- pid: Gitlab::Cluster::PRIMARY_PID
- ).and_return({ uss: primary_memory })
-
- allow(::Prometheus::PidProvider).to receive(:worker_id).and_return('worker_1')
- end
-
- context 'when created' do
- it 'sets the heap fragmentation limit gauge' do
- expect(heap_frag_limit_gauge).to receive(:set).with({}, max_heap_fragmentation)
+ describe '#initialize' do
+ it 'initialize new configuration' do
+ expect(described_class::Configuration).to receive(:new)
watchdog
end
-
- context 'when no settings are set in the environment' do
- it 'initializes with defaults' do
- watchdog = described_class.new(handler: handler, logger: logger)
-
- expect(watchdog.max_heap_fragmentation).to eq(described_class::DEFAULT_MAX_HEAP_FRAG)
- expect(watchdog.max_mem_growth).to eq(described_class::DEFAULT_MAX_MEM_GROWTH)
- expect(watchdog.max_strikes).to eq(described_class::DEFAULT_MAX_STRIKES)
- expect(watchdog.sleep_time_seconds).to eq(described_class::DEFAULT_SLEEP_TIME_SECONDS)
- end
- end
-
- context 'when settings are passed through the environment' do
- before do
- stub_env('GITLAB_MEMWD_MAX_HEAP_FRAG', 1)
- stub_env('GITLAB_MEMWD_MAX_STRIKES', 2)
- stub_env('GITLAB_MEMWD_SLEEP_TIME_SEC', 3)
- stub_env('GITLAB_MEMWD_MAX_MEM_GROWTH', 4)
- end
-
- it 'initializes with these settings' do
- watchdog = described_class.new(handler: handler, logger: logger)
-
- expect(watchdog.max_heap_fragmentation).to eq(1)
- expect(watchdog.max_strikes).to eq(2)
- expect(watchdog.sleep_time_seconds).to eq(3)
- expect(watchdog.max_mem_growth).to eq(4)
- end
- end
end
- shared_examples 'has strikes left' do |stat|
- context 'when process has not exceeded allowed number of strikes' do
- let(:watchdog_iterations) { max_strikes }
-
- it 'does not signal the handler' do
- expect(handler).not_to receive(:call)
-
- watchdog.call
- end
-
- it 'does not log any events' do
- expect(logger).not_to receive(:warn)
-
- watchdog.call
- end
-
- it 'increments the violations counter' do
- expect(violations_counter).to receive(:increment).with(reason: stat).exactly(watchdog_iterations)
-
- watchdog.call
+ describe '#call' do
+ before do
+ stub_prometheus_metrics
+ allow(Gitlab::Metrics::System).to receive(:memory_usage_rss).at_least(:once).and_return(1024)
+ allow(::Prometheus::PidProvider).to receive(:worker_id).and_return('worker_1')
+
+ watchdog.configure do |config|
+ config.handler = handler
+ config.logger = logger
+ config.sleep_time_seconds = sleep_time_seconds
+ config.monitors.use monitor_class, threshold_violated, payload, max_strikes: max_strikes
end
- it 'does not increment violations handled counter' do
- expect(violations_handled_counter).not_to receive(:increment)
-
- watchdog.call
- end
+ allow(handler).to receive(:call).and_return(true)
+ allow(logger).to receive(:info)
+ allow(logger).to receive(:warn)
end
- end
- shared_examples 'no strikes left' do |stat|
- it 'signals the handler and resets strike counter' do
- expect(handler).to receive(:call).and_return(true)
+ it 'logs start message once' do
+ expect(logger).to receive(:info).once
+ .with(
+ pid: Process.pid,
+ worker_id: 'worker_1',
+ memwd_handler_class: handler.class.name,
+ memwd_sleep_time_s: sleep_time_seconds,
+ memwd_rss_bytes: 1024,
+ message: 'started')
watchdog.call
-
- expect(watchdog.strikes(stat.to_sym)).to eq(0)
end
- it 'increments both the violations and violations handled counters' do
- expect(violations_counter).to receive(:increment).with(reason: stat).exactly(watchdog_iterations)
- expect(violations_handled_counter).to receive(:increment).with(reason: stat)
+ it 'waits for check interval seconds' do
+ expect(watchdog).to receive(:sleep).with(sleep_time_seconds)
watchdog.call
end
- context 'when enforce_memory_watchdog ops toggle is off' do
+ context 'when gitlab_memory_watchdog ops toggle is off' do
before do
- stub_feature_flags(enforce_memory_watchdog: false)
+ stub_feature_flags(gitlab_memory_watchdog: false)
end
- it 'always uses the NullHandler' do
- expect(handler).not_to receive(:call)
- expect(described_class::NullHandler.instance).to receive(:call).and_return(true)
-
- watchdog.call
+ it 'does not trigger any monitor' do
+ expect(configuration).not_to receive(:monitors)
end
end
- context 'when handler result is true' do
- it 'considers the event handled and stops itself' do
- expect(handler).to receive(:call).once.and_return(true)
- expect(logger).to receive(:info).with(hash_including(message: 'stopped'))
+ context 'when process does not exceed threshold' do
+ it 'does not increment violations counters' do
+ expect(violations_counter).not_to receive(:increment)
+ expect(violations_handled_counter).not_to receive(:increment)
watchdog.call
end
- end
-
- context 'when handler result is false' do
- let(:max_strikes) { 0 } # to make sure the handler fires each iteration
- let(:watchdog_iterations) { 3 }
- it 'keeps running' do
- expect(violations_counter).to receive(:increment).exactly(watchdog_iterations)
- expect(violations_handled_counter).to receive(:increment).exactly(watchdog_iterations)
- # Return true the third time to terminate the daemon.
- expect(handler).to receive(:call).and_return(false, false, true)
+ it 'does not log violation' do
+ expect(logger).not_to receive(:warn)
watchdog.call
end
- end
- end
-
- context 'when monitoring memory growth' do
- let(:primary_memory) { 2048 }
-
- context 'when process does not exceed threshold' do
- let(:worker_memory) { max_mem_growth * primary_memory - 1 }
- it 'does not signal the handler' do
+ it 'does not execute handler' do
expect(handler).not_to receive(:call)
watchdog.call
end
end
- context 'when process exceeds threshold permanently' do
- let(:worker_memory) { max_mem_growth * primary_memory + 1 }
- let(:max_strikes) { 3 }
-
- it_behaves_like 'has strikes left', 'mem_growth'
+ context 'when process exceeds threshold' do
+ let(:threshold_violated) { true }
- context 'when process exceeds the allowed number of strikes' do
- let(:watchdog_iterations) { max_strikes + 1 }
+ it 'increments violations counter' do
+ expect(violations_counter).to receive(:increment).with(reason: name)
- it_behaves_like 'no strikes left', 'mem_growth'
+ watchdog.call
+ end
- it 'only reads reference memory once' do
- expect(Gitlab::Metrics::System).to receive(:memory_usage_uss_pss)
- .with(pid: Gitlab::Cluster::PRIMARY_PID)
- .once
+ context 'when process does not exceed the allowed number of strikes' do
+ it 'does not increment handled violations counter' do
+ expect(violations_handled_counter).not_to receive(:increment)
watchdog.call
end
- it 'logs the event' do
- expect(Gitlab::Metrics::System).to receive(:memory_usage_rss).at_least(:once).and_return(1024)
- expect(logger).to receive(:warn).with({
- message: 'memory limit exceeded',
- pid: Process.pid,
- worker_id: 'worker_1',
- memwd_handler_class: 'RSpec::Mocks::InstanceVerifyingDouble',
- memwd_sleep_time_s: sleep_time,
- memwd_max_uss_bytes: max_mem_growth * primary_memory,
- memwd_ref_uss_bytes: primary_memory,
- memwd_uss_bytes: worker_memory,
- memwd_rss_bytes: 1024,
- memwd_max_strikes: max_strikes,
- memwd_cur_strikes: max_strikes + 1
- })
+ it 'does not log violation' do
+ expect(logger).not_to receive(:warn)
watchdog.call
end
- end
- end
- context 'when process exceeds threshold temporarily' do
- let(:worker_memory) { max_mem_growth * primary_memory }
- let(:max_strikes) { 1 }
- let(:watchdog_iterations) { 4 }
+ it 'does not execute handler' do
+ expect(handler).not_to receive(:call)
- before do
- allow(Gitlab::Metrics::System).to receive(:memory_usage_uss_pss).and_return(
- { uss: worker_memory - 0.1 },
- { uss: worker_memory + 0.2 },
- { uss: worker_memory - 0.1 },
- { uss: worker_memory + 0.1 }
- )
- allow(Gitlab::Metrics::System).to receive(:memory_usage_uss_pss).with(
- pid: Gitlab::Cluster::PRIMARY_PID
- ).and_return({ uss: primary_memory })
+ watchdog.call
+ end
end
- it 'does not signal the handler' do
- expect(handler).not_to receive(:call)
+ context 'when monitor exceeds the allowed number of strikes' do
+ let(:max_strikes) { 0 }
- watchdog.call
- end
- end
- end
+ it 'increments handled violations counter' do
+ expect(violations_handled_counter).to receive(:increment).with(reason: name)
- context 'when monitoring heap fragmentation' do
- context 'when process does not exceed threshold' do
- let(:fragmentation) { max_heap_fragmentation - 0.1 }
-
- it 'does not signal the handler' do
- expect(handler).not_to receive(:call)
-
- watchdog.call
- end
- end
-
- context 'when process exceeds threshold permanently' do
- let(:fragmentation) { max_heap_fragmentation + 0.1 }
- let(:max_strikes) { 3 }
-
- it_behaves_like 'has strikes left', 'heap_frag'
+ watchdog.call
+ end
- context 'when process exceeds the allowed number of strikes' do
- let(:watchdog_iterations) { max_strikes + 1 }
+ it 'logs violation' do
+ expect(logger).to receive(:warn)
+ .with(
+ pid: Process.pid,
+ worker_id: 'worker_1',
+ memwd_handler_class: handler.class.name,
+ memwd_sleep_time_s: sleep_time_seconds,
+ memwd_rss_bytes: 1024,
+ memwd_cur_strikes: 1,
+ memwd_max_strikes: max_strikes,
+ message: 'dummy_text')
- it_behaves_like 'no strikes left', 'heap_frag'
+ watchdog.call
+ end
- it 'logs the event' do
- expect(Gitlab::Metrics::System).to receive(:memory_usage_rss).at_least(:once).and_return(1024)
- expect(logger).to receive(:warn).with({
- message: 'heap fragmentation limit exceeded',
- pid: Process.pid,
- worker_id: 'worker_1',
- memwd_handler_class: 'RSpec::Mocks::InstanceVerifyingDouble',
- memwd_sleep_time_s: sleep_time,
- memwd_max_heap_frag: max_heap_fragmentation,
- memwd_cur_heap_frag: fragmentation,
- memwd_max_strikes: max_strikes,
- memwd_cur_strikes: max_strikes + 1,
- memwd_rss_bytes: 1024
- })
+ it 'executes handler' do
+ expect(handler).to receive(:call)
watchdog.call
end
- end
- end
- context 'when process exceeds threshold temporarily' do
- let(:fragmentation) { max_heap_fragmentation }
- let(:max_strikes) { 1 }
- let(:watchdog_iterations) { 4 }
+ context 'when enforce_memory_watchdog ops toggle is off' do
+ before do
+ stub_feature_flags(enforce_memory_watchdog: false)
+ end
- before do
- allow(Gitlab::Metrics::Memory).to receive(:gc_heap_fragmentation).and_return(
- fragmentation - 0.1,
- fragmentation + 0.2,
- fragmentation - 0.1,
- fragmentation + 0.1
- )
- end
+ it 'always uses the NullHandler' do
+ expect(handler).not_to receive(:call)
+ expect(described_class::NullHandler.instance).to receive(:call).and_return(true)
- it 'does not signal the handler' do
- expect(handler).not_to receive(:call)
+ watchdog.call
+ end
+ end
- watchdog.call
+ context 'when multiple monitors exceeds allowed number of strikes' do
+ before do
+ watchdog.configure do |config|
+ config.handler = handler
+ config.logger = logger
+ config.sleep_time_seconds = sleep_time_seconds
+ config.monitors.use monitor_class, threshold_violated, payload, max_strikes: max_strikes
+ config.monitors.use monitor_class, threshold_violated, payload, max_strikes: max_strikes
+ end
+ end
+
+ it 'only calls the handler once' do
+ expect(handler).to receive(:call).once.and_return(true)
+
+ watchdog.call
+ end
+ end
end
end
- end
-
- context 'when both memory fragmentation and growth exceed thresholds' do
- let(:fragmentation) { max_heap_fragmentation + 0.1 }
- let(:primary_memory) { 2048 }
- let(:worker_memory) { max_mem_growth * primary_memory + 1 }
- let(:watchdog_iterations) { max_strikes + 1 }
- it 'only calls the handler once' do
- expect(handler).to receive(:call).once.and_return(true)
+ it 'logs stop message once' do
+ expect(logger).to receive(:info).once
+ .with(
+ pid: Process.pid,
+ worker_id: 'worker_1',
+ memwd_handler_class: handler.class.name,
+ memwd_sleep_time_s: sleep_time_seconds,
+ memwd_rss_bytes: 1024,
+ message: 'stopped')
watchdog.call
end
end
- context 'when gitlab_memory_watchdog ops toggle is off' do
- before do
- stub_feature_flags(gitlab_memory_watchdog: false)
- end
-
- it 'does not monitor heap fragmentation' do
- expect(Gitlab::Metrics::Memory).not_to receive(:gc_heap_fragmentation)
-
- watchdog.call
- end
-
- it 'does not monitor memory growth' do
- expect(Gitlab::Metrics::System).not_to receive(:memory_usage_uss_pss)
-
- watchdog.call
+ describe '#configure' do
+ it 'yields block' do
+ expect { |b| watchdog.configure(&b) }.to yield_control
end
end
end
diff --git a/spec/lib/gitlab/metrics/global_search_slis_spec.rb b/spec/lib/gitlab/metrics/global_search_slis_spec.rb
index 28496eff2fc..0c09cf6dd71 100644
--- a/spec/lib/gitlab/metrics/global_search_slis_spec.rb
+++ b/spec/lib/gitlab/metrics/global_search_slis_spec.rb
@@ -5,26 +5,20 @@ require 'spec_helper'
RSpec.describe Gitlab::Metrics::GlobalSearchSlis do
using RSpec::Parameterized::TableSyntax
- let(:apdex_feature_flag_enabled) { true }
let(:error_rate_feature_flag_enabled) { true }
before do
- stub_feature_flags(global_search_custom_slis: apdex_feature_flag_enabled)
stub_feature_flags(global_search_error_rate_sli: error_rate_feature_flag_enabled)
end
describe '#initialize_slis!' do
- context 'when global_search_custom_slis feature flag is enabled' do
- let(:apdex_feature_flag_enabled) { true }
+ it 'initializes Apdex SLIs for global_search' do
+ expect(Gitlab::Metrics::Sli::Apdex).to receive(:initialize_sli).with(
+ :global_search,
+ a_kind_of(Array)
+ )
- it 'initializes Apdex SLIs for global_search' do
- expect(Gitlab::Metrics::Sli::Apdex).to receive(:initialize_sli).with(
- :global_search,
- a_kind_of(Array)
- )
-
- described_class.initialize_slis!
- end
+ described_class.initialize_slis!
end
context 'when global_search_error_rate_sli feature flag is enabled' do
@@ -40,16 +34,6 @@ RSpec.describe Gitlab::Metrics::GlobalSearchSlis do
end
end
- context 'when global_search_custom_slis feature flag is disabled' do
- let(:apdex_feature_flag_enabled) { false }
-
- it 'does not initialize the Apdex SLIs for global_search' do
- expect(Gitlab::Metrics::Sli::Apdex).not_to receive(:initialize_sli)
-
- described_class.initialize_slis!
- end
- end
-
context 'when global_search_error_rate_sli feature flag is disabled' do
let(:error_rate_feature_flag_enabled) { false }
@@ -62,78 +46,59 @@ RSpec.describe Gitlab::Metrics::GlobalSearchSlis do
end
describe '#record_apdex' do
- context 'when global_search_custom_slis feature flag is enabled' do
- let(:apdex_feature_flag_enabled) { true }
-
- where(:search_type, :code_search, :duration_target) do
- 'basic' | false | 7.031
- 'basic' | true | 21.903
- 'advanced' | false | 4.865
- 'advanced' | true | 13.546
- end
-
- with_them do
- before do
- allow(::Gitlab::ApplicationContext).to receive(:current_context_attribute).with(:caller_id).and_return('end')
- end
+ where(:search_type, :code_search, :duration_target) do
+ 'basic' | false | 7.031
+ 'basic' | true | 21.903
+ 'advanced' | false | 4.865
+ 'advanced' | true | 13.546
+ end
- let(:search_scope) { code_search ? 'blobs' : 'issues' }
+ with_them do
+ before do
+ allow(::Gitlab::ApplicationContext).to receive(:current_context_attribute).with(:caller_id).and_return('end')
+ end
- it 'increments the global_search SLI as a success if the elapsed time is within the target' do
- duration = duration_target - 0.1
+ let(:search_scope) { code_search ? 'blobs' : 'issues' }
- expect(Gitlab::Metrics::Sli::Apdex[:global_search]).to receive(:increment).with(
- labels: {
- search_type: search_type,
- search_level: 'global',
- search_scope: search_scope,
- endpoint_id: 'end'
- },
- success: true
- )
+ it 'increments the global_search SLI as a success if the elapsed time is within the target' do
+ duration = duration_target - 0.1
- described_class.record_apdex(
- elapsed: duration,
- search_type: search_type,
- search_level: 'global',
- search_scope: search_scope
- )
- end
-
- it 'increments the global_search SLI as a failure if the elapsed time is not within the target' do
- duration = duration_target + 0.1
-
- expect(Gitlab::Metrics::Sli::Apdex[:global_search]).to receive(:increment).with(
- labels: {
- search_type: search_type,
- search_level: 'global',
- search_scope: search_scope,
- endpoint_id: 'end'
- },
- success: false
- )
-
- described_class.record_apdex(
- elapsed: duration,
+ expect(Gitlab::Metrics::Sli::Apdex[:global_search]).to receive(:increment).with(
+ labels: {
search_type: search_type,
search_level: 'global',
- search_scope: search_scope
- )
- end
+ search_scope: search_scope,
+ endpoint_id: 'end'
+ },
+ success: true
+ )
+
+ described_class.record_apdex(
+ elapsed: duration,
+ search_type: search_type,
+ search_level: 'global',
+ search_scope: search_scope
+ )
end
- end
- context 'when global_search_custom_slis feature flag is disabled' do
- let(:apdex_feature_flag_enabled) { false }
+ it 'increments the global_search SLI as a failure if the elapsed time is not within the target' do
+ duration = duration_target + 0.1
- it 'does not call increment on the apdex SLI' do
- expect(Gitlab::Metrics::Sli::Apdex[:global_search]).not_to receive(:increment)
+ expect(Gitlab::Metrics::Sli::Apdex[:global_search]).to receive(:increment).with(
+ labels: {
+ search_type: search_type,
+ search_level: 'global',
+ search_scope: search_scope,
+ endpoint_id: 'end'
+ },
+ success: false
+ )
described_class.record_apdex(
- elapsed: 1,
- search_type: 'basic',
+ elapsed: duration,
+ search_type: search_type,
search_level: 'global',
- search_scope: 'issues'
+ search_scope: search_scope
)
end
end
diff --git a/spec/lib/gitlab/metrics/system_spec.rb b/spec/lib/gitlab/metrics/system_spec.rb
index 7739501dd95..b86469eacd1 100644
--- a/spec/lib/gitlab/metrics/system_spec.rb
+++ b/spec/lib/gitlab/metrics/system_spec.rb
@@ -71,6 +71,65 @@ RSpec.describe Gitlab::Metrics::System do
SNIP
end
+ let(:mem_info) do
+ # full snapshot
+ <<~SNIP
+ MemTotal: 15362536 kB
+ MemFree: 3403136 kB
+ MemAvailable: 13044528 kB
+ Buffers: 272188 kB
+ Cached: 8171312 kB
+ SwapCached: 0 kB
+ Active: 3332084 kB
+ Inactive: 6981076 kB
+ Active(anon): 1603868 kB
+ Inactive(anon): 9044 kB
+ Active(file): 1728216 kB
+ Inactive(file): 6972032 kB
+ Unevictable: 18676 kB
+ Mlocked: 18676 kB
+ SwapTotal: 0 kB
+ SwapFree: 0 kB
+ Dirty: 6808 kB
+ Writeback: 0 kB
+ AnonPages: 1888300 kB
+ Mapped: 166164 kB
+ Shmem: 12932 kB
+ KReclaimable: 1275120 kB
+ Slab: 1495480 kB
+ SReclaimable: 1275120 kB
+ SUnreclaim: 220360 kB
+ KernelStack: 7072 kB
+ PageTables: 11936 kB
+ NFS_Unstable: 0 kB
+ Bounce: 0 kB
+ WritebackTmp: 0 kB
+ CommitLimit: 7681268 kB
+ Committed_AS: 4976100 kB
+ VmallocTotal: 34359738367 kB
+ VmallocUsed: 25532 kB
+ VmallocChunk: 0 kB
+ Percpu: 23200 kB
+ HardwareCorrupted: 0 kB
+ AnonHugePages: 202752 kB
+ ShmemHugePages: 0 kB
+ ShmemPmdMapped: 0 kB
+ FileHugePages: 0 kB
+ FilePmdMapped: 0 kB
+ CmaTotal: 0 kB
+ CmaFree: 0 kB
+ HugePages_Total: 0
+ HugePages_Free: 0
+ HugePages_Rsvd: 0
+ HugePages_Surp: 0
+ Hugepagesize: 2048 kB
+ Hugetlb: 0 kB
+ DirectMap4k: 4637504 kB
+ DirectMap2M: 11087872 kB
+ DirectMap1G: 2097152 kB
+ SNIP
+ end
+
describe '.memory_usage_rss' do
context 'without PID' do
it "returns the current process' resident set size (RSS) in bytes" do
@@ -125,6 +184,14 @@ RSpec.describe Gitlab::Metrics::System do
end
end
+ describe '.memory_total' do
+ it "returns the current process' resident set size (RSS) in bytes" do
+ mock_existing_proc_file('/proc/meminfo', mem_info)
+
+ expect(described_class.memory_total).to eq(15731236864)
+ end
+ end
+
describe '.process_runtime_elapsed_seconds' do
it 'returns the seconds elapsed since the process was started' do
# sets process starttime ticks to 1000
diff --git a/spec/lib/gitlab/middleware/handle_malformed_strings_spec.rb b/spec/lib/gitlab/middleware/handle_malformed_strings_spec.rb
index cf7b0dbb5fd..ed1440f23b6 100644
--- a/spec/lib/gitlab/middleware/handle_malformed_strings_spec.rb
+++ b/spec/lib/gitlab/middleware/handle_malformed_strings_spec.rb
@@ -132,11 +132,12 @@ RSpec.describe Gitlab::Middleware::HandleMalformedStrings do
end
it "rejects bad params for arrays containing hashes with string values" do
- env = env_for(name: [
- {
- inner_key: "I am #{problematic_input} bad"
- }
- ])
+ env = env_for(
+ name: [
+ {
+ inner_key: "I am #{problematic_input} bad"
+ }
+ ])
expect(subject.call(env)).to eq error_400
end
@@ -148,11 +149,12 @@ RSpec.describe Gitlab::Middleware::HandleMalformedStrings do
it_behaves_like 'checks params'
it "gives up and does not reject too deeply nested params" do
- env = env_for(name: [
- {
- inner_key: { deeper_key: [{ hash_inside_array_key: "I am #{problematic_input} bad" }] }
- }
- ])
+ env = env_for(
+ name: [
+ {
+ inner_key: { deeper_key: [{ hash_inside_array_key: "I am #{problematic_input} bad" }] }
+ }
+ ])
expect(subject.call(env)).not_to eq error_400
end
diff --git a/spec/lib/gitlab/pages/cache_control_spec.rb b/spec/lib/gitlab/pages/cache_control_spec.rb
index 6ed823427fb..431c989e874 100644
--- a/spec/lib/gitlab/pages/cache_control_spec.rb
+++ b/spec/lib/gitlab/pages/cache_control_spec.rb
@@ -3,21 +3,16 @@
require 'spec_helper'
RSpec.describe Gitlab::Pages::CacheControl do
- it 'fails with invalid type' do
- expect { described_class.new(type: :unknown, id: nil) }
- .to raise_error(ArgumentError, "type must be :namespace or :project")
- end
-
describe '.for_namespace' do
- let(:subject) { described_class.for_namespace(1) }
+ subject(:cache_control) { described_class.for_namespace(1) }
- it { expect(subject.cache_key).to eq('pages_domain_for_namespace_1') }
+ it { expect(subject.cache_key).to match(/pages_domain_for_namespace_1_*/) }
describe '#clear_cache' do
it 'clears the cache' do
expect(Rails.cache)
.to receive(:delete)
- .with('pages_domain_for_namespace_1')
+ .with(/pages_domain_for_namespace_1_*/)
subject.clear_cache
end
@@ -25,18 +20,48 @@ RSpec.describe Gitlab::Pages::CacheControl do
end
describe '.for_project' do
- let(:subject) { described_class.for_project(1) }
+ subject(:cache_control) { described_class.for_project(1) }
- it { expect(subject.cache_key).to eq('pages_domain_for_project_1') }
+ it { expect(subject.cache_key).to match(/pages_domain_for_project_1_*/) }
describe '#clear_cache' do
it 'clears the cache' do
expect(Rails.cache)
.to receive(:delete)
- .with('pages_domain_for_project_1')
+ .with(/pages_domain_for_project_1_*/)
subject.clear_cache
end
end
end
+
+ describe '#cache_key' do
+ it 'does not change the pages config' do
+ expect { described_class.new(type: :project, id: 1).cache_key }
+ .not_to change(Gitlab.config, :pages)
+ end
+
+ it 'is based on pages settings' do
+ access_control = Gitlab.config.pages.access_control
+ cache_key = described_class.new(type: :project, id: 1).cache_key
+
+ stub_config(pages: { access_control: !access_control })
+
+ expect(described_class.new(type: :project, id: 1).cache_key).not_to eq(cache_key)
+ end
+
+ it 'is based on the force_pages_access_control settings' do
+ force_pages_access_control = ::Gitlab::CurrentSettings.force_pages_access_control
+ cache_key = described_class.new(type: :project, id: 1).cache_key
+
+ ::Gitlab::CurrentSettings.force_pages_access_control = !force_pages_access_control
+
+ expect(described_class.new(type: :project, id: 1).cache_key).not_to eq(cache_key)
+ end
+ end
+
+ it 'fails with invalid type' do
+ expect { described_class.new(type: :unknown, id: nil) }
+ .to raise_error(ArgumentError, "type must be :namespace or :project")
+ end
end
diff --git a/spec/lib/gitlab/pagination/keyset/column_order_definition_spec.rb b/spec/lib/gitlab/pagination/keyset/column_order_definition_spec.rb
index 100574cc75f..64bc4555bcc 100644
--- a/spec/lib/gitlab/pagination/keyset/column_order_definition_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/column_order_definition_spec.rb
@@ -23,10 +23,11 @@ RSpec.describe Gitlab::Pagination::Keyset::ColumnOrderDefinition do
let_it_be(:project_calculated_column_expression) do
# COALESCE("projects"."description", 'No Description')
- Arel::Nodes::NamedFunction.new('COALESCE', [
- Project.arel_table[:description],
- Arel.sql("'No Description'")
- ])
+ Arel::Nodes::NamedFunction.new('COALESCE',
+ [
+ Project.arel_table[:description],
+ Arel.sql("'No Description'")
+ ])
end
let_it_be(:project_calculated_column) do
diff --git a/spec/lib/gitlab/pagination/keyset/in_operator_optimization/query_builder_spec.rb b/spec/lib/gitlab/pagination/keyset/in_operator_optimization/query_builder_spec.rb
index 9f2ac9a953d..cc85c897019 100644
--- a/spec/lib/gitlab/pagination/keyset/in_operator_optimization/query_builder_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/in_operator_optimization/query_builder_spec.rb
@@ -117,23 +117,24 @@ RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::QueryBuilder
let(:order) do
# NULLS LAST ordering requires custom Order object for keyset pagination:
# https://docs.gitlab.com/ee/development/database/keyset_pagination.html#complex-order-configuration
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: :relative_position,
- column_expression: Issue.arel_table[:relative_position],
- order_expression: Issue.arel_table[:relative_position].desc.nulls_last,
- reversed_order_expression: Issue.arel_table[:relative_position].asc.nulls_first,
- order_direction: :desc,
- nullable: :nulls_last,
- distinct: false
- ),
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: :id,
- order_expression: Issue.arel_table[:id].desc,
- nullable: :not_nullable,
- distinct: true
- )
- ])
+ Gitlab::Pagination::Keyset::Order.build(
+ [
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: :relative_position,
+ column_expression: Issue.arel_table[:relative_position],
+ order_expression: Issue.arel_table[:relative_position].desc.nulls_last,
+ reversed_order_expression: Issue.arel_table[:relative_position].asc.nulls_first,
+ order_direction: :desc,
+ nullable: :nulls_last,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: :id,
+ order_expression: Issue.arel_table[:id].desc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
end
let(:in_operator_optimization_options) do
@@ -279,17 +280,18 @@ RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::QueryBuilder
context 'when ordering by SQL expression' do
let(:order) do
# ORDER BY (id * 10), id
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'id_multiplied_by_ten',
- order_expression: Arel.sql('(id * 10)').asc,
- sql_type: 'integer'
- ),
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: :id,
- order_expression: Issue.arel_table[:id].asc
- )
- ])
+ Gitlab::Pagination::Keyset::Order.build(
+ [
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id_multiplied_by_ten',
+ order_expression: Arel.sql('(id * 10)').asc,
+ sql_type: 'integer'
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: :id,
+ order_expression: Issue.arel_table[:id].asc
+ )
+ ])
end
let(:scope) { Issue.reorder(order) }
@@ -328,4 +330,148 @@ RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::QueryBuilder
end
end
end
+
+ context 'when ordering by JOIN-ed columns' do
+ let(:scope) { cte_with_issues_and_projects.apply_to(Issue.where({})).reorder(order) }
+
+ let(:cte_with_issues_and_projects) do
+ cte_query = Issue.select('issues.id AS id', 'project_id', 'projects.id AS projects_id', 'projects.name AS projects_name').joins(:project)
+ Gitlab::SQL::CTE.new(:issue_with_project, cte_query, materialized: false)
+ end
+
+ let(:in_operator_optimization_options) do
+ {
+ array_scope: Project.where(namespace_id: top_level_group.self_and_descendants.select(:id)).select(:id),
+ array_mapping_scope: -> (id_expression) { Issue.where(Issue.arel_table[:project_id].eq(id_expression)) }
+ }
+ end
+
+ context 'when directions are project.id DESC, issues.id ASC' do
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'projects_id',
+ order_expression: Issue.arel_table[:projects_id].asc,
+ sql_type: 'integer',
+ nullable: :not_nullable,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: :id,
+ order_expression: Issue.arel_table[:id].asc
+ )
+ ])
+ end
+
+ let(:expected_order) { issues.sort_by { |issue| [issue.project_id, issue.id] } }
+
+ context 'when iterating records one by one' do
+ let(:batch_size) { 1 }
+
+ it_behaves_like 'correct ordering examples', skip_finder_query_test: true
+ end
+
+ context 'when iterating records with LIMIT 2' do
+ let(:batch_size) { 2 }
+
+ it_behaves_like 'correct ordering examples', skip_finder_query_test: true
+ end
+ end
+
+ context 'when directions are projects.id DESC, issues.id ASC' do
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'projects_id',
+ order_expression: Issue.arel_table[:projects_id].desc,
+ sql_type: 'integer',
+ nullable: :not_nullable,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: :id,
+ order_expression: Issue.arel_table[:id].asc
+ )
+ ])
+ end
+
+ let(:expected_order) { issues.sort_by { |issue| [issue.project_id * -1, issue.id] } }
+
+ context 'when iterating records one by one' do
+ let(:batch_size) { 1 }
+
+ it_behaves_like 'correct ordering examples', skip_finder_query_test: true
+ end
+
+ context 'when iterating records with LIMIT 2' do
+ let(:batch_size) { 2 }
+
+ it_behaves_like 'correct ordering examples', skip_finder_query_test: true
+ end
+ end
+
+ context 'when directions are projects.name ASC, projects.id ASC, issues.id ASC' do
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'projects_name',
+ order_expression: Issue.arel_table[:projects_name].asc,
+ sql_type: 'character varying',
+ nullable: :not_nullable,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'projects_id',
+ order_expression: Issue.arel_table[:projects_id].asc,
+ sql_type: 'integer',
+ nullable: :not_nullable,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: :id,
+ order_expression: Issue.arel_table[:id].asc
+ )
+ ])
+ end
+
+ let(:expected_order) { issues.sort_by { |issue| [issue.project.name, issue.project.id, issue.id] } }
+
+ context 'when iterating records with LIMIT 2' do
+ let(:batch_size) { 2 }
+
+ it_behaves_like 'correct ordering examples', skip_finder_query_test: true
+ end
+ end
+
+ context 'when directions are projects.name ASC (nullable), issues.id ASC' do
+ let(:cte_with_issues_and_projects) do
+ cte_query = Issue.select('issues.id AS id', 'project_id', 'projects.id AS projects_id', 'NULL AS projects_name').joins(:project)
+ Gitlab::SQL::CTE.new(:issue_with_project, cte_query, materialized: false)
+ end
+
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'projects_name',
+ order_expression: Issue.arel_table[:projects_name].asc,
+ sql_type: 'character varying',
+ nullable: :nulls_last,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: :id,
+ order_expression: Issue.arel_table[:id].asc
+ )
+ ])
+ end
+
+ let(:expected_order) { issues.sort_by { |issue| [issue.id] } }
+
+ context 'when iterating records with LIMIT 2' do
+ let(:batch_size) { 2 }
+
+ it_behaves_like 'correct ordering examples', skip_finder_query_test: true
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/pagination/keyset/in_operator_optimization/strategies/order_values_loader_strategy_spec.rb b/spec/lib/gitlab/pagination/keyset/in_operator_optimization/strategies/order_values_loader_strategy_spec.rb
index ab1037b318b..2073142f077 100644
--- a/spec/lib/gitlab/pagination/keyset/in_operator_optimization/strategies/order_values_loader_strategy_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/in_operator_optimization/strategies/order_values_loader_strategy_spec.rb
@@ -25,22 +25,24 @@ RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::Strategies::O
describe '#initializer_columns' do
it 'returns NULLs for each ORDER BY columns' do
- expect(strategy.initializer_columns).to eq([
- 'NULL::timestamp without time zone AS created_at',
- 'NULL::integer AS id'
- ])
+ expect(strategy.initializer_columns).to eq(
+ [
+ 'NULL::timestamp without time zone AS created_at',
+ 'NULL::integer AS id'
+ ])
end
end
context 'when an SQL expression is given' do
context 'when the sql_type attribute is missing' do
let(:order) do
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'id_times_ten',
- order_expression: Arel.sql('id * 10').asc
- )
- ])
+ Gitlab::Pagination::Keyset::Order.build(
+ [
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id_times_ten',
+ order_expression: Arel.sql('id * 10').asc
+ )
+ ])
end
let(:keyset_scope) { Project.order(order) }
@@ -52,13 +54,14 @@ RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::Strategies::O
context 'when the sql_type_attribute is present' do
let(:order) do
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'id_times_ten',
- order_expression: Arel.sql('id * 10').asc,
- sql_type: 'integer'
- )
- ])
+ Gitlab::Pagination::Keyset::Order.build(
+ [
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id_times_ten',
+ order_expression: Arel.sql('id * 10').asc,
+ sql_type: 'integer'
+ )
+ ])
end
let(:keyset_scope) { Project.order(order) }
diff --git a/spec/lib/gitlab/pagination/keyset/iterator_spec.rb b/spec/lib/gitlab/pagination/keyset/iterator_spec.rb
index d62d20d2d2c..eee743c5e48 100644
--- a/spec/lib/gitlab/pagination/keyset/iterator_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/iterator_spec.rb
@@ -15,21 +15,22 @@ RSpec.describe Gitlab::Pagination::Keyset::Iterator do
let(:nulls_position) { :nulls_last }
let(:reverse_nulls_position) { ::Gitlab::Pagination::Keyset::ColumnOrderDefinition::REVERSED_NULL_POSITIONS[nulls_position] }
let(:custom_reorder) do
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: column,
- column_expression: klass.arel_table[column],
- order_expression: klass.arel_table[column].public_send(direction).public_send(nulls_position), # rubocop:disable GitlabSecurity/PublicSend
- reversed_order_expression: klass.arel_table[column].public_send(reverse_direction).public_send(reverse_nulls_position), # rubocop:disable GitlabSecurity/PublicSend
- order_direction: direction,
- nullable: nulls_position,
- distinct: false
- ),
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'id',
- order_expression: klass.arel_table[:id].send(direction)
- )
- ])
+ Gitlab::Pagination::Keyset::Order.build(
+ [
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: column,
+ column_expression: klass.arel_table[column],
+ order_expression: klass.arel_table[column].public_send(direction).public_send(nulls_position), # rubocop:disable GitlabSecurity/PublicSend
+ reversed_order_expression: klass.arel_table[column].public_send(reverse_direction).public_send(reverse_nulls_position), # rubocop:disable GitlabSecurity/PublicSend
+ order_direction: direction,
+ nullable: nulls_position,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ order_expression: klass.arel_table[:id].send(direction)
+ )
+ ])
end
let(:iterator_params) { nil }
diff --git a/spec/lib/gitlab/pagination/keyset/order_spec.rb b/spec/lib/gitlab/pagination/keyset/order_spec.rb
index c1fc73603d6..e99846ad424 100644
--- a/spec/lib/gitlab/pagination/keyset/order_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/order_spec.rb
@@ -148,15 +148,16 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
end
let(:order) do
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'id',
- column_expression: table['id'],
- order_expression: table['id'].desc,
- nullable: :not_nullable,
- distinct: true
- )
- ])
+ Gitlab::Pagination::Keyset::Order.build(
+ [
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ column_expression: table['id'],
+ order_expression: table['id'].desc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
end
let(:expected) do
@@ -192,29 +193,30 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
end
let(:order) do
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'year',
- column_expression: table['year'],
- order_expression: table['year'].asc,
- nullable: :not_nullable,
- distinct: false
- ),
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'month',
- column_expression: table['month'],
- order_expression: table['month'].asc,
- nullable: :not_nullable,
- distinct: false
- ),
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'id',
- column_expression: table['id'],
- order_expression: table['id'].asc,
- nullable: :not_nullable,
- distinct: true
- )
- ])
+ Gitlab::Pagination::Keyset::Order.build(
+ [
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'year',
+ column_expression: table['year'],
+ order_expression: table['year'].asc,
+ nullable: :not_nullable,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'month',
+ column_expression: table['month'],
+ order_expression: table['month'].asc,
+ nullable: :not_nullable,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ column_expression: table['id'],
+ order_expression: table['id'].asc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
end
let(:expected) do
@@ -258,33 +260,34 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
end
let(:order) do
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'year',
- column_expression: table['year'],
- order_expression: table[:year].asc.nulls_last,
- reversed_order_expression: table[:year].desc.nulls_first,
- order_direction: :asc,
- nullable: :nulls_last,
- distinct: false
- ),
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'month',
- column_expression: table['month'],
- order_expression: table[:month].asc.nulls_last,
- reversed_order_expression: table[:month].desc.nulls_first,
- order_direction: :asc,
- nullable: :nulls_last,
- distinct: false
- ),
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'id',
- column_expression: table['id'],
- order_expression: table['id'].asc,
- nullable: :not_nullable,
- distinct: true
- )
- ])
+ Gitlab::Pagination::Keyset::Order.build(
+ [
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'year',
+ column_expression: table['year'],
+ order_expression: table[:year].asc.nulls_last,
+ reversed_order_expression: table[:year].desc.nulls_first,
+ order_direction: :asc,
+ nullable: :nulls_last,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'month',
+ column_expression: table['month'],
+ order_expression: table[:month].asc.nulls_last,
+ reversed_order_expression: table[:month].desc.nulls_first,
+ order_direction: :asc,
+ nullable: :nulls_last,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ column_expression: table['id'],
+ order_expression: table['id'].asc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
end
let(:expected) do
@@ -324,33 +327,34 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
end
let(:order) do
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'year',
- column_expression: table['year'],
- order_expression: table[:year].asc.nulls_first,
- reversed_order_expression: table[:year].desc.nulls_last,
- order_direction: :asc,
- nullable: :nulls_first,
- distinct: false
- ),
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'month',
- column_expression: table['month'],
- order_expression: table[:month].asc.nulls_first,
- order_direction: :asc,
- reversed_order_expression: table[:month].desc.nulls_last,
- nullable: :nulls_first,
- distinct: false
- ),
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'id',
- column_expression: table['id'],
- order_expression: table['id'].asc,
- nullable: :not_nullable,
- distinct: true
- )
- ])
+ Gitlab::Pagination::Keyset::Order.build(
+ [
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'year',
+ column_expression: table['year'],
+ order_expression: table[:year].asc.nulls_first,
+ reversed_order_expression: table[:year].desc.nulls_last,
+ order_direction: :asc,
+ nullable: :nulls_first,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'month',
+ column_expression: table['month'],
+ order_expression: table[:month].asc.nulls_first,
+ order_direction: :asc,
+ reversed_order_expression: table[:month].desc.nulls_last,
+ nullable: :nulls_first,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ column_expression: table['id'],
+ order_expression: table['id'].asc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
end
let(:expected) do
@@ -390,22 +394,23 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
end
let(:order) do
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'year',
- column_expression: table['year'],
- order_expression: table['year'].asc,
- nullable: :not_nullable,
- distinct: false
- ),
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'id',
- column_expression: table['id'],
- order_expression: table['id'].desc,
- nullable: :not_nullable,
- distinct: true
- )
- ])
+ Gitlab::Pagination::Keyset::Order.build(
+ [
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'year',
+ column_expression: table['year'],
+ order_expression: table['year'].asc,
+ nullable: :not_nullable,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ column_expression: table['id'],
+ order_expression: table['id'].desc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
end
let(:expected) do
@@ -432,33 +437,38 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
reversed = order.reversed_order
before_conditions = reversed.where_values_with_or_query(before_cursor)
- query = build_query(order: order, where_conditions: [Arel::Nodes::And.new([after_conditions, before_conditions])], limit: 100)
+ query = build_query(
+ order: order,
+ where_conditions: [Arel::Nodes::And.new([after_conditions, before_conditions])],
+ limit: 100)
- expect(run_query(query)).to eq([
- { "id" => 2, "year" => 2011, "month" => 0 },
- { "id" => 6, "year" => 2012, "month" => 0 }
- ])
+ expect(run_query(query)).to eq(
+ [
+ { "id" => 2, "year" => 2011, "month" => 0 },
+ { "id" => 6, "year" => 2012, "month" => 0 }
+ ])
end
end
context 'when ordering by the named function LOWER' do
let(:order) do
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'title',
- column_expression: Arel::Nodes::NamedFunction.new("LOWER", [table['title'].desc]),
- order_expression: table['title'].lower.desc,
- nullable: :not_nullable,
- distinct: false
- ),
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'id',
- column_expression: table['id'],
- order_expression: table['id'].desc,
- nullable: :not_nullable,
- distinct: true
- )
- ])
+ Gitlab::Pagination::Keyset::Order.build(
+ [
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'title',
+ column_expression: Arel::Nodes::NamedFunction.new("LOWER", [table['title'].desc]),
+ order_expression: table['title'].lower.desc,
+ nullable: :not_nullable,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ column_expression: table['id'],
+ order_expression: table['id'].desc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
end
let(:table_data) do
@@ -484,22 +494,23 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
context 'when the passed cursor values do not match with the order definition' do
let(:order) do
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'year',
- column_expression: table['year'],
- order_expression: table['year'].asc,
- nullable: :not_nullable,
- distinct: false
- ),
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'id',
- column_expression: table['id'],
- order_expression: table['id'].desc,
- nullable: :not_nullable,
- distinct: true
- )
- ])
+ Gitlab::Pagination::Keyset::Order.build(
+ [
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'year',
+ column_expression: table['year'],
+ order_expression: table['year'].asc,
+ nullable: :not_nullable,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ column_expression: table['id'],
+ order_expression: table['id'].desc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
end
context 'when values are missing' do
@@ -553,14 +564,15 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
context 'when string attribute name is given' do
let(:order) do
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'id',
- order_expression: Project.arel_table['id'].desc,
- nullable: :not_nullable,
- distinct: true
- )
- ])
+ Gitlab::Pagination::Keyset::Order.build(
+ [
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ order_expression: Project.arel_table['id'].desc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
end
it_behaves_like 'cursor attribute examples'
@@ -568,14 +580,15 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
context 'when symbol attribute name is given' do
let(:order) do
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: :id,
- order_expression: Project.arel_table['id'].desc,
- nullable: :not_nullable,
- distinct: true
- )
- ])
+ Gitlab::Pagination::Keyset::Order.build(
+ [
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: :id,
+ order_expression: Project.arel_table['id'].desc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
end
it_behaves_like 'cursor attribute examples'
@@ -593,20 +606,21 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
context 'when there are additional_projections' do
let(:order) do
- order = Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'created_at_field',
- column_expression: Project.arel_table[:created_at],
- order_expression: Project.arel_table[:created_at].desc,
- order_direction: :desc,
- distinct: false,
- add_to_projections: true
- ),
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'id',
- order_expression: Project.arel_table[:id].desc
- )
- ])
+ order = Gitlab::Pagination::Keyset::Order.build(
+ [
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'created_at_field',
+ column_expression: Project.arel_table[:created_at],
+ order_expression: Project.arel_table[:created_at].desc,
+ order_direction: :desc,
+ distinct: false,
+ add_to_projections: true
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ order_expression: Project.arel_table[:id].desc
+ )
+ ])
order
end
@@ -684,20 +698,21 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
describe '#attribute_names' do
let(:expected_attribute_names) { %w(id name) }
let(:order) do
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'id',
- order_expression: Project.arel_table['id'].desc,
- nullable: :not_nullable,
- distinct: true
- ),
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'name',
- order_expression: Project.arel_table['name'].desc,
- nullable: :not_nullable,
- distinct: true
- )
- ])
+ Gitlab::Pagination::Keyset::Order.build(
+ [
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ order_expression: Project.arel_table['id'].desc,
+ nullable: :not_nullable,
+ distinct: true
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'name',
+ order_expression: Project.arel_table['name'].desc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
end
subject { order.attribute_names }
diff --git a/spec/lib/gitlab/profiler_spec.rb b/spec/lib/gitlab/profiler_spec.rb
index bfe1a588489..7c365990627 100644
--- a/spec/lib/gitlab/profiler_spec.rb
+++ b/spec/lib/gitlab/profiler_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Gitlab::Profiler do
end
it 'returns a profile result' do
- expect(described_class.profile('/')).to be_an_instance_of(RubyProf::Profile)
+ expect(described_class.profile('/')).to be_an_instance_of(File)
end
it 'uses the custom logger given' do
@@ -59,28 +59,26 @@ RSpec.describe Gitlab::Profiler do
described_class.profile('/', user: user, private_token: private_token)
end
- context 'with sampling profiler' do
- it 'generates sampling data' do
- user = double(:user)
- temp_data = Tempfile.new
+ it 'generates sampling data' do
+ user = double(:user)
+ temp_data = Tempfile.new
- expect(described_class).to receive(:with_user).with(user).and_call_original
- described_class.profile('/', user: user, sampling_mode: true, profiler_options: { out: temp_data.path })
+ expect(described_class).to receive(:with_user).with(user).and_call_original
+ described_class.profile('/', user: user, profiler_options: { out: temp_data.path })
- expect(File.stat(temp_data).size).to be > 0
- File.unlink(temp_data)
- end
+ expect(File.stat(temp_data).size).to be > 0
+ File.unlink(temp_data)
+ end
- it 'saves sampling data with a randomly-generated filename' do
- user = double(:user)
+ it 'saves sampling data with a randomly-generated filename' do
+ user = double(:user)
- expect(described_class).to receive(:with_user).with(user).and_call_original
- result = described_class.profile('/', user: user, sampling_mode: true)
+ expect(described_class).to receive(:with_user).with(user).and_call_original
+ result = described_class.profile('/', user: user)
- expect(result).to be_a(File)
- expect(File.stat(result.path).size).to be > 0
- File.unlink(result.path)
- end
+ expect(result).to be_a(File)
+ expect(File.stat(result.path).size).to be > 0
+ File.unlink(result.path)
end
end
@@ -211,54 +209,4 @@ RSpec.describe Gitlab::Profiler do
expect(described_class.log_load_times_by_model(null_logger)).to be_nil
end
end
-
- describe '.print_by_total_time' do
- let(:stdout) { StringIO.new }
- let(:regexp) { /^\s+\d+\.\d+\s+(\d+\.\d+)/ }
-
- let(:output) do
- stdout.rewind
- stdout.read
- end
-
- let_it_be(:result) do
- Thread.new { sleep 1 }
-
- RubyProf.profile do
- sleep 0.1
- 1.to_s
- end
- end
-
- around do |example|
- original_stdout = $stdout
-
- $stdout = stdout # rubocop: disable RSpec/ExpectOutput
- example.run
- $stdout = original_stdout # rubocop: disable RSpec/ExpectOutput
- end
-
- it 'prints a profile result sorted by total time' do
- described_class.print_by_total_time(result)
-
- expect(output).to include('Kernel#sleep')
-
- thread_profiles = output.split('Sort by: total_time').select { |x| x =~ regexp }
-
- thread_profiles.each do |profile|
- total_times =
- profile
- .scan(regexp)
- .map { |(total)| total.to_f }
-
- expect(total_times).to eq(total_times.sort.reverse)
- end
- end
-
- it 'accepts a max_percent option' do
- described_class.print_by_total_time(result, max_percent: 50)
-
- expect(output).not_to include('Kernel#sleep')
- end
- end
end
diff --git a/spec/lib/gitlab/project_search_results_spec.rb b/spec/lib/gitlab/project_search_results_spec.rb
index a9c0262fdb2..a762fdbde6b 100644
--- a/spec/lib/gitlab/project_search_results_spec.rb
+++ b/spec/lib/gitlab/project_search_results_spec.rb
@@ -118,7 +118,7 @@ RSpec.describe Gitlab::ProjectSearchResults do
shared_examples 'blob search repository ref' do |entity_type, blob_type|
let(:query) { 'files' }
let(:file_finder) { double }
- let(:project_branch) { 'project_branch' }
+ let(:project_branch) { blob_type == 'wiki_blobs' ? entity.default_branch : 'project_branch' }
subject(:objects) { results.objects(blob_type) }
@@ -209,8 +209,11 @@ RSpec.describe Gitlab::ProjectSearchResults do
describe 'wiki search' do
let(:project) { create(:project, :public, :wiki_repo) }
+ let(:project_branch) { 'project_branch' }
before do
+ allow(project.wiki).to receive(:root_ref).and_return(project_branch)
+
project.wiki.create_page('Files/Title', 'Content')
project.wiki.create_page('CHANGELOG', 'Files example')
end
diff --git a/spec/lib/gitlab/project_transfer_spec.rb b/spec/lib/gitlab/project_transfer_spec.rb
index 87c4014264f..3d6aa80c51f 100644
--- a/spec/lib/gitlab/project_transfer_spec.rb
+++ b/spec/lib/gitlab/project_transfer_spec.rb
@@ -15,10 +15,11 @@ RSpec.describe Gitlab::ProjectTransfer do
end
after do
- FileUtils.rm_rf([
- File.join(@root_dir, @namespace_path),
- File.join(@root_dir, @namespace_path_was)
- ])
+ FileUtils.rm_rf(
+ [
+ File.join(@root_dir, @namespace_path),
+ File.join(@root_dir, @namespace_path_was)
+ ])
end
describe '#move_project' do
diff --git a/spec/lib/gitlab/prometheus_client_spec.rb b/spec/lib/gitlab/prometheus_client_spec.rb
index 9083c5625d4..d0bfc6e5610 100644
--- a/spec/lib/gitlab/prometheus_client_spec.rb
+++ b/spec/lib/gitlab/prometheus_client_spec.rb
@@ -300,12 +300,13 @@ RSpec.describe Gitlab::PrometheusClient do
it 'returns data from the API call' do
req_stub = stub_prometheus_request(query_url, body: prometheus_values_body('matrix'))
- expect(subject.query_range(prometheus_query)).to eq([
- {
- "metric" => {},
- "values" => [[1488758662.506, "0.00002996364761904785"], [1488758722.506, "0.00003090239047619091"]]
- }
- ])
+ expect(subject.query_range(prometheus_query)).to eq(
+ [
+ {
+ "metric" => {},
+ "values" => [[1488758662.506, "0.00002996364761904785"], [1488758722.506, "0.00003090239047619091"]]
+ }
+ ])
expect(req_stub).to have_been_requested
end
end
diff --git a/spec/lib/gitlab/push_options_spec.rb b/spec/lib/gitlab/push_options_spec.rb
index 3ff1c8e9012..054beaf7012 100644
--- a/spec/lib/gitlab/push_options_spec.rb
+++ b/spec/lib/gitlab/push_options_spec.rb
@@ -52,10 +52,11 @@ RSpec.describe Gitlab::PushOptions do
end
it 'can parse multiple push options' do
- options = described_class.new([
- 'merge_request.create',
- 'merge_request.target=value'
- ])
+ options = described_class.new(
+ [
+ 'merge_request.create',
+ 'merge_request.target=value'
+ ])
expect(options.get(:merge_request)).to include({
create: true,
@@ -66,19 +67,21 @@ RSpec.describe Gitlab::PushOptions do
end
it 'stores options internally as a HashWithIndifferentAccess' do
- options = described_class.new([
- 'merge_request.create'
- ])
+ options = described_class.new(
+ [
+ 'merge_request.create'
+ ])
expect(options.get('merge_request', 'create')).to eq(true)
expect(options.get(:merge_request, :create)).to eq(true)
end
it 'selects the last option when options contain duplicate namespace and key pairs' do
- options = described_class.new([
- 'merge_request.target=value1',
- 'merge_request.target=value2'
- ])
+ options = described_class.new(
+ [
+ 'merge_request.target=value1',
+ 'merge_request.target=value2'
+ ])
expect(options.get(:merge_request, :target)).to eq('value2')
end
diff --git a/spec/lib/gitlab/query_limiting/transaction_spec.rb b/spec/lib/gitlab/query_limiting/transaction_spec.rb
index 27da1f23556..d8eb2040ccc 100644
--- a/spec/lib/gitlab/query_limiting/transaction_spec.rb
+++ b/spec/lib/gitlab/query_limiting/transaction_spec.rb
@@ -52,7 +52,7 @@ RSpec.describe Gitlab::QueryLimiting::Transaction do
context 'when the query threshold is exceeded' do
let(:transaction) do
trans = described_class.new
- trans.count = described_class::THRESHOLD + 1
+ trans.count = described_class.threshold + 1
trans
end
@@ -120,7 +120,7 @@ RSpec.describe Gitlab::QueryLimiting::Transaction do
it 'returns true when the threshold is exceeded' do
transaction = described_class.new
- transaction.count = described_class::THRESHOLD + 1
+ transaction.count = described_class.threshold + 1
expect(transaction.threshold_exceeded?).to eq(true)
end
@@ -129,7 +129,7 @@ RSpec.describe Gitlab::QueryLimiting::Transaction do
describe '#error_message' do
it 'returns the error message to display when the threshold is exceeded' do
transaction = described_class.new
- transaction.count = max = described_class::THRESHOLD
+ transaction.count = max = described_class.threshold
expect(transaction.error_message).to eq(
"Too many SQL queries were executed: a maximum of #{max} " \
@@ -139,7 +139,7 @@ RSpec.describe Gitlab::QueryLimiting::Transaction do
it 'includes a list of executed queries' do
transaction = described_class.new
- transaction.count = max = described_class::THRESHOLD
+ transaction.count = max = described_class.threshold
%w[foo bar baz].each { |sql| transaction.executed_sql(sql) }
message = transaction.error_message
@@ -154,7 +154,7 @@ RSpec.describe Gitlab::QueryLimiting::Transaction do
it 'indicates if the log is truncated' do
transaction = described_class.new
- transaction.count = described_class::THRESHOLD * 2
+ transaction.count = described_class.threshold * 2
message = transaction.error_message
@@ -163,7 +163,7 @@ RSpec.describe Gitlab::QueryLimiting::Transaction do
it 'includes the action name in the error message when present' do
transaction = described_class.new
- transaction.count = max = described_class::THRESHOLD
+ transaction.count = max = described_class.threshold
transaction.action = 'UsersController#show'
expect(transaction.error_message).to eq(
diff --git a/spec/lib/gitlab/rack_attack/request_spec.rb b/spec/lib/gitlab/rack_attack/request_spec.rb
index b8a26a64e5b..5345205e15b 100644
--- a/spec/lib/gitlab/rack_attack/request_spec.rb
+++ b/spec/lib/gitlab/rack_attack/request_spec.rb
@@ -217,10 +217,11 @@ RSpec.describe Gitlab::RackAttack::Request do
subject { request.protected_path? }
before do
- stub_application_setting(protected_paths: [
- '/protected',
- '/secure'
- ])
+ stub_application_setting(
+ protected_paths: [
+ '/protected',
+ '/secure'
+ ])
end
where(:path, :expected) do
diff --git a/spec/lib/gitlab/reference_extractor_spec.rb b/spec/lib/gitlab/reference_extractor_spec.rb
index 177e9d346b6..0ee8c35ae81 100644
--- a/spec/lib/gitlab/reference_extractor_spec.rb
+++ b/spec/lib/gitlab/reference_extractor_spec.rb
@@ -193,7 +193,7 @@ RSpec.describe Gitlab::ReferenceExtractor do
end
context 'with an external issue tracker' do
- let(:project) { create(:jira_project) }
+ let(:project) { create(:project, :with_jira_integration) }
let(:issue) { create(:issue, project: project) }
context 'when GitLab issues are enabled' do
diff --git a/spec/lib/gitlab/regex_requires_app_spec.rb b/spec/lib/gitlab/regex_requires_app_spec.rb
index 5808033dc4c..780184cdfd2 100644
--- a/spec/lib/gitlab/regex_requires_app_spec.rb
+++ b/spec/lib/gitlab/regex_requires_app_spec.rb
@@ -30,6 +30,8 @@ RSpec.describe Gitlab::Regex do
it { is_expected.not_to match('AMD64') }
it { is_expected.not_to match('Amd64') }
it { is_expected.not_to match('aMD64') }
+
+ it_behaves_like 'regex rejecting path traversal'
end
describe '.npm_package_name_regex' do
@@ -73,6 +75,8 @@ RSpec.describe Gitlab::Regex do
# Do not allow Unicode
it { is_expected.not_to match('hé') }
+
+ it_behaves_like 'regex rejecting path traversal'
end
describe '.debian_component_regex' do
@@ -86,5 +90,7 @@ RSpec.describe Gitlab::Regex do
# Do not allow Unicode
it { is_expected.not_to match('hé') }
+
+ it_behaves_like 'regex rejecting path traversal'
end
end
diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb
index d8f182d903d..89ef76d246e 100644
--- a/spec/lib/gitlab/regex_spec.rb
+++ b/spec/lib/gitlab/regex_spec.rb
@@ -3,6 +3,7 @@
require 'fast_spec_helper'
require_relative '../../../lib/gitlab/regex'
+require_relative '../../support/shared_examples/lib/gitlab/regex_shared_examples'
# All specs that can be run with fast_spec_helper only
# See regex_requires_app_spec for tests that require the full spec_helper
@@ -543,6 +544,8 @@ RSpec.describe Gitlab::Regex do
it { is_expected.not_to match('aA') }
# No underscore
it { is_expected.not_to match('a_b') }
+
+ it_behaves_like 'regex rejecting path traversal'
end
describe '.debian_version_regex' do
@@ -596,6 +599,13 @@ RSpec.describe Gitlab::Regex do
it { is_expected.to match('1-2-3-4-5-6-7-8-9-10-11-12-13-14-15') }
it { is_expected.not_to match('1-2-3-4-5-6-7-8-9-10-11-12-13-14-15-16') }
end
+
+ context 'path traversals' do
+ it { is_expected.not_to match('1../0') }
+ it { is_expected.not_to match('1..%2f0') }
+ it { is_expected.not_to match('1%2e%2e%2f0') }
+ it { is_expected.not_to match('1%2e%2e/0') }
+ end
end
describe '.helm_channel_regex' do
diff --git a/spec/lib/gitlab/search/abuse_detection_spec.rb b/spec/lib/gitlab/search/abuse_detection_spec.rb
index 2a8d74a62ab..7fb9621141c 100644
--- a/spec/lib/gitlab/search/abuse_detection_spec.rb
+++ b/spec/lib/gitlab/search/abuse_detection_spec.rb
@@ -21,16 +21,16 @@ RSpec.describe Gitlab::Search::AbuseDetection do
describe 'abusive character matching' do
refs = %w(
- main
- тест
- maiñ
- main123
- main-v123
- main-v12.3
- feature/it_works
- really_important!
- 测试
- )
+ main
+ тест
+ maiñ
+ main123
+ main-v123
+ main-v12.3
+ feature/it_works
+ really_important!
+ 测试
+ )
refs.each do |ref|
it "does match refs permitted by git refname: #{ref}" do
diff --git a/spec/lib/gitlab/search/query_spec.rb b/spec/lib/gitlab/search/query_spec.rb
index 234b683ba1f..cdab7f1c04b 100644
--- a/spec/lib/gitlab/search/query_spec.rb
+++ b/spec/lib/gitlab/search/query_spec.rb
@@ -64,4 +64,38 @@ RSpec.describe Gitlab::Search::Query do
expect(subject.filters[0]).to include(name: :name, negated: false, value: "MY TEST.TXT")
end
end
+
+ context 'with mutliple filename filters' do
+ let(:query) { 'something filename:myfile.txt -filename:ANOTHERFILE.yml filename:somethingelse.txt' }
+ let(:subject) do
+ described_class.new(query) do
+ filter :filename
+ end
+ end
+
+ it 'creates a filter for each filename in query' do
+ expect(subject.filters.count).to eq(3)
+ expect(subject.filters[0]).to include(name: :filename, negated: false, value: 'myfile.txt')
+ expect(subject.filters[1]).to include(name: :filename, negated: true, value: 'anotherfile.yml')
+ expect(subject.filters[2]).to include(name: :filename, negated: false, value: 'somethingelse.txt')
+ end
+
+ context 'when multiple extension filters are added' do
+ let(:query) { 'something filename:myfile.txt -extension:yml -filename:ANOTHERFILE.yml extension:txt' }
+ let(:subject) do
+ described_class.new(query) do
+ filter :filename
+ filter :extension
+ end
+ end
+
+ it 'creates a filter for each filename and extension in query' do
+ expect(subject.filters.count).to eq(4)
+ expect(subject.filters[0]).to include(name: :filename, negated: false, value: 'myfile.txt')
+ expect(subject.filters[1]).to include(name: :filename, negated: true, value: 'anotherfile.yml')
+ expect(subject.filters[2]).to include(name: :extension, negated: true, value: 'yml')
+ expect(subject.filters[3]).to include(name: :extension, negated: false, value: 'txt')
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/serializer/ci/variables_spec.rb b/spec/lib/gitlab/serializer/ci/variables_spec.rb
index 9b0475259fe..02f1d543e4b 100644
--- a/spec/lib/gitlab/serializer/ci/variables_spec.rb
+++ b/spec/lib/gitlab/serializer/ci/variables_spec.rb
@@ -13,9 +13,10 @@ RSpec.describe Gitlab::Serializer::Ci::Variables do
end
it 'converts keys into strings and symbolizes hash' do
- is_expected.to eq([
- { key: 'key', value: 'value', public: true },
- { key: 'wee', value: 1, public: false }
- ])
+ is_expected.to eq(
+ [
+ { key: 'key', value: 'value', public: true },
+ { key: 'wee', value: 1, public: false }
+ ])
end
end
diff --git a/spec/lib/gitlab/sidekiq_config_spec.rb b/spec/lib/gitlab/sidekiq_config_spec.rb
index c62302d8bba..c5b00afe672 100644
--- a/spec/lib/gitlab/sidekiq_config_spec.rb
+++ b/spec/lib/gitlab/sidekiq_config_spec.rb
@@ -193,9 +193,7 @@ RSpec.describe Gitlab::SidekiqConfig do
it 'returns worker queue mappings that have queues in the current Sidekiq options' do
queues = described_class.routing_queues
- expect(queues).to match_array(%w[
- default mailers high_urgency gitaly
- ])
+ expect(queues).to match_array(%w[default mailers high_urgency gitaly])
expect(queues).not_to include('not_exist')
end
end
diff --git a/spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb b/spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb
index dff04a2e509..62681b21756 100644
--- a/spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb
+++ b/spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb
@@ -130,9 +130,10 @@ RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
end
it 'return true when everything is within limit', :aggregate_failures do
- expect(memory_killer).to receive(:get_rss).and_return(100)
- expect(memory_killer).to receive(:get_soft_limit_rss).and_return(200)
- expect(memory_killer).to receive(:get_hard_limit_rss).and_return(300)
+ expect(memory_killer).to receive(:get_rss_kb).and_return(100)
+ expect(memory_killer).to receive(:get_soft_limit_rss_kb).and_return(200)
+ expect(memory_killer).to receive(:get_hard_limit_rss_kb).and_return(300)
+ expect(memory_killer).to receive(:get_memory_total_kb).and_return(3072)
expect(memory_killer).to receive(:refresh_state)
.with(:running)
@@ -145,9 +146,10 @@ RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
end
it 'return false when rss exceeds hard_limit_rss', :aggregate_failures do
- expect(memory_killer).to receive(:get_rss).at_least(:once).and_return(400)
- expect(memory_killer).to receive(:get_soft_limit_rss).at_least(:once).and_return(200)
- expect(memory_killer).to receive(:get_hard_limit_rss).at_least(:once).and_return(300)
+ expect(memory_killer).to receive(:get_rss_kb).at_least(:once).and_return(400)
+ expect(memory_killer).to receive(:get_soft_limit_rss_kb).at_least(:once).and_return(200)
+ expect(memory_killer).to receive(:get_hard_limit_rss_kb).at_least(:once).and_return(300)
+ expect(memory_killer).to receive(:get_memory_total_kb).at_least(:once).and_return(3072)
expect(memory_killer).to receive(:refresh_state)
.with(:running)
@@ -165,9 +167,10 @@ RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
end
it 'return false when rss exceed hard_limit_rss after a while', :aggregate_failures do
- expect(memory_killer).to receive(:get_rss).and_return(250, 400, 400)
- expect(memory_killer).to receive(:get_soft_limit_rss).at_least(:once).and_return(200)
- expect(memory_killer).to receive(:get_hard_limit_rss).at_least(:once).and_return(300)
+ expect(memory_killer).to receive(:get_rss_kb).and_return(250, 400, 400)
+ expect(memory_killer).to receive(:get_soft_limit_rss_kb).at_least(:once).and_return(200)
+ expect(memory_killer).to receive(:get_hard_limit_rss_kb).at_least(:once).and_return(300)
+ expect(memory_killer).to receive(:get_memory_total_kb).at_least(:once).and_return(3072)
expect(memory_killer).to receive(:refresh_state)
.with(:running)
@@ -187,9 +190,10 @@ RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
end
it 'return true when rss below soft_limit_rss after a while within GRACE_BALLOON_SECONDS', :aggregate_failures do
- expect(memory_killer).to receive(:get_rss).and_return(250, 100)
- expect(memory_killer).to receive(:get_soft_limit_rss).and_return(200, 200)
- expect(memory_killer).to receive(:get_hard_limit_rss).and_return(300, 300)
+ expect(memory_killer).to receive(:get_rss_kb).and_return(250, 100)
+ expect(memory_killer).to receive(:get_soft_limit_rss_kb).and_return(200, 200)
+ expect(memory_killer).to receive(:get_hard_limit_rss_kb).and_return(300, 300)
+ expect(memory_killer).to receive(:get_memory_total_kb).and_return(3072, 3072)
expect(memory_killer).to receive(:refresh_state)
.with(:running)
@@ -211,9 +215,10 @@ RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
let(:grace_balloon_seconds) { 0 }
it 'return false when rss exceed soft_limit_rss', :aggregate_failures do
- allow(memory_killer).to receive(:get_rss).and_return(250)
- allow(memory_killer).to receive(:get_soft_limit_rss).and_return(200)
- allow(memory_killer).to receive(:get_hard_limit_rss).and_return(300)
+ allow(memory_killer).to receive(:get_rss_kb).and_return(250)
+ allow(memory_killer).to receive(:get_soft_limit_rss_kb).and_return(200)
+ allow(memory_killer).to receive(:get_hard_limit_rss_kb).and_return(300)
+ allow(memory_killer).to receive(:get_memory_total_kb).and_return(3072)
expect(memory_killer).to receive(:refresh_state)
.with(:running)
@@ -235,40 +240,57 @@ RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
subject { memory_killer.send(:restart_sidekiq) }
- before do
- stub_const("#{described_class}::SHUTDOWN_TIMEOUT_SECONDS", shutdown_timeout_seconds)
- allow(Sidekiq).to receive(:options).and_return(timeout: 9)
- allow(memory_killer).to receive(:get_rss).and_return(100)
- allow(memory_killer).to receive(:get_soft_limit_rss).and_return(200)
- allow(memory_killer).to receive(:get_hard_limit_rss).and_return(300)
+ context 'when sidekiq_memory_killer_read_only_mode is enabled' do
+ before do
+ stub_feature_flags(sidekiq_memory_killer_read_only_mode: true)
+ end
+
+ it 'does not send signal' do
+ expect(memory_killer).not_to receive(:refresh_state)
+ expect(memory_killer).not_to receive(:signal_and_wait)
+
+ subject
+ end
end
- it 'send signal' do
- expect(memory_killer).to receive(:refresh_state)
- .with(:stop_fetching_new_jobs)
- .ordered
- .and_call_original
- expect(memory_killer).to receive(:signal_and_wait)
- .with(shutdown_timeout_seconds, 'SIGTSTP', 'stop fetching new jobs')
- .ordered
+ context 'when sidekiq_memory_killer_read_only_mode is disabled' do
+ before do
+ stub_const("#{described_class}::SHUTDOWN_TIMEOUT_SECONDS", shutdown_timeout_seconds)
+ stub_feature_flags(sidekiq_memory_killer_read_only_mode: false)
+ allow(Sidekiq).to receive(:options).and_return(timeout: 9)
+ allow(memory_killer).to receive(:get_rss_kb).and_return(100)
+ allow(memory_killer).to receive(:get_soft_limit_rss_kb).and_return(200)
+ allow(memory_killer).to receive(:get_hard_limit_rss_kb).and_return(300)
+ allow(memory_killer).to receive(:get_memory_total_kb).and_return(3072)
+ end
- expect(memory_killer).to receive(:refresh_state)
- .with(:shutting_down)
- .ordered
- .and_call_original
- expect(memory_killer).to receive(:signal_and_wait)
- .with(11, 'SIGTERM', 'gracefully shut down')
- .ordered
+ it 'send signal' do
+ expect(memory_killer).to receive(:refresh_state)
+ .with(:stop_fetching_new_jobs)
+ .ordered
+ .and_call_original
+ expect(memory_killer).to receive(:signal_and_wait)
+ .with(shutdown_timeout_seconds, 'SIGTSTP', 'stop fetching new jobs')
+ .ordered
- expect(memory_killer).to receive(:refresh_state)
- .with(:killing_sidekiq)
- .ordered
- .and_call_original
- expect(memory_killer).to receive(:signal_pgroup)
- .with('SIGKILL', 'die')
- .ordered
+ expect(memory_killer).to receive(:refresh_state)
+ .with(:shutting_down)
+ .ordered
+ .and_call_original
+ expect(memory_killer).to receive(:signal_and_wait)
+ .with(11, 'SIGTERM', 'gracefully shut down')
+ .ordered
- subject
+ expect(memory_killer).to receive(:refresh_state)
+ .with(:killing_sidekiq)
+ .ordered
+ .and_call_original
+ expect(memory_killer).to receive(:signal_pgroup)
+ .with('SIGKILL', 'die')
+ .ordered
+
+ subject
+ end
end
end
@@ -351,6 +373,7 @@ RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
let(:current_rss) { 100 }
let(:soft_limit_rss) { 200 }
let(:hard_limit_rss) { 300 }
+ let(:memory_total) { 3072 }
let(:jid) { 1 }
let(:reason) { 'rss out of range reason description' }
let(:queue) { 'default' }
@@ -369,9 +392,10 @@ RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
before do
stub_const("DummyWorker", worker)
- allow(memory_killer).to receive(:get_rss).and_return(*current_rss)
- allow(memory_killer).to receive(:get_soft_limit_rss).and_return(soft_limit_rss)
- allow(memory_killer).to receive(:get_hard_limit_rss).and_return(hard_limit_rss)
+ allow(memory_killer).to receive(:get_rss_kb).and_return(*current_rss)
+ allow(memory_killer).to receive(:get_soft_limit_rss_kb).and_return(soft_limit_rss)
+ allow(memory_killer).to receive(:get_hard_limit_rss_kb).and_return(hard_limit_rss)
+ allow(memory_killer).to receive(:get_memory_total_kb).and_return(memory_total)
memory_killer.send(:refresh_state, :running)
end
@@ -389,7 +413,8 @@ RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
hard_limit_rss: hard_limit_rss,
soft_limit_rss: soft_limit_rss,
reason: reason,
- running_jobs: running_jobs)
+ running_jobs: running_jobs,
+ memory_total_kb: memory_total)
expect(metrics[:sidekiq_memory_killer_running_jobs]).to receive(:increment)
.with({ worker_class: "DummyWorker", deadline_exceeded: true })
@@ -525,9 +550,10 @@ RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
subject { memory_killer.send(:refresh_state, :shutting_down) }
it 'calls gitlab metrics gauge set methods' do
- expect(memory_killer).to receive(:get_rss) { 1010 }
- expect(memory_killer).to receive(:get_soft_limit_rss) { 1020 }
- expect(memory_killer).to receive(:get_hard_limit_rss) { 1040 }
+ expect(memory_killer).to receive(:get_rss_kb) { 1010 }
+ expect(memory_killer).to receive(:get_soft_limit_rss_kb) { 1020 }
+ expect(memory_killer).to receive(:get_hard_limit_rss_kb) { 1040 }
+ expect(memory_killer).to receive(:get_memory_total_kb) { 3072 }
expect(metrics[:sidekiq_memory_killer_phase]).to receive(:set)
.with({}, described_class::PHASE[:shutting_down])
diff --git a/spec/lib/gitlab/sidekiq_middleware/memory_killer_spec.rb b/spec/lib/gitlab/sidekiq_middleware/memory_killer_spec.rb
deleted file mode 100644
index 1667622ad8e..00000000000
--- a/spec/lib/gitlab/sidekiq_middleware/memory_killer_spec.rb
+++ /dev/null
@@ -1,83 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::SidekiqMiddleware::MemoryKiller do
- subject { described_class.new }
-
- let(:pid) { 999 }
-
- let(:worker) { double(:worker, class: ProjectCacheWorker) }
- let(:job) { { 'jid' => 123 } }
- let(:queue) { 'test_queue' }
-
- def run
- thread = subject.call(worker, job, queue) { nil }
- thread&.join
- end
-
- before do
- allow(subject).to receive(:get_rss).and_return(10.kilobytes)
- allow(subject).to receive(:pid).and_return(pid)
- end
-
- context 'when MAX_RSS is set to 0' do
- before do
- stub_const("#{described_class}::MAX_RSS", 0)
- end
-
- it 'does nothing' do
- expect(subject).not_to receive(:sleep)
-
- run
- end
- end
-
- context 'when MAX_RSS is exceeded' do
- before do
- stub_const("#{described_class}::MAX_RSS", 5.kilobytes)
- end
-
- it 'sends the TSTP, TERM and KILL signals at expected times' do
- expect(subject).to receive(:sleep).with(15 * 60).ordered
- expect(Process).to receive(:kill).with('SIGTSTP', pid).ordered
-
- expect(subject).to receive(:sleep).with(30).ordered
- expect(Process).to receive(:kill).with('SIGTERM', pid).ordered
-
- expect(subject).to receive(:sleep).with(Sidekiq.options[:timeout] + 2).ordered
- expect(Process).to receive(:kill).with('SIGKILL', pid).ordered
-
- expect(Sidekiq.logger)
- .to receive(:warn).with(class: 'ProjectCacheWorker',
- message: anything,
- pid: pid,
- signal: anything).at_least(:once)
-
- run
- end
-
- it 'sends TSTP and TERM to the pid, but KILL to the pgroup, when running as process leader' do
- allow(Process).to receive(:getpgrp) { pid }
- allow(subject).to receive(:sleep)
-
- expect(Process).to receive(:kill).with('SIGTSTP', pid).ordered
- expect(Process).to receive(:kill).with('SIGTERM', pid).ordered
- expect(Process).to receive(:kill).with('SIGKILL', 0).ordered
-
- run
- end
- end
-
- context 'when MAX_RSS is not exceeded' do
- before do
- stub_const("#{described_class}::MAX_RSS", 15.kilobytes)
- end
-
- it 'does nothing' do
- expect(subject).not_to receive(:sleep)
-
- run
- end
- end
-end
diff --git a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
index 52b50a143fc..54a1723afbc 100644
--- a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
@@ -322,8 +322,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
with_sidekiq_server_middleware do |chain|
Gitlab::SidekiqMiddleware.server_configurator(
metrics: true,
- arguments_logger: false,
- memory_killer: false
+ arguments_logger: false
).call(chain)
Sidekiq::Testing.inline! { example.run }
diff --git a/spec/lib/gitlab/sidekiq_middleware_spec.rb b/spec/lib/gitlab/sidekiq_middleware_spec.rb
index e687c8e8cf7..14dbeac37e8 100644
--- a/spec/lib/gitlab/sidekiq_middleware_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware_spec.rb
@@ -60,7 +60,6 @@ RSpec.describe Gitlab::SidekiqMiddleware do
::Labkit::Middleware::Sidekiq::Server,
::Gitlab::SidekiqMiddleware::ServerMetrics,
::Gitlab::SidekiqMiddleware::ArgumentsLogger,
- ::Gitlab::SidekiqMiddleware::MemoryKiller,
::Gitlab::SidekiqMiddleware::RequestStoreMiddleware,
::Gitlab::SidekiqMiddleware::ExtraDoneLogMetadata,
::Gitlab::SidekiqMiddleware::BatchLoader,
@@ -79,8 +78,7 @@ RSpec.describe Gitlab::SidekiqMiddleware do
with_sidekiq_server_middleware do |chain|
described_class.server_configurator(
metrics: true,
- arguments_logger: true,
- memory_killer: true
+ arguments_logger: true
).call(chain)
Sidekiq::Testing.inline! { example.run }
@@ -112,16 +110,14 @@ RSpec.describe Gitlab::SidekiqMiddleware do
let(:configurator) do
described_class.server_configurator(
metrics: false,
- arguments_logger: false,
- memory_killer: false
+ arguments_logger: false
)
end
let(:disabled_sidekiq_middlewares) do
[
Gitlab::SidekiqMiddleware::ServerMetrics,
- Gitlab::SidekiqMiddleware::ArgumentsLogger,
- Gitlab::SidekiqMiddleware::MemoryKiller
+ Gitlab::SidekiqMiddleware::ArgumentsLogger
]
end
diff --git a/spec/lib/gitlab/sidekiq_status_spec.rb b/spec/lib/gitlab/sidekiq_status_spec.rb
index 027697db7e1..7f1504a8df9 100644
--- a/spec/lib/gitlab/sidekiq_status_spec.rb
+++ b/spec/lib/gitlab/sidekiq_status_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues, :clean_gitlab_
key = described_class.key_for('123')
with_redis do |redis|
- expect(redis.exists(key)).to eq(true)
+ expect(redis.exists?(key)).to eq(true)
expect(redis.ttl(key) > 0).to eq(true)
expect(redis.get(key)).to eq('1')
end
@@ -23,7 +23,7 @@ RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues, :clean_gitlab_
key = described_class.key_for('123')
with_redis do |redis|
- expect(redis.exists(key)).to eq(true)
+ expect(redis.exists?(key)).to eq(true)
expect(redis.ttl(key) > described_class::DEFAULT_EXPIRATION).to eq(true)
expect(redis.get(key)).to eq('1')
end
@@ -35,7 +35,7 @@ RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues, :clean_gitlab_
key = described_class.key_for('123')
with_redis do |redis|
- expect(redis.exists(key)).to eq(false)
+ expect(redis.exists?(key)).to eq(false)
end
end
end
@@ -48,7 +48,7 @@ RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues, :clean_gitlab_
key = described_class.key_for('123')
with_redis do |redis|
- expect(redis.exists(key)).to eq(false)
+ expect(redis.exists?(key)).to eq(false)
end
end
end
diff --git a/spec/lib/gitlab/slash_commands/issue_new_spec.rb b/spec/lib/gitlab/slash_commands/issue_new_spec.rb
index c17cee887ee..29a941f3691 100644
--- a/spec/lib/gitlab/slash_commands/issue_new_spec.rb
+++ b/spec/lib/gitlab/slash_commands/issue_new_spec.rb
@@ -53,6 +53,21 @@ RSpec.describe Gitlab::SlashCommands::IssueNew do
expect(subject[:response_type]).to be(:ephemeral)
expect(subject[:text]).to match("- Title is too long")
end
+
+ context 'when create issue service return an unrecoverable error' do
+ let(:regex_match) { described_class.match("issue create title}") }
+
+ before do
+ allow_next_instance_of(Issues::CreateService) do |create_service|
+ allow(create_service).to receive(:execute).and_return(ServiceResponse.error(message: 'unauthorized'))
+ end
+ end
+
+ it 'displays the errors' do
+ expect(subject[:response_type]).to be(:ephemeral)
+ expect(subject[:text]).to eq('unauthorized')
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ssh_public_key_spec.rb b/spec/lib/gitlab/ssh_public_key_spec.rb
index 114a18cf99a..a2524314458 100644
--- a/spec/lib/gitlab/ssh_public_key_spec.rb
+++ b/spec/lib/gitlab/ssh_public_key_spec.rb
@@ -88,12 +88,12 @@ RSpec.describe Gitlab::SSHPublicKey, lib: true, fips_mode: false do
it 'returns all supported algorithms' do
expect(described_class.supported_algorithms).to eq(
%w(
- ssh-rsa
- ssh-dss
- ecdsa-sha2-nistp256 ecdsa-sha2-nistp384 ecdsa-sha2-nistp521
- ssh-ed25519
- sk-ecdsa-sha2-nistp256@openssh.com
- sk-ssh-ed25519@openssh.com
+ ssh-rsa
+ ssh-dss
+ ecdsa-sha2-nistp256 ecdsa-sha2-nistp384 ecdsa-sha2-nistp521
+ ssh-ed25519
+ sk-ecdsa-sha2-nistp256@openssh.com
+ sk-ssh-ed25519@openssh.com
)
)
end
@@ -102,12 +102,12 @@ RSpec.describe Gitlab::SSHPublicKey, lib: true, fips_mode: false do
it 'returns all supported algorithms' do
expect(described_class.supported_algorithms).to eq(
%w(
- ssh-rsa
- ssh-dss
- ecdsa-sha2-nistp256 ecdsa-sha2-nistp384 ecdsa-sha2-nistp521
- ssh-ed25519
- sk-ecdsa-sha2-nistp256@openssh.com
- sk-ssh-ed25519@openssh.com
+ ssh-rsa
+ ssh-dss
+ ecdsa-sha2-nistp256 ecdsa-sha2-nistp384 ecdsa-sha2-nistp521
+ ssh-ed25519
+ sk-ecdsa-sha2-nistp256@openssh.com
+ sk-ssh-ed25519@openssh.com
)
)
end
diff --git a/spec/lib/gitlab/tracking/service_ping_context_spec.rb b/spec/lib/gitlab/tracking/service_ping_context_spec.rb
new file mode 100644
index 00000000000..d70dfaa4e0b
--- /dev/null
+++ b/spec/lib/gitlab/tracking/service_ping_context_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Tracking::ServicePingContext do
+ describe '#init' do
+ it 'does not accept unsupported data sources' do
+ expect { described_class.new(data_source: :random, event: 'event a') }.to raise_error(ArgumentError)
+ end
+ end
+
+ describe '#to_context' do
+ let(:subject) { described_class.new(data_source: :redis_hll, event: 'sample_event') }
+
+ it 'contains event_name' do
+ expect(subject.to_context.to_json.dig(:data, :event_name)).to eq('sample_event')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb b/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb
index 8e02f4f562c..76eec2755df 100644
--- a/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb
@@ -235,10 +235,27 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Aggregate, :clean_gitlab_redi
end
end
- it 'allows for YAML aliases in aggregated metrics configs' do
- expect(YAML).to receive(:safe_load).with(kind_of(String), aliases: true).at_least(:once)
+ context 'legacy aggregated metrics configuration' do
+ let(:temp_dir) { Dir.mktmpdir }
+ let(:temp_file) { Tempfile.new(%w[common .yml], temp_dir) }
+
+ before do
+ stub_const("#{namespace}::AGGREGATED_METRICS_PATH", File.expand_path('*.yml', temp_dir))
+ File.open(temp_file.path, "w+b") do |file|
+ file.write [aggregated_metric(name: "gmau_1", time_frame: '7d')].to_yaml
+ end
+ end
+
+ after do
+ temp_file.unlink
+ FileUtils.remove_entry(temp_dir) if Dir.exist?(temp_dir)
+ end
- described_class.new(recorded_at)
+ it 'allows for YAML aliases in aggregated metrics configs' do
+ expect(YAML).to receive(:safe_load).with(kind_of(String), aliases: true).at_least(:once)
+
+ described_class.new(recorded_at)
+ end
end
describe '.aggregated_metrics_weekly_data' do
@@ -260,5 +277,132 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Aggregate, :clean_gitlab_redi
it_behaves_like 'database_sourced_aggregated_metrics'
it_behaves_like 'redis_sourced_aggregated_metrics'
end
+
+ describe '.calculate_count_for_aggregation' do
+ using RSpec::Parameterized::TableSyntax
+
+ context 'with valid configuration' do
+ where(:number_of_days, :operator, :datasource, :expected_method) do
+ 28 | 'AND' | 'redis' | :calculate_metrics_intersections
+ 7 | 'AND' | 'redis' | :calculate_metrics_intersections
+ 28 | 'AND' | 'database' | :calculate_metrics_intersections
+ 7 | 'AND' | 'database' | :calculate_metrics_intersections
+ 28 | 'OR' | 'redis' | :calculate_metrics_union
+ 7 | 'OR' | 'redis' | :calculate_metrics_union
+ 28 | 'OR' | 'database' | :calculate_metrics_union
+ 7 | 'OR' | 'database' | :calculate_metrics_union
+ end
+
+ with_them do
+ let(:time_frame) { "#{number_of_days}d" }
+ let(:start_date) { number_of_days.days.ago.to_date }
+ let(:params) { { start_date: start_date, end_date: end_date, recorded_at: recorded_at } }
+ let(:aggregate) do
+ {
+ source: datasource,
+ operator: operator,
+ events: %w[event1 event2]
+ }
+ end
+
+ subject(:calculate_count_for_aggregation) do
+ described_class
+ .new(recorded_at)
+ .calculate_count_for_aggregation(aggregation: aggregate, time_frame: time_frame)
+ end
+
+ it 'returns the number of unique events for aggregation', :aggregate_failures do
+ expect(namespace::SOURCES[datasource])
+ .to receive(expected_method)
+ .with(params.merge(metric_names: %w[event1 event2]))
+ .and_return(5)
+ expect(calculate_count_for_aggregation).to eq(5)
+ end
+ end
+ end
+
+ context 'with invalid configuration' do
+ where(:time_frame, :operator, :datasource, :expected_error) do
+ '28d' | 'SUM' | 'redis' | namespace::UnknownAggregationOperator
+ '7d' | 'AND' | 'mongodb' | namespace::UnknownAggregationSource
+ 'all' | 'AND' | 'redis' | namespace::DisallowedAggregationTimeFrame
+ end
+
+ with_them do
+ let(:aggregate) do
+ {
+ source: datasource,
+ operator: operator,
+ events: %w[event1 event2]
+ }
+ end
+
+ subject(:calculate_count_for_aggregation) do
+ described_class
+ .new(recorded_at)
+ .calculate_count_for_aggregation(aggregation: aggregate, time_frame: time_frame)
+ end
+
+ context 'with non prod environment' do
+ it 'raises error' do
+ expect { calculate_count_for_aggregation }.to raise_error expected_error
+ end
+ end
+
+ context 'with prod environment' do
+ before do
+ stub_rails_env('production')
+ end
+
+ it 'returns fallback value' do
+ expect(calculate_count_for_aggregation).to be(-1)
+ end
+ end
+ end
+ end
+
+ context 'when union data is not available' do
+ subject(:calculate_count_for_aggregation) do
+ described_class
+ .new(recorded_at)
+ .calculate_count_for_aggregation(aggregation: aggregate, time_frame: time_frame)
+ end
+
+ where(:time_frame, :operator, :datasource) do
+ '28d' | 'OR' | 'redis'
+ '7d' | 'OR' | 'database'
+ end
+
+ with_them do
+ before do
+ allow(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).and_raise(sources::UnionNotAvailable)
+ end
+
+ let(:aggregate) do
+ {
+ source: datasource,
+ operator: operator,
+ events: %w[event1 event2]
+ }
+ end
+
+ context 'with non prod environment' do
+ it 'raises error' do
+ expect { calculate_count_for_aggregation }.to raise_error sources::UnionNotAvailable
+ end
+ end
+
+ context 'with prod environment' do
+ before do
+ stub_rails_env('production')
+ end
+
+ it 'returns fallback value' do
+ expect(calculate_count_for_aggregation).to be(-1)
+ end
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/aggregated_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/aggregated_metric_spec.rb
new file mode 100644
index 00000000000..3e7b13e21c1
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/aggregated_metric_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::AggregatedMetric, :clean_gitlab_redis_shared_state do
+ using RSpec::Parameterized::TableSyntax
+ before do
+ # weekly AND 1 weekly OR 2
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:i_quickactions_approve, values: 1, time: 1.week.ago)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:i_quickactions_unapprove, values: 1, time: 1.week.ago)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:i_quickactions_unapprove, values: 2, time: 1.week.ago)
+
+ # monthly AND 2 weekly OR 3
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:i_quickactions_approve, values: 2, time: 2.weeks.ago)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:i_quickactions_unapprove, values: 3, time: 2.weeks.ago)
+
+ # out of date range
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:i_quickactions_approve, values: 3, time: 2.months.ago)
+
+ # database events
+ Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll
+ .save_aggregated_metrics(
+ metric_name: :i_quickactions_approve,
+ time_period: { created_at: (1.week.ago..Date.current) },
+ recorded_at_timestamp: Time.current,
+ data: ::Gitlab::Database::PostgresHll::Buckets.new(141 => 1, 56 => 1)
+ )
+ Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll
+ .save_aggregated_metrics(
+ metric_name: :i_quickactions_unapprove,
+ time_period: { created_at: (1.week.ago..Date.current) },
+ recorded_at_timestamp: Time.current,
+ data: ::Gitlab::Database::PostgresHll::Buckets.new(10 => 1, 56 => 1)
+ )
+ end
+
+ where(:data_source, :time_frame, :operator, :expected_value) do
+ 'redis_hll' | '28d' | 'AND' | 2
+ 'redis_hll' | '28d' | 'OR' | 3
+ 'redis_hll' | '7d' | 'AND' | 1
+ 'redis_hll' | '7d' | 'OR' | 2
+ 'database' | '7d' | 'OR' | 3.0
+ 'database' | '7d' | 'AND' | 1.0
+ end
+
+ with_them do
+ let(:error_rate) { Gitlab::Database::PostgresHll::BatchDistinctCounter::ERROR_RATE }
+ let(:metric_definition) do
+ {
+ data_source: data_source,
+ time_frame: time_frame,
+ options: {
+ aggregate: {
+ operator: operator
+ },
+ events: %w[
+ i_quickactions_approve
+ i_quickactions_unapprove
+ ]
+ }
+ }
+ end
+
+ around do |example|
+ freeze_time { example.run }
+ end
+
+ it 'has correct value' do
+ expect(described_class.new(metric_definition).value).to be_within(error_rate).percent_of(expected_value)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/distinct_count_projects_with_expiration_policy_disabled_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/distinct_count_projects_with_expiration_policy_disabled_metric_spec.rb
new file mode 100644
index 00000000000..757adee6117
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/distinct_count_projects_with_expiration_policy_disabled_metric_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::DistinctCountProjectsWithExpirationPolicyDisabledMetric do
+ before_all do
+ create(:container_expiration_policy, enabled: false)
+ create(:container_expiration_policy, enabled: false, created_at: 29.days.ago)
+ create(:container_expiration_policy, enabled: true)
+ end
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: '28d' } do
+ let(:expected_value) { 1 }
+ end
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'all' } do
+ let(:expected_value) { 2 }
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/gitlab_for_jira_app_direct_installations_count_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/gitlab_for_jira_app_direct_installations_count_metric_spec.rb
new file mode 100644
index 00000000000..061558085a1
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/gitlab_for_jira_app_direct_installations_count_metric_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::GitlabForJiraAppDirectInstallationsCountMetric do
+ before do
+ create(:jira_connect_subscription)
+ end
+
+ let(:expected_value) { 1 }
+ let(:expected_query) do
+ 'SELECT COUNT("jira_connect_installations"."id") FROM "jira_connect_installations"'\
+ ' INNER JOIN "jira_connect_subscriptions" ON "jira_connect_subscriptions"."jira_connect_installation_id"'\
+ ' = "jira_connect_installations"."id"'
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' }
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/gitlab_for_jira_app_proxy_installations_count_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/gitlab_for_jira_app_proxy_installations_count_metric_spec.rb
new file mode 100644
index 00000000000..4535bab7702
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/gitlab_for_jira_app_proxy_installations_count_metric_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::GitlabForJiraAppProxyInstallationsCountMetric do
+ let_it_be(:installation) { create(:jira_connect_installation, instance_url: 'http://self-managed-gitlab.com') }
+
+ before do
+ create(:jira_connect_subscription, installation: installation)
+ end
+
+ let(:expected_value) { 1 }
+ let(:expected_query) do
+ 'SELECT COUNT("jira_connect_installations"."id") FROM "jira_connect_installations"'\
+ ' WHERE "jira_connect_installations"."instance_url" IS NOT NULL'
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' }
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/merge_request_widget_extension_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/merge_request_widget_extension_metric_spec.rb
new file mode 100644
index 00000000000..c0ac00c9cdd
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/merge_request_widget_extension_metric_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::MergeRequestWidgetExtensionMetric,
+ :clean_gitlab_redis_shared_state do
+ before do
+ 4.times do
+ Gitlab::UsageDataCounters::MergeRequestWidgetExtensionCounter.count(:terraform_count_expand)
+ end
+ end
+
+ let(:expected_value) { 4 }
+
+ it_behaves_like 'a correct instrumented metric value', {
+ options: { event: 'expand', widget: 'terraform' },
+ time_frame: 'all'
+ }
+
+ it 'raises an exception if widget option is not present' do
+ expect do
+ described_class.new(options: { event: 'expand' }, time_frame: 'all')
+ end.to raise_error(ArgumentError, /'widget' option is required/)
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/redis_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/redis_metric_spec.rb
index 80ae5c6fd21..c4d6edd43e1 100644
--- a/spec/lib/gitlab/usage/metrics/instrumentations/redis_metric_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/redis_metric_spec.rb
@@ -11,14 +11,21 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::RedisMetric, :clean_git
let(:expected_value) { 4 }
- it_behaves_like 'a correct instrumented metric value', { options: { event: 'pushes', prefix: 'source_code' } }
+ it_behaves_like 'a correct instrumented metric value', {
+ options: { event: 'pushes', prefix: 'source_code' },
+ time_frame: 'all'
+ }
it 'raises an exception if event option is not present' do
- expect { described_class.new(prefix: 'source_code') }.to raise_error(ArgumentError)
+ expect do
+ described_class.new(options: { prefix: 'source_code' }, time_frame: 'all')
+ end.to raise_error(ArgumentError, /'event' option is required/)
end
it 'raises an exception if prefix option is not present' do
- expect { described_class.new(event: 'pushes') }.to raise_error(ArgumentError)
+ expect do
+ described_class.new(options: { event: 'pushes' }, time_frame: 'all')
+ end.to raise_error(ArgumentError, /'prefix' option is required/)
end
describe 'children classes' do
@@ -55,7 +62,22 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::RedisMetric, :clean_git
end
it_behaves_like 'a correct instrumented metric value', {
- options: { event: 'merge_requests_count', prefix: 'web_ide', include_usage_prefix: false }
+ options: { event: 'merge_requests_count', prefix: 'web_ide', include_usage_prefix: false },
+ time_frame: 'all'
+ }
+ end
+
+ context "with prefix disabled" do
+ let(:expected_value) { 3 }
+
+ before do
+ 3.times do
+ Gitlab::UsageDataCounters::SearchCounter.count(:all_searches)
+ end
+ end
+
+ it_behaves_like 'a correct instrumented metric value', {
+ options: { event: 'all_searches_count', prefix: nil, include_usage_prefix: false }, time_frame: 'all'
}
end
end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/work_items_activity_aggregated_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/work_items_activity_aggregated_metric_spec.rb
new file mode 100644
index 00000000000..3e315692d0a
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/work_items_activity_aggregated_metric_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::WorkItemsActivityAggregatedMetric do
+ let(:metric_definition) do
+ {
+ data_source: 'redis_hll',
+ time_frame: time_frame,
+ options: {
+ aggregate: {
+ operator: 'OR'
+ },
+ events: %w[
+ users_creating_work_items
+ users_updating_work_item_title
+ users_updating_work_item_dates
+ users_updating_work_item_iteration
+ ]
+ }
+ }
+ end
+
+ around do |example|
+ freeze_time { example.run }
+ end
+
+ where(:time_frame) { [['28d'], ['7d']] }
+
+ with_them do
+ describe '#available?' do
+ it 'returns false without track_work_items_activity feature' do
+ stub_feature_flags(track_work_items_activity: false)
+
+ expect(described_class.new(metric_definition).available?).to eq(false)
+ end
+
+ it 'returns true with track_work_items_activity feature' do
+ stub_feature_flags(track_work_items_activity: true)
+
+ expect(described_class.new(metric_definition).available?).to eq(true)
+ end
+ end
+
+ describe '#value', :clean_gitlab_redis_shared_state do
+ let(:counter) { Gitlab::UsageDataCounters::HLLRedisCounter }
+
+ before do
+ counter.track_event(:users_creating_work_items, values: 1, time: 1.week.ago)
+ counter.track_event(:users_updating_work_item_title, values: 1, time: 1.week.ago)
+ counter.track_event(:users_updating_work_item_dates, values: 2, time: 1.week.ago)
+ counter.track_event(:users_updating_work_item_iteration, values: 2, time: 1.week.ago)
+ end
+
+ it 'has correct value' do
+ expect(described_class.new(metric_definition).value).to eq 2
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage_data/topology_spec.rb b/spec/lib/gitlab/usage_data/topology_spec.rb
index dfdf8eaabe8..3fb87e77457 100644
--- a/spec/lib/gitlab/usage_data/topology_spec.rb
+++ b/spec/lib/gitlab/usage_data/topology_spec.rb
@@ -523,210 +523,210 @@ RSpec.describe Gitlab::UsageData::Topology do
receive(:query)
.with(/gitlab_usage_ping:ops:rate/)
.and_return(result || [
- {
- 'metric' => { 'component' => 'http_requests', 'service' => 'workhorse' },
- 'value' => [1000, '0.01']
- }
- ])
+ {
+ 'metric' => { 'component' => 'http_requests', 'service' => 'workhorse' },
+ 'value' => [1000, '0.01']
+ }
+ ])
end
def receive_query_apdex_ratio_query(result: nil)
receive(:query)
.with(/gitlab_usage_ping:sql_duration_apdex:ratio_rate5m/)
.and_return(result || [
- {
- 'metric' => {},
- 'value' => [1000, '0.996']
- }
- ])
+ {
+ 'metric' => {},
+ 'value' => [1000, '0.996']
+ }
+ ])
end
def receive_node_memory_query(result: nil)
receive(:query)
.with(/node_memory_total_bytes/, an_instance_of(Hash))
.and_return(result || [
- {
- 'metric' => { 'instance' => 'instance1:8080' },
- 'value' => [1000, '512']
- },
- {
- 'metric' => { 'instance' => 'instance2:8090' },
- 'value' => [1000, '1024']
- }
- ])
+ {
+ 'metric' => { 'instance' => 'instance1:8080' },
+ 'value' => [1000, '512']
+ },
+ {
+ 'metric' => { 'instance' => 'instance2:8090' },
+ 'value' => [1000, '1024']
+ }
+ ])
end
def receive_node_memory_utilization_query(result: nil)
receive(:query)
.with(/node_memory_utilization/, an_instance_of(Hash))
.and_return(result || [
- {
- 'metric' => { 'instance' => 'instance1:8080' },
- 'value' => [1000, '0.45']
- },
- {
- 'metric' => { 'instance' => 'instance2:8090' },
- 'value' => [1000, '0.25']
- }
- ])
+ {
+ 'metric' => { 'instance' => 'instance1:8080' },
+ 'value' => [1000, '0.45']
+ },
+ {
+ 'metric' => { 'instance' => 'instance2:8090' },
+ 'value' => [1000, '0.25']
+ }
+ ])
end
def receive_node_cpu_count_query(result: nil)
receive(:query)
.with(/node_cpus/, an_instance_of(Hash))
.and_return(result || [
- {
- 'metric' => { 'instance' => 'instance2:8090' },
- 'value' => [1000, '16']
- },
- {
- 'metric' => { 'instance' => 'instance1:8080' },
- 'value' => [1000, '8']
- }
- ])
+ {
+ 'metric' => { 'instance' => 'instance2:8090' },
+ 'value' => [1000, '16']
+ },
+ {
+ 'metric' => { 'instance' => 'instance1:8080' },
+ 'value' => [1000, '8']
+ }
+ ])
end
def receive_node_cpu_utilization_query(result: nil)
receive(:query)
.with(/node_cpu_utilization/, an_instance_of(Hash))
.and_return(result || [
- {
- 'metric' => { 'instance' => 'instance2:8090' },
- 'value' => [1000, '0.2']
- },
- {
- 'metric' => { 'instance' => 'instance1:8080' },
- 'value' => [1000, '0.1']
- }
- ])
+ {
+ 'metric' => { 'instance' => 'instance2:8090' },
+ 'value' => [1000, '0.2']
+ },
+ {
+ 'metric' => { 'instance' => 'instance1:8080' },
+ 'value' => [1000, '0.1']
+ }
+ ])
end
def receive_node_uname_info_query(result: nil)
receive(:query)
.with('node_uname_info')
.and_return(result || [
- {
- "metric" => {
- "__name__" => "node_uname_info",
- "domainname" => "(none)",
- "instance" => "instance1:9100",
- "job" => "node_exporter",
- "machine" => "x86_64",
- "nodename" => "instance1",
- "release" => "4.19.76-linuxkit",
- "sysname" => "Linux"
- },
- "value" => [1592463033.359, "1"]
- },
- {
- "metric" => {
- "__name__" => "node_uname_info",
- "domainname" => "(none)",
- "instance" => "instance2:9100",
- "job" => "node_exporter",
- "machine" => "x86_64",
- "nodename" => "instance2",
- "release" => "4.15.0-101-generic",
- "sysname" => "Linux"
- },
- "value" => [1592463033.359, "1"]
- }
- ])
+ {
+ "metric" => {
+ "__name__" => "node_uname_info",
+ "domainname" => "(none)",
+ "instance" => "instance1:9100",
+ "job" => "node_exporter",
+ "machine" => "x86_64",
+ "nodename" => "instance1",
+ "release" => "4.19.76-linuxkit",
+ "sysname" => "Linux"
+ },
+ "value" => [1592463033.359, "1"]
+ },
+ {
+ "metric" => {
+ "__name__" => "node_uname_info",
+ "domainname" => "(none)",
+ "instance" => "instance2:9100",
+ "job" => "node_exporter",
+ "machine" => "x86_64",
+ "nodename" => "instance2",
+ "release" => "4.15.0-101-generic",
+ "sysname" => "Linux"
+ },
+ "value" => [1592463033.359, "1"]
+ }
+ ])
end
def receive_node_service_memory_rss_query(result: nil)
receive(:query)
.with(/process_resident_memory_bytes/, an_instance_of(Hash))
.and_return(result || [
- {
- 'metric' => { 'instance' => 'instance1:8080', 'job' => 'gitlab-rails' },
- 'value' => [1000, '300']
- },
- {
- 'metric' => { 'instance' => 'instance1:8090', 'job' => 'gitlab-sidekiq' },
- 'value' => [1000, '303']
- },
- # instance 2: runs a dedicated Sidekiq + Redis (which uses a different metric name)
- {
- 'metric' => { 'instance' => 'instance2:8090', 'job' => 'gitlab-sidekiq' },
- 'value' => [1000, '400']
- },
- {
- 'metric' => { 'instance' => 'instance2:9121', 'job' => 'redis' },
- 'value' => [1000, '402']
- }
- ])
+ {
+ 'metric' => { 'instance' => 'instance1:8080', 'job' => 'gitlab-rails' },
+ 'value' => [1000, '300']
+ },
+ {
+ 'metric' => { 'instance' => 'instance1:8090', 'job' => 'gitlab-sidekiq' },
+ 'value' => [1000, '303']
+ },
+ # instance 2: runs a dedicated Sidekiq + Redis (which uses a different metric name)
+ {
+ 'metric' => { 'instance' => 'instance2:8090', 'job' => 'gitlab-sidekiq' },
+ 'value' => [1000, '400']
+ },
+ {
+ 'metric' => { 'instance' => 'instance2:9121', 'job' => 'redis' },
+ 'value' => [1000, '402']
+ }
+ ])
end
def receive_node_service_memory_uss_query(result: nil)
receive(:query)
.with(/process_unique_memory_bytes/, an_instance_of(Hash))
.and_return(result || [
- {
- 'metric' => { 'instance' => 'instance1:8080', 'job' => 'gitlab-rails' },
- 'value' => [1000, '301']
- }
- ])
+ {
+ 'metric' => { 'instance' => 'instance1:8080', 'job' => 'gitlab-rails' },
+ 'value' => [1000, '301']
+ }
+ ])
end
def receive_node_service_memory_pss_query(result: nil)
receive(:query)
.with(/process_proportional_memory_bytes/, an_instance_of(Hash))
.and_return(result || [
- {
- 'metric' => { 'instance' => 'instance1:8080', 'job' => 'gitlab-rails' },
- 'value' => [1000, '302']
- },
- {
- 'metric' => { 'instance' => 'instance2:8090', 'job' => 'gitlab-sidekiq' },
- 'value' => [1000, '401']
- }
- ])
+ {
+ 'metric' => { 'instance' => 'instance1:8080', 'job' => 'gitlab-rails' },
+ 'value' => [1000, '302']
+ },
+ {
+ 'metric' => { 'instance' => 'instance2:8090', 'job' => 'gitlab-sidekiq' },
+ 'value' => [1000, '401']
+ }
+ ])
end
def receive_node_service_process_count_query(result: nil)
receive(:query)
.with(/service_process:count/, an_instance_of(Hash))
.and_return(result || [
- # instance 1
- {
- 'metric' => { 'instance' => 'instance1:8080', 'job' => 'gitlab-rails' },
- 'value' => [1000, '10']
- },
- {
- 'metric' => { 'instance' => 'instance1:8090', 'job' => 'gitlab-sidekiq' },
- 'value' => [1000, '5']
- },
- # instance 2
- {
- 'metric' => { 'instance' => 'instance2:8090', 'job' => 'gitlab-sidekiq' },
- 'value' => [1000, '15']
- },
- {
- 'metric' => { 'instance' => 'instance2:9121', 'job' => 'redis' },
- 'value' => [1000, '1']
- },
- {
- 'metric' => { 'instance' => 'instance2:8080', 'job' => 'registry' },
- 'value' => [1000, '1']
- }
- ])
+ # instance 1
+ {
+ 'metric' => { 'instance' => 'instance1:8080', 'job' => 'gitlab-rails' },
+ 'value' => [1000, '10']
+ },
+ {
+ 'metric' => { 'instance' => 'instance1:8090', 'job' => 'gitlab-sidekiq' },
+ 'value' => [1000, '5']
+ },
+ # instance 2
+ {
+ 'metric' => { 'instance' => 'instance2:8090', 'job' => 'gitlab-sidekiq' },
+ 'value' => [1000, '15']
+ },
+ {
+ 'metric' => { 'instance' => 'instance2:9121', 'job' => 'redis' },
+ 'value' => [1000, '1']
+ },
+ {
+ 'metric' => { 'instance' => 'instance2:8080', 'job' => 'registry' },
+ 'value' => [1000, '1']
+ }
+ ])
end
def receive_node_service_app_server_workers_query(result: nil)
receive(:query)
.with(/app_server_workers/, an_instance_of(Hash))
.and_return(result || [
- # instance 1
- {
- 'metric' => { 'instance' => 'instance1:8080', 'job' => 'gitlab-rails', 'server' => 'puma' },
- 'value' => [1000, '2']
- },
- # instance 2
- {
- 'metric' => { 'instance' => 'instance2:8080', 'job' => 'gitlab-rails', 'server' => 'puma' },
- 'value' => [1000, '1']
- }
- ])
+ # instance 1
+ {
+ 'metric' => { 'instance' => 'instance1:8080', 'job' => 'gitlab-rails', 'server' => 'puma' },
+ 'value' => [1000, '2']
+ },
+ # instance 2
+ {
+ 'metric' => { 'instance' => 'instance2:8080', 'job' => 'gitlab-rails', 'server' => 'puma' },
+ 'value' => [1000, '1']
+ }
+ ])
end
end
diff --git a/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb
index 6a37bfd106d..1ca0bb0e9ea 100644
--- a/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb
@@ -65,17 +65,11 @@ RSpec.describe Gitlab::UsageDataCounters::CiTemplateUniqueCounter do
context 'with implicit includes', :snowplow do
let(:config_source) { :auto_devops_source }
- [
- ['', ['Auto-DevOps.gitlab-ci.yml']],
- ['Jobs', described_class.ci_templates('lib/gitlab/ci/templates/Jobs')],
- ['Security', described_class.ci_templates('lib/gitlab/ci/templates/Security')]
- ].each do |directory, templates|
- templates.each do |template|
- context "for #{template}" do
- let(:template_path) { File.join(directory, template) }
-
- include_examples 'tracks template'
- end
+ described_class.all_included_templates('Auto-DevOps.gitlab-ci.yml').each do |template_name|
+ context "for #{template_name}" do
+ let(:template_path) { Gitlab::Template::GitlabCiYmlTemplate.find(template_name.delete_suffix('.gitlab-ci.yml')).full_name }
+
+ include_examples 'tracks template'
end
end
end
diff --git a/spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb b/spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb
index 01396602f29..e122d9a3026 100644
--- a/spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb
@@ -6,19 +6,22 @@ require 'spec_helper'
# NOTE: ONLY user related metrics to be added to the aggregates - otherwise add it to the exception list
RSpec.describe 'Code review events' do
it 'the aggregated metrics contain all the code review metrics' do
- path = Rails.root.join('config/metrics/aggregates/code_review.yml')
- aggregated_events = YAML.safe_load(File.read(path), aliases: true)&.map(&:with_indifferent_access)
-
- code_review_aggregated_events = aggregated_events
- .map { |event| event['events'] }
- .flatten
- .uniq
-
code_review_events = Gitlab::UsageDataCounters::HLLRedisCounter.events_for_category("code_review")
+ code_review_aggregated_events = Gitlab::Usage::MetricDefinition.all.flat_map do |definition|
+ next [] unless code_review_aggregated_metric?(definition.attributes)
+
+ definition.attributes.dig(:options, :events)
+ end.uniq
exceptions = %w[i_code_review_mr_diffs i_code_review_mr_with_invalid_approvers i_code_review_mr_single_file_diffs i_code_review_total_suggestions_applied i_code_review_total_suggestions_added i_code_review_create_note_in_ipynb_diff i_code_review_create_note_in_ipynb_diff_mr i_code_review_create_note_in_ipynb_diff_commit]
code_review_aggregated_events += exceptions
expect(code_review_events - code_review_aggregated_events).to be_empty
end
+
+ def code_review_aggregated_metric?(attributes)
+ return false unless attributes[:product_group] == 'code_review' && attributes[:status] == 'active'
+
+ attributes[:instrumentation_class] == 'AggregatedMetric'
+ end
end
diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
index 3fb2532521a..d0b935d59dd 100644
--- a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
@@ -24,8 +24,10 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
context 'migration to instrumentation classes data collection' do
let_it_be(:instrumented_events) do
+ instrumentation_classes = %w[AggregatedMetric RedisHLLMetric]
::Gitlab::Usage::MetricDefinition.all.map do |definition|
- next unless definition.attributes[:instrumentation_class] == 'RedisHLLMetric' && definition.available?
+ next unless definition.available?
+ next unless instrumentation_classes.include?(definition.attributes[:instrumentation_class])
definition.attributes.dig(:options, :events)&.sort
end.compact.to_set
@@ -96,21 +98,17 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
'source_code',
'incident_management',
'incident_management_alerts',
- 'incident_management_oncall',
'testing',
'issues_edit',
- 'ci_secrets_management',
'snippets',
'code_review',
'terraform',
'ci_templates',
'quickactions',
'pipeline_authoring',
- 'epics_usage',
'secure',
'importer',
'geo',
- 'growth',
'work_items',
'ci_users',
'error_tracking',
diff --git a/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb
index 0bcdbe82a7a..2d251017c87 100644
--- a/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb
@@ -28,4 +28,12 @@ RSpec.describe Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter, :clean_
it_behaves_like 'work item unique counter'
end
+
+ describe '.track_work_item_labels_changed_action' do
+ subject(:track_event) { described_class.track_work_item_labels_changed_action(author: user) }
+
+ let(:event_name) { described_class::WORK_ITEM_LABELS_CHANGED }
+
+ it_behaves_like 'work item unique counter'
+ end
end
diff --git a/spec/lib/gitlab/usage_data_metrics_spec.rb b/spec/lib/gitlab/usage_data_metrics_spec.rb
index ed0eabf1b4d..5d58933f1fd 100644
--- a/spec/lib/gitlab/usage_data_metrics_spec.rb
+++ b/spec/lib/gitlab/usage_data_metrics_spec.rb
@@ -31,6 +31,8 @@ RSpec.describe Gitlab::UsageDataMetrics do
it 'includes counts keys', :aggregate_failures do
expect(subject[:counts]).to include(:boards)
expect(subject[:counts]).to include(:issues)
+ expect(subject[:counts]).to include(:gitlab_for_jira_app_direct_installations)
+ expect(subject[:counts]).to include(:gitlab_for_jira_app_proxy_installations)
end
it 'includes usage_activity_by_stage keys' do
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index 46ed4b57d3a..cb645ae3e53 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -624,7 +624,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
it 'gathers usage data' do
expect(subject[:projects_with_expiration_policy_enabled]).to eq 19
- expect(subject[:projects_with_expiration_policy_disabled]).to eq 5
expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_unset]).to eq 1
expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_1]).to eq 1
@@ -758,13 +757,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
- describe '.usage_counters' do
- subject { described_class.usage_counters }
-
- it { is_expected.to include(:kubernetes_agent_gitops_sync) }
- it { is_expected.to include(:kubernetes_agent_k8s_api_proxy_request) }
- end
-
describe '.usage_data_counters' do
subject { described_class.usage_data_counters }
@@ -1057,12 +1049,13 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
before do
allow(described_class).to receive(:operating_system).and_return('ubuntu-20.04')
- expect(prometheus_client).to receive(:query).with(/gitlab_usage_ping:gitaly_apdex:ratio_avg_over_time_5m/).and_return([
- {
- 'metric' => {},
- 'value' => [1616016381.473, '0.95']
- }
- ])
+ expect(prometheus_client).to receive(:query)
+ .with(/gitlab_usage_ping:gitaly_apdex:ratio_avg_over_time_5m/)
+ .and_return(
+ [
+ { 'metric' => {},
+ 'value' => [1616016381.473, '0.95'] }
+ ])
expect(described_class).to receive(:with_prometheus_client).and_yield(prometheus_client)
end
diff --git a/spec/lib/gitlab/user_access_snippet_spec.rb b/spec/lib/gitlab/user_access_snippet_spec.rb
index 4143a3017e8..916e920e2ac 100644
--- a/spec/lib/gitlab/user_access_snippet_spec.rb
+++ b/spec/lib/gitlab/user_access_snippet_spec.rb
@@ -49,7 +49,7 @@ RSpec.describe Gitlab::UserAccessSnippet do
end
describe '#can_push_to_branch?' do
- include ProjectHelpers
+ include UserHelpers
[:anonymous, :non_member, :guest, :reporter, :maintainer, :admin, :author].each do |membership|
context membership.to_s do
diff --git a/spec/lib/gitlab/utils_spec.rb b/spec/lib/gitlab/utils_spec.rb
index 61323f0646b..d1fdaf7a9db 100644
--- a/spec/lib/gitlab/utils_spec.rb
+++ b/spec/lib/gitlab/utils_spec.rb
@@ -582,11 +582,12 @@ RSpec.describe Gitlab::Utils do
end
it 'sorts items like the regular sort_by' do
- expect(sorted_list).to eq([
- { name: 'obj 2', priority: 1 },
- { name: 'obj 1', priority: 2 },
- { name: 'obj 3', priority: 3 }
- ])
+ expect(sorted_list).to eq(
+ [
+ { name: 'obj 2', priority: 1 },
+ { name: 'obj 1', priority: 2 },
+ { name: 'obj 3', priority: 3 }
+ ])
end
end
end
diff --git a/spec/lib/gitlab/web_ide/config/entry/terminal_spec.rb b/spec/lib/gitlab/web_ide/config/entry/terminal_spec.rb
index 8d4629bf48b..7d96adf95e8 100644
--- a/spec/lib/gitlab/web_ide/config/entry/terminal_spec.rb
+++ b/spec/lib/gitlab/web_ide/config/entry/terminal_spec.rb
@@ -150,29 +150,6 @@ RSpec.describe Gitlab::WebIde::Config::Entry::Terminal do
}
)
end
-
- context 'when the FF ci_variables_refactoring_to_variable is disabled' do
- let(:entry_without_ff) { described_class.new(config, with_image_ports: true) }
-
- before do
- stub_feature_flags(ci_variables_refactoring_to_variable: false)
- entry_without_ff.compose!
- end
-
- it 'returns correct value' do
- expect(entry_without_ff.value)
- .to eq(
- tag_list: ['webide'],
- job_variables: [{ key: 'KEY', value: 'value', public: true }],
- options: {
- image: { name: "image:1.0" },
- services: [{ name: "mysql" }],
- before_script: %w[ls pwd],
- script: ['sleep 100']
- }
- )
- end
- end
end
end
end
diff --git a/spec/lib/gitlab/webpack/manifest_spec.rb b/spec/lib/gitlab/webpack/manifest_spec.rb
index 08b4774dd67..24a36258379 100644
--- a/spec/lib/gitlab/webpack/manifest_spec.rb
+++ b/spec/lib/gitlab/webpack/manifest_spec.rb
@@ -66,10 +66,11 @@ RSpec.describe Gitlab::Webpack::Manifest do
describe "webpack errors" do
context "when webpack has 'Module build failed' errors in its manifest" do
it "errors" do
- error_manifest = Gitlab::Json.parse(manifest).merge("errors" => [
- "somethingModule build failed something",
- "I am an error"
- ]).to_json
+ error_manifest = Gitlab::Json.parse(manifest).merge("errors" =>
+ [
+ "somethingModule build failed something",
+ "I am an error"
+ ]).to_json
stub_request(:get, "http://hostname:2000/public_path/my_manifest.json").to_return(body: error_manifest, status: 200)
expect { Gitlab::Webpack::Manifest.asset_paths("entry1") }.to raise_error(Gitlab::Webpack::Manifest::WebpackError)
diff --git a/spec/lib/gitlab/x509/signature_spec.rb b/spec/lib/gitlab/x509/signature_spec.rb
index 5626e49bfe1..31f66232f38 100644
--- a/spec/lib/gitlab/x509/signature_spec.rb
+++ b/spec/lib/gitlab/x509/signature_spec.rb
@@ -30,6 +30,20 @@ RSpec.describe Gitlab::X509::Signature do
expect(signature.verification_status).to eq(:verified)
end
+ it 'returns a verified signature if email does match, case-insensitively' do
+ signature = described_class.new(
+ X509Helpers::User1.signed_commit_signature,
+ X509Helpers::User1.signed_commit_base_data,
+ X509Helpers::User1.certificate_email.upcase,
+ X509Helpers::User1.signed_commit_time
+ )
+
+ expect(signature.x509_certificate).to have_attributes(certificate_attributes)
+ expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes)
+ expect(signature.verified_signature).to be_truthy
+ expect(signature.verification_status).to eq(:verified)
+ end
+
context "if the email matches but isn't confirmed" do
let!(:user) { create(:user, :unconfirmed, email: X509Helpers::User1.certificate_email) }
diff --git a/spec/lib/gitlab/x509/tag_spec.rb b/spec/lib/gitlab/x509/tag_spec.rb
index f52880cfc52..e20ef688db5 100644
--- a/spec/lib/gitlab/x509/tag_spec.rb
+++ b/spec/lib/gitlab/x509/tag_spec.rb
@@ -5,8 +5,8 @@ RSpec.describe Gitlab::X509::Tag do
subject(:signature) { described_class.new(project.repository, tag).signature }
describe '#signature' do
- let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
- let(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:repository) { project.repository.raw }
describe 'signed tag' do
let(:tag) { project.repository.find_tag('v1.1.1') }
diff --git a/spec/lib/google_api/cloud_platform/client_spec.rb b/spec/lib/google_api/cloud_platform/client_spec.rb
index 0f117f495d1..0c207161927 100644
--- a/spec/lib/google_api/cloud_platform/client_spec.rb
+++ b/spec/lib/google_api/cloud_platform/client_spec.rb
@@ -17,17 +17,19 @@ RSpec.describe GoogleApi::CloudPlatform::Client do
let(:tier) { 'mock-tier' }
let(:database_list) do
- Google::Apis::SqladminV1beta4::ListDatabasesResponse.new(items: [
- Google::Apis::SqladminV1beta4::Database.new(name: 'db_01', instance: database_instance),
- Google::Apis::SqladminV1beta4::Database.new(name: 'db_02', instance: database_instance)
- ])
+ Google::Apis::SqladminV1beta4::ListDatabasesResponse.new(
+ items: [
+ Google::Apis::SqladminV1beta4::Database.new(name: 'db_01', instance: database_instance),
+ Google::Apis::SqladminV1beta4::Database.new(name: 'db_02', instance: database_instance)
+ ])
end
let(:user_list) do
- Google::Apis::SqladminV1beta4::ListUsersResponse.new(items: [
- Google::Apis::SqladminV1beta4::User.new(name: 'user_01', instance: database_instance),
- Google::Apis::SqladminV1beta4::User.new(name: 'user_02', instance: database_instance)
- ])
+ Google::Apis::SqladminV1beta4::ListUsersResponse.new(
+ items: [
+ Google::Apis::SqladminV1beta4::User.new(name: 'user_01', instance: database_instance),
+ Google::Apis::SqladminV1beta4::User.new(name: 'user_02', instance: database_instance)
+ ])
end
describe '.session_key_for_redirect_uri' do
diff --git a/spec/lib/object_storage/config_spec.rb b/spec/lib/object_storage/config_spec.rb
index 9a0e83bfd5e..2a81142ea44 100644
--- a/spec/lib/object_storage/config_spec.rb
+++ b/spec/lib/object_storage/config_spec.rb
@@ -136,7 +136,6 @@ RSpec.describe ObjectStorage::Config do
let(:credentials) do
{
provider: 'Google',
- google_client_email: 'foo@gcp-project.example.com',
google_json_key_location: '/path/to/gcp.json'
}
end
diff --git a/spec/lib/peek/views/bullet_detailed_spec.rb b/spec/lib/peek/views/bullet_detailed_spec.rb
index ec2f798a320..6eaf8c50cc0 100644
--- a/spec/lib/peek/views/bullet_detailed_spec.rb
+++ b/spec/lib/peek/views/bullet_detailed_spec.rb
@@ -44,10 +44,11 @@ RSpec.describe Peek::Views::BulletDetailed do
expect(subject.key).to eq('bullet')
expect(subject.results[:calls]).to eq(2)
expect(subject.results[:warnings]).to eq([Peek::Views::BulletDetailed::WARNING_MESSAGE])
- expect(subject.results[:details]).to eq([
- { notification: 'Title 1: Body 1', backtrace: "first\nsecond\n" },
- { notification: 'Title 2: Body 2', backtrace: "first\nsecond\n" }
- ])
+ expect(subject.results[:details]).to eq(
+ [
+ { notification: 'Title 1: Body 1', backtrace: "first\nsecond\n" },
+ { notification: 'Title 2: Body 2', backtrace: "first\nsecond\n" }
+ ])
end
end
end
diff --git a/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb b/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb
index c5666724acf..ce368ad5bd6 100644
--- a/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb
+++ b/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb
@@ -207,6 +207,16 @@ RSpec.describe Sidebars::Groups::Menus::PackagesRegistriesMenu do
it_behaves_like 'the menu entry is available'
end
+
+ context 'when config harbor registry setting is not activated' do
+ before do
+ harbor_integration.update!(active: false)
+ end
+
+ let(:harbor_registry_enabled) { true }
+
+ it_behaves_like 'the menu entry is not available'
+ end
end
end
diff --git a/spec/lib/sidebars/projects/menus/analytics_menu_spec.rb b/spec/lib/sidebars/projects/menus/analytics_menu_spec.rb
index 25a65015847..878da747abe 100644
--- a/spec/lib/sidebars/projects/menus/analytics_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/analytics_menu_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Sidebars::Projects::Menus::AnalyticsMenu do
- let_it_be(:project) { create(:project, :repository) }
+ let_it_be_with_refind(:project) { create(:project, :repository) }
let_it_be(:guest) do
create(:user).tap { |u| project.add_guest(u) }
end
@@ -125,6 +125,34 @@ RSpec.describe Sidebars::Projects::Menus::AnalyticsMenu do
specify { is_expected.to be_nil }
end
+
+ describe 'when issues are disabled' do
+ before do
+ project.issues_enabled = false
+ project.save!
+ end
+
+ specify { is_expected.not_to be_nil }
+ end
+
+ describe 'when merge requests are disabled' do
+ before do
+ project.merge_requests_enabled = false
+ project.save!
+ end
+
+ specify { is_expected.not_to be_nil }
+ end
+
+ describe 'when the issues and merge requests are disabled' do
+ before do
+ project.issues_enabled = false
+ project.merge_requests_enabled = false
+ project.save!
+ end
+
+ specify { is_expected.to be_nil }
+ end
end
end
end
diff --git a/spec/lib/sidebars/projects/menus/deployments_menu_spec.rb b/spec/lib/sidebars/projects/menus/deployments_menu_spec.rb
index 90ff04a2064..685ba0c31c7 100644
--- a/spec/lib/sidebars/projects/menus/deployments_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/deployments_menu_spec.rb
@@ -45,30 +45,30 @@ RSpec.describe Sidebars::Projects::Menus::DeploymentsMenu do
it { is_expected.to be_nil }
end
+ end
+
+ shared_examples 'split_operations_visibility_permissions FF disabled' do
+ before do
+ stub_feature_flags(split_operations_visibility_permissions: false)
+ end
- describe 'when split_operations_visibility_permissions FF is disabled' do
+ it { is_expected.not_to be_nil }
+
+ context 'and the feature is disabled' do
before do
- stub_feature_flags(split_operations_visibility_permissions: false)
+ project.update_attribute("#{item_id}_access_level", 'disabled')
end
it { is_expected.not_to be_nil }
+ end
- context 'and the feature is disabled' do
- before do
- project.update_attribute("#{item_id}_access_level", 'disabled')
- end
-
- it { is_expected.not_to be_nil }
+ context 'and operations is disabled' do
+ before do
+ project.update_attribute(:operations_access_level, 'disabled')
end
- context 'and operations is disabled' do
- before do
- project.update_attribute(:operations_access_level, 'disabled')
- end
-
- it do
- is_expected.to be_nil if [:environments, :feature_flags].include?(item_id)
- end
+ it do
+ is_expected.to be_nil if [:environments, :feature_flags].include?(item_id)
end
end
end
@@ -77,12 +77,14 @@ RSpec.describe Sidebars::Projects::Menus::DeploymentsMenu do
let(:item_id) { :feature_flags }
it_behaves_like 'access rights checks'
+ it_behaves_like 'split_operations_visibility_permissions FF disabled'
end
describe 'Environments' do
let(:item_id) { :environments }
it_behaves_like 'access rights checks'
+ it_behaves_like 'split_operations_visibility_permissions FF disabled'
end
describe 'Releases' do
diff --git a/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb b/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb
index 6491ef823e9..b03269c424a 100644
--- a/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb
@@ -67,7 +67,7 @@ RSpec.describe Sidebars::Projects::Menus::PackagesRegistriesMenu do
describe 'Packages Registry' do
let(:item_id) { :packages_registry }
- shared_examples 'when user can read packages' do
+ context 'when user can read packages' do
context 'when config package setting is disabled' do
it 'the menu item is not added to list of menu items' do
stub_config(packages: { enabled: false })
@@ -85,25 +85,13 @@ RSpec.describe Sidebars::Projects::Menus::PackagesRegistriesMenu do
end
end
- shared_examples 'when user cannot read packages' do
+ context 'when user cannot read packages' do
let(:user) { nil }
it 'the menu item is not added to list of menu items' do
is_expected.to be_nil
end
end
-
- it_behaves_like 'when user can read packages'
- it_behaves_like 'when user cannot read packages'
-
- context 'with feature flag disabled' do
- before do
- stub_feature_flags(read_package_policy_rule: false)
- end
-
- it_behaves_like 'when user can read packages'
- it_behaves_like 'when user cannot read packages'
- end
end
describe 'Container Registry' do
@@ -178,6 +166,15 @@ RSpec.describe Sidebars::Projects::Menus::PackagesRegistriesMenu do
is_expected.not_to be_nil
end
end
+
+ context 'when config harbor registry setting is not activated' do
+ it 'does not add the menu item to the list' do
+ stub_feature_flags(harbor_registry_integration: true)
+ project.harbor_integration.update!(active: false)
+
+ is_expected.to be_nil
+ end
+ end
end
end
end
diff --git a/spec/lib/sidebars/projects/menus/repository_menu_spec.rb b/spec/lib/sidebars/projects/menus/repository_menu_spec.rb
index fc181947e60..f26433306b6 100644
--- a/spec/lib/sidebars/projects/menus/repository_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/repository_menu_spec.rb
@@ -34,5 +34,29 @@ RSpec.describe Sidebars::Projects::Menus::RepositoryMenu do
end
end
end
+
+ context 'for menu items' do
+ subject { described_class.new(context).renderable_items.index { |e| e.item_id == item_id } }
+
+ describe 'Contributors' do
+ let_it_be(:item_id) { :contributors }
+
+ context 'when analytics is disabled' do
+ before do
+ project.project_feature.update!(analytics_access_level: ProjectFeature::DISABLED)
+ end
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when analytics is enabled' do
+ before do
+ project.project_feature.update!(analytics_access_level: ProjectFeature::ENABLED)
+ end
+
+ it { is_expected.not_to be_nil }
+ end
+ end
+ end
end
end
diff --git a/spec/lib/system_check/incoming_email_check_spec.rb b/spec/lib/system_check/incoming_email_check_spec.rb
index 5d93b810045..cf3fd3b7967 100644
--- a/spec/lib/system_check/incoming_email_check_spec.rb
+++ b/spec/lib/system_check/incoming_email_check_spec.rb
@@ -26,11 +26,12 @@ RSpec.describe SystemCheck::IncomingEmailCheck do
end
it 'runs IMAP and mailroom checks' do
- expect(SystemCheck).to receive(:run).with('Reply by email', [
- SystemCheck::IncomingEmail::ImapAuthenticationCheck,
- SystemCheck::IncomingEmail::MailRoomEnabledCheck,
- SystemCheck::IncomingEmail::MailRoomRunningCheck
- ])
+ expect(SystemCheck).to receive(:run).with('Reply by email',
+ [
+ SystemCheck::IncomingEmail::ImapAuthenticationCheck,
+ SystemCheck::IncomingEmail::MailRoomEnabledCheck,
+ SystemCheck::IncomingEmail::MailRoomRunningCheck
+ ])
subject.multi_check
end
@@ -42,10 +43,11 @@ RSpec.describe SystemCheck::IncomingEmailCheck do
end
it 'runs mailroom checks' do
- expect(SystemCheck).to receive(:run).with('Reply by email', [
- SystemCheck::IncomingEmail::MailRoomEnabledCheck,
- SystemCheck::IncomingEmail::MailRoomRunningCheck
- ])
+ expect(SystemCheck).to receive(:run).with('Reply by email',
+ [
+ SystemCheck::IncomingEmail::MailRoomEnabledCheck,
+ SystemCheck::IncomingEmail::MailRoomRunningCheck
+ ])
subject.multi_check
end
diff --git a/spec/lib/unnested_in_filters/rewriter_spec.rb b/spec/lib/unnested_in_filters/rewriter_spec.rb
index a808aec7728..21bab42c95c 100644
--- a/spec/lib/unnested_in_filters/rewriter_spec.rb
+++ b/spec/lib/unnested_in_filters/rewriter_spec.rb
@@ -41,14 +41,15 @@ RSpec.describe UnnestedInFilters::Rewriter do
context 'when the order is a Keyset order' do
let(:order) do
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'user_type',
- order_expression: User.arel_table['user_type'].desc,
- nullable: :not_nullable,
- distinct: false
- )
- ])
+ Gitlab::Pagination::Keyset::Order.build(
+ [
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'user_type',
+ order_expression: User.arel_table['user_type'].desc,
+ nullable: :not_nullable,
+ distinct: false
+ )
+ ])
end
it { is_expected.to be_truthy }
@@ -152,14 +153,15 @@ RSpec.describe UnnestedInFilters::Rewriter do
context 'when the order is a Keyset order' do
let(:order) do
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'user_type',
- order_expression: User.arel_table['user_type'].desc,
- nullable: :not_nullable,
- distinct: false
- )
- ])
+ Gitlab::Pagination::Keyset::Order.build(
+ [
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'user_type',
+ order_expression: User.arel_table['user_type'].desc,
+ nullable: :not_nullable,
+ distinct: false
+ )
+ ])
end
it 'changes the query' do
diff --git a/spec/lib/version_check_spec.rb b/spec/lib/version_check_spec.rb
index 736a8f9595e..1803dd66ba7 100644
--- a/spec/lib/version_check_spec.rb
+++ b/spec/lib/version_check_spec.rb
@@ -9,6 +9,20 @@ RSpec.describe VersionCheck do
end
end
+ context 'reactive cache properties' do
+ describe '.reactive_cache_refresh_interval' do
+ it 'returns 12.hours' do
+ expect(described_class.reactive_cache_refresh_interval).to eq(12.hours)
+ end
+ end
+
+ describe '.reactive_cache_lifetime' do
+ it 'returns 7.days' do
+ expect(described_class.reactive_cache_lifetime).to eq(7.days)
+ end
+ end
+ end
+
describe '#calculate_reactive_cache' do
context 'response code is 200' do
before do
diff --git a/spec/mailers/emails/profile_spec.rb b/spec/mailers/emails/profile_spec.rb
index fce55256922..767eddb7f98 100644
--- a/spec/mailers/emails/profile_spec.rb
+++ b/spec/mailers/emails/profile_spec.rb
@@ -246,6 +246,35 @@ RSpec.describe Emails::Profile do
end
end
+ describe 'user personal access token has been revoked' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:token) { create(:personal_access_token, user: user) }
+
+ context 'when valid' do
+ subject { Notify.access_token_revoked_email(user, token.name) }
+
+ it_behaves_like 'an email sent from GitLab'
+ it_behaves_like 'it should not have Gmail Actions links'
+ it_behaves_like 'a user cannot unsubscribe through footer link'
+
+ it 'is sent to the user' do
+ is_expected.to deliver_to user.email
+ end
+
+ it 'has the correct subject' do
+ is_expected.to have_subject /^A personal access token has been revoked$/i
+ end
+
+ it 'provides the names of the token' do
+ is_expected.to have_body_text /#{token.name}/
+ end
+
+ it 'includes the email reason' do
+ is_expected.to have_body_text %r{You're receiving this email because of your account on <a .*>localhost<\/a>}
+ end
+ end
+ end
+
describe 'SSH key notification' do
let_it_be_with_reload(:user) { create(:user) }
let_it_be(:fingerprints) { ["aa:bb:cc:dd:ee:zz"] }
@@ -375,7 +404,7 @@ RSpec.describe Emails::Profile do
end
it 'includes a link to the change password documentation' do
- is_expected.to have_body_text 'https://docs.gitlab.com/ee/user/profile/#changing-your-password'
+ is_expected.to have_body_text 'https://docs.gitlab.com/ee/user/profile/user_passwords.html#change-your-password'
end
it 'mentions two factor authentication when two factor is not enabled' do
@@ -396,6 +425,39 @@ RSpec.describe Emails::Profile do
end
end
+ describe 'user attempted sign in with wrong 2FA OTP email' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:ip) { '169.0.0.1' }
+ let_it_be(:current_time) { Time.current }
+ let_it_be(:email) { Notify.two_factor_otp_attempt_failed_email(user, ip, current_time) }
+
+ subject { email }
+
+ it_behaves_like 'an email sent from GitLab'
+ it_behaves_like 'it should not have Gmail Actions links'
+ it_behaves_like 'a user cannot unsubscribe through footer link'
+
+ it 'is sent to the user' do
+ is_expected.to deliver_to user.email
+ end
+
+ it 'has the correct subject' do
+ is_expected.to have_subject "Attempted sign in to #{Gitlab.config.gitlab.host} using a wrong two-factor authentication code"
+ end
+
+ it 'mentions the IP address' do
+ is_expected.to have_body_text ip
+ end
+
+ it 'mentioned the time' do
+ is_expected.to have_body_text current_time.strftime('%Y-%m-%d %H:%M:%S %Z')
+ end
+
+ it 'includes a link to the change password documentation' do
+ is_expected.to have_body_text 'https://docs.gitlab.com/ee/user/profile/user_passwords.html#change-your-password'
+ end
+ end
+
describe 'disabled two-factor authentication email' do
let_it_be(:user) { create(:user) }
diff --git a/spec/migrations/20220901035725_schedule_destroy_invalid_project_members_spec.rb b/spec/migrations/20220920124709_backfill_internal_on_notes_spec.rb
index ed9f7e3cd44..f4ac6e6fc8e 100644
--- a/spec/migrations/20220901035725_schedule_destroy_invalid_project_members_spec.rb
+++ b/spec/migrations/20220920124709_backfill_internal_on_notes_spec.rb
@@ -3,19 +3,19 @@
require 'spec_helper'
require_migration!
-RSpec.describe ScheduleDestroyInvalidProjectMembers, :migration do
- let_it_be(:migration) { described_class::MIGRATION }
+RSpec.describe BackfillInternalOnNotes, :migration do
+ let(:migration) { described_class::MIGRATION }
describe '#up' do
- it 'schedules background jobs for each batch of members' do
+ it 'schedules background jobs for each batch of issues' do
migrate!
expect(migration).to have_scheduled_batched_migration(
- table_name: :members,
+ table_name: :notes,
column_name: :id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
- max_batch_size: described_class::MAX_BATCH_SIZE
+ sub_batch_size: described_class::SUB_BATCH_SIZE
)
end
end
diff --git a/spec/migrations/20220921093355_schedule_backfill_namespace_details_spec.rb b/spec/migrations/20220921093355_schedule_backfill_namespace_details_spec.rb
new file mode 100644
index 00000000000..61e4af3d10c
--- /dev/null
+++ b/spec/migrations/20220921093355_schedule_backfill_namespace_details_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe ScheduleBackfillNamespaceDetails, schema: 20220921093355 do
+ context 'when on gitlab.com' do
+ let(:background_migration) { described_class::MIGRATION }
+ let(:migration) { described_class.new }
+
+ before do
+ migration.up
+ end
+
+ describe '#up' do
+ it 'schedules background jobs for each batch of projects' do
+ expect(background_migration).to(
+ have_scheduled_batched_migration(
+ table_name: :namespaces,
+ column_name: :id,
+ interval: described_class::INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ )
+ end
+ end
+
+ describe '#down' do
+ it 'deletes all batched migration records' do
+ migration.down
+
+ expect(described_class::MIGRATION).not_to have_scheduled_batched_migration
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20220921144258_remove_orphan_group_token_users_spec.rb b/spec/migrations/20220921144258_remove_orphan_group_token_users_spec.rb
new file mode 100644
index 00000000000..614044657ec
--- /dev/null
+++ b/spec/migrations/20220921144258_remove_orphan_group_token_users_spec.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe RemoveOrphanGroupTokenUsers, :migration, :sidekiq_inline do
+ subject(:migration) { described_class.new }
+
+ let(:users) { table(:users) }
+ let!(:orphan_bot) do
+ create_bot(username: 'orphan_bot', email: 'orphan_bot@bot.com').tap do |bot|
+ namespaces.create!(type: 'User', path: 'n1', name: 'n1', owner_id: bot.id)
+ end
+ end
+
+ let!(:valid_used_bot) do
+ create_bot(username: 'used_bot', email: 'used_bot@bot.com').tap do |bot|
+ group = namespaces.create!(type: 'Group', path: 'used_bot_group', name: 'used_bot_group')
+ members.create!(user_id: bot.id,
+ source_id: group.id,
+ member_namespace_id: group.id,
+ source_type: 'Group',
+ access_level: 10,
+ notification_level: 0)
+ end
+ end
+
+ let!(:different_bot) do
+ create_bot(username: 'other_bot', email: 'other_bot@bot.com', user_type: 5)
+ end
+
+ let(:personal_access_tokens) { table(:personal_access_tokens) }
+ let(:members) { table(:members) }
+ let(:namespaces) { table(:namespaces) }
+
+ before do
+ stub_feature_flags(user_destroy_with_limited_execution_time_worker: false)
+ end
+
+ it 'removes orphan project bot and its tokens', :aggregate_failures do
+ expect(DeleteUserWorker)
+ .to receive(:perform_async)
+ .with(orphan_bot.id, orphan_bot.id, skip_authorization: true)
+ .and_call_original
+
+ migrate!
+
+ expect(users.count).to eq 2
+ expect(personal_access_tokens.count).to eq 2
+ expect(personal_access_tokens.find_by(user_id: orphan_bot.id)).to eq nil
+ end
+
+ context "when DeleteUserWorker doesn't fit anymore" do
+ it 'removes project bot tokens only', :aggregate_failures do
+ allow(DeleteUserWorker).to receive(:respond_to?).and_call_original
+ allow(DeleteUserWorker).to receive(:respond_to?).with(:perform_async).and_return(false)
+
+ migrate!
+
+ expect(users.count).to eq 3
+ expect(personal_access_tokens.count).to eq 2
+ expect(personal_access_tokens.find_by(user_id: orphan_bot.id)).to eq nil
+ end
+ end
+
+ private
+
+ def create_bot(**params)
+ users.create!({ projects_limit: 0, state: 'active', user_type: 6 }.merge(params)).tap do |bot|
+ personal_access_tokens.create!(user_id: bot.id, name: "BOT##{bot.id}")
+ end
+ end
+end
diff --git a/spec/migrations/20220922143143_schedule_reset_duplicate_ci_runners_token_values_spec.rb b/spec/migrations/20220922143143_schedule_reset_duplicate_ci_runners_token_values_spec.rb
new file mode 100644
index 00000000000..409f7d544ee
--- /dev/null
+++ b/spec/migrations/20220922143143_schedule_reset_duplicate_ci_runners_token_values_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe ScheduleResetDuplicateCiRunnersTokenValues, migration: :gitlab_ci do
+ let(:migration) { described_class::MIGRATION }
+
+ describe '#up' do
+ it 'schedules background jobs for each batch of runners' do
+ migrate!
+
+ expect(migration).to(
+ have_scheduled_batched_migration(
+ gitlab_schema: :gitlab_ci,
+ table_name: :ci_runners,
+ column_name: :id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ max_batch_size: described_class::MAX_BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ )
+ end
+ end
+
+ describe '#down' do
+ it 'deletes all batched migration records' do
+ migrate!
+ schema_migrate_down!
+
+ expect(migration).not_to have_scheduled_batched_migration
+ end
+ end
+end
diff --git a/spec/migrations/20220922143634_schedule_reset_duplicate_ci_runners_token_encrypted_values_spec.rb b/spec/migrations/20220922143634_schedule_reset_duplicate_ci_runners_token_encrypted_values_spec.rb
new file mode 100644
index 00000000000..4f3103927d5
--- /dev/null
+++ b/spec/migrations/20220922143634_schedule_reset_duplicate_ci_runners_token_encrypted_values_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe ScheduleResetDuplicateCiRunnersTokenEncryptedValues, migration: :gitlab_ci do
+ let(:migration) { described_class::MIGRATION }
+
+ describe '#up' do
+ it 'schedules background jobs for each batch of runners' do
+ migrate!
+
+ expect(migration).to(
+ have_scheduled_batched_migration(
+ gitlab_schema: :gitlab_ci,
+ table_name: :ci_runners,
+ column_name: :id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ max_batch_size: described_class::MAX_BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ )
+ end
+ end
+
+ describe '#down' do
+ it 'deletes all batched migration records' do
+ migrate!
+ schema_migrate_down!
+
+ expect(migration).not_to have_scheduled_batched_migration
+ end
+ end
+end
diff --git a/spec/migrations/20220928225711_schedule_update_ci_pipeline_artifacts_locked_status_spec.rb b/spec/migrations/20220928225711_schedule_update_ci_pipeline_artifacts_locked_status_spec.rb
new file mode 100644
index 00000000000..7e3f8caa966
--- /dev/null
+++ b/spec/migrations/20220928225711_schedule_update_ci_pipeline_artifacts_locked_status_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe ScheduleUpdateCiPipelineArtifactsLockedStatus, migration: :gitlab_ci do
+ let_it_be(:migration) { described_class::MIGRATION }
+
+ describe '#up' do
+ it 'schedules background jobs for each batch of ci_pipeline_artifacts' do
+ migrate!
+
+ expect(migration).to have_scheduled_batched_migration(
+ gitlab_schema: :gitlab_ci,
+ table_name: :ci_pipeline_artifacts,
+ column_name: :id,
+ batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ end
+ end
+
+ describe '#down' do
+ it 'deletes all batched migration records' do
+ migrate!
+ schema_migrate_down!
+
+ expect(migration).not_to have_scheduled_batched_migration
+ end
+ end
+end
diff --git a/spec/migrations/20220929213730_schedule_delete_orphaned_operational_vulnerabilities_spec.rb b/spec/migrations/20220929213730_schedule_delete_orphaned_operational_vulnerabilities_spec.rb
new file mode 100644
index 00000000000..9220b5e8a95
--- /dev/null
+++ b/spec/migrations/20220929213730_schedule_delete_orphaned_operational_vulnerabilities_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe ScheduleDeleteOrphanedOperationalVulnerabilities do
+ let_it_be(:migration) { described_class.new }
+ let_it_be(:post_migration) { described_class::MIGRATION }
+
+ describe '#up' do
+ it 'schedules background jobs for each batch of vulnerabilities' do
+ migration.up
+
+ expect(post_migration).to(
+ have_scheduled_batched_migration(
+ table_name: :vulnerabilities,
+ column_name: :id,
+ interval: described_class::INTERVAL,
+ batch_size: described_class::BATCH_SIZE
+ )
+ )
+ end
+ end
+
+ describe '#down' do
+ it 'deletes all batched migration records' do
+ migration.down
+
+ expect(post_migration).not_to have_scheduled_batched_migration
+ end
+ end
+end
diff --git a/spec/migrations/20220809002011_schedule_destroy_invalid_group_members_spec.rb b/spec/migrations/20221004094814_schedule_destroy_invalid_members_spec.rb
index 31dd4344d9f..73fdfa78eb4 100644
--- a/spec/migrations/20220809002011_schedule_destroy_invalid_group_members_spec.rb
+++ b/spec/migrations/20221004094814_schedule_destroy_invalid_members_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!
-RSpec.describe ScheduleDestroyInvalidGroupMembers, :migration do
+RSpec.describe ScheduleDestroyInvalidMembers, :migration do
let_it_be(:migration) { described_class::MIGRATION }
describe '#up' do
diff --git a/spec/migrations/20221008032350_add_password_expiration_migration_spec.rb b/spec/migrations/20221008032350_add_password_expiration_migration_spec.rb
new file mode 100644
index 00000000000..05e557f1f52
--- /dev/null
+++ b/spec/migrations/20221008032350_add_password_expiration_migration_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe AddPasswordExpirationMigration do
+ let(:application_setting) { table(:application_settings).create! }
+
+ describe "#up" do
+ it 'allows to read password expiration fields' do
+ migrate!
+
+ expect(application_setting.password_expiration_enabled).to eq false
+ expect(application_setting.password_expires_in_days).to eq 90
+ expect(application_setting.password_expires_notice_before_days).to eq 7
+ end
+ end
+end
diff --git a/spec/migrations/20221012033107_add_password_last_changed_at_to_user_details_spec.rb b/spec/migrations/20221012033107_add_password_last_changed_at_to_user_details_spec.rb
new file mode 100644
index 00000000000..46a7b097d02
--- /dev/null
+++ b/spec/migrations/20221012033107_add_password_last_changed_at_to_user_details_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe AddPasswordLastChangedAtToUserDetails do
+ let_it_be(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
+ let_it_be(:users) { table(:users) }
+ let_it_be(:user) { create_user! }
+ let(:user_detail) { table(:user_details).create!(user_id: user.id, provisioned_by_group_id: namespace.id) }
+
+ describe "#up" do
+ it 'allows to read password_last_changed_at' do
+ migrate!
+
+ expect(user_detail.password_last_changed_at).to eq nil
+ end
+ end
+
+ private
+
+ def create_user!(name: "Example User", email: "user@example.com", user_type: nil)
+ users.create!(
+ name: name,
+ email: email,
+ username: name,
+ projects_limit: 0,
+ user_type: user_type,
+ confirmed_at: Time.current
+ )
+ end
+end
diff --git a/spec/migrations/20221013154159_update_invalid_dormant_user_setting_spec.rb b/spec/migrations/20221013154159_update_invalid_dormant_user_setting_spec.rb
new file mode 100644
index 00000000000..eac71e428be
--- /dev/null
+++ b/spec/migrations/20221013154159_update_invalid_dormant_user_setting_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe UpdateInvalidDormantUserSetting, :migration do
+ let(:settings) { table(:application_settings) }
+
+ context 'with no rows in the application_settings table' do
+ it 'does not insert a row' do
+ expect { migrate! }.to not_change { settings.count }
+ end
+ end
+
+ context 'with a row in the application_settings table' do
+ before do
+ settings.create!(deactivate_dormant_users_period: days)
+ end
+
+ context 'with deactivate_dormant_users_period set to a value greater than or equal to 90' do
+ let(:days) { 90 }
+
+ it 'does not update the row' do
+ expect { migrate! }
+ .to not_change { settings.count }
+ .and not_change { settings.first.deactivate_dormant_users_period }
+ end
+ end
+
+ context 'with deactivate_dormant_users_period set to a value less than or equal to 90' do
+ let(:days) { 1 }
+
+ it 'updates the existing row' do
+ expect { migrate! }
+ .to not_change { settings.count }
+ .and change { settings.first.deactivate_dormant_users_period }
+ end
+ end
+ end
+end
diff --git a/spec/migrations/add_premium_and_ultimate_plan_limits_spec.rb b/spec/migrations/add_premium_and_ultimate_plan_limits_spec.rb
index fb62fc3ca02..0ae4559ca9f 100644
--- a/spec/migrations/add_premium_and_ultimate_plan_limits_spec.rb
+++ b/spec/migrations/add_premium_and_ultimate_plan_limits_spec.rb
@@ -72,12 +72,14 @@ RSpec.describe AddPremiumAndUltimatePlanLimits, :migration do
it 'creates plan limits from the source plan' do
migrate!
- expect(AddPremiumAndUltimatePlanLimits::PlanLimits.pluck(:plan_id, :storage_size_limit)).to match_array([
- [silver.id, silver_limits.storage_size_limit],
- [gold.id, gold_limits.storage_size_limit],
- [premium.id, silver_limits.storage_size_limit],
- [ultimate.id, gold_limits.storage_size_limit]
- ])
+ expect(AddPremiumAndUltimatePlanLimits::PlanLimits.pluck(:plan_id, :storage_size_limit))
+ .to match_array(
+ [
+ [silver.id, silver_limits.storage_size_limit],
+ [gold.id, gold_limits.storage_size_limit],
+ [premium.id, silver_limits.storage_size_limit],
+ [ultimate.id, gold_limits.storage_size_limit]
+ ])
end
end
end
diff --git a/spec/migrations/adjust_task_note_rename_background_migration_values_spec.rb b/spec/migrations/adjust_task_note_rename_background_migration_values_spec.rb
new file mode 100644
index 00000000000..422d0655e36
--- /dev/null
+++ b/spec/migrations/adjust_task_note_rename_background_migration_values_spec.rb
@@ -0,0 +1,143 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe AdjustTaskNoteRenameBackgroundMigrationValues, :migration do
+ let(:finished_status) { 3 }
+ let(:failed_status) { described_class::MIGRATION_FAILED_STATUS }
+ let(:active_status) { described_class::MIGRATION_ACTIVE_STATUS }
+
+ shared_examples 'task note migration with failing batches' do
+ it 'updates batch sizes and resets failed batches' do
+ migration = create_background_migration(status: initial_status)
+ batches = []
+
+ batches << create_failed_batched_job(migration)
+ batches << create_failed_batched_job(migration)
+
+ migrate!
+
+ expect(described_class::JOB_CLASS_NAME).to have_scheduled_batched_migration(
+ table_name: :system_note_metadata,
+ column_name: :id,
+ interval: 2.minutes,
+ batch_size: described_class::NEW_BATCH_SIZE,
+ max_batch_size: 20_000,
+ sub_batch_size: described_class::NEW_SUB_BATCH_SIZE
+ )
+ expect(migration.reload.status).to eq(active_status)
+
+ updated_batches = batches.map { |b| b.reload.attributes.slice('attempts', 'sub_batch_size') }
+ expect(updated_batches).to all(eq("attempts" => 0, "sub_batch_size" => 10))
+ end
+ end
+
+ describe '#up' do
+ context 'when migration was already finished' do
+ it 'does not update batch sizes' do
+ create_background_migration(status: finished_status)
+
+ migrate!
+
+ expect(described_class::JOB_CLASS_NAME).to have_scheduled_batched_migration(
+ table_name: :system_note_metadata,
+ column_name: :id,
+ interval: 2.minutes,
+ batch_size: described_class::OLD_BATCH_SIZE,
+ max_batch_size: 20_000,
+ sub_batch_size: described_class::OLD_SUB_BATCH_SIZE
+ )
+ end
+ end
+
+ context 'when the migration had failing batches' do
+ context 'when migration had a failed status' do
+ it_behaves_like 'task note migration with failing batches' do
+ let(:initial_status) { failed_status }
+ end
+
+ it 'updates started_at timestamp' do
+ migration = create_background_migration(status: failed_status)
+ now = Time.zone.now
+
+ travel_to now do
+ migrate!
+ migration.reload
+ end
+
+ expect(migration.started_at).to be_like_time(now)
+ end
+ end
+
+ context 'when migration had an active status' do
+ it_behaves_like 'task note migration with failing batches' do
+ let(:initial_status) { active_status }
+ end
+
+ it 'does not update started_at timestamp' do
+ migration = create_background_migration(status: active_status)
+ original_time = migration.started_at
+
+ migrate!
+ migration.reload
+
+ expect(migration.started_at).to be_like_time(original_time)
+ end
+ end
+ end
+ end
+
+ describe '#down' do
+ it 'reverts to old batch sizes' do
+ create_background_migration(status: finished_status)
+
+ migrate!
+ schema_migrate_down!
+
+ expect(described_class::JOB_CLASS_NAME).to have_scheduled_batched_migration(
+ table_name: :system_note_metadata,
+ column_name: :id,
+ interval: 2.minutes,
+ batch_size: described_class::OLD_BATCH_SIZE,
+ max_batch_size: 20_000,
+ sub_batch_size: described_class::OLD_SUB_BATCH_SIZE
+ )
+ end
+ end
+
+ def create_failed_batched_job(migration)
+ table(:batched_background_migration_jobs).create!(
+ batched_background_migration_id: migration.id,
+ status: described_class::JOB_FAILED_STATUS,
+ min_value: 1,
+ max_value: 10,
+ attempts: 3,
+ batch_size: described_class::OLD_BATCH_SIZE,
+ sub_batch_size: described_class::OLD_SUB_BATCH_SIZE
+ )
+ end
+
+ def create_background_migration(status:)
+ migrations_table = table(:batched_background_migrations)
+ # make sure we only have on migration with that job class name in the specs
+ migrations_table.where(job_class_name: described_class::JOB_CLASS_NAME).delete_all
+
+ migrations_table.create!(
+ job_class_name: described_class::JOB_CLASS_NAME,
+ status: status,
+ max_value: 10,
+ max_batch_size: 20_000,
+ batch_size: described_class::OLD_BATCH_SIZE,
+ sub_batch_size: described_class::OLD_SUB_BATCH_SIZE,
+ interval: 2.minutes,
+ table_name: :system_note_metadata,
+ column_name: :id,
+ total_tuple_count: 100_000,
+ pause_ms: 100,
+ gitlab_schema: :gitlab_main,
+ job_arguments: [],
+ started_at: 2.days.ago
+ )
+ end
+end
diff --git a/spec/migrations/backfill_epic_cache_counts_spec.rb b/spec/migrations/backfill_epic_cache_counts_spec.rb
new file mode 100644
index 00000000000..6084fdad0a6
--- /dev/null
+++ b/spec/migrations/backfill_epic_cache_counts_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe BackfillEpicCacheCounts, :migration do
+ let(:migration) { described_class::MIGRATION }
+
+ describe '#up' do
+ it 'schedules a batched background migration' do
+ migrate!
+
+ expect(migration).to have_scheduled_batched_migration(
+ table_name: :epics,
+ column_name: :id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ max_batch_size: described_class::MAX_BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ end
+ end
+
+ describe '#down' do
+ it 'deletes all batched migration records' do
+ migrate!
+ schema_migrate_down!
+
+ expect(migration).not_to have_scheduled_batched_migration
+ end
+ end
+end
diff --git a/spec/migrations/backfill_escalation_policies_for_oncall_schedules_spec.rb b/spec/migrations/backfill_escalation_policies_for_oncall_schedules_spec.rb
index da32e92ebb0..87855285203 100644
--- a/spec/migrations/backfill_escalation_policies_for_oncall_schedules_spec.rb
+++ b/spec/migrations/backfill_escalation_policies_for_oncall_schedules_spec.rb
@@ -57,36 +57,39 @@ RSpec.describe BackfillEscalationPoliciesForOncallSchedules do
expect(new_polices).to all have_attributes(name: 'On-call Escalation Policy')
expect(new_policy_b1.description).to eq('Immediately notify Schedule B1')
expect(new_policy_c1.description).to eq('Immediately notify Schedule C1')
- expect(policies.pluck(:project_id)).to eq([
- project_d.id,
- project_e.id,
- project_f.id,
- project_f.id,
- project_b.id,
- project_c.id
- ])
+ expect(policies.pluck(:project_id)).to eq(
+ [
+ project_d.id,
+ project_e.id,
+ project_f.id,
+ project_f.id,
+ project_b.id,
+ project_c.id
+ ])
expect(new_rules).to all have_attributes(status: 1, elapsed_time_seconds: 0)
- expect(rules.pluck(:policy_id)).to eq([
- rule_d1.policy_id,
- rule_e1.policy_id,
- rule_f1.policy_id,
- rule_f2.policy_id,
- rule_f3.policy_id,
- new_policy_b1.id,
- new_policy_c1.id,
- new_policy_c1.id
- ])
- expect(rules.pluck(:oncall_schedule_id)).to eq([
- rule_d1.oncall_schedule_id,
- rule_e1.oncall_schedule_id,
- rule_f1.oncall_schedule_id,
- rule_f2.oncall_schedule_id,
- rule_f3.oncall_schedule_id,
- schedule_b1.id,
- schedule_c1.id,
- schedule_c2.id
- ])
+ expect(rules.pluck(:policy_id)).to eq(
+ [
+ rule_d1.policy_id,
+ rule_e1.policy_id,
+ rule_f1.policy_id,
+ rule_f2.policy_id,
+ rule_f3.policy_id,
+ new_policy_b1.id,
+ new_policy_c1.id,
+ new_policy_c1.id
+ ])
+ expect(rules.pluck(:oncall_schedule_id)).to eq(
+ [
+ rule_d1.oncall_schedule_id,
+ rule_e1.oncall_schedule_id,
+ rule_f1.oncall_schedule_id,
+ rule_f2.oncall_schedule_id,
+ rule_f3.oncall_schedule_id,
+ schedule_b1.id,
+ schedule_c1.id,
+ schedule_c2.id
+ ])
end
end
diff --git a/spec/migrations/populate_releases_access_level_from_repository_spec.rb b/spec/migrations/populate_releases_access_level_from_repository_spec.rb
new file mode 100644
index 00000000000..2bb97662923
--- /dev/null
+++ b/spec/migrations/populate_releases_access_level_from_repository_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe PopulateReleasesAccessLevelFromRepository, :migration do
+ let(:projects) { table(:projects) }
+ let(:groups) { table(:namespaces) }
+ let(:project_features) { table(:project_features) }
+
+ let(:group) { groups.create!(name: 'test-group', path: 'test-group') }
+ let(:project) { projects.create!(namespace_id: group.id, project_namespace_id: group.id) }
+ let(:project_feature) do
+ project_features.create!(project_id: project.id, pages_access_level: 20, **project_feature_attributes)
+ end
+
+ # repository_access_level and releases_access_level default to ENABLED
+ describe '#up' do
+ context 'when releases_access_level is greater than repository_access_level' do
+ let(:project_feature_attributes) { { repository_access_level: ProjectFeature::PRIVATE } }
+
+ it 'reduces releases_access_level to match repository_access_level' do
+ expect { migrate! }.to change { project_feature.reload.releases_access_level }
+ .from(ProjectFeature::ENABLED)
+ .to(ProjectFeature::PRIVATE)
+ end
+ end
+
+ context 'when releases_access_level is less than repository_access_level' do
+ let(:project_feature_attributes) { { releases_access_level: ProjectFeature::DISABLED } }
+
+ it 'does not change releases_access_level' do
+ expect { migrate! }.not_to change { project_feature.reload.releases_access_level }
+ .from(ProjectFeature::DISABLED)
+ end
+ end
+ end
+end
diff --git a/spec/migrations/slice_merge_request_diff_commit_migrations_spec.rb b/spec/migrations/slice_merge_request_diff_commit_migrations_spec.rb
index e03dd73ec8b..b03a5c41a11 100644
--- a/spec/migrations/slice_merge_request_diff_commit_migrations_spec.rb
+++ b/spec/migrations/slice_merge_request_diff_commit_migrations_spec.rb
@@ -49,12 +49,13 @@ RSpec.describe SliceMergeRequestDiffCommitMigrations, :migration do
.pending
.to_a
- expect(new_jobs.map(&:arguments)).to eq([
- [1, 5_001],
- [5_001, 10_001],
- [10_001, 15_001],
- [15_001, 20_001]
- ])
+ expect(new_jobs.map(&:arguments)).to eq(
+ [
+ [1, 5_001],
+ [5_001, 10_001],
+ [10_001, 15_001],
+ [15_001, 20_001]
+ ])
end
it 'schedules a background migration for the first job' do
diff --git a/spec/models/analytics/cycle_analytics/project_stage_spec.rb b/spec/models/analytics/cycle_analytics/project_stage_spec.rb
index a67f9fec443..697b7aee022 100644
--- a/spec/models/analytics/cycle_analytics/project_stage_spec.rb
+++ b/spec/models/analytics/cycle_analytics/project_stage_spec.rb
@@ -48,10 +48,11 @@ RSpec.describe Analytics::CycleAnalytics::ProjectStage do
subject(:distinct_start_and_end_event_identifiers) { described_class.distinct_stages_within_hierarchy(top_level_group).to_a.pluck(:start_event_identifier, :end_event_identifier) }
it 'returns distinct stages by start and end events (using stage_event_hash_id)' do
- expect(distinct_start_and_end_event_identifiers).to match_array([
- %w[issue_created issue_deployed_to_production],
- %w[merge_request_created merge_request_merged]
- ])
+ expect(distinct_start_and_end_event_identifiers).to match_array(
+ [
+ %w[issue_created issue_deployed_to_production],
+ %w[merge_request_created merge_request_merged]
+ ])
end
end
end
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index b5f153e7add..77bb6b502b5 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -203,6 +203,17 @@ RSpec.describe ApplicationSetting do
it { is_expected.to allow_value([]).for(:valid_runner_registrars) }
it { is_expected.to allow_value(%w(project group)).for(:valid_runner_registrars) }
+ context 'when deactivate_dormant_users is enabled' do
+ before do
+ stub_application_setting(deactivate_dormant_users: true)
+ end
+
+ it { is_expected.not_to allow_value(nil).for(:deactivate_dormant_users_period) }
+ it { is_expected.to allow_value(90).for(:deactivate_dormant_users_period) }
+ it { is_expected.to allow_value(365).for(:deactivate_dormant_users_period) }
+ it { is_expected.not_to allow_value(89).for(:deactivate_dormant_users_period) }
+ end
+
context 'help_page_documentation_base_url validations' do
it { is_expected.to allow_value(nil).for(:help_page_documentation_base_url) }
it { is_expected.to allow_value('https://docs.gitlab.com').for(:help_page_documentation_base_url) }
@@ -257,11 +268,12 @@ RSpec.describe ApplicationSetting do
subject.grafana_url = ' ' + http
expect(subject.save).to be false
- expect(subject.errors[:grafana_url]).to eq([
- 'must be a valid relative or absolute URL. ' \
- 'Please check your Grafana URL setting in ' \
- 'Admin Area > Settings > Metrics and profiling > Metrics - Grafana'
- ])
+ expect(subject.errors[:grafana_url]).to eq(
+ [
+ 'must be a valid relative or absolute URL. ' \
+ 'Please check your Grafana URL setting in ' \
+ 'Admin Area > Settings > Metrics and profiling > Metrics - Grafana'
+ ])
end
end
@@ -270,11 +282,12 @@ RSpec.describe ApplicationSetting do
subject.grafana_url = javascript
expect(subject.save).to be false
- expect(subject.errors[:grafana_url]).to eq([
- 'is blocked: Only allowed schemes are http, https. Please check your ' \
- 'Grafana URL setting in ' \
- 'Admin Area > Settings > Metrics and profiling > Metrics - Grafana'
- ])
+ expect(subject.errors[:grafana_url]).to eq(
+ [
+ 'is blocked: Only allowed schemes are http, https. Please check your ' \
+ 'Grafana URL setting in ' \
+ 'Admin Area > Settings > Metrics and profiling > Metrics - Grafana'
+ ])
end
end
end
@@ -1453,4 +1466,10 @@ RSpec.describe ApplicationSetting do
expect(setting.personal_access_token_prefix).to eql('glpat-')
end
end
+
+ describe '.personal_access_tokens_disabled?' do
+ it 'is false' do
+ expect(setting.personal_access_tokens_disabled?).to eq(false)
+ end
+ end
end
diff --git a/spec/models/award_emoji_spec.rb b/spec/models/award_emoji_spec.rb
index 4da19267b1c..2593c9b3595 100644
--- a/spec/models/award_emoji_spec.rb
+++ b/spec/models/award_emoji_spec.rb
@@ -290,4 +290,13 @@ RSpec.describe AwardEmoji do
end
end
end
+
+ describe '#to_ability_name' do
+ let(:merge_request) { create(:merge_request) }
+ let(:award_emoji) { build(:award_emoji, user: build(:user), awardable: merge_request) }
+
+ it 'returns correct ability name' do
+ expect(award_emoji.to_ability_name).to be('emoji')
+ end
+ end
end
diff --git a/spec/models/bulk_imports/entity_spec.rb b/spec/models/bulk_imports/entity_spec.rb
index 874009d552a..f4f2b174a7b 100644
--- a/spec/models/bulk_imports/entity_spec.rb
+++ b/spec/models/bulk_imports/entity_spec.rb
@@ -46,6 +46,8 @@ RSpec.describe BulkImports::Entity, type: :model do
end
it 'is invalid as a project_entity' do
+ stub_feature_flags(bulk_import_projects: true)
+
entity = build(:bulk_import_entity, :project_entity, group: build(:group), project: nil)
expect(entity).not_to be_valid
@@ -55,6 +57,8 @@ RSpec.describe BulkImports::Entity, type: :model do
context 'when associated with a project and no group' do
it 'is valid' do
+ stub_feature_flags(bulk_import_projects: true)
+
entity = build(:bulk_import_entity, :project_entity, group: nil, project: build(:project))
expect(entity).to be_valid
@@ -84,6 +88,8 @@ RSpec.describe BulkImports::Entity, type: :model do
context 'when the parent is a project import' do
it 'is invalid' do
+ stub_feature_flags(bulk_import_projects: true)
+
entity = build(:bulk_import_entity, parent: build(:bulk_import_entity, :project_entity))
expect(entity).not_to be_valid
@@ -124,6 +130,39 @@ RSpec.describe BulkImports::Entity, type: :model do
.to include('Import failed: Destination cannot be a subgroup of the source group. Change the destination and try again.')
end
end
+
+ context 'when bulk_import_projects feature flag is disabled and source_type is a project_entity' do
+ it 'is invalid' do
+ stub_feature_flags(bulk_import_projects: false)
+
+ entity = build(:bulk_import_entity, :project_entity)
+
+ expect(entity).not_to be_valid
+ expect(entity.errors[:base]).to include('invalid entity source type')
+ end
+ end
+
+ context 'when bulk_import_projects feature flag is enabled and source_type is a project_entity' do
+ it 'is valid' do
+ stub_feature_flags(bulk_import_projects: true)
+
+ entity = build(:bulk_import_entity, :project_entity)
+
+ expect(entity).to be_valid
+ end
+ end
+
+ context 'when bulk_import_projects feature flag is enabled on root ancestor level and source_type is a project_entity' do
+ it 'is valid' do
+ top_level_namespace = create(:group)
+
+ stub_feature_flags(bulk_import_projects: top_level_namespace)
+
+ entity = build(:bulk_import_entity, :project_entity, destination_namespace: top_level_namespace.full_path)
+
+ expect(entity).to be_valid
+ end
+ end
end
describe '#encoded_source_full_path' do
@@ -209,7 +248,7 @@ RSpec.describe BulkImports::Entity, type: :model do
it 'returns group export relations url' do
entity = build(:bulk_import_entity, :group_entity)
- expect(entity.export_relations_url_path).to eq("/groups/#{entity.encoded_source_full_path}/export_relations")
+ expect(entity.export_relations_url_path).to eq("/groups/#{entity.source_xid}/export_relations")
end
end
@@ -217,7 +256,7 @@ RSpec.describe BulkImports::Entity, type: :model do
it 'returns project export relations url' do
entity = build(:bulk_import_entity, :project_entity)
- expect(entity.export_relations_url_path).to eq("/projects/#{entity.encoded_source_full_path}/export_relations")
+ expect(entity.export_relations_url_path).to eq("/projects/#{entity.source_xid}/export_relations")
end
end
end
@@ -227,7 +266,7 @@ RSpec.describe BulkImports::Entity, type: :model do
entity = build(:bulk_import_entity)
expect(entity.relation_download_url_path('test'))
- .to eq("/groups/#{entity.encoded_source_full_path}/export_relations/download?relation=test")
+ .to eq("/groups/#{entity.source_xid}/export_relations/download?relation=test")
end
end
@@ -263,15 +302,15 @@ RSpec.describe BulkImports::Entity, type: :model do
describe '#base_resource_url_path' do
it 'returns base entity url path' do
- entity = build(:bulk_import_entity)
+ entity = build(:bulk_import_entity, source_xid: nil)
- expect(entity.base_resource_url_path).to eq("/groups/#{entity.encoded_source_full_path}")
+ expect(entity.base_resource_path).to eq("/groups/#{entity.encoded_source_full_path}")
end
end
describe '#wiki_url_path' do
it 'returns entity wiki url path' do
- entity = build(:bulk_import_entity)
+ entity = build(:bulk_import_entity, source_xid: nil)
expect(entity.wikis_url_path).to eq("/groups/#{entity.encoded_source_full_path}/wikis")
end
diff --git a/spec/models/bulk_imports/export_status_spec.rb b/spec/models/bulk_imports/export_status_spec.rb
index 6ade82409dc..0921c3bdce2 100644
--- a/spec/models/bulk_imports/export_status_spec.rb
+++ b/spec/models/bulk_imports/export_status_spec.rb
@@ -157,12 +157,36 @@ RSpec.describe BulkImports::ExportStatus do
end
context 'when something goes wrong during export status fetch' do
- it 'returns exception class as error' do
+ let(:exception) { BulkImports::NetworkError.new('Error!') }
+
+ before do
allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
- allow(client).to receive(:get).and_raise(StandardError, 'Error!')
+ allow(client).to receive(:get).once.and_raise(exception)
end
+ end
+
+ it 'raises RetryPipelineError' do
+ allow(exception).to receive(:retriable?).with(tracker).and_return(true)
+
+ expect { subject.failed? }.to raise_error(BulkImports::RetryPipelineError)
+ end
- expect(subject.error).to eq('Error!')
+ context 'when error is not retriable' do
+ it 'returns exception class as error' do
+ expect(subject.error).to eq('Error!')
+ expect(subject.failed?).to eq(true)
+ end
+ end
+
+ context 'when error raised is not a network error' do
+ it 'returns exception class as error' do
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
+ allow(client).to receive(:get).once.and_raise(StandardError, 'Standard Error!')
+ end
+
+ expect(subject.error).to eq('Standard Error!')
+ expect(subject.failed?).to eq(true)
+ end
end
end
end
diff --git a/spec/models/bulk_imports/failure_spec.rb b/spec/models/bulk_imports/failure_spec.rb
index cde62659a48..b3fd60ba348 100644
--- a/spec/models/bulk_imports/failure_spec.rb
+++ b/spec/models/bulk_imports/failure_spec.rb
@@ -3,15 +3,45 @@
require 'spec_helper'
RSpec.describe BulkImports::Failure, type: :model do
+ let(:failure) { create(:bulk_import_failure) }
+
describe 'associations' do
it { is_expected.to belong_to(:entity).required }
end
describe 'validations' do
- before do
- create(:bulk_import_failure)
+ it { is_expected.to validate_presence_of(:entity) }
+ end
+
+ describe '#relation' do
+ context 'when pipeline class is valid' do
+ it 'returns pipeline defined relation' do
+ failure.update!(pipeline_class: 'BulkImports::Common::Pipelines::WikiPipeline')
+
+ expect(failure.relation).to eq('wiki')
+ end
end
- it { is_expected.to validate_presence_of(:entity) }
+ context 'when pipeline class is invalid' do
+ it 'returns default relation' do
+ failure.update!(pipeline_class: 'foobar')
+
+ expect(failure.relation).to eq('foobar')
+ end
+
+ context 'when pipeline class is outside of BulkImports namespace' do
+ it 'returns default relation' do
+ failure.update!(pipeline_class: 'Gitlab::ImportExport::Importer')
+
+ expect(failure.relation).to eq('importer')
+ end
+ end
+
+ it 'returns demodulized, underscored, chomped string' do
+ failure.update!(pipeline_class: 'BulkImports::Pipelines::Test::TestRelationPipeline')
+
+ expect(failure.relation).to eq('test_relation')
+ end
+ end
end
end
diff --git a/spec/models/ci/bridge_spec.rb b/spec/models/ci/bridge_spec.rb
index 40c2d62c465..44a6bec0130 100644
--- a/spec/models/ci/bridge_spec.rb
+++ b/spec/models/ci/bridge_spec.rb
@@ -86,9 +86,9 @@ RSpec.describe Ci::Bridge do
describe '#scoped_variables' do
it 'returns a hash representing variables' do
variables = %w[
- CI_JOB_NAME CI_JOB_STAGE CI_COMMIT_SHA CI_COMMIT_SHORT_SHA
- CI_COMMIT_BEFORE_SHA CI_COMMIT_REF_NAME CI_COMMIT_REF_SLUG
- CI_PROJECT_ID CI_PROJECT_NAME CI_PROJECT_PATH
+ CI_JOB_NAME CI_JOB_NAME_SLUG CI_JOB_STAGE CI_COMMIT_SHA
+ CI_COMMIT_SHORT_SHA CI_COMMIT_BEFORE_SHA CI_COMMIT_REF_NAME
+ CI_COMMIT_REF_SLUG CI_PROJECT_ID CI_PROJECT_NAME CI_PROJECT_PATH
CI_PROJECT_PATH_SLUG CI_PROJECT_NAMESPACE CI_PROJECT_ROOT_NAMESPACE
CI_PIPELINE_IID CI_CONFIG_PATH CI_PIPELINE_SOURCE CI_COMMIT_MESSAGE
CI_COMMIT_TITLE CI_COMMIT_DESCRIPTION CI_COMMIT_REF_PROTECTED
diff --git a/spec/models/ci/build_metadata_spec.rb b/spec/models/ci/build_metadata_spec.rb
index e904463a5ca..16cff72db64 100644
--- a/spec/models/ci/build_metadata_spec.rb
+++ b/spec/models/ci/build_metadata_spec.rb
@@ -14,8 +14,8 @@ RSpec.describe Ci::BuildMetadata do
status: 'success')
end
- let(:build) { create(:ci_build, pipeline: pipeline) }
- let(:metadata) { build.metadata }
+ let(:job) { create(:ci_build, pipeline: pipeline) }
+ let(:metadata) { job.metadata }
it_behaves_like 'having unique enum values'
@@ -35,7 +35,7 @@ RSpec.describe Ci::BuildMetadata do
context 'when project timeout is set' do
context 'when runner is assigned to the job' do
before do
- build.update!(runner: runner)
+ job.update!(runner: runner)
end
context 'when runner timeout is not set' do
@@ -59,13 +59,13 @@ RSpec.describe Ci::BuildMetadata do
context 'when job timeout is set' do
context 'when job timeout is higher than project timeout' do
- let(:build) { create(:ci_build, pipeline: pipeline, options: { job_timeout: 3000 }) }
+ let(:job) { create(:ci_build, pipeline: pipeline, options: { job_timeout: 3000 }) }
it_behaves_like 'sets timeout', 'job_timeout_source', 3000
end
context 'when job timeout is lower than project timeout' do
- let(:build) { create(:ci_build, pipeline: pipeline, options: { job_timeout: 1000 }) }
+ let(:job) { create(:ci_build, pipeline: pipeline, options: { job_timeout: 1000 }) }
it_behaves_like 'sets timeout', 'job_timeout_source', 1000
end
@@ -73,18 +73,18 @@ RSpec.describe Ci::BuildMetadata do
context 'when both runner and job timeouts are set' do
before do
- build.update!(runner: runner)
+ job.update!(runner: runner)
end
context 'when job timeout is higher than runner timeout' do
- let(:build) { create(:ci_build, pipeline: pipeline, options: { job_timeout: 3000 }) }
+ let(:job) { create(:ci_build, pipeline: pipeline, options: { job_timeout: 3000 }) }
let(:runner) { create(:ci_runner, maximum_timeout: 2100) }
it_behaves_like 'sets timeout', 'runner_timeout_source', 2100
end
context 'when job timeout is lower than runner timeout' do
- let(:build) { create(:ci_build, pipeline: pipeline, options: { job_timeout: 1900 }) }
+ let(:job) { create(:ci_build, pipeline: pipeline, options: { job_timeout: 1900 }) }
let(:runner) { create(:ci_runner, maximum_timeout: 2100) }
it_behaves_like 'sets timeout', 'job_timeout_source', 1900
@@ -135,20 +135,51 @@ RSpec.describe Ci::BuildMetadata do
describe 'set_cancel_gracefully' do
it 'sets cancel_gracefully' do
- build.set_cancel_gracefully
+ job.set_cancel_gracefully
- expect(build.cancel_gracefully?).to be true
+ expect(job.cancel_gracefully?).to be true
end
it 'returns false' do
- expect(build.cancel_gracefully?).to be false
+ expect(job.cancel_gracefully?).to be false
end
end
context 'loose foreign key on ci_builds_metadata.project_id' do
it_behaves_like 'cleanup by a loose foreign key' do
- let!(:parent) { create(:project) }
- let!(:model) { create(:ci_build_metadata, project: parent) }
+ let!(:parent) { project }
+ let!(:model) { metadata }
+ end
+ end
+
+ describe 'partitioning' do
+ context 'with job' do
+ let(:status) { build(:commit_status, partition_id: 123) }
+ let(:metadata) { build(:ci_build_metadata, build: status) }
+
+ it 'copies the partition_id from job' do
+ expect { metadata.valid? }.to change(metadata, :partition_id).to(123)
+ end
+
+ context 'when it is already set' do
+ let(:metadata) { build(:ci_build_metadata, build: status, partition_id: 125) }
+
+ it 'does not change the partition_id value' do
+ expect { metadata.valid? }.not_to change(metadata, :partition_id)
+ end
+ end
+ end
+
+ context 'without job' do
+ subject(:metadata) do
+ build(:ci_build_metadata, build: nil)
+ end
+
+ it { is_expected.to validate_presence_of(:partition_id) }
+
+ it 'does not change the partition_id value' do
+ expect { metadata.valid? }.not_to change(metadata, :partition_id)
+ end
end
end
end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 7ee381b29ea..9713734e97a 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -160,6 +160,42 @@ RSpec.describe Ci::Build do
end
end
+ describe '.with_erasable_artifacts' do
+ subject { described_class.with_erasable_artifacts }
+
+ context 'when job does not have any artifacts' do
+ let!(:job) { create(:ci_build) }
+
+ it 'does not return the job' do
+ is_expected.not_to include(job)
+ end
+ end
+
+ ::Ci::JobArtifact.erasable_file_types.each do |type|
+ context "when job has a #{type} artifact" do
+ it 'returns the job' do
+ job = create(:ci_build)
+ create(
+ :ci_job_artifact,
+ file_format: ::Ci::JobArtifact::TYPE_AND_FORMAT_PAIRS[type.to_sym],
+ file_type: type,
+ job: job
+ )
+
+ is_expected.to include(job)
+ end
+ end
+ end
+
+ context 'when job has a non-erasable artifact' do
+ let!(:job) { create(:ci_build, :trace_artifact) }
+
+ it 'does not return the job' do
+ is_expected.not_to include(job)
+ end
+ end
+ end
+
describe '.with_live_trace' do
subject { described_class.with_live_trace }
@@ -284,10 +320,10 @@ RSpec.describe Ci::Build do
let(:artifact_scope) { Ci::JobArtifact.where(file_type: 'archive') }
- let!(:build_1) { create(:ci_build, :artifacts) }
- let!(:build_2) { create(:ci_build, :codequality_reports) }
- let!(:build_3) { create(:ci_build, :test_reports) }
- let!(:build_4) { create(:ci_build, :artifacts) }
+ let!(:build_1) { create(:ci_build, :artifacts, pipeline: pipeline) }
+ let!(:build_2) { create(:ci_build, :codequality_reports, pipeline: pipeline) }
+ let!(:build_3) { create(:ci_build, :test_reports, pipeline: pipeline) }
+ let!(:build_4) { create(:ci_build, :artifacts, pipeline: pipeline) }
it 'returns artifacts matching the given scope' do
expect(builds).to contain_exactly(build_1, build_4)
@@ -596,15 +632,6 @@ RSpec.describe Ci::Build do
it { expect(subject).to be_falsey }
end
- context 'when prevent_outdated_deployment_jobs FF is disabled' do
- before do
- stub_feature_flags(prevent_outdated_deployment_jobs: false)
- expect(build.deployment).not_to receive(:rollback?)
- end
-
- it { expect(subject).to be_falsey }
- end
-
context 'when build can prevent rollback deployment' do
before do
expect(build.deployment).to receive(:older_than_last_successful_deployment?).and_return(true)
@@ -2668,6 +2695,7 @@ RSpec.describe Ci::Build do
{ key: 'CI_JOB_JWT_V1', value: 'ci.job.jwt', public: false, masked: true },
{ key: 'CI_JOB_JWT_V2', value: 'ci.job.jwtv2', public: false, masked: true },
{ key: 'CI_JOB_NAME', value: 'test', public: true, masked: false },
+ { key: 'CI_JOB_NAME_SLUG', value: 'test', public: true, masked: false },
{ key: 'CI_JOB_STAGE', value: 'test', public: true, masked: false },
{ key: 'CI_NODE_TOTAL', value: '1', public: true, masked: false },
{ key: 'CI_BUILD_NAME', value: 'test', public: true, masked: false },
@@ -2780,6 +2808,14 @@ RSpec.describe Ci::Build do
end
end
+ context 'when the opt_in_jwt project setting is true' do
+ it 'does not include the JWT variables' do
+ project.ci_cd_settings.update!(opt_in_jwt: true)
+
+ expect(subject.pluck(:key)).not_to include('CI_JOB_JWT', 'CI_JOB_JWT_V1', 'CI_JOB_JWT_V2')
+ end
+ end
+
describe 'variables ordering' do
context 'when variables hierarchy is stubbed' do
let(:build_pre_var) { { key: 'build', value: 'value', public: true, masked: false } }
@@ -3069,8 +3105,24 @@ RSpec.describe Ci::Build do
end
context 'when build is for tag' do
+ let(:tag_name) { project.repository.tags.first.name }
+ let(:tag_message) { project.repository.tags.first.message }
+
+ let!(:pipeline) do
+ create(:ci_pipeline, project: project,
+ sha: project.commit.id,
+ ref: tag_name,
+ status: 'success')
+ end
+
+ let!(:build) { create(:ci_build, pipeline: pipeline, ref: tag_name) }
+
let(:tag_variable) do
- { key: 'CI_COMMIT_TAG', value: 'master', public: true, masked: false }
+ { key: 'CI_COMMIT_TAG', value: tag_name, public: true, masked: false }
+ end
+
+ let(:tag_message_variable) do
+ { key: 'CI_COMMIT_TAG_MESSAGE', value: tag_message, public: true, masked: false }
end
before do
@@ -3081,7 +3133,7 @@ RSpec.describe Ci::Build do
it do
build.reload
- expect(subject).to include(tag_variable)
+ expect(subject).to include(tag_variable, tag_message_variable)
end
end
@@ -3474,6 +3526,49 @@ RSpec.describe Ci::Build do
it { is_expected.to include(key: job_variable.key, value: job_variable.value, public: false, masked: false) }
end
+
+ context 'when ID tokens are defined on the build' do
+ before do
+ rsa_key = OpenSSL::PKey::RSA.generate(3072).to_s
+ stub_application_setting(ci_jwt_signing_key: rsa_key)
+ build.metadata.update!(id_tokens: {
+ 'ID_TOKEN_1' => { id_token: { aud: 'developers' } },
+ 'ID_TOKEN_2' => { id_token: { aud: 'maintainers' } }
+ })
+ end
+
+ subject(:runner_vars) { build.variables.to_runner_variables }
+
+ it 'includes the ID token variables' do
+ expect(runner_vars).to include(
+ a_hash_including(key: 'ID_TOKEN_1', public: false, masked: true),
+ a_hash_including(key: 'ID_TOKEN_2', public: false, masked: true)
+ )
+
+ id_token_var_1 = runner_vars.find { |var| var[:key] == 'ID_TOKEN_1' }
+ id_token_var_2 = runner_vars.find { |var| var[:key] == 'ID_TOKEN_2' }
+ id_token_1 = JWT.decode(id_token_var_1[:value], nil, false).first
+ id_token_2 = JWT.decode(id_token_var_2[:value], nil, false).first
+ expect(id_token_1['aud']).to eq('developers')
+ expect(id_token_2['aud']).to eq('maintainers')
+ end
+
+ context 'when a NoSigningKeyError is raised' do
+ it 'does not include the ID token variables' do
+ allow(::Gitlab::Ci::JwtV2).to receive(:for_build).and_raise(::Gitlab::Ci::Jwt::NoSigningKeyError)
+
+ expect(runner_vars.map { |var| var[:key] }).not_to include('ID_TOKEN_1', 'ID_TOKEN_2')
+ end
+ end
+
+ context 'when a RSAError is raised' do
+ it 'does not include the ID token variables' do
+ allow(::Gitlab::Ci::JwtV2).to receive(:for_build).and_raise(::OpenSSL::PKey::RSAError)
+
+ expect(runner_vars.map { |var| var[:key] }).not_to include('ID_TOKEN_1', 'ID_TOKEN_2')
+ end
+ end
+ end
end
describe '#scoped_variables' do
@@ -5171,10 +5266,11 @@ RSpec.describe Ci::Build do
it { expect(matchers.size).to eq(2) }
it 'groups build ids' do
- expect(matchers.map(&:build_ids)).to match_array([
- [build_without_tags.id],
- match_array([build_with_tags.id, other_build_with_tags.id])
- ])
+ expect(matchers.map(&:build_ids)).to match_array(
+ [
+ [build_without_tags.id],
+ match_array([build_with_tags.id, other_build_with_tags.id])
+ ])
end
it { expect(matchers.map(&:tag_list)).to match_array([[], %w[tag1 tag2]]) }
@@ -5362,7 +5458,7 @@ RSpec.describe Ci::Build do
end
describe '#clone' do
- let_it_be(:user) { FactoryBot.build(:user) }
+ let_it_be(:user) { create(:user) }
context 'when given new job variables' do
context 'when the cloned build has an action' do
@@ -5371,10 +5467,11 @@ RSpec.describe Ci::Build do
create(:ci_job_variable, job: build, key: 'TEST_KEY', value: 'old value')
create(:ci_job_variable, job: build, key: 'OLD_KEY', value: 'i will not live for long')
- new_build = build.clone(current_user: user, new_job_variables_attributes: [
- { key: 'TEST_KEY', value: 'new value' },
- { key: 'NEW_KEY', value: 'exciting new value' }
- ])
+ new_build = build.clone(current_user: user, new_job_variables_attributes:
+ [
+ { key: 'TEST_KEY', value: 'new value' },
+ { key: 'NEW_KEY', value: 'exciting new value' }
+ ])
new_build.save!
expect(new_build.job_variables.count).to be(2)
@@ -5388,9 +5485,10 @@ RSpec.describe Ci::Build do
build = create(:ci_build)
create(:ci_job_variable, job: build, key: 'TEST_KEY', value: 'old value')
- new_build = build.clone(current_user: user, new_job_variables_attributes: [
- { key: 'TEST_KEY', value: 'new value' }
- ])
+ new_build = build.clone(
+ current_user: user,
+ new_job_variables_attributes: [{ key: 'TEST_KEY', value: 'new value' }]
+ )
new_build.save!
expect(new_build.job_variables.count).to be(1)
diff --git a/spec/models/ci/build_trace_chunks/redis_spec.rb b/spec/models/ci/build_trace_chunks/redis_spec.rb
index c004887d609..0d8cda7b3d8 100644
--- a/spec/models/ci/build_trace_chunks/redis_spec.rb
+++ b/spec/models/ci/build_trace_chunks/redis_spec.rb
@@ -211,15 +211,15 @@ RSpec.describe Ci::BuildTraceChunks::Redis, :clean_gitlab_redis_shared_state do
it 'deletes multiple data' do
Gitlab::Redis::SharedState.with do |redis|
- expect(redis.exists("gitlab:ci:trace:#{build.id}:chunks:0")).to be_truthy
- expect(redis.exists("gitlab:ci:trace:#{build.id}:chunks:1")).to be_truthy
+ expect(redis.exists?("gitlab:ci:trace:#{build.id}:chunks:0")).to eq(true)
+ expect(redis.exists?("gitlab:ci:trace:#{build.id}:chunks:1")).to eq(true)
end
subject
Gitlab::Redis::SharedState.with do |redis|
- expect(redis.exists("gitlab:ci:trace:#{build.id}:chunks:0")).to be_falsy
- expect(redis.exists("gitlab:ci:trace:#{build.id}:chunks:1")).to be_falsy
+ expect(redis.exists?("gitlab:ci:trace:#{build.id}:chunks:0")).to eq(false)
+ expect(redis.exists?("gitlab:ci:trace:#{build.id}:chunks:1")).to eq(false)
end
end
end
diff --git a/spec/models/ci/build_trace_spec.rb b/spec/models/ci/build_trace_spec.rb
index bd24e8be1ac..f2df4874b84 100644
--- a/spec/models/ci/build_trace_spec.rb
+++ b/spec/models/ci/build_trace_spec.rb
@@ -28,9 +28,10 @@ RSpec.describe Ci::BuildTrace do
it_behaves_like 'delegates methods'
it 'returns formatted trace' do
- expect(subject.lines).to eq([
- { offset: 0, content: [{ text: 'the-stream' }] }
- ])
+ expect(subject.lines).to eq(
+ [
+ { offset: 0, content: [{ text: 'the-stream' }] }
+ ])
end
context 'with invalid UTF-8 data' do
diff --git a/spec/models/ci/daily_build_group_report_result_spec.rb b/spec/models/ci/daily_build_group_report_result_spec.rb
index d0141a1469e..cd55817243f 100644
--- a/spec/models/ci/daily_build_group_report_result_spec.rb
+++ b/spec/models/ci/daily_build_group_report_result_spec.rb
@@ -41,24 +41,25 @@ RSpec.describe Ci::DailyBuildGroupReportResult do
let!(:new_pipeline) { create(:ci_pipeline) }
it 'creates or updates matching report results' do
- described_class.upsert_reports([
- {
- project_id: rspec_coverage.project_id,
- ref_path: rspec_coverage.ref_path,
- last_pipeline_id: new_pipeline.id,
- date: rspec_coverage.date,
- group_name: 'rspec',
- data: { 'coverage' => 81.0 }
- },
- {
- project_id: rspec_coverage.project_id,
- ref_path: rspec_coverage.ref_path,
- last_pipeline_id: new_pipeline.id,
- date: rspec_coverage.date,
- group_name: 'karma',
- data: { 'coverage' => 87.0 }
- }
- ])
+ described_class.upsert_reports(
+ [
+ {
+ project_id: rspec_coverage.project_id,
+ ref_path: rspec_coverage.ref_path,
+ last_pipeline_id: new_pipeline.id,
+ date: rspec_coverage.date,
+ group_name: 'rspec',
+ data: { 'coverage' => 81.0 }
+ },
+ {
+ project_id: rspec_coverage.project_id,
+ ref_path: rspec_coverage.ref_path,
+ last_pipeline_id: new_pipeline.id,
+ date: rspec_coverage.date,
+ group_name: 'karma',
+ data: { 'coverage' => 87.0 }
+ }
+ ])
rspec_coverage.reload
diff --git a/spec/models/ci/job_token/project_scope_link_spec.rb b/spec/models/ci/job_token/project_scope_link_spec.rb
index c000a3e29f7..92ed86b55b2 100644
--- a/spec/models/ci/job_token/project_scope_link_spec.rb
+++ b/spec/models/ci/job_token/project_scope_link_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Ci::JobToken::ProjectScopeLink do
it { is_expected.to belong_to(:target_project) }
it { is_expected.to belong_to(:added_by) }
+ let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project) }
it_behaves_like 'cleanup by a loose foreign key' do
@@ -89,16 +90,22 @@ RSpec.describe Ci::JobToken::ProjectScopeLink do
end
end
+ describe 'enums' do
+ let(:directions) { { outbound: 0, inbound: 1 } }
+
+ it { is_expected.to define_enum_for(:direction).with_values(directions) }
+ end
+
context 'loose foreign key on ci_job_token_project_scope_links.source_project_id' do
it_behaves_like 'cleanup by a loose foreign key' do
- let!(:parent) { create(:project) }
+ let!(:parent) { create(:project, namespace: group) }
let!(:model) { create(:ci_job_token_project_scope_link, source_project: parent) }
end
end
context 'loose foreign key on ci_job_token_project_scope_links.target_project_id' do
it_behaves_like 'cleanup by a loose foreign key' do
- let!(:parent) { create(:project) }
+ let!(:parent) { create(:project, namespace: group) }
let!(:model) { create(:ci_job_token_project_scope_link, target_project: parent) }
end
end
diff --git a/spec/models/ci/job_token/scope_spec.rb b/spec/models/ci/job_token/scope_spec.rb
index 4b95adf8476..1e3f6d044d2 100644
--- a/spec/models/ci/job_token/scope_spec.rb
+++ b/spec/models/ci/job_token/scope_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Ci::JobToken::Scope do
- let_it_be(:project) { create(:project, ci_job_token_scope_enabled: true).tap(&:save!) }
+ let_it_be(:project) { create(:project, ci_outbound_job_token_scope_enabled: true).tap(&:save!) }
let(:scope) { described_class.new(project) }
@@ -53,7 +53,7 @@ RSpec.describe Ci::JobToken::Scope do
context 'when project scope setting is disabled' do
before do
- project.ci_job_token_scope_enabled = false
+ project.ci_outbound_job_token_scope_enabled = false
end
it 'considers any project to be part of the scope' do
diff --git a/spec/models/ci/pipeline_metadata_spec.rb b/spec/models/ci/pipeline_metadata_spec.rb
new file mode 100644
index 00000000000..0704cbc8ec1
--- /dev/null
+++ b/spec/models/ci/pipeline_metadata_spec.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::PipelineMetadata do
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to belong_to(:pipeline) }
+
+ describe 'validations' do
+ it { is_expected.to validate_length_of(:title).is_at_least(1).is_at_most(255) }
+ it { is_expected.to validate_presence_of(:project) }
+ it { is_expected.to validate_presence_of(:pipeline) }
+ end
+end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index ec03030a4b8..b2316949497 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -43,12 +43,14 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
it { is_expected.to have_one(:triggered_by_pipeline) }
it { is_expected.to have_one(:source_job) }
it { is_expected.to have_one(:pipeline_config) }
+ it { is_expected.to have_one(:pipeline_metadata) }
it { is_expected.to respond_to :git_author_name }
it { is_expected.to respond_to :git_author_email }
it { is_expected.to respond_to :git_author_full_text }
it { is_expected.to respond_to :short_sha }
it { is_expected.to delegate_method(:full_path).to(:project).with_prefix }
+ it { is_expected.to delegate_method(:title).to(:pipeline_metadata).allow_nil }
describe 'validations' do
it { is_expected.to validate_presence_of(:sha) }
@@ -2981,6 +2983,24 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
let_it_be(:pipeline) { create(:ci_empty_pipeline, :created) }
+ it 'logs the event' do
+ allow(Gitlab::AppJsonLogger).to receive(:info)
+
+ pipeline.cancel_running
+
+ expect(Gitlab::AppJsonLogger)
+ .to have_received(:info)
+ .with(
+ a_hash_including(
+ event: 'pipeline_cancel_running',
+ pipeline_id: pipeline.id,
+ auto_canceled_by_pipeline_id: nil,
+ cascade_to_children: true,
+ execute_async: true
+ )
+ )
+ end
+
context 'when there is a running external job and a regular job' do
before do
create(:ci_build, :running, pipeline: pipeline)
@@ -3813,7 +3833,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
describe '#upstream_root' do
subject { pipeline.upstream_root }
- let_it_be(:pipeline) { create(:ci_pipeline) }
+ let_it_be_with_refind(:pipeline) { create(:ci_pipeline) }
context 'when pipeline is child of child pipeline' do
let!(:root_ancestor) { create(:ci_pipeline) }
@@ -4529,10 +4549,11 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
it 'returns accessibility report with collected data' do
- expect(subject.urls.keys).to match_array([
- "https://pa11y.org/",
- "https://about.gitlab.com/"
- ])
+ expect(subject.urls.keys).to match_array(
+ [
+ "https://pa11y.org/",
+ "https://about.gitlab.com/"
+ ])
end
context 'when builds are retried' do
@@ -5316,19 +5337,18 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
- describe '#authorized_cluster_agents' do
+ describe '#cluster_agent_authorizations' do
let(:pipeline) { create(:ci_empty_pipeline, :created) }
- let(:agent) { instance_double(Clusters::Agent) }
- let(:authorization) { instance_double(Clusters::Agents::GroupAuthorization, agent: agent) }
+ let(:authorization) { instance_double(Clusters::Agents::GroupAuthorization) }
let(:finder) { double(execute: [authorization]) }
- it 'retrieves agent records from the finder and caches the result' do
+ it 'retrieves authorization records from the finder and caches the result' do
expect(Clusters::AgentAuthorizationsFinder).to receive(:new).once
.with(pipeline.project)
.and_return(finder)
- expect(pipeline.authorized_cluster_agents).to contain_exactly(agent)
- expect(pipeline.authorized_cluster_agents).to contain_exactly(agent) # cached
+ expect(pipeline.cluster_agent_authorizations).to contain_exactly(authorization)
+ expect(pipeline.cluster_agent_authorizations).to contain_exactly(authorization) # cached
end
end
@@ -5486,7 +5506,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
describe 'partitioning' do
- let(:pipeline) { build(:ci_pipeline) }
+ let(:pipeline) { build(:ci_pipeline, partition_id: nil) }
before do
allow(described_class).to receive(:current_partition_value) { 123 }
@@ -5516,4 +5536,73 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
end
+
+ describe '#notes=' do
+ context 'when notes already exist' do
+ it 'does not create duplicate notes', :aggregate_failures do
+ time = Time.zone.now
+ pipeline = create(:ci_pipeline, user: user, project: project)
+ note = Note.new(
+ note: 'note',
+ noteable_type: 'Commit',
+ noteable_id: pipeline.id,
+ commit_id: pipeline.id,
+ author_id: user.id,
+ project_id: pipeline.project_id,
+ created_at: time
+ )
+ another_note = note.dup.tap { |note| note.note = 'another note' }
+
+ expect(project.notes.for_commit_id(pipeline.sha).count).to eq(0)
+
+ pipeline.notes = [note]
+
+ expect(project.notes.for_commit_id(pipeline.sha).count).to eq(1)
+
+ pipeline.notes = [note, note, another_note]
+
+ expect(project.notes.for_commit_id(pipeline.sha).count).to eq(2)
+ expect(project.notes.for_commit_id(pipeline.sha).pluck(:note)).to contain_exactly(note.note, another_note.note)
+ end
+ end
+ end
+
+ describe '#has_erasable_artifacts?' do
+ subject { pipeline.has_erasable_artifacts? }
+
+ context 'when pipeline is not complete' do
+ let(:pipeline) { create(:ci_pipeline, :running, :with_job) }
+
+ context 'and has erasable artifacts' do
+ before do
+ create(:ci_job_artifact, :archive, job: pipeline.builds.first)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ context 'when pipeline is complete' do
+ let(:pipeline) { create(:ci_pipeline, :success, :with_job) }
+
+ context 'and has no artifacts' do
+ it { is_expected.to be_falsey }
+ end
+
+ Ci::JobArtifact.erasable_file_types.each do |type|
+ context "and has an artifact of type #{type}" do
+ before do
+ create(
+ :ci_job_artifact,
+ file_format: ::Ci::JobArtifact::TYPE_AND_FORMAT_PAIRS[type.to_sym],
+ file_type: type,
+ job: pipeline.builds.first
+ )
+ end
+
+ it { is_expected.to be_truthy }
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/ci/processable_spec.rb b/spec/models/ci/processable_spec.rb
index 61e2864a518..a199111b1e3 100644
--- a/spec/models/ci/processable_spec.rb
+++ b/spec/models/ci/processable_spec.rb
@@ -177,7 +177,7 @@ RSpec.describe Ci::Processable do
Ci::Build.attribute_names.map(&:to_sym) +
Ci::Build.attribute_aliases.keys.map(&:to_sym) +
Ci::Build.reflect_on_all_associations.map(&:name) +
- [:tag_list, :needs_attributes, :job_variables_attributes] -
+ [:tag_list, :needs_attributes, :job_variables_attributes, :id_tokens] -
# ToDo: Move EE accessors to ee/
::Ci::Build.extra_accessors -
[:dast_site_profiles_build, :dast_scanner_profiles_build]
diff --git a/spec/models/ci/resource_group_spec.rb b/spec/models/ci/resource_group_spec.rb
index 76e74f3193c..e8eccc233db 100644
--- a/spec/models/ci/resource_group_spec.rb
+++ b/spec/models/ci/resource_group_spec.rb
@@ -3,8 +3,10 @@
require 'spec_helper'
RSpec.describe Ci::ResourceGroup do
+ let_it_be(:group) { create(:group) }
+
it_behaves_like 'cleanup by a loose foreign key' do
- let!(:parent) { create(:project) }
+ let!(:parent) { create(:project, group: group) }
let!(:model) { create(:ci_resource_group, project: parent) }
end
@@ -94,7 +96,7 @@ RSpec.describe Ci::ResourceGroup do
describe '#upcoming_processables' do
subject { resource_group.upcoming_processables }
- let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository, group: group) }
let_it_be(:pipeline_1) { create(:ci_pipeline, project: project) }
let_it_be(:pipeline_2) { create(:ci_pipeline, project: project) }
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index 181351222c1..13eb7086586 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -1186,7 +1186,7 @@ RSpec.describe Ci::Runner do
end
end
- context 'Project-related queries' do
+ describe 'Project-related queries' do
let_it_be(:project1) { create(:project) }
let_it_be(:project2) { create(:project) }
@@ -1206,14 +1206,14 @@ RSpec.describe Ci::Runner do
end
end
- describe "belongs_to_one_project?" do
+ describe '#belongs_to_one_project?' do
it "returns false if there are two projects runner is assigned to" do
runner = create(:ci_runner, :project, projects: [project1, project2])
expect(runner.belongs_to_one_project?).to be_falsey
end
- it "returns true if there is only one project runner is assigned to" do
+ it 'returns true if there is only one project runner is assigned to' do
runner = create(:ci_runner, :project, projects: [project1])
expect(runner.belongs_to_one_project?).to be_truthy
@@ -1537,47 +1537,155 @@ RSpec.describe Ci::Runner do
it { is_expected.to eq(contacted_at_stored) }
end
- describe '.belonging_to_group' do
- it 'returns the specific group runner' do
- group = create(:group)
- runner = create(:ci_runner, :group, groups: [group])
- unrelated_group = create(:group)
- create(:ci_runner, :group, groups: [unrelated_group])
+ describe 'Group-related queries' do
+ # Groups
+ let_it_be(:top_level_group) { create(:group) }
+ let_it_be(:child_group) { create(:group, parent: top_level_group) }
+ let_it_be(:child_group2) { create(:group, parent: top_level_group) }
+ let_it_be(:other_top_level_group) { create(:group) }
+
+ # Projects
+ let_it_be(:top_level_group_project) { create(:project, group: top_level_group) }
+ let_it_be(:child_group_project) { create(:project, group: child_group) }
+ let_it_be(:other_top_level_group_project) { create(:project, group: other_top_level_group) }
- expect(described_class.belonging_to_group(group.id)).to contain_exactly(runner)
+ # Runners
+ let_it_be(:instance_runner) { create(:ci_runner, :instance) }
+ let_it_be(:top_level_group_runner) { create(:ci_runner, :group, groups: [top_level_group]) }
+ let_it_be(:child_group_runner) { create(:ci_runner, :group, groups: [child_group]) }
+ let_it_be(:child_group2_runner) { create(:ci_runner, :group, groups: [child_group2]) }
+ let_it_be(:other_top_level_group_runner) do
+ create(:ci_runner, :group, groups: [other_top_level_group])
end
- end
- describe '.belonging_to_group_and_ancestors' do
- let_it_be(:parent_group) { create(:group) }
- let_it_be(:parent_runner) { create(:ci_runner, :group, groups: [parent_group]) }
- let_it_be(:group) { create(:group, parent: parent_group) }
+ let_it_be(:top_level_group_project_runner) do
+ create(:ci_runner, :project, projects: [top_level_group_project])
+ end
- it 'returns the group runner from the parent group' do
- expect(described_class.belonging_to_group_and_ancestors(group.id)).to contain_exactly(parent_runner)
+ let_it_be(:child_group_project_runner) do
+ create(:ci_runner, :project, projects: [child_group_project])
end
- end
- describe '.belonging_to_group_or_project_descendants' do
- it 'returns the specific group runners' do
- group1 = create(:group)
- group2 = create(:group, parent: group1)
- group3 = create(:group)
-
- project1 = create(:project, namespace: group1)
- project2 = create(:project, namespace: group2)
- project3 = create(:project, namespace: group3)
-
- runner1 = create(:ci_runner, :group, groups: [group1])
- runner2 = create(:ci_runner, :group, groups: [group2])
- _runner3 = create(:ci_runner, :group, groups: [group3])
- runner4 = create(:ci_runner, :project, projects: [project1])
- runner5 = create(:ci_runner, :project, projects: [project2])
- _runner6 = create(:ci_runner, :project, projects: [project3])
-
- expect(described_class.belonging_to_group_or_project_descendants(group1.id)).to contain_exactly(
- runner1, runner2, runner4, runner5
- )
+ let_it_be(:other_top_level_group_project_runner) do
+ create(:ci_runner, :project, projects: [other_top_level_group_project])
+ end
+
+ let_it_be(:shared_top_level_group_project_runner) do
+ create(:ci_runner, :project, projects: [top_level_group_project, child_group_project])
+ end
+
+ describe '.belonging_to_group' do
+ subject(:relation) { described_class.belonging_to_group(scope.id) }
+
+ context 'with scope set to top_level_group' do
+ let(:scope) { top_level_group }
+
+ it 'returns the group runners from the top_level_group' do
+ is_expected.to contain_exactly(top_level_group_runner)
+ end
+ end
+
+ context 'with scope set to child_group' do
+ let(:scope) { child_group }
+
+ it 'returns the group runners from the child_group' do
+ is_expected.to contain_exactly(child_group_runner)
+ end
+ end
+ end
+
+ describe '.belonging_to_group_and_ancestors' do
+ subject(:relation) { described_class.belonging_to_group_and_ancestors(child_group.id) }
+
+ it 'returns the group runners from the group and parent group' do
+ is_expected.to contain_exactly(child_group_runner, top_level_group_runner)
+ end
+ end
+
+ describe '.belonging_to_group_or_project_descendants' do
+ subject(:relation) { described_class.belonging_to_group_or_project_descendants(scope.id) }
+
+ context 'with scope set to top_level_group' do
+ let(:scope) { top_level_group }
+
+ it 'returns the expected group and project runners without duplicates', :aggregate_failures do
+ expect(relation).to contain_exactly(
+ top_level_group_runner,
+ top_level_group_project_runner,
+ child_group_runner,
+ child_group_project_runner,
+ child_group2_runner,
+ shared_top_level_group_project_runner
+ )
+
+ # Ensure no duplicates are returned
+ expect(relation.distinct).to match_array(relation)
+ end
+ end
+
+ context 'with scope set to child_group' do
+ let(:scope) { child_group }
+
+ it 'returns the expected group and project runners without duplicates', :aggregate_failures do
+ expect(relation).to contain_exactly(
+ child_group_runner,
+ child_group_project_runner,
+ shared_top_level_group_project_runner
+ )
+
+ # Ensure no duplicates are returned
+ expect(relation.distinct).to match_array(relation)
+ end
+ end
+ end
+
+ describe '.usable_from_scope' do
+ subject(:relation) { described_class.usable_from_scope(scope) }
+
+ context 'with scope set to top_level_group' do
+ let(:scope) { top_level_group }
+
+ it 'returns all runners usable from top_level_group without duplicates' do
+ expect(relation).to contain_exactly(
+ instance_runner,
+ top_level_group_runner,
+ top_level_group_project_runner,
+ child_group_runner,
+ child_group_project_runner,
+ child_group2_runner,
+ shared_top_level_group_project_runner
+ )
+
+ # Ensure no duplicates are returned
+ expect(relation.distinct).to match_array(relation)
+ end
+ end
+
+ context 'with scope set to child_group' do
+ let(:scope) { child_group }
+
+ it 'returns all runners usable from child_group' do
+ expect(relation).to contain_exactly(
+ instance_runner,
+ top_level_group_runner,
+ child_group_runner,
+ child_group_project_runner,
+ shared_top_level_group_project_runner
+ )
+ end
+ end
+
+ context 'with scope set to other_top_level_group' do
+ let(:scope) { other_top_level_group }
+
+ it 'returns all runners usable from other_top_level_group' do
+ expect(relation).to contain_exactly(
+ instance_runner,
+ other_top_level_group_runner,
+ other_top_level_group_project_runner
+ )
+ end
+ end
end
end
diff --git a/spec/models/ci/secure_file_spec.rb b/spec/models/ci/secure_file_spec.rb
index e47efff5dfd..20f64d40865 100644
--- a/spec/models/ci/secure_file_spec.rb
+++ b/spec/models/ci/secure_file_spec.rb
@@ -81,4 +81,70 @@ RSpec.describe Ci::SecureFile do
expect(Base64.encode64(subject.file.read)).to eq(Base64.encode64(sample_file))
end
end
+
+ describe '#file_extension' do
+ it 'returns the extension for the file name' do
+ file = build(:ci_secure_file, name: 'file1.cer')
+ expect(file.file_extension).to eq('cer')
+ end
+
+ it 'returns only the last part of the extension for the file name' do
+ file = build(:ci_secure_file, name: 'file1.tar.gz')
+ expect(file.file_extension).to eq('gz')
+ end
+ end
+
+ describe '#metadata_parsable?' do
+ it 'returns true when the file extension has a supported parser' do
+ file = build(:ci_secure_file, name: 'file1.cer')
+ expect(file.metadata_parsable?).to be true
+ end
+
+ it 'returns false when the file extension does not have a supported parser' do
+ file = build(:ci_secure_file, name: 'file1.foo')
+ expect(file.metadata_parsable?).to be false
+ end
+ end
+
+ describe '#metadata_parser' do
+ it 'returns an instance of Gitlab::Ci::SecureFiles::Cer when a .cer file is supplied' do
+ file = build(:ci_secure_file, name: 'file1.cer')
+ expect(file.metadata_parser).to be_an_instance_of(Gitlab::Ci::SecureFiles::Cer)
+ end
+
+ it 'returns an instance of Gitlab::Ci::SecureFiles::P12 when a .p12 file is supplied' do
+ file = build(:ci_secure_file, name: 'file1.p12')
+ expect(file.metadata_parser).to be_an_instance_of(Gitlab::Ci::SecureFiles::P12)
+ end
+
+ it 'returns an instance of Gitlab::Ci::SecureFiles::MobileProvision when a .mobileprovision file is supplied' do
+ file = build(:ci_secure_file, name: 'file1.mobileprovision')
+ expect(file.metadata_parser).to be_an_instance_of(Gitlab::Ci::SecureFiles::MobileProvision)
+ end
+
+ it 'returns nil when the file type is not supported by any parsers' do
+ file = build(:ci_secure_file, name: 'file1.foo')
+ expect(file.metadata_parser).to be nil
+ end
+ end
+
+ describe '#update_metadata!' do
+ it 'assigns the expected metadata when a parsable file is supplied' do
+ file = create(:ci_secure_file, name: 'file1.cer',
+ file: CarrierWaveStringFile.new(fixture_file('ci_secure_files/sample.cer') ))
+ file.update_metadata!
+
+ expect(file.expires_at).to eq(DateTime.parse('2022-04-26 19:20:40'))
+ expect(file.metadata['id']).to eq('33669367788748363528491290218354043267')
+ expect(file.metadata['issuer']['CN']).to eq('Apple Worldwide Developer Relations Certification Authority')
+ expect(file.metadata['subject']['OU']).to eq('N7SYAN8PX8')
+ end
+
+ it 'logs an error when something goes wrong with the file parsing' do
+ corrupt_file = create(:ci_secure_file, name: 'file1.cer', file: CarrierWaveStringFile.new('11111111'))
+ message = 'Validation failed: Metadata must be a valid json schema - not enough data.'
+ expect(Gitlab::AppLogger).to receive(:error).with("Secure File Parser Failure (#{corrupt_file.id}): #{message}")
+ corrupt_file.update_metadata!
+ end
+ end
end
diff --git a/spec/models/ci/unit_test_spec.rb b/spec/models/ci/unit_test_spec.rb
index 556cf93c266..b3180492a36 100644
--- a/spec/models/ci/unit_test_spec.rb
+++ b/spec/models/ci/unit_test_spec.rb
@@ -43,18 +43,19 @@ RSpec.describe Ci::UnitTest do
result = described_class.find_or_create_by_batch(project, attrs)
- expect(result).to match_array([
- have_attributes(
- key_hash: existing_test.key_hash,
- suite_name: 'rspec',
- name: 'Math#sum adds numbers'
- ),
- have_attributes(
- key_hash: new_key,
- suite_name: 'jest',
- name: 'Component works'
- )
- ])
+ expect(result).to match_array(
+ [
+ have_attributes(
+ key_hash: existing_test.key_hash,
+ suite_name: 'rspec',
+ name: 'Math#sum adds numbers'
+ ),
+ have_attributes(
+ key_hash: new_key,
+ suite_name: 'jest',
+ name: 'Component works'
+ )
+ ])
expect(result).to all(be_persisted)
end
@@ -77,13 +78,14 @@ RSpec.describe Ci::UnitTest do
result = described_class.find_or_create_by_batch(project, attrs)
- expect(result).to match_array([
- have_attributes(
- key_hash: new_key,
- suite_name: 'abc...',
- name: 'abc...'
- )
- ])
+ expect(result).to match_array(
+ [
+ have_attributes(
+ key_hash: new_key,
+ suite_name: 'abc...',
+ name: 'abc...'
+ )
+ ])
expect(result).to all(be_persisted)
end
diff --git a/spec/models/ci/variable_spec.rb b/spec/models/ci/variable_spec.rb
index f0af229ff2c..5f2b5971508 100644
--- a/spec/models/ci/variable_spec.rb
+++ b/spec/models/ci/variable_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe Ci::Variable do
context 'loose foreign key on ci_variables.project_id' do
it_behaves_like 'cleanup by a loose foreign key' do
- let!(:parent) { create(:project) }
+ let!(:parent) { create(:project, namespace: create(:group)) }
let!(:model) { create(:ci_variable, project: parent) }
end
end
diff --git a/spec/models/clusters/agents/implicit_authorization_spec.rb b/spec/models/clusters/agents/implicit_authorization_spec.rb
index 2d6c3ddb426..1f4c5b1ac9e 100644
--- a/spec/models/clusters/agents/implicit_authorization_spec.rb
+++ b/spec/models/clusters/agents/implicit_authorization_spec.rb
@@ -10,5 +10,5 @@ RSpec.describe Clusters::Agents::ImplicitAuthorization do
it { expect(subject.agent).to eq(agent) }
it { expect(subject.agent_id).to eq(agent.id) }
it { expect(subject.config_project).to eq(agent.project) }
- it { expect(subject.config).to be_nil }
+ it { expect(subject.config).to eq({}) }
end
diff --git a/spec/models/clusters/applications/cert_manager_spec.rb b/spec/models/clusters/applications/cert_manager_spec.rb
index 3044260a000..05ab8c4108e 100644
--- a/spec/models/clusters/applications/cert_manager_spec.rb
+++ b/spec/models/clusters/applications/cert_manager_spec.rb
@@ -49,13 +49,15 @@ RSpec.describe Clusters::Applications::CertManager do
expect(subject.version).to eq('v0.10.1')
expect(subject).to be_rbac
expect(subject.files).to eq(cert_manager.files.merge(cluster_issuer_file))
- expect(subject.preinstall).to eq([
- 'kubectl apply -f https://raw.githubusercontent.com/jetstack/cert-manager/release-0.10/deploy/manifests/00-crds.yaml',
- 'kubectl label --overwrite namespace gitlab-managed-apps certmanager.k8s.io/disable-validation=true'
- ])
- expect(subject.postinstall).to eq([
- "for i in $(seq 1 90); do kubectl apply -f /data/helm/certmanager/config/cluster_issuer.yaml && s=0 && break || s=$?; sleep 1s; echo \"Retrying ($i)...\"; done; (exit $s)"
- ])
+ expect(subject.preinstall).to eq(
+ [
+ 'kubectl apply -f https://raw.githubusercontent.com/jetstack/cert-manager/release-0.10/deploy/manifests/00-crds.yaml',
+ 'kubectl label --overwrite namespace gitlab-managed-apps certmanager.k8s.io/disable-validation=true'
+ ])
+ expect(subject.postinstall).to eq(
+ [
+ "for i in $(seq 1 90); do kubectl apply -f /data/helm/certmanager/config/cluster_issuer.yaml && s=0 && break || s=$?; sleep 1s; echo \"Retrying ($i)...\"; done; (exit $s)"
+ ])
end
context 'for a specific user' do
@@ -99,15 +101,16 @@ RSpec.describe Clusters::Applications::CertManager do
end
it 'specifies a post delete command to remove custom resource definitions' do
- expect(subject.postdelete).to eq([
- 'kubectl delete secret -n gitlab-managed-apps letsencrypt-prod --ignore-not-found',
- 'kubectl delete crd certificates.certmanager.k8s.io --ignore-not-found',
- 'kubectl delete crd certificaterequests.certmanager.k8s.io --ignore-not-found',
- 'kubectl delete crd challenges.certmanager.k8s.io --ignore-not-found',
- 'kubectl delete crd clusterissuers.certmanager.k8s.io --ignore-not-found',
- 'kubectl delete crd issuers.certmanager.k8s.io --ignore-not-found',
- 'kubectl delete crd orders.certmanager.k8s.io --ignore-not-found'
- ])
+ expect(subject.postdelete).to eq(
+ [
+ 'kubectl delete secret -n gitlab-managed-apps letsencrypt-prod --ignore-not-found',
+ 'kubectl delete crd certificates.certmanager.k8s.io --ignore-not-found',
+ 'kubectl delete crd certificaterequests.certmanager.k8s.io --ignore-not-found',
+ 'kubectl delete crd challenges.certmanager.k8s.io --ignore-not-found',
+ 'kubectl delete crd clusterissuers.certmanager.k8s.io --ignore-not-found',
+ 'kubectl delete crd issuers.certmanager.k8s.io --ignore-not-found',
+ 'kubectl delete crd orders.certmanager.k8s.io --ignore-not-found'
+ ])
end
context 'secret key name is not found' do
@@ -119,14 +122,15 @@ RSpec.describe Clusters::Applications::CertManager do
end
it 'does not try and delete the secret' do
- expect(subject.postdelete).to eq([
- 'kubectl delete crd certificates.certmanager.k8s.io --ignore-not-found',
- 'kubectl delete crd certificaterequests.certmanager.k8s.io --ignore-not-found',
- 'kubectl delete crd challenges.certmanager.k8s.io --ignore-not-found',
- 'kubectl delete crd clusterissuers.certmanager.k8s.io --ignore-not-found',
- 'kubectl delete crd issuers.certmanager.k8s.io --ignore-not-found',
- 'kubectl delete crd orders.certmanager.k8s.io --ignore-not-found'
- ])
+ expect(subject.postdelete).to eq(
+ [
+ 'kubectl delete crd certificates.certmanager.k8s.io --ignore-not-found',
+ 'kubectl delete crd certificaterequests.certmanager.k8s.io --ignore-not-found',
+ 'kubectl delete crd challenges.certmanager.k8s.io --ignore-not-found',
+ 'kubectl delete crd clusterissuers.certmanager.k8s.io --ignore-not-found',
+ 'kubectl delete crd issuers.certmanager.k8s.io --ignore-not-found',
+ 'kubectl delete crd orders.certmanager.k8s.io --ignore-not-found'
+ ])
end
end
end
diff --git a/spec/models/clusters/platforms/kubernetes_spec.rb b/spec/models/clusters/platforms/kubernetes_spec.rb
index 7b9ff409edd..4ac2fd022ba 100644
--- a/spec/models/clusters/platforms/kubernetes_spec.rb
+++ b/spec/models/clusters/platforms/kubernetes_spec.rb
@@ -601,19 +601,27 @@ RSpec.describe Clusters::Platforms::Kubernetes do
it 'creates a matching RolloutStatus' do
expect(rollout_status).to be_kind_of(::Gitlab::Kubernetes::RolloutStatus)
- expect(rollout_status.deployments.map(&:annotations)).to eq([
- { 'app.gitlab.com/app' => project.full_path_slug, 'app.gitlab.com/env' => 'env-000000' }
- ])
- expect(rollout_status.instances).to eq([{ pod_name: "kube-pod",
- stable: true,
- status: "pending",
- tooltip: "kube-pod (Pending)",
- track: "stable" },
- { pod_name: "Not provided",
- stable: true,
- status: "pending",
- tooltip: "Not provided (Pending)",
- track: "stable" }])
+ expect(rollout_status.deployments.map(&:annotations)).to eq(
+ [
+ { 'app.gitlab.com/app' => project.full_path_slug, 'app.gitlab.com/env' => 'env-000000' }
+ ])
+ expect(rollout_status.instances).to eq(
+ [
+ {
+ pod_name: "kube-pod",
+ stable: true,
+ status: "pending",
+ tooltip: "kube-pod (Pending)",
+ track: "stable"
+ },
+ {
+ pod_name: "Not provided",
+ stable: true,
+ status: "pending",
+ tooltip: "Not provided (Pending)",
+ track: "stable"
+ }
+ ])
end
context 'with canary ingress' do
@@ -720,11 +728,12 @@ RSpec.describe Clusters::Platforms::Kubernetes do
end
it 'returns a pending pod for each missing replica' do
- expect(rollout_status.instances.map { |p| p.slice(:pod_name, :status) }).to eq([
- { pod_name: 'pod-a-1', status: 'running' },
- { pod_name: 'Not provided', status: 'pending' },
- { pod_name: 'Not provided', status: 'pending' }
- ])
+ expect(rollout_status.instances.map { |p| p.slice(:pod_name, :status) }).to eq(
+ [
+ { pod_name: 'pod-a-1', status: 'running' },
+ { pod_name: 'Not provided', status: 'pending' },
+ { pod_name: 'Not provided', status: 'pending' }
+ ])
end
end
@@ -743,12 +752,13 @@ RSpec.describe Clusters::Platforms::Kubernetes do
end
it 'returns the correct track for the pending pods' do
- expect(rollout_status.instances.map { |p| p.slice(:pod_name, :status, :track) }).to eq([
- { pod_name: 'pod-a-1', status: 'running', track: 'canary' },
- { pod_name: 'Not provided', status: 'pending', track: 'canary' },
- { pod_name: 'Not provided', status: 'pending', track: 'stable' },
- { pod_name: 'Not provided', status: 'pending', track: 'stable' }
- ])
+ expect(rollout_status.instances.map { |p| p.slice(:pod_name, :status, :track) }).to eq(
+ [
+ { pod_name: 'pod-a-1', status: 'running', track: 'canary' },
+ { pod_name: 'Not provided', status: 'pending', track: 'canary' },
+ { pod_name: 'Not provided', status: 'pending', track: 'stable' },
+ { pod_name: 'Not provided', status: 'pending', track: 'stable' }
+ ])
end
end
@@ -765,10 +775,11 @@ RSpec.describe Clusters::Platforms::Kubernetes do
end
it 'returns the correct number of pending pods' do
- expect(rollout_status.instances.map { |p| p.slice(:pod_name, :status, :track) }).to eq([
- { pod_name: 'Not provided', status: 'pending', track: 'mytrack' },
- { pod_name: 'Not provided', status: 'pending', track: 'mytrack' }
- ])
+ expect(rollout_status.instances.map { |p| p.slice(:pod_name, :status, :track) }).to eq(
+ [
+ { pod_name: 'Not provided', status: 'pending', track: 'mytrack' },
+ { pod_name: 'Not provided', status: 'pending', track: 'mytrack' }
+ ])
end
end
diff --git a/spec/models/commit_collection_spec.rb b/spec/models/commit_collection_spec.rb
index de9b72c1da2..93c696cae54 100644
--- a/spec/models/commit_collection_spec.rb
+++ b/spec/models/commit_collection_spec.rb
@@ -42,10 +42,7 @@ RSpec.describe CommitCollection do
merge_commit = project.commit("60ecb67744cb56576c30214ff52294f8ce2def98")
expect(merge_commit).to receive(:merge_commit?).and_return(true)
- collection = described_class.new(project, [
- commit,
- merge_commit
- ])
+ collection = described_class.new(project, [commit, merge_commit])
expect(collection.without_merge_commits).to contain_exactly(commit)
end
diff --git a/spec/models/compare_spec.rb b/spec/models/compare_spec.rb
index 0035fb8468a..dc8429fe77e 100644
--- a/spec/models/compare_spec.rb
+++ b/spec/models/compare_spec.rb
@@ -127,13 +127,14 @@ RSpec.describe Compare do
end
it 'returns affected file paths, without duplication' do
- expect(subject.modified_paths).to contain_exactly(*%w{
- foo/for_move.txt
- foo/bar/for_move.txt
- foo/for_create.txt
- foo/for_delete.txt
- foo/for_edit.txt
- })
+ expect(subject.modified_paths).to contain_exactly(
+ *%w{
+ foo/for_move.txt
+ foo/bar/for_move.txt
+ foo/for_create.txt
+ foo/for_delete.txt
+ foo/for_edit.txt
+ })
end
end
diff --git a/spec/models/concerns/approvable_spec.rb b/spec/models/concerns/approvable_spec.rb
index 1ddd9b3edca..25a4f51cd82 100644
--- a/spec/models/concerns/approvable_spec.rb
+++ b/spec/models/concerns/approvable_spec.rb
@@ -32,8 +32,8 @@ RSpec.describe Approvable do
end
end
- describe '#can_be_approved_by?' do
- subject { merge_request.can_be_approved_by?(user) }
+ describe '#eligible_for_approval_by?' do
+ subject { merge_request.eligible_for_approval_by?(user) }
before do
merge_request.project.add_developer(user) if user
@@ -60,8 +60,8 @@ RSpec.describe Approvable do
end
end
- describe '#can_be_unapproved_by?' do
- subject { merge_request.can_be_unapproved_by?(user) }
+ describe '#eligible_for_unapproval_by?' do
+ subject { merge_request.eligible_for_unapproval_by?(user) }
before do
merge_request.project.add_developer(user) if user
diff --git a/spec/models/concerns/atomic_internal_id_spec.rb b/spec/models/concerns/atomic_internal_id_spec.rb
index b803e699b25..5fe3141eb17 100644
--- a/spec/models/concerns/atomic_internal_id_spec.rb
+++ b/spec/models/concerns/atomic_internal_id_spec.rb
@@ -3,10 +3,11 @@
require 'spec_helper'
RSpec.describe AtomicInternalId do
- let(:milestone) { build(:milestone) }
+ let_it_be(:project) { create(:project) }
+ let(:milestone) { build(:milestone, project: project) }
let(:iid) { double('iid', to_i: 42) }
let(:external_iid) { 100 }
- let(:scope_attrs) { { project: milestone.project } }
+ let(:scope_attrs) { { project: project } }
let(:usage) { :milestones }
describe '#save!' do
@@ -248,4 +249,12 @@ RSpec.describe AtomicInternalId do
end.to change { InternalId.find_by(project: milestone.project, usage: :milestones)&.last_value.to_i }.by(4)
end
end
+
+ describe '.track_project_iid!' do
+ it 'tracks the present value' do
+ expect do
+ ::Issue.track_project_iid!(milestone.project, external_iid)
+ end.to change { InternalId.find_by(project: milestone.project, usage: :issues)&.last_value.to_i }.to(external_iid)
+ end
+ end
end
diff --git a/spec/models/concerns/cascading_namespace_setting_attribute_spec.rb b/spec/models/concerns/cascading_namespace_setting_attribute_spec.rb
deleted file mode 100644
index 6be6e3f048f..00000000000
--- a/spec/models/concerns/cascading_namespace_setting_attribute_spec.rb
+++ /dev/null
@@ -1,347 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe NamespaceSetting, 'CascadingNamespaceSettingAttribute' do
- let(:group) { create(:group) }
- let(:subgroup) { create(:group, parent: group) }
-
- def group_settings
- group.namespace_settings
- end
-
- def subgroup_settings
- subgroup.namespace_settings
- end
-
- describe '#delayed_project_removal' do
- subject(:delayed_project_removal) { subgroup_settings.delayed_project_removal }
-
- context 'when there is no parent' do
- context 'and the value is not nil' do
- before do
- group_settings.update!(delayed_project_removal: true)
- end
-
- it 'returns the local value' do
- expect(group_settings.delayed_project_removal).to eq(true)
- end
- end
-
- context 'and the value is nil' do
- before do
- group_settings.update!(delayed_project_removal: nil)
- stub_application_setting(delayed_project_removal: false)
- end
-
- it 'returns the application settings value' do
- expect(group_settings.delayed_project_removal).to eq(false)
- end
- end
- end
-
- context 'when parent does not lock the attribute' do
- context 'and value is not nil' do
- before do
- group_settings.update!(delayed_project_removal: false)
- end
-
- it 'returns local setting when present' do
- subgroup_settings.update!(delayed_project_removal: true)
-
- expect(delayed_project_removal).to eq(true)
- end
-
- it 'returns the parent value when local value is nil' do
- subgroup_settings.update!(delayed_project_removal: nil)
-
- expect(delayed_project_removal).to eq(false)
- end
-
- it 'returns the correct dirty value' do
- subgroup_settings.delayed_project_removal = true
-
- expect(delayed_project_removal).to eq(true)
- end
-
- it 'does not return the application setting value when parent value is false' do
- stub_application_setting(delayed_project_removal: true)
-
- expect(delayed_project_removal).to eq(false)
- end
- end
-
- context 'and the value is nil' do
- before do
- group_settings.update!(delayed_project_removal: nil, lock_delayed_project_removal: false)
- subgroup_settings.update!(delayed_project_removal: nil)
-
- subgroup_settings.clear_memoization(:delayed_project_removal)
- end
-
- it 'cascades to the application settings value' do
- expect(delayed_project_removal).to eq(false)
- end
- end
-
- context 'when multiple ancestors set a value' do
- let(:third_level_subgroup) { create(:group, parent: subgroup) }
-
- before do
- group_settings.update!(delayed_project_removal: true)
- subgroup_settings.update!(delayed_project_removal: false)
- end
-
- it 'returns the closest ancestor value' do
- expect(third_level_subgroup.namespace_settings.delayed_project_removal).to eq(false)
- end
- end
- end
-
- context 'when parent locks the attribute' do
- before do
- subgroup_settings.update!(delayed_project_removal: true)
- group_settings.update!(lock_delayed_project_removal: true, delayed_project_removal: false)
-
- subgroup_settings.clear_memoization(:delayed_project_removal)
- subgroup_settings.clear_memoization(:delayed_project_removal_locked_ancestor)
- end
-
- it 'returns the parent value' do
- expect(delayed_project_removal).to eq(false)
- end
-
- it 'does not allow the local value to be saved' do
- subgroup_settings.delayed_project_removal = nil
-
- expect { subgroup_settings.save! }
- .to raise_error(ActiveRecord::RecordInvalid, /Delayed project removal cannot be changed because it is locked by an ancestor/)
- end
- end
-
- context 'when the application settings locks the attribute' do
- before do
- subgroup_settings.update!(delayed_project_removal: true)
- stub_application_setting(lock_delayed_project_removal: true, delayed_project_removal: true)
- end
-
- it 'returns the application setting value' do
- expect(delayed_project_removal).to eq(true)
- end
-
- it 'does not allow the local value to be saved' do
- subgroup_settings.delayed_project_removal = false
-
- expect { subgroup_settings.save! }
- .to raise_error(ActiveRecord::RecordInvalid, /Delayed project removal cannot be changed because it is locked by an ancestor/)
- end
- end
-
- context 'when parent locked the attribute then the application settings locks it' do
- before do
- subgroup_settings.update!(delayed_project_removal: true)
- group_settings.update!(lock_delayed_project_removal: true, delayed_project_removal: false)
- stub_application_setting(lock_delayed_project_removal: true, delayed_project_removal: true)
-
- subgroup_settings.clear_memoization(:delayed_project_removal)
- subgroup_settings.clear_memoization(:delayed_project_removal_locked_ancestor)
- end
-
- it 'returns the application setting value' do
- expect(delayed_project_removal).to eq(true)
- end
- end
- end
-
- describe '#delayed_project_removal?' do
- before do
- subgroup_settings.update!(delayed_project_removal: true)
- group_settings.update!(lock_delayed_project_removal: true, delayed_project_removal: false)
-
- subgroup_settings.clear_memoization(:delayed_project_removal)
- subgroup_settings.clear_memoization(:delayed_project_removal_locked_ancestor)
- end
-
- it 'aliases the method when the attribute is a boolean' do
- expect(subgroup_settings.delayed_project_removal?).to eq(subgroup_settings.delayed_project_removal)
- end
- end
-
- describe '#delayed_project_removal=' do
- before do
- subgroup_settings.update!(delayed_project_removal: nil)
- group_settings.update!(delayed_project_removal: true)
- end
-
- it 'does not save the value locally when it matches the cascaded value' do
- subgroup_settings.update!(delayed_project_removal: true)
-
- expect(subgroup_settings.read_attribute(:delayed_project_removal)).to eq(nil)
- end
- end
-
- describe '#delayed_project_removal_locked?' do
- shared_examples 'not locked' do
- it 'is not locked by an ancestor' do
- expect(subgroup_settings.delayed_project_removal_locked_by_ancestor?).to eq(false)
- end
-
- it 'is not locked by application setting' do
- expect(subgroup_settings.delayed_project_removal_locked_by_application_setting?).to eq(false)
- end
-
- it 'does not return a locked namespace' do
- expect(subgroup_settings.delayed_project_removal_locked_ancestor).to be_nil
- end
- end
-
- context 'when attribute is locked by self' do
- before do
- subgroup_settings.update!(lock_delayed_project_removal: true)
- end
-
- it 'is not locked by default' do
- expect(subgroup_settings.delayed_project_removal_locked?).to eq(false)
- end
-
- it 'is locked when including self' do
- expect(subgroup_settings.delayed_project_removal_locked?(include_self: true)).to eq(true)
- end
- end
-
- context 'when parent does not lock the attribute' do
- it_behaves_like 'not locked'
- end
-
- context 'when parent locks the attribute' do
- before do
- group_settings.update!(lock_delayed_project_removal: true, delayed_project_removal: false)
-
- subgroup_settings.clear_memoization(:delayed_project_removal)
- subgroup_settings.clear_memoization(:delayed_project_removal_locked_ancestor)
- end
-
- it 'is locked by an ancestor' do
- expect(subgroup_settings.delayed_project_removal_locked_by_ancestor?).to eq(true)
- end
-
- it 'is not locked by application setting' do
- expect(subgroup_settings.delayed_project_removal_locked_by_application_setting?).to eq(false)
- end
-
- it 'returns a locked namespace settings object' do
- expect(subgroup_settings.delayed_project_removal_locked_ancestor.namespace_id).to eq(group_settings.namespace_id)
- end
- end
-
- context 'when not locked by application settings' do
- before do
- stub_application_setting(lock_delayed_project_removal: false)
- end
-
- it_behaves_like 'not locked'
- end
-
- context 'when locked by application settings' do
- before do
- stub_application_setting(lock_delayed_project_removal: true)
- end
-
- it 'is not locked by an ancestor' do
- expect(subgroup_settings.delayed_project_removal_locked_by_ancestor?).to eq(false)
- end
-
- it 'is locked by application setting' do
- expect(subgroup_settings.delayed_project_removal_locked_by_application_setting?).to eq(true)
- end
-
- it 'does not return a locked namespace' do
- expect(subgroup_settings.delayed_project_removal_locked_ancestor).to be_nil
- end
- end
- end
-
- describe '#lock_delayed_project_removal=' do
- context 'when parent locks the attribute' do
- before do
- group_settings.update!(lock_delayed_project_removal: true, delayed_project_removal: false)
-
- subgroup_settings.clear_memoization(:delayed_project_removal)
- subgroup_settings.clear_memoization(:delayed_project_removal_locked_ancestor)
- end
-
- it 'does not allow the attribute to be saved' do
- subgroup_settings.lock_delayed_project_removal = true
-
- expect { subgroup_settings.save! }
- .to raise_error(ActiveRecord::RecordInvalid, /Lock delayed project removal cannot be changed because it is locked by an ancestor/)
- end
- end
-
- context 'when parent does not lock the attribute' do
- before do
- group_settings.update!(lock_delayed_project_removal: false)
-
- subgroup_settings.lock_delayed_project_removal = true
- end
-
- it 'allows the lock to be set when the attribute is not nil' do
- subgroup_settings.delayed_project_removal = true
-
- expect(subgroup_settings.save).to eq(true)
- end
-
- it 'does not allow the lock to be saved when the attribute is nil' do
- subgroup_settings.delayed_project_removal = nil
-
- expect { subgroup_settings.save! }
- .to raise_error(ActiveRecord::RecordInvalid, /Delayed project removal cannot be nil when locking the attribute/)
- end
-
- it 'copies the cascaded value when locking the attribute if the local value is nil', :aggregate_failures do
- subgroup_settings.delayed_project_removal = nil
- subgroup_settings.lock_delayed_project_removal = true
-
- expect(subgroup_settings.read_attribute(:delayed_project_removal)).to eq(false)
- end
- end
-
- context 'when application settings locks the attribute' do
- before do
- stub_application_setting(lock_delayed_project_removal: true)
- end
-
- it 'does not allow the attribute to be saved' do
- subgroup_settings.lock_delayed_project_removal = true
-
- expect { subgroup_settings.save! }
- .to raise_error(ActiveRecord::RecordInvalid, /Lock delayed project removal cannot be changed because it is locked by an ancestor/)
- end
- end
-
- context 'when application_settings does not lock the attribute' do
- before do
- stub_application_setting(lock_delayed_project_removal: false)
- end
-
- it 'allows the attribute to be saved' do
- subgroup_settings.delayed_project_removal = true
- subgroup_settings.lock_delayed_project_removal = true
-
- expect(subgroup_settings.save).to eq(true)
- end
- end
- end
-
- describe 'after update callback' do
- before do
- subgroup_settings.update!(lock_delayed_project_removal: true, delayed_project_removal: false)
- end
-
- it 'clears descendant locks' do
- group_settings.update!(lock_delayed_project_removal: true, delayed_project_removal: true)
-
- expect(subgroup_settings.reload.lock_delayed_project_removal).to eq(false)
- end
- end
-end
diff --git a/spec/models/concerns/ci/partitionable_spec.rb b/spec/models/concerns/ci/partitionable_spec.rb
new file mode 100644
index 00000000000..d53501ccc3d
--- /dev/null
+++ b/spec/models/concerns/ci/partitionable_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::Partitionable do
+ describe 'partitionable models inclusion' do
+ let(:ci_model) { Class.new(Ci::ApplicationRecord) }
+
+ subject { ci_model.include(described_class) }
+
+ it 'raises an exception' do
+ expect { subject }
+ .to raise_error(/must be included in PARTITIONABLE_MODELS/)
+ end
+
+ context 'when is included in the models list' do
+ before do
+ stub_const("#{described_class}::Testing::PARTITIONABLE_MODELS", [ci_model.name])
+ end
+
+ it 'does not raise exceptions' do
+ expect { subject }.not_to raise_error
+ end
+ end
+ end
+end
diff --git a/spec/models/concerns/counter_attribute_spec.rb b/spec/models/concerns/counter_attribute_spec.rb
index 2dd70188740..66ccd4559e5 100644
--- a/spec/models/concerns/counter_attribute_spec.rb
+++ b/spec/models/concerns/counter_attribute_spec.rb
@@ -73,8 +73,8 @@ RSpec.describe CounterAttribute, :counter_attribute, :clean_gitlab_redis_shared_
subject
Gitlab::Redis::SharedState.with do |redis|
- expect(redis.exists(increment_key)).to be_falsey
- expect(redis.exists(flushed_key)).to eq(flushed_key_present)
+ expect(redis.exists?(increment_key)).to eq(false)
+ expect(redis.exists?(flushed_key)).to eq(flushed_key_present)
end
end
end
diff --git a/spec/models/concerns/id_in_ordered_spec.rb b/spec/models/concerns/id_in_ordered_spec.rb
index a3b434caac6..15da079f2bc 100644
--- a/spec/models/concerns/id_in_ordered_spec.rb
+++ b/spec/models/concerns/id_in_ordered_spec.rb
@@ -12,9 +12,10 @@ RSpec.describe IdInOrdered do
issue4 = create(:issue)
issue5 = create(:issue)
- expect(Issue.id_in_ordered([issue3.id, issue1.id, issue4.id, issue5.id, issue2.id])).to eq([
- issue3, issue1, issue4, issue5, issue2
- ])
+ expect(Issue.id_in_ordered([issue3.id, issue1.id, issue4.id, issue5.id, issue2.id])).to eq(
+ [
+ issue3, issue1, issue4, issue5, issue2
+ ])
end
context 'when the ids are not an array of integers' do
diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb
index 6763cc904b4..8842a36f40a 100644
--- a/spec/models/concerns/issuable_spec.rb
+++ b/spec/models/concerns/issuable_spec.rb
@@ -75,6 +75,24 @@ RSpec.describe Issuable do
it_behaves_like 'truncates the description to its allowed maximum length on import'
end
+
+ describe '#validate_assignee_length' do
+ let(:assignee_1) { create(:user) }
+ let(:assignee_2) { create(:user) }
+ let(:assignee_3) { create(:user) }
+
+ subject { create(:merge_request) }
+
+ before do
+ stub_const("Issuable::MAX_NUMBER_OF_ASSIGNEES_OR_REVIEWERS", 2)
+ end
+
+ it 'will not exceed the assignee limit' do
+ expect do
+ subject.update!(assignees: [assignee_1, assignee_2, assignee_3])
+ end.to raise_error(ActiveRecord::RecordInvalid)
+ end
+ end
end
describe "Scope" do
diff --git a/spec/models/concerns/mentionable_spec.rb b/spec/models/concerns/mentionable_spec.rb
index 9daea3438cb..7bbbd10ec8d 100644
--- a/spec/models/concerns/mentionable_spec.rb
+++ b/spec/models/concerns/mentionable_spec.rb
@@ -225,7 +225,7 @@ RSpec.describe Commit, 'Mentionable' do
end
context 'with external issue tracker' do
- let(:project) { create(:jira_project, :repository) }
+ let(:project) { create(:project, :with_jira_integration, :repository) }
it 'is true if external issues referenced' do
allow(commit.raw).to receive(:message).and_return 'JIRA-123'
diff --git a/spec/models/concerns/noteable_spec.rb b/spec/models/concerns/noteable_spec.rb
index 81ae30b7116..82aca13c929 100644
--- a/spec/models/concerns/noteable_spec.rb
+++ b/spec/models/concerns/noteable_spec.rb
@@ -47,18 +47,19 @@ RSpec.describe Noteable do
let(:discussions) { subject.discussions }
it 'includes discussions for diff notes, commit diff notes, commit notes, and regular notes' do
- expect(discussions).to eq([
- DiffDiscussion.new([active_diff_note1, active_diff_note2], subject),
- DiffDiscussion.new([active_diff_note3], subject),
- DiffDiscussion.new([outdated_diff_note1, outdated_diff_note2], subject),
- Discussion.new([discussion_note1, discussion_note2], subject),
- DiffDiscussion.new([commit_diff_note1, commit_diff_note2], subject),
- OutOfContextDiscussion.new([commit_note1, commit_note2], subject),
- Discussion.new([commit_discussion_note1, commit_discussion_note2], subject),
- Discussion.new([commit_discussion_note3], subject),
- IndividualNoteDiscussion.new([note1], subject),
- IndividualNoteDiscussion.new([note2], subject)
- ])
+ expect(discussions).to eq(
+ [
+ DiffDiscussion.new([active_diff_note1, active_diff_note2], subject),
+ DiffDiscussion.new([active_diff_note3], subject),
+ DiffDiscussion.new([outdated_diff_note1, outdated_diff_note2], subject),
+ Discussion.new([discussion_note1, discussion_note2], subject),
+ DiffDiscussion.new([commit_diff_note1, commit_diff_note2], subject),
+ OutOfContextDiscussion.new([commit_note1, commit_note2], subject),
+ Discussion.new([commit_discussion_note1, commit_discussion_note2], subject),
+ Discussion.new([commit_discussion_note3], subject),
+ IndividualNoteDiscussion.new([note1], subject),
+ IndividualNoteDiscussion.new([note2], subject)
+ ])
end
end
@@ -88,23 +89,24 @@ RSpec.describe Noteable do
{ table_name: n.table_name, discussion_id: n.discussion_id, id: n.id }
end
- expect(discussions).to match([
- a_hash_including(table_name: 'notes', discussion_id: active_diff_note1.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: active_diff_note3.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: outdated_diff_note1.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: discussion_note1.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: commit_diff_note1.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: commit_note1.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: commit_note2.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: commit_discussion_note1.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: commit_discussion_note3.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: note1.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: note2.discussion_id),
- a_hash_including(table_name: 'resource_label_events', id: label_event.id),
- a_hash_including(table_name: 'notes', discussion_id: system_note.discussion_id),
- a_hash_including(table_name: 'resource_milestone_events', id: milestone_event.id),
- a_hash_including(table_name: 'resource_state_events', id: state_event.id)
- ])
+ expect(discussions).to match(
+ [
+ a_hash_including(table_name: 'notes', discussion_id: active_diff_note1.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: active_diff_note3.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: outdated_diff_note1.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: discussion_note1.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: commit_diff_note1.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: commit_note1.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: commit_note2.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: commit_discussion_note1.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: commit_discussion_note3.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: note1.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: note2.discussion_id),
+ a_hash_including(table_name: 'resource_label_events', id: label_event.id),
+ a_hash_including(table_name: 'notes', discussion_id: system_note.discussion_id),
+ a_hash_including(table_name: 'resource_milestone_events', id: milestone_event.id),
+ a_hash_including(table_name: 'resource_state_events', id: state_event.id)
+ ])
end
it 'filters by comments only' do
@@ -112,19 +114,20 @@ RSpec.describe Noteable do
{ table_name: n.table_name, discussion_id: n.discussion_id, id: n.id }
end
- expect(discussions).to match([
- a_hash_including(table_name: 'notes', discussion_id: active_diff_note1.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: active_diff_note3.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: outdated_diff_note1.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: discussion_note1.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: commit_diff_note1.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: commit_note1.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: commit_note2.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: commit_discussion_note1.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: commit_discussion_note3.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: note1.discussion_id),
- a_hash_including(table_name: 'notes', discussion_id: note2.discussion_id)
- ])
+ expect(discussions).to match(
+ [
+ a_hash_including(table_name: 'notes', discussion_id: active_diff_note1.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: active_diff_note3.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: outdated_diff_note1.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: discussion_note1.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: commit_diff_note1.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: commit_note1.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: commit_note2.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: commit_discussion_note1.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: commit_discussion_note3.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: note1.discussion_id),
+ a_hash_including(table_name: 'notes', discussion_id: note2.discussion_id)
+ ])
end
it 'filters by system notes only' do
@@ -132,12 +135,13 @@ RSpec.describe Noteable do
{ table_name: n.table_name, discussion_id: n.discussion_id, id: n.id }
end
- expect(discussions).to match([
- a_hash_including(table_name: 'resource_label_events', id: label_event.id),
- a_hash_including(table_name: 'notes', discussion_id: system_note.discussion_id),
- a_hash_including(table_name: 'resource_milestone_events', id: milestone_event.id),
- a_hash_including(table_name: 'resource_state_events', id: state_event.id)
- ])
+ expect(discussions).to match(
+ [
+ a_hash_including(table_name: 'resource_label_events', id: label_event.id),
+ a_hash_including(table_name: 'notes', discussion_id: system_note.discussion_id),
+ a_hash_including(table_name: 'resource_milestone_events', id: milestone_event.id),
+ a_hash_including(table_name: 'resource_state_events', id: state_event.id)
+ ])
end
end
diff --git a/spec/models/concerns/participable_spec.rb b/spec/models/concerns/participable_spec.rb
index f7f68cb38d8..58a44fec3aa 100644
--- a/spec/models/concerns/participable_spec.rb
+++ b/spec/models/concerns/participable_spec.rb
@@ -186,6 +186,9 @@ RSpec.describe Participable do
expect(instance.visible_participants(user1)).to match_array [user1, user2]
end
end
+
+ it_behaves_like 'visible participants for issuable with read ability', :issue
+ it_behaves_like 'visible participants for issuable with read ability', :merge_request
end
describe '#participant?' do
diff --git a/spec/models/concerns/prometheus_adapter_spec.rb b/spec/models/concerns/prometheus_adapter_spec.rb
index 4158e8a0a4c..d3a44ac8403 100644
--- a/spec/models/concerns/prometheus_adapter_spec.rb
+++ b/spec/models/concerns/prometheus_adapter_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe PrometheusAdapter, :use_clean_rails_memory_store_caching do
include PrometheusHelpers
include ReactiveCachingHelpers
- let(:project) { create(:prometheus_project) }
+ let(:project) { create(:project, :with_prometheus_integration) }
let(:integration) { project.prometheus_integration }
let(:described_class) do
diff --git a/spec/models/concerns/routable_spec.rb b/spec/models/concerns/routable_spec.rb
index cf66ba83e87..dc1002f3560 100644
--- a/spec/models/concerns/routable_spec.rb
+++ b/spec/models/concerns/routable_spec.rb
@@ -23,6 +23,12 @@ RSpec.shared_examples 'routable resource' do
end.not_to exceed_all_query_limit(control_count)
end
+ context 'when path is a negative number' do
+ it 'returns nil' do
+ expect(described_class.find_by_full_path(-1)).to be_nil
+ end
+ end
+
context 'with redirect routes' do
let_it_be(:redirect_route) { create(:redirect_route, source: record) }
diff --git a/spec/models/concerns/token_authenticatable_spec.rb b/spec/models/concerns/token_authenticatable_spec.rb
index 3f6bbe795cc..e8db83b7144 100644
--- a/spec/models/concerns/token_authenticatable_spec.rb
+++ b/spec/models/concerns/token_authenticatable_spec.rb
@@ -314,52 +314,22 @@ RSpec.describe Ci::Runner, 'TokenAuthenticatable', :freeze_time do
describe '#token_expired?' do
subject { runner.token_expired? }
- context 'when enforce_runner_token_expires_at feature flag is disabled' do
- before do
- stub_feature_flags(enforce_runner_token_expires_at: false)
- end
-
- context 'when runner has no token expiration' do
- let(:runner) { non_expirable_runner }
-
- it { is_expected.to eq(false) }
- end
-
- context 'when runner token is not expired' do
- let(:runner) { non_expired_runner }
+ context 'when runner has no token expiration' do
+ let(:runner) { non_expirable_runner }
- it { is_expected.to eq(false) }
- end
-
- context 'when runner token is expired' do
- let(:runner) { expired_runner }
-
- it { is_expected.to eq(false) }
- end
+ it { is_expected.to eq(false) }
end
- context 'when enforce_runner_token_expires_at feature flag is enabled' do
- before do
- stub_feature_flags(enforce_runner_token_expires_at: true)
- end
-
- context 'when runner has no token expiration' do
- let(:runner) { non_expirable_runner }
-
- it { is_expected.to eq(false) }
- end
+ context 'when runner token is not expired' do
+ let(:runner) { non_expired_runner }
- context 'when runner token is not expired' do
- let(:runner) { non_expired_runner }
-
- it { is_expected.to eq(false) }
- end
+ it { is_expected.to eq(false) }
+ end
- context 'when runner token is expired' do
- let(:runner) { expired_runner }
+ context 'when runner token is expired' do
+ let(:runner) { expired_runner }
- it { is_expected.to eq(true) }
- end
+ it { is_expected.to eq(true) }
end
end
@@ -386,52 +356,22 @@ RSpec.describe Ci::Runner, 'TokenAuthenticatable', :freeze_time do
describe '.find_by_token' do
subject { Ci::Runner.find_by_token(runner.token) }
- context 'when enforce_runner_token_expires_at feature flag is disabled' do
- before do
- stub_feature_flags(enforce_runner_token_expires_at: false)
- end
-
- context 'when runner has no token expiration' do
- let(:runner) { non_expirable_runner }
-
- it { is_expected.to eq(non_expirable_runner) }
- end
-
- context 'when runner token is not expired' do
- let(:runner) { non_expired_runner }
-
- it { is_expected.to eq(non_expired_runner) }
- end
-
- context 'when runner token is expired' do
- let(:runner) { expired_runner }
+ context 'when runner has no token expiration' do
+ let(:runner) { non_expirable_runner }
- it { is_expected.to eq(expired_runner) }
- end
+ it { is_expected.to eq(non_expirable_runner) }
end
- context 'when enforce_runner_token_expires_at feature flag is enabled' do
- before do
- stub_feature_flags(enforce_runner_token_expires_at: true)
- end
-
- context 'when runner has no token expiration' do
- let(:runner) { non_expirable_runner }
-
- it { is_expected.to eq(non_expirable_runner) }
- end
-
- context 'when runner token is not expired' do
- let(:runner) { non_expired_runner }
+ context 'when runner token is not expired' do
+ let(:runner) { non_expired_runner }
- it { is_expected.to eq(non_expired_runner) }
- end
+ it { is_expected.to eq(non_expired_runner) }
+ end
- context 'when runner token is expired' do
- let(:runner) { expired_runner }
+ context 'when runner token is expired' do
+ let(:runner) { expired_runner }
- it { is_expected.to be_nil }
- end
+ it { is_expected.to be_nil }
end
end
end
diff --git a/spec/models/container_repository_spec.rb b/spec/models/container_repository_spec.rb
index a4329993e91..0033e9bbd08 100644
--- a/spec/models/container_repository_spec.rb
+++ b/spec/models/container_repository_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe ContainerRepository, :aggregate_failures do
api_url: 'http://registry.gitlab',
host_port: 'registry.gitlab')
- stub_request(:get, 'http://registry.gitlab/v2/group/test/my_image/tags/list')
+ stub_request(:get, "http://registry.gitlab/v2/group/test/my_image/tags/list?n=#{::ContainerRegistry::Client::DEFAULT_TAGS_PAGE_SIZE}")
.with(headers: { 'Accept' => ContainerRegistry::Client::ACCEPTED_TYPES.join(', ') })
.to_return(
status: 200,
diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb
index 87fa5289795..b91d836f82f 100644
--- a/spec/models/deployment_spec.rb
+++ b/spec/models/deployment_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe Deployment do
let(:deployment) { create(:deployment) }
it 'delegates to environment_manual_actions' do
- expect(deployment.deployable).to receive(:environment_manual_actions).and_call_original
+ expect(deployment.deployable).to receive(:other_manual_actions).and_call_original
deployment.manual_actions
end
@@ -38,7 +38,7 @@ RSpec.describe Deployment do
let(:deployment) { create(:deployment) }
it 'delegates to environment_scheduled_actions' do
- expect(deployment.deployable).to receive(:environment_scheduled_actions).and_call_original
+ expect(deployment.deployable).to receive(:other_scheduled_actions).and_call_original
deployment.scheduled_actions
end
@@ -171,11 +171,22 @@ RSpec.describe Deployment do
end
it 'executes Deployments::DropOlderDeploymentsWorker asynchronously' do
+ stub_feature_flags(prevent_outdated_deployment_jobs: false)
+
expect(Deployments::DropOlderDeploymentsWorker)
.to receive(:perform_async).once.with(deployment.id)
deployment.run!
end
+
+ it 'does not execute Deployments::DropOlderDeploymentsWorker when FF enabled' do
+ stub_feature_flags(prevent_outdated_deployment_jobs: true)
+
+ expect(Deployments::DropOlderDeploymentsWorker)
+ .not_to receive(:perform_async).with(deployment.id)
+
+ deployment.run!
+ end
end
context 'when deployment succeeded' do
diff --git a/spec/models/diff_note_spec.rb b/spec/models/diff_note_spec.rb
index d379ffeee02..a526f91ddc1 100644
--- a/spec/models/diff_note_spec.rb
+++ b/spec/models/diff_note_spec.rb
@@ -541,11 +541,12 @@ RSpec.describe DiffNote do
describe '#shas' do
it 'returns list of SHAs based on original_position' do
- expect(subject.shas).to match_array([
- position.base_sha,
- position.start_sha,
- position.head_sha
- ])
+ expect(subject.shas).to match_array(
+ [
+ position.base_sha,
+ position.start_sha,
+ position.head_sha
+ ])
end
context 'when position changes' do
@@ -554,14 +555,15 @@ RSpec.describe DiffNote do
end
it 'includes the new position SHAs' do
- expect(subject.shas).to match_array([
- position.base_sha,
- position.start_sha,
- position.head_sha,
- new_position.base_sha,
- new_position.start_sha,
- new_position.head_sha
- ])
+ expect(subject.shas).to match_array(
+ [
+ position.base_sha,
+ position.start_sha,
+ position.head_sha,
+ new_position.base_sha,
+ new_position.start_sha,
+ new_position.head_sha
+ ])
end
end
end
diff --git a/spec/models/diff_viewer/server_side_spec.rb b/spec/models/diff_viewer/server_side_spec.rb
index 28660b0d4b9..db0814af422 100644
--- a/spec/models/diff_viewer/server_side_spec.rb
+++ b/spec/models/diff_viewer/server_side_spec.rb
@@ -17,10 +17,30 @@ RSpec.describe DiffViewer::ServerSide do
subject { viewer_class.new(diff_file) }
describe '#prepare!' do
- it 'loads all diff file data' do
- expect(Blob).to receive(:lazy).at_least(:twice)
+ before do
+ stub_feature_flags(disable_load_entire_blob_for_diff_viewer: feature_flag_enabled)
+ end
+
+ context 'when the disable_load_entire_blob_for_diff_viewer flag is disabled' do
+ let(:feature_flag_enabled) { false }
- subject.prepare!
+ it 'loads all diff file data' do
+ subject
+ expect(diff_file).to receive_message_chain(:old_blob, :load_all_data!)
+ expect(diff_file).to receive_message_chain(:new_blob, :load_all_data!)
+ subject.prepare!
+ end
+ end
+
+ context 'when the disable_load_entire_blob_for_diff_viewer flag is enabled' do
+ let(:feature_flag_enabled) { true }
+
+ it 'does not load file data' do
+ subject
+ expect(diff_file).not_to receive(:old_blob)
+ expect(diff_file).not_to receive(:new_blob)
+ subject.prepare!
+ end
end
end
diff --git a/spec/models/discussion_spec.rb b/spec/models/discussion_spec.rb
index 212619a1c3d..7bd3c5743a6 100644
--- a/spec/models/discussion_spec.rb
+++ b/spec/models/discussion_spec.rb
@@ -37,10 +37,11 @@ RSpec.describe Discussion do
describe '.build_collection' do
it 'returns an array of discussions of the right type' do
discussions = described_class.build_collection([first_note, second_note, third_note], merge_request)
- expect(discussions).to eq([
- DiffDiscussion.new([first_note, second_note], merge_request),
- DiffDiscussion.new([third_note], merge_request)
- ])
+ expect(discussions).to eq(
+ [
+ DiffDiscussion.new([first_note, second_note], merge_request),
+ DiffDiscussion.new([third_note], merge_request)
+ ])
end
end
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index 1e15b09a069..a442856d993 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -390,7 +390,10 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
'staging' | described_class.tiers[:staging]
'pre-prod' | described_class.tiers[:staging]
'blue-kit-stage' | described_class.tiers[:staging]
- 'pre-prod' | described_class.tiers[:staging]
+ 'nonprod' | described_class.tiers[:staging]
+ 'nonlive' | described_class.tiers[:staging]
+ 'non-prod' | described_class.tiers[:staging]
+ 'non-live' | described_class.tiers[:staging]
'gprd' | described_class.tiers[:production]
'gprd-cny' | described_class.tiers[:production]
'production' | described_class.tiers[:production]
@@ -1291,7 +1294,7 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
context 'when the environment is available' do
context 'with a deployment service' do
- let(:project) { create(:prometheus_project, :repository) }
+ let(:project) { create(:project, :with_prometheus_integration, :repository) }
context 'and a deployment' do
let!(:deployment) { create(:deployment, environment: environment) }
@@ -1364,7 +1367,7 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
end
context 'when the environment is unavailable' do
- let(:project) { create(:prometheus_project) }
+ let(:project) { create(:project, :with_prometheus_integration) }
before do
environment.stop
@@ -1391,7 +1394,7 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
end
describe '#metrics' do
- let(:project) { create(:prometheus_project) }
+ let(:project) { create(:project, :with_prometheus_integration) }
subject { environment.metrics }
@@ -1427,7 +1430,7 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
end
describe '#additional_metrics' do
- let(:project) { create(:prometheus_project) }
+ let(:project) { create(:project, :with_prometheus_integration) }
let(:metric_params) { [] }
subject { environment.additional_metrics(*metric_params) }
@@ -1617,44 +1620,30 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
nil | nil
'never' | nil
end
- with_them do
- it 'sets correct auto_stop_in' do
- freeze_time do
- if expected_result.is_a?(Integer) || expected_result.nil?
- subject
- expect(environment.auto_stop_in).to eq(expected_result)
- else
- expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
- an_instance_of(expected_result),
- project_id: environment.project_id,
- environment_id: environment.id
- )
+ with_them do
+ shared_examples 'for given values expected result is set' do
+ it do
+ freeze_time do
+ if expected_result.is_a?(Integer) || expected_result.nil?
+ subject
- expect { subject }.to raise_error(expected_result)
+ expect(environment.auto_stop_in).to eq(expected_result)
+ else
+ expect { subject }.to raise_error(expected_result)
+ end
end
end
end
- end
- context 'resets earlier value' do
- let(:environment) { create(:environment, auto_stop_at: 1.day.since.round) }
-
- where(:value, :expected_result) do
- '2 days' | 2.days.to_i
- '1 week' | 1.week.to_i
- '2h20min' | 2.hours.to_i + 20.minutes.to_i
- '' | nil
- 'never' | nil
+ context 'new assignment sets correct auto_stop_in' do
+ include_examples 'for given values expected result is set'
end
- with_them do
- it 'assigns new value' do
- freeze_time do
- subject
- expect(environment.auto_stop_in).to eq(expected_result)
- end
- end
+ context 'resets older value' do
+ let(:environment) { create(:environment, auto_stop_at: 1.day.since.round) }
+
+ include_examples 'for given values expected result is set'
end
end
end
diff --git a/spec/models/factories_spec.rb b/spec/models/factories_spec.rb
index 2993b2aee58..c931c96bafd 100644
--- a/spec/models/factories_spec.rb
+++ b/spec/models/factories_spec.rb
@@ -25,6 +25,7 @@ RSpec.describe 'factories' do
[:issue_customer_relations_contact, :for_contact],
[:issue_customer_relations_contact, :for_issue],
[:package_file, :object_storage],
+ [:rpm_repository_file, :object_storage],
[:pages_domain, :without_certificate],
[:pages_domain, :without_key],
[:pages_domain, :with_missing_chain],
@@ -79,7 +80,6 @@ RSpec.describe 'factories' do
member_task
milestone_release
namespace
- project_broken_repo
project_namespace
project_repository
prometheus_alert
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 2ce75fb1290..68c2d1d3995 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -2293,10 +2293,11 @@ RSpec.describe Group do
it 'clears both self and descendant cache when the parent value is updated' do
expect(Rails.cache).to receive(:delete_multi)
.with(
- match_array([
- start_with("namespaces:{#{parent.traversal_ids.first}}:first_auto_devops_config:#{parent.id}"),
- start_with("namespaces:{#{parent.traversal_ids.first}}:first_auto_devops_config:#{group.id}")
- ])
+ match_array(
+ [
+ start_with("namespaces:{#{parent.traversal_ids.first}}:first_auto_devops_config:#{parent.id}"),
+ start_with("namespaces:{#{parent.traversal_ids.first}}:first_auto_devops_config:#{group.id}")
+ ])
)
parent.update!(auto_devops_enabled: true)
@@ -3312,16 +3313,6 @@ RSpec.describe Group do
expect(group.packages_policy_subject).to be_a(Packages::Policies::Group)
expect(group.packages_policy_subject.group).to eq(group)
end
-
- context 'with feature flag disabled' do
- before do
- stub_feature_flags(read_package_policy_rule: false)
- end
-
- it 'returns group' do
- expect(group.packages_policy_subject).to eq(group)
- end
- end
end
describe '#gitlab_deploy_token' do
diff --git a/spec/models/hooks/web_hook_spec.rb b/spec/models/hooks/web_hook_spec.rb
index 036d2effc0f..da8c10b67a6 100644
--- a/spec/models/hooks/web_hook_spec.rb
+++ b/spec/models/hooks/web_hook_spec.rb
@@ -139,6 +139,16 @@ RSpec.describe WebHook do
it { is_expected.to contain_exactly(:token, :url, :url_variables) }
end
+ describe '.web_hooks_disable_failed?' do
+ it 'returns true when feature is enabled for parent' do
+ second_hook = build(:project_hook, project: create(:project))
+ stub_feature_flags(web_hooks_disable_failed: [false, second_hook.project])
+
+ expect(described_class.web_hooks_disable_failed?(hook)).to eq(false)
+ expect(described_class.web_hooks_disable_failed?(second_hook)).to eq(true)
+ end
+ end
+
describe 'execute' do
let(:data) { { key: 'value' } }
let(:hook_name) { 'project hook' }
@@ -170,7 +180,7 @@ RSpec.describe WebHook do
end
it 'does not async execute non-executable hooks' do
- hook.update!(disabled_until: 1.day.from_now)
+ allow(hook).to receive(:executable?).and_return(false)
expect(WebHookService).not_to receive(:new)
@@ -238,17 +248,18 @@ RSpec.describe WebHook do
[
[0, :not_set, true],
[0, :past, true],
- [0, :future, false],
- [0, :now, false],
+ [0, :future, true],
+ [0, :now, true],
[1, :not_set, true],
[1, :past, true],
- [1, :future, false],
+ [1, :future, true],
[3, :not_set, true],
[3, :past, true],
- [3, :future, false],
+ [3, :future, true],
[4, :not_set, false],
- [4, :past, false],
- [4, :future, false]
+ [4, :past, true], # expired suspension
+ [4, :now, false], # active suspension
+ [4, :future, false] # active suspension
]
end
@@ -315,7 +326,7 @@ RSpec.describe WebHook do
end
it 'is twice the initial value' do
- expect(hook.next_backoff).to eq(20.minutes)
+ expect(hook.next_backoff).to eq(2 * described_class::INITIAL_BACKOFF)
end
end
@@ -325,7 +336,7 @@ RSpec.describe WebHook do
end
it 'grows exponentially' do
- expect(hook.next_backoff).to eq(80.minutes)
+ expect(hook.next_backoff).to eq(2 * 2 * 2 * described_class::INITIAL_BACKOFF)
end
end
@@ -357,6 +368,7 @@ RSpec.describe WebHook do
end
it 'makes a hook executable if it is currently backed off' do
+ hook.recent_failures = 1000
hook.disabled_until = 1.hour.from_now
expect { hook.enable! }.to change(hook, :executable?).from(false).to(true)
@@ -378,55 +390,71 @@ RSpec.describe WebHook do
end
describe 'backoff!' do
- it 'sets disabled_until to the next backoff' do
- expect { hook.backoff! }.to change(hook, :disabled_until).to(hook.next_backoff.from_now)
- end
+ context 'when we have not backed off before' do
+ it 'does not disable the hook' do
+ expect { hook.backoff! }.not_to change(hook, :executable?).from(true)
+ end
- it 'increments the backoff count' do
- expect { hook.backoff! }.to change(hook, :backoff_count).by(1)
+ it 'increments the recent_failures count' do
+ expect { hook.backoff! }.to change(hook, :recent_failures).by(1)
+ end
end
- context 'when the hook is permanently disabled' do
+ context 'when we have exhausted the grace period' do
before do
- allow(hook).to receive(:permanently_disabled?).and_return(true)
- end
-
- it 'does not set disabled_until' do
- expect { hook.backoff! }.not_to change(hook, :disabled_until)
+ hook.update!(recent_failures: described_class::FAILURE_THRESHOLD)
end
- it 'does not increment the backoff count' do
- expect { hook.backoff! }.not_to change(hook, :backoff_count)
+ it 'sets disabled_until to the next backoff' do
+ expect { hook.backoff! }.to change(hook, :disabled_until).to(hook.next_backoff.from_now)
end
- end
- context 'when we have backed off MAX_FAILURES times' do
- before do
- stub_const("#{described_class}::MAX_FAILURES", 5)
- 5.times { hook.backoff! }
+ it 'increments the backoff count' do
+ expect { hook.backoff! }.to change(hook, :backoff_count).by(1)
end
- it 'does not let the backoff count exceed the maximum failure count' do
- expect { hook.backoff! }.not_to change(hook, :backoff_count)
- end
+ context 'when the hook is permanently disabled' do
+ before do
+ allow(hook).to receive(:permanently_disabled?).and_return(true)
+ end
- it 'does not change disabled_until', :skip_freeze_time do
- travel_to(hook.disabled_until - 1.minute) do
+ it 'does not set disabled_until' do
expect { hook.backoff! }.not_to change(hook, :disabled_until)
end
+
+ it 'does not increment the backoff count' do
+ expect { hook.backoff! }.not_to change(hook, :backoff_count)
+ end
end
- it 'changes disabled_until when it has elapsed', :skip_freeze_time do
- travel_to(hook.disabled_until + 1.minute) do
- expect { hook.backoff! }.to change { hook.disabled_until }
- expect(hook.backoff_count).to eq(described_class::MAX_FAILURES)
+ context 'when we have backed off MAX_FAILURES times' do
+ before do
+ stub_const("#{described_class}::MAX_FAILURES", 5)
+ (described_class::FAILURE_THRESHOLD + 5).times { hook.backoff! }
+ end
+
+ it 'does not let the backoff count exceed the maximum failure count' do
+ expect { hook.backoff! }.not_to change(hook, :backoff_count)
+ end
+
+ it 'does not change disabled_until', :skip_freeze_time do
+ travel_to(hook.disabled_until - 1.minute) do
+ expect { hook.backoff! }.not_to change(hook, :disabled_until)
+ end
+ end
+
+ it 'changes disabled_until when it has elapsed', :skip_freeze_time do
+ travel_to(hook.disabled_until + 1.minute) do
+ expect { hook.backoff! }.to change { hook.disabled_until }
+ expect(hook.backoff_count).to eq(described_class::MAX_FAILURES)
+ end
end
end
- end
- include_examples 'is tolerant of invalid records' do
- def run_expectation
- expect { hook.backoff! }.to change(hook, :backoff_count).by(1)
+ include_examples 'is tolerant of invalid records' do
+ def run_expectation
+ expect { hook.backoff! }.to change(hook, :backoff_count).by(1)
+ end
end
end
end
@@ -468,8 +496,19 @@ RSpec.describe WebHook do
expect(hook).not_to be_temporarily_disabled
end
+ it 'allows FAILURE_THRESHOLD initial failures before we back-off' do
+ described_class::FAILURE_THRESHOLD.times do
+ hook.backoff!
+ expect(hook).not_to be_temporarily_disabled
+ end
+
+ hook.backoff!
+ expect(hook).to be_temporarily_disabled
+ end
+
context 'when hook has been told to back off' do
before do
+ hook.update!(recent_failures: described_class::FAILURE_THRESHOLD)
hook.backoff!
end
@@ -550,6 +589,7 @@ RSpec.describe WebHook do
context 'when hook has been backed off' do
before do
+ hook.update!(recent_failures: described_class::FAILURE_THRESHOLD + 1)
hook.disabled_until = 1.hour.from_now
end
diff --git a/spec/models/incident_management/timeline_event_spec.rb b/spec/models/incident_management/timeline_event_spec.rb
index fea391acda3..d288cc1a75d 100644
--- a/spec/models/incident_management/timeline_event_spec.rb
+++ b/spec/models/incident_management/timeline_event_spec.rb
@@ -13,6 +13,12 @@ RSpec.describe IncidentManagement::TimelineEvent do
it { is_expected.to belong_to(:incident) }
it { is_expected.to belong_to(:updated_by_user) }
it { is_expected.to belong_to(:promoted_from_note) }
+ it { is_expected.to have_many(:timeline_event_tag_links).class_name('IncidentManagement::TimelineEventTagLink') }
+
+ it do
+ is_expected.to have_many(:timeline_event_tags)
+ .class_name('IncidentManagement::TimelineEventTag').through(:timeline_event_tag_links)
+ end
end
describe 'validations' do
@@ -22,7 +28,6 @@ RSpec.describe IncidentManagement::TimelineEvent do
it { is_expected.to validate_presence_of(:incident) }
it { is_expected.to validate_presence_of(:note) }
it { is_expected.to validate_length_of(:note).is_at_most(10_000) }
- it { is_expected.to validate_presence_of(:note_html) }
it { is_expected.to validate_length_of(:note_html).is_at_most(10_000) }
it { is_expected.to validate_presence_of(:occurred_at) }
it { is_expected.to validate_presence_of(:action) }
diff --git a/spec/models/incident_management/timeline_event_tag_link_spec.rb b/spec/models/incident_management/timeline_event_tag_link_spec.rb
new file mode 100644
index 00000000000..fe31a6604c1
--- /dev/null
+++ b/spec/models/incident_management/timeline_event_tag_link_spec.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe IncidentManagement::TimelineEventTagLink do
+ describe 'associations' do
+ it { is_expected.to belong_to(:timeline_event) }
+ it { is_expected.to belong_to(:timeline_event_tag) }
+ end
+end
diff --git a/spec/models/incident_management/timeline_event_tag_spec.rb b/spec/models/incident_management/timeline_event_tag_spec.rb
new file mode 100644
index 00000000000..cff8ad8469f
--- /dev/null
+++ b/spec/models/incident_management/timeline_event_tag_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe IncidentManagement::TimelineEventTag do
+ describe 'associations' do
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to have_many(:timeline_event_tag_links).class_name('IncidentManagement::TimelineEventTagLink') }
+
+ it {
+ is_expected.to have_many(:timeline_events)
+ .class_name('IncidentManagement::TimelineEvent').through(:timeline_event_tag_links)
+ }
+ end
+
+ describe 'validations' do
+ subject { build(:incident_management_timeline_event_tag) }
+
+ it { is_expected.to validate_presence_of(:name) }
+ it { is_expected.to validate_length_of(:name).is_at_most(255) }
+ it { is_expected.to validate_uniqueness_of(:name).scoped_to([:project_id]) }
+
+ it { is_expected.to allow_value('Test tag 1').for(:name) }
+ it { is_expected.not_to allow_value('Test tag, 1').for(:name) }
+ it { is_expected.not_to allow_value('').for(:name) }
+ it { is_expected.not_to allow_value('s' * 256).for(:name) }
+ end
+end
diff --git a/spec/models/integration_spec.rb b/spec/models/integration_spec.rb
index 950f2c639fb..baa3443b4c5 100644
--- a/spec/models/integration_spec.rb
+++ b/spec/models/integration_spec.rb
@@ -971,11 +971,12 @@ RSpec.describe Integration do
describe '#secret_fields' do
it 'returns all fields with type `password`' do
- allow(subject).to receive(:fields).and_return([
- { name: 'password', type: 'password' },
- { name: 'secret', type: 'password' },
- { name: 'public', type: 'text' }
- ])
+ allow(subject).to receive(:fields).and_return(
+ [
+ { name: 'password', type: 'password' },
+ { name: 'secret', type: 'password' },
+ { name: 'public', type: 'text' }
+ ])
expect(subject.secret_fields).to match_array(%w[password secret])
end
diff --git a/spec/models/integrations/chat_message/issue_message_spec.rb b/spec/models/integrations/chat_message/issue_message_spec.rb
index 4a86322cdaf..ff9f30efdca 100644
--- a/spec/models/integrations/chat_message/issue_message_spec.rb
+++ b/spec/models/integrations/chat_message/issue_message_spec.rb
@@ -47,14 +47,15 @@ RSpec.describe Integrations::ChatMessage::IssueMessage do
it 'returns a message regarding opening of issues' do
expect(subject.pretext).to eq(
'[<http://somewhere.com|project_name>] Issue <http://url.com|#100 Issue title> opened by Test User (test.user)')
- expect(subject.attachments).to eq([
- {
- title: "#100 Issue title",
- title_link: "http://url.com",
- text: "issue description",
- color: color
- }
- ])
+ expect(subject.attachments).to eq(
+ [
+ {
+ title: "#100 Issue title",
+ title_link: "http://url.com",
+ text: "issue description",
+ color: color
+ }
+ ])
end
end
diff --git a/spec/models/integrations/chat_message/wiki_page_message_spec.rb b/spec/models/integrations/chat_message/wiki_page_message_spec.rb
index 16659311c52..dae8d293354 100644
--- a/spec/models/integrations/chat_message/wiki_page_message_spec.rb
+++ b/spec/models/integrations/chat_message/wiki_page_message_spec.rb
@@ -71,12 +71,13 @@ RSpec.describe Integrations::ChatMessage::WikiPageMessage do
end
it 'returns the commit message for a new wiki page' do
- expect(subject.attachments).to eq([
- {
- text: commit_message,
- color: color
- }
- ])
+ expect(subject.attachments).to eq(
+ [
+ {
+ text: commit_message,
+ color: color
+ }
+ ])
end
end
@@ -86,12 +87,13 @@ RSpec.describe Integrations::ChatMessage::WikiPageMessage do
end
it 'returns the commit message for an updated wiki page' do
- expect(subject.attachments).to eq([
- {
- text: commit_message,
- color: color
- }
- ])
+ expect(subject.attachments).to eq(
+ [
+ {
+ text: commit_message,
+ color: color
+ }
+ ])
end
end
end
diff --git a/spec/models/integrations/datadog_spec.rb b/spec/models/integrations/datadog_spec.rb
index b7da6a79e44..71a5bbc4db1 100644
--- a/spec/models/integrations/datadog_spec.rb
+++ b/spec/models/integrations/datadog_spec.rb
@@ -47,6 +47,10 @@ RSpec.describe Integrations::Datadog do
Gitlab::DataBuilder::ArchiveTrace.build(build)
end
+ it_behaves_like Integrations::ResetSecretFields do
+ let(:integration) { instance }
+ end
+
it_behaves_like Integrations::HasWebHook do
let(:integration) { instance }
let(:hook_url) { "#{described_class::URL_TEMPLATE % { datadog_domain: dd_site }}?dd-api-key={api_key}&env=#{dd_env}&service=#{dd_service}" }
diff --git a/spec/models/integrations/harbor_spec.rb b/spec/models/integrations/harbor_spec.rb
index 26b43fa3313..9ab37a92e89 100644
--- a/spec/models/integrations/harbor_spec.rb
+++ b/spec/models/integrations/harbor_spec.rb
@@ -7,7 +7,11 @@ RSpec.describe Integrations::Harbor do
let(:project_name) { 'testproject' }
let(:username) { 'harborusername' }
let(:password) { 'harborpassword' }
- let(:harbor_integration) { create(:harbor_integration) }
+ let(:harbor_integration) { build(:harbor_integration) }
+
+ it_behaves_like Integrations::ResetSecretFields do
+ let(:integration) { described_class.new }
+ end
describe "masked password" do
subject { build(:harbor_integration) }
@@ -66,6 +70,8 @@ RSpec.describe Integrations::Harbor do
end
context 'ci variables' do
+ let(:harbor_integration) { create(:harbor_integration) }
+
it 'returns vars when harbor_integration is activated' do
ci_vars = [
{ key: 'HARBOR_URL', value: url },
@@ -94,66 +100,4 @@ RSpec.describe Integrations::Harbor do
end
end
end
-
- describe 'before_validation :reset_username_and_password' do
- context 'when username/password was previously set' do
- it 'resets username and password if url changed' do
- harbor_integration.url = 'https://anotherharbor.com'
- harbor_integration.valid?
-
- expect(harbor_integration.password).to be_nil
- expect(harbor_integration.username).to be_nil
- end
-
- it 'does not reset password if username changed' do
- harbor_integration.username = 'newusername'
- harbor_integration.valid?
-
- expect(harbor_integration.password).to eq('harborpassword')
- end
-
- it 'does not reset username if password changed' do
- harbor_integration.password = 'newpassword'
- harbor_integration.valid?
-
- expect(harbor_integration.username).to eq('harborusername')
- end
-
- it "does not reset password if new url is set together with password, even if it's the same password" do
- harbor_integration.url = 'https://anotherharbor.com'
- harbor_integration.password = 'harborpassword'
- harbor_integration.valid?
-
- expect(harbor_integration.password).to eq('harborpassword')
- expect(harbor_integration.username).to be_nil
- expect(harbor_integration.url).to eq('https://anotherharbor.com')
- end
-
- it "does not reset username if new url is set together with username, even if it's the same username" do
- harbor_integration.url = 'https://anotherharbor.com'
- harbor_integration.username = 'harborusername'
- harbor_integration.valid?
-
- expect(harbor_integration.password).to be_nil
- expect(harbor_integration.username).to eq('harborusername')
- expect(harbor_integration.url).to eq('https://anotherharbor.com')
- end
- end
-
- it 'saves password if new url is set together with password when no password was previously set' do
- harbor_integration.password = nil
- harbor_integration.username = nil
-
- harbor_integration.url = 'https://anotherharbor.com'
- harbor_integration.password = 'newpassword'
- harbor_integration.username = 'newusername'
- harbor_integration.save!
-
- expect(harbor_integration).to have_attributes(
- url: 'https://anotherharbor.com',
- password: 'newpassword',
- username: 'newusername'
- )
- end
- end
end
diff --git a/spec/models/integrations/jira_spec.rb b/spec/models/integrations/jira_spec.rb
index a52a4514ebe..9f928442b28 100644
--- a/spec/models/integrations/jira_spec.rb
+++ b/spec/models/integrations/jira_spec.rb
@@ -81,9 +81,10 @@ RSpec.describe Integrations::Jira do
jira_integration.jira_issue_transition_id = 'foo bar'
expect(jira_integration).not_to be_valid
- expect(jira_integration.errors.full_messages).to eq([
- 'Jira issue transition IDs must be a list of numbers that can be split with , or ;'
- ])
+ expect(jira_integration.errors.full_messages).to eq(
+ [
+ 'Jira issue transition IDs must be a list of numbers that can be split with , or ;'
+ ])
end
end
end
@@ -213,6 +214,8 @@ RSpec.describe Integrations::Jira do
'EXT_EXT-1234' | 'EXT_EXT-1234'
'EXT3_EXT-1234' | 'EXT3_EXT-1234'
'3EXT_EXT-1234' | ''
+ 'CVE-2022-123' | ''
+ 'CVE-123' | 'CVE-123'
end
with_them do
diff --git a/spec/models/integrations/microsoft_teams_spec.rb b/spec/models/integrations/microsoft_teams_spec.rb
index b1b3e42b5e9..b6de2bb7176 100644
--- a/spec/models/integrations/microsoft_teams_spec.rb
+++ b/spec/models/integrations/microsoft_teams_spec.rb
@@ -81,10 +81,14 @@ RSpec.describe Integrations::MicrosoftTeams do
let(:opts) { { title: 'Awesome issue', description: 'please fix' } }
let(:issues_sample_data) do
service = Issues::CreateService.new(project: project, current_user: user, params: opts, spam_params: nil)
- issue = service.execute
+ issue = service.execute[:issue]
service.hook_data(issue, 'open')
end
+ before do
+ project.add_developer(user)
+ end
+
it "calls Microsoft Teams API" do
chat_integration.execute(issues_sample_data)
diff --git a/spec/models/integrations/prometheus_spec.rb b/spec/models/integrations/prometheus_spec.rb
index ae965ed78d1..3971511872b 100644
--- a/spec/models/integrations/prometheus_spec.rb
+++ b/spec/models/integrations/prometheus_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Integrations::Prometheus, :use_clean_rails_memory_store_caching,
include PrometheusHelpers
include ReactiveCachingHelpers
- let_it_be_with_reload(:project) { create(:prometheus_project) }
+ let_it_be_with_reload(:project) { create(:project, :with_prometheus_integration) }
let(:integration) { project.prometheus_integration }
@@ -318,7 +318,7 @@ RSpec.describe Integrations::Prometheus, :use_clean_rails_memory_store_caching,
context 'cluster belongs to projects group' do
let_it_be(:group) { create(:group) }
- let(:project) { create(:prometheus_project, group: group) }
+ let(:project) { create(:project, :with_prometheus_integration, group: group) }
let(:cluster) { create(:cluster_for_group, groups: [group]) }
it 'returns true' do
diff --git a/spec/models/jira_connect/public_key_spec.rb b/spec/models/jira_connect/public_key_spec.rb
new file mode 100644
index 00000000000..2e79a3ca4d2
--- /dev/null
+++ b/spec/models/jira_connect/public_key_spec.rb
@@ -0,0 +1,90 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe JiraConnect::PublicKey do
+ describe '.create!' do
+ let(:key) { 'key123' }
+
+ subject(:create_public_key) { described_class.create!(key: key) }
+
+ it 'only accepts valid public keys' do
+ expect { create_public_key }.to raise_error(ArgumentError, 'Invalid public key')
+ end
+
+ shared_examples 'creates a jira connect public key' do
+ it 'generates a Uuid' do
+ expect(create_public_key.uuid).to match(/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/)
+ end
+
+ it 'sets the key attribute' do
+ expect(create_public_key.key).to eq(expected_key)
+ end
+
+ it 'persists the values' do
+ Gitlab::Redis::SharedState.with do |redis|
+ expect(redis).to receive(:set).with(anything, expected_key, anything)
+ end
+
+ create_public_key
+ end
+ end
+
+ context 'with OpenSSL::PKey::RSA object' do
+ let(:key) { OpenSSL::PKey::RSA.generate(3072).public_key }
+ let(:expected_key) { key.to_s }
+
+ it_behaves_like 'creates a jira connect public key'
+ end
+
+ context 'with string public key' do
+ let(:key) { OpenSSL::PKey::RSA.generate(3072).public_key.to_s }
+ let(:expected_key) { key }
+
+ it_behaves_like 'creates a jira connect public key'
+ end
+ end
+
+ describe '.find' do
+ let(:uuid) { '1234' }
+
+ subject(:find_public_key) { described_class.find(uuid) }
+
+ it 'raises an error' do
+ expect { find_public_key }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+
+ context 'when the public key exists' do
+ let_it_be(:key) { OpenSSL::PKey::RSA.generate(3072).public_key }
+ let_it_be(:public_key) { described_class.create!(key: key) }
+
+ let(:uuid) { public_key.uuid }
+
+ it 'loads the public key', :aggregate_failures do
+ expect(find_public_key).to be_kind_of(described_class)
+ expect(find_public_key.uuid).to eq(public_key.uuid)
+ expect(find_public_key.key).to eq(key.to_s)
+ end
+ end
+ end
+
+ describe '#save!' do
+ let(:key) { OpenSSL::PKey::RSA.generate(3072).public_key }
+ let(:public_key) { described_class.new(key: key, uuid: '123') }
+ let(:jira_connect_installation) { build(:jira_connect_installation) }
+
+ subject(:save_public_key) { public_key.save! }
+
+ it 'persists the values' do
+ Gitlab::Redis::SharedState.with do |redis|
+ expect(redis).to receive(:set).with(anything, key.to_s, ex: 5.minutes.to_i)
+ end
+
+ save_public_key
+ end
+
+ it 'returns itself' do
+ expect(save_public_key).to eq(public_key)
+ end
+ end
+end
diff --git a/spec/models/jira_connect_installation_spec.rb b/spec/models/jira_connect_installation_spec.rb
index 9c1f7c678a9..e57d3e78a4e 100644
--- a/spec/models/jira_connect_installation_spec.rb
+++ b/spec/models/jira_connect_installation_spec.rb
@@ -20,29 +20,53 @@ RSpec.describe JiraConnectInstallation do
it { is_expected.not_to allow_value('not/a/url').for(:instance_url) }
end
- describe '.for_project' do
- let(:other_group) { create(:group) }
- let(:parent_group) { create(:group) }
- let(:group) { create(:group, parent: parent_group) }
- let(:project) { create(:project, group: group) }
+ describe 'scopes' do
+ let_it_be(:jira_connect_subscription) { create(:jira_connect_subscription) }
- subject { described_class.for_project(project) }
+ describe '.for_project' do
+ let_it_be(:other_group) { create(:group) }
+ let_it_be(:parent_group) { create(:group) }
+ let_it_be(:group) { create(:group, parent: parent_group) }
+ let_it_be(:project) { create(:project, group: group) }
- it 'returns installations with subscriptions for project' do
- sub_on_project_namespace = create(:jira_connect_subscription, namespace: group)
- sub_on_ancestor_namespace = create(:jira_connect_subscription, namespace: parent_group)
+ subject { described_class.for_project(project) }
- # Subscription on other group that shouldn't be returned
- create(:jira_connect_subscription, namespace: other_group)
+ it 'returns installations with subscriptions for project' do
+ sub_on_project_namespace = create(:jira_connect_subscription, namespace: group)
+ sub_on_ancestor_namespace = create(:jira_connect_subscription, namespace: parent_group)
- expect(subject).to contain_exactly(sub_on_project_namespace.installation, sub_on_ancestor_namespace.installation)
+ # Subscription on other group that shouldn't be returned
+ create(:jira_connect_subscription, namespace: other_group)
+
+ expect(subject).to contain_exactly(
+ sub_on_project_namespace.installation, sub_on_ancestor_namespace.installation
+ )
+ end
+
+ it 'returns distinct installations' do
+ subscription = create(:jira_connect_subscription, namespace: group)
+ create(:jira_connect_subscription, namespace: parent_group, installation: subscription.installation)
+
+ expect(subject).to contain_exactly(subscription.installation)
+ end
end
- it 'returns distinct installations' do
- subscription = create(:jira_connect_subscription, namespace: group)
- create(:jira_connect_subscription, namespace: parent_group, installation: subscription.installation)
+ describe '.direct_installations' do
+ subject { described_class.direct_installations }
+
+ it { is_expected.to contain_exactly(jira_connect_subscription.installation) }
+ end
+
+ describe '.proxy_installations' do
+ subject { described_class.proxy_installations }
+
+ it { is_expected.to be_empty }
- expect(subject).to contain_exactly(subscription.installation)
+ context 'with an installation on a self-managed instance' do
+ let_it_be(:installation) { create(:jira_connect_installation, instance_url: 'http://self-managed-gitlab.com') }
+
+ it { is_expected.to contain_exactly(installation) }
+ end
end
end
@@ -71,4 +95,46 @@ RSpec.describe JiraConnectInstallation do
end
end
end
+
+ describe 'audience_url' do
+ let_it_be(:installation) { create(:jira_connect_installation) }
+
+ subject(:audience) { installation.audience_url }
+
+ it { is_expected.to eq(nil) }
+
+ context 'when proxy installation' do
+ let_it_be(:installation) { create(:jira_connect_installation, instance_url: 'https://example.com') }
+
+ it { is_expected.to eq('https://example.com/-/jira_connect') }
+ end
+ end
+
+ describe 'audience_installed_event_url' do
+ let_it_be(:installation) { create(:jira_connect_installation) }
+
+ subject(:audience) { installation.audience_installed_event_url }
+
+ it { is_expected.to eq(nil) }
+
+ context 'when proxy installation' do
+ let_it_be(:installation) { create(:jira_connect_installation, instance_url: 'https://example.com') }
+
+ it { is_expected.to eq('https://example.com/-/jira_connect/events/installed') }
+ end
+ end
+
+ describe 'proxy?' do
+ let_it_be(:installation) { create(:jira_connect_installation) }
+
+ subject { installation.proxy? }
+
+ it { is_expected.to eq(false) }
+
+ context 'when instance_url is present' do
+ let_it_be(:installation) { create(:jira_connect_installation, instance_url: 'https://example.com') }
+
+ it { is_expected.to eq(true) }
+ end
+ end
end
diff --git a/spec/models/label_note_spec.rb b/spec/models/label_note_spec.rb
index 145ddd44834..6658d42f25e 100644
--- a/spec/models/label_note_spec.rb
+++ b/spec/models/label_note_spec.rb
@@ -18,9 +18,10 @@ RSpec.describe LabelNote do
it_behaves_like 'label note created from events'
it 'includes a link to the list of issues filtered by the label' do
- note = described_class.from_events([
- create(:resource_label_event, label: label, issue: resource)
- ])
+ note = described_class.from_events(
+ [
+ create(:resource_label_event, label: label, issue: resource)
+ ])
expect(note.note_html).to include(project_issues_path(project, label_name: label.title))
end
@@ -32,9 +33,10 @@ RSpec.describe LabelNote do
it_behaves_like 'label note created from events'
it 'includes a link to the list of merge requests filtered by the label' do
- note = described_class.from_events([
- create(:resource_label_event, label: label, merge_request: resource)
- ])
+ note = described_class.from_events(
+ [
+ create(:resource_label_event, label: label, merge_request: resource)
+ ])
expect(note.note_html).to include(project_merge_requests_path(project, label_name: label.title))
end
diff --git a/spec/models/member_spec.rb b/spec/models/member_spec.rb
index 7b75a6ee1c2..04df8ecc882 100644
--- a/spec/models/member_spec.rb
+++ b/spec/models/member_spec.rb
@@ -178,8 +178,8 @@ RSpec.describe Member do
end
context 'member role is associated' do
- let_it_be(:member_role) do
- create(:member_role, members: [member])
+ let!(:member_role) do
+ create(:member_role, members: [member], base_access_level: Gitlab::Access::DEVELOPER)
end
context 'member role matches access level' do
@@ -201,7 +201,9 @@ RSpec.describe Member do
member.access_level = Gitlab::Access::MAINTAINER
expect(member).not_to be_valid
- expect(member.errors.full_messages).to include( "Access level cannot be changed since member is associated with a custom role")
+ expect(member.errors.full_messages).to include(
+ "Access level cannot be changed since member is associated with a custom role"
+ )
end
end
end
@@ -824,22 +826,6 @@ RSpec.describe Member do
expect(user.authorized_projects.reload).to include(project)
end
-
- context 'when the feature flag is disabled' do
- before do
- stub_feature_flags(allow_non_blocking_member_refresh: false)
- end
-
- it 'successfully completes a blocking refresh', :delete, :sidekiq_inline do
- member.blocking_refresh = false
-
- expect(member).to receive(:refresh_member_authorized_projects).with(blocking: true).and_call_original
-
- member.accept_invite!(user)
-
- expect(user.authorized_projects.reload).to include(project)
- end
- end
end
it 'does not accept the invite if saving a new user fails' do
diff --git a/spec/models/members/member_role_spec.rb b/spec/models/members/member_role_spec.rb
index e8993491918..e2691e2e78c 100644
--- a/spec/models/members/member_role_spec.rb
+++ b/spec/models/members/member_role_spec.rb
@@ -11,7 +11,39 @@ RSpec.describe MemberRole do
describe 'validation' do
subject { described_class.new }
- it { is_expected.to validate_presence_of(:namespace_id) }
+ it { is_expected.to validate_presence_of(:namespace) }
it { is_expected.to validate_presence_of(:base_access_level) }
+
+ context 'for namespace' do
+ subject { build(:member_role) }
+
+ let_it_be(:root_group) { create(:group) }
+
+ context 'when namespace is a subgroup' do
+ it 'is invalid' do
+ subgroup = create(:group, parent: root_group)
+ subject.namespace = subgroup
+
+ expect(subject).to be_invalid
+ end
+ end
+
+ context 'when namespace is a root group' do
+ it 'is valid' do
+ subject.namespace = root_group
+
+ expect(subject).to be_valid
+ end
+ end
+
+ context 'when namespace is not present' do
+ it 'is invalid with a different error message' do
+ subject.namespace = nil
+
+ expect(subject).to be_invalid
+ expect(subject.errors.full_messages).to eq(["Namespace can't be blank"])
+ end
+ end
+ end
end
end
diff --git a/spec/models/merge_request/cleanup_schedule_spec.rb b/spec/models/merge_request/cleanup_schedule_spec.rb
index 9c50b64f2bd..1f1f33db5ed 100644
--- a/spec/models/merge_request/cleanup_schedule_spec.rb
+++ b/spec/models/merge_request/cleanup_schedule_spec.rb
@@ -111,12 +111,13 @@ RSpec.describe MergeRequest::CleanupSchedule do
let!(:cleanup_schedule_7) { create(:merge_request_cleanup_schedule, :failed, scheduled_at: 5.days.ago) }
it 'returns records that are scheduled before or on current time and unstarted (ordered by scheduled first)' do
- expect(described_class.scheduled_and_unstarted).to eq([
- cleanup_schedule_2,
- cleanup_schedule_1,
- cleanup_schedule_5,
- cleanup_schedule_4
- ])
+ expect(described_class.scheduled_and_unstarted).to eq(
+ [
+ cleanup_schedule_2,
+ cleanup_schedule_1,
+ cleanup_schedule_5,
+ cleanup_schedule_4
+ ])
end
end
diff --git a/spec/models/merge_request_diff_file_spec.rb b/spec/models/merge_request_diff_file_spec.rb
index 7dc550a6c93..f107a56c1b6 100644
--- a/spec/models/merge_request_diff_file_spec.rb
+++ b/spec/models/merge_request_diff_file_spec.rb
@@ -220,5 +220,25 @@ RSpec.describe MergeRequestDiffFile do
file.utf8_diff
end
end
+
+ context 'when exception is raised' do
+ it 'logs exception and returns an empty string' do
+ allow(file).to receive(:diff).and_raise(StandardError, 'Error!')
+
+ expect(Gitlab::AppLogger)
+ .to receive(:warn)
+ .with(
+ a_hash_including(
+ :message => 'Failed fetching merge request diff',
+ :merge_request_diff_file_id => file.id,
+ :merge_request_diff_id => file.merge_request_diff.id,
+ 'exception.class' => 'StandardError',
+ 'exception.message' => 'Error!'
+ )
+ )
+
+ expect(file.utf8_diff).to eq('')
+ end
+ end
end
end
diff --git a/spec/models/merge_request_diff_spec.rb b/spec/models/merge_request_diff_spec.rb
index 007e84164a8..e9e8bd9bfea 100644
--- a/spec/models/merge_request_diff_spec.rb
+++ b/spec/models/merge_request_diff_spec.rb
@@ -466,19 +466,20 @@ RSpec.describe MergeRequestDiff do
diff_with_commits.update!(sorted: false) # Mark as unsorted so it'll re-order
# There will be 11 returned, as we have to take into account for new and old paths
- expect(diff_with_commits.diffs_in_batch(0, 10, diff_options: diff_options).diff_paths).to eq([
- 'bar/branch-test.txt',
- 'custom-highlighting/test.gitlab-custom',
- 'encoding/iso8859.txt',
- 'files/images/wm.svg',
- 'files/js/commit.js.coffee',
- 'files/js/commit.coffee',
- 'files/lfs/lfs_object.iso',
- 'files/ruby/popen.rb',
- 'files/ruby/regex.rb',
- 'files/.DS_Store',
- 'files/whitespace'
- ])
+ expect(diff_with_commits.diffs_in_batch(0, 10, diff_options: diff_options).diff_paths).to eq(
+ [
+ 'bar/branch-test.txt',
+ 'custom-highlighting/test.gitlab-custom',
+ 'encoding/iso8859.txt',
+ 'files/images/wm.svg',
+ 'files/js/commit.js.coffee',
+ 'files/js/commit.coffee',
+ 'files/lfs/lfs_object.iso',
+ 'files/ruby/popen.rb',
+ 'files/ruby/regex.rb',
+ 'files/.DS_Store',
+ 'files/whitespace'
+ ])
end
context 'when diff_options include ignore_whitespace_change' do
@@ -555,29 +556,30 @@ RSpec.describe MergeRequestDiff do
it 'sorts diff files directory first' do
diff_with_commits.update!(sorted: false) # Mark as unsorted so it'll re-order
- expect(diff_with_commits.diffs(diff_options).diff_paths).to eq([
- 'bar/branch-test.txt',
- 'custom-highlighting/test.gitlab-custom',
- 'encoding/iso8859.txt',
- 'files/images/wm.svg',
- 'files/js/commit.js.coffee',
- 'files/js/commit.coffee',
- 'files/lfs/lfs_object.iso',
- 'files/ruby/popen.rb',
- 'files/ruby/regex.rb',
- 'files/.DS_Store',
- 'files/whitespace',
- 'foo/bar/.gitkeep',
- 'with space/README.md',
- '.DS_Store',
- '.gitattributes',
- '.gitignore',
- '.gitmodules',
- 'CHANGELOG',
- 'README',
- 'gitlab-grack',
- 'gitlab-shell'
- ])
+ expect(diff_with_commits.diffs(diff_options).diff_paths).to eq(
+ [
+ 'bar/branch-test.txt',
+ 'custom-highlighting/test.gitlab-custom',
+ 'encoding/iso8859.txt',
+ 'files/images/wm.svg',
+ 'files/js/commit.js.coffee',
+ 'files/js/commit.coffee',
+ 'files/lfs/lfs_object.iso',
+ 'files/ruby/popen.rb',
+ 'files/ruby/regex.rb',
+ 'files/.DS_Store',
+ 'files/whitespace',
+ 'foo/bar/.gitkeep',
+ 'with space/README.md',
+ '.DS_Store',
+ '.gitattributes',
+ '.gitignore',
+ '.gitmodules',
+ 'CHANGELOG',
+ 'README',
+ 'gitlab-grack',
+ 'gitlab-shell'
+ ])
end
end
end
@@ -661,28 +663,29 @@ RSpec.describe MergeRequestDiff do
mr_diff = create(:merge_request).merge_request_diff
diff_files_paths = mr_diff.merge_request_diff_files.map { |file| file.new_path.presence || file.old_path }
- expect(diff_files_paths).to eq([
- 'bar/branch-test.txt',
- 'custom-highlighting/test.gitlab-custom',
- 'encoding/iso8859.txt',
- 'files/images/wm.svg',
- 'files/js/commit.coffee',
- 'files/lfs/lfs_object.iso',
- 'files/ruby/popen.rb',
- 'files/ruby/regex.rb',
- 'files/.DS_Store',
- 'files/whitespace',
- 'foo/bar/.gitkeep',
- 'with space/README.md',
- '.DS_Store',
- '.gitattributes',
- '.gitignore',
- '.gitmodules',
- 'CHANGELOG',
- 'README',
- 'gitlab-grack',
- 'gitlab-shell'
- ])
+ expect(diff_files_paths).to eq(
+ [
+ 'bar/branch-test.txt',
+ 'custom-highlighting/test.gitlab-custom',
+ 'encoding/iso8859.txt',
+ 'files/images/wm.svg',
+ 'files/js/commit.coffee',
+ 'files/lfs/lfs_object.iso',
+ 'files/ruby/popen.rb',
+ 'files/ruby/regex.rb',
+ 'files/.DS_Store',
+ 'files/whitespace',
+ 'foo/bar/.gitkeep',
+ 'with space/README.md',
+ '.DS_Store',
+ '.gitattributes',
+ '.gitignore',
+ '.gitmodules',
+ 'CHANGELOG',
+ 'README',
+ 'gitlab-grack',
+ 'gitlab-shell'
+ ])
end
it 'expands collapsed diffs before saving' do
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index f27f3b749b1..32518b867cb 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -1837,9 +1837,8 @@ RSpec.describe MergeRequest, factory_default: :keep do
context 'persisted merge request' do
context 'with a limit' do
it 'returns a limited number of commit shas' do
- expect(subject.commit_shas(limit: 2)).to eq(%w[
- b83d6e391c22777fca1ed3012fce84f633d7fed0 498214de67004b1da3d820901307bed2a68a8ef6
- ])
+ expect(subject.commit_shas(limit: 2)).to eq(
+ %w[b83d6e391c22777fca1ed3012fce84f633d7fed0 498214de67004b1da3d820901307bed2a68a8ef6])
end
end
@@ -4200,6 +4199,45 @@ RSpec.describe MergeRequest, factory_default: :keep do
context 'state machine transitions' do
let(:project) { create(:project, :repository) }
+ shared_examples_for 'transition not triggering mergeRequestMergeStatusUpdated GraphQL subscription' do
+ specify do
+ expect(GraphqlTriggers).not_to receive(:merge_request_merge_status_updated)
+
+ transition!
+ end
+ end
+
+ shared_examples_for 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription' do
+ specify do
+ expect(GraphqlTriggers).to receive(:merge_request_merge_status_updated).with(subject).and_call_original
+
+ transition!
+ end
+
+ context 'when trigger_mr_subscription_on_merge_status_change is disabled' do
+ before do
+ stub_feature_flags(trigger_mr_subscription_on_merge_status_change: false)
+ end
+
+ it_behaves_like 'transition not triggering mergeRequestMergeStatusUpdated GraphQL subscription'
+ end
+ end
+
+ shared_examples 'for an invalid state transition' do
+ specify 'is not a valid state transition' do
+ expect { transition! }.to raise_error(StateMachines::InvalidTransition)
+ end
+ end
+
+ shared_examples 'for a valid state transition' do
+ it 'is a valid state transition' do
+ expect { transition! }
+ .to change { subject.merge_status }
+ .from(merge_status.to_s)
+ .to(expected_merge_status)
+ end
+ end
+
describe '#unlock_mr' do
subject { create(:merge_request, state: 'locked', source_project: project, merge_jid: 123) }
@@ -4214,22 +4252,58 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
- describe '#mark_as_unchecked' do
+ describe '#mark_as_preparing' do
subject { create(:merge_request, source_project: project, merge_status: merge_status) }
- shared_examples 'for an invalid state transition' do
- it 'is not a valid state transition' do
- expect { subject.mark_as_unchecked! }.to raise_error(StateMachines::InvalidTransition)
- end
+ let(:expected_merge_status) { 'preparing' }
+
+ def transition!
+ subject.mark_as_preparing!
end
- shared_examples 'for an valid state transition' do
- it 'is a valid state transition' do
- expect { subject.mark_as_unchecked! }
- .to change { subject.merge_status }
- .from(merge_status.to_s)
- .to(expected_merge_status)
- end
+ context 'when the status is unchecked' do
+ let(:merge_status) { :unchecked }
+
+ include_examples 'for a valid state transition'
+ it_behaves_like 'transition not triggering mergeRequestMergeStatusUpdated GraphQL subscription'
+ end
+
+ context 'when the status is checking' do
+ let(:merge_status) { :checking }
+
+ include_examples 'for an invalid state transition'
+ end
+
+ context 'when the status is can_be_merged' do
+ let(:merge_status) { :can_be_merged }
+
+ include_examples 'for an invalid state transition'
+ end
+
+ context 'when the status is cannot_be_merged_recheck' do
+ let(:merge_status) { :cannot_be_merged_recheck }
+
+ include_examples 'for an invalid state transition'
+ end
+
+ context 'when the status is cannot_be_merged' do
+ let(:merge_status) { :cannot_be_merged }
+
+ include_examples 'for an invalid state transition'
+ end
+
+ context 'when the status is cannot_be_merged_rechecking' do
+ let(:merge_status) { :cannot_be_merged_rechecking }
+
+ include_examples 'for an invalid state transition'
+ end
+ end
+
+ describe '#mark_as_unchecked' do
+ subject { create(:merge_request, source_project: project, merge_status: merge_status) }
+
+ def transition!
+ subject.mark_as_unchecked!
end
context 'when the status is unchecked' do
@@ -4242,14 +4316,16 @@ RSpec.describe MergeRequest, factory_default: :keep do
let(:merge_status) { :checking }
let(:expected_merge_status) { 'unchecked' }
- include_examples 'for an valid state transition'
+ include_examples 'for a valid state transition'
+ it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
end
context 'when the status is can_be_merged' do
let(:merge_status) { :can_be_merged }
let(:expected_merge_status) { 'unchecked' }
- include_examples 'for an valid state transition'
+ include_examples 'for a valid state transition'
+ it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
end
context 'when the status is cannot_be_merged_recheck' do
@@ -4262,14 +4338,164 @@ RSpec.describe MergeRequest, factory_default: :keep do
let(:merge_status) { :cannot_be_merged }
let(:expected_merge_status) { 'cannot_be_merged_recheck' }
- include_examples 'for an valid state transition'
+ include_examples 'for a valid state transition'
+ it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
+ end
+
+ context 'when the status is cannot_be_merged_rechecking' do
+ let(:merge_status) { :cannot_be_merged_rechecking }
+ let(:expected_merge_status) { 'cannot_be_merged_recheck' }
+
+ include_examples 'for a valid state transition'
+ it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
+ end
+ end
+
+ describe '#mark_as_checking' do
+ subject { create(:merge_request, source_project: project, merge_status: merge_status) }
+
+ def transition!
+ subject.mark_as_checking!
+ end
+
+ context 'when the status is unchecked' do
+ let(:merge_status) { :unchecked }
+ let(:expected_merge_status) { 'checking' }
+
+ include_examples 'for a valid state transition'
+ it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
+ end
+
+ context 'when the status is checking' do
+ let(:merge_status) { :checking }
+
+ include_examples 'for an invalid state transition'
+ end
+
+ context 'when the status is can_be_merged' do
+ let(:merge_status) { :can_be_merged }
+
+ include_examples 'for an invalid state transition'
+ end
+
+ context 'when the status is cannot_be_merged_recheck' do
+ let(:merge_status) { :cannot_be_merged_recheck }
+ let(:expected_merge_status) { 'cannot_be_merged_rechecking' }
+
+ include_examples 'for a valid state transition'
+ it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
end
context 'when the status is cannot_be_merged' do
let(:merge_status) { :cannot_be_merged }
- let(:expected_merge_status) { 'cannot_be_merged_recheck' }
- include_examples 'for an valid state transition'
+ include_examples 'for an invalid state transition'
+ end
+
+ context 'when the status is cannot_be_merged_rechecking' do
+ let(:merge_status) { :cannot_be_merged_rechecking }
+
+ include_examples 'for an invalid state transition'
+ end
+ end
+
+ describe '#mark_as_mergeable' do
+ subject { create(:merge_request, source_project: project, merge_status: merge_status) }
+
+ let(:expected_merge_status) { 'can_be_merged' }
+
+ def transition!
+ subject.mark_as_mergeable!
+ end
+
+ context 'when the status is unchecked' do
+ let(:merge_status) { :unchecked }
+
+ include_examples 'for a valid state transition'
+ it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
+ end
+
+ context 'when the status is checking' do
+ let(:merge_status) { :checking }
+
+ include_examples 'for a valid state transition'
+ it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
+ end
+
+ context 'when the status is can_be_merged' do
+ let(:merge_status) { :can_be_merged }
+
+ include_examples 'for an invalid state transition'
+ end
+
+ context 'when the status is cannot_be_merged_recheck' do
+ let(:merge_status) { :cannot_be_merged_recheck }
+
+ include_examples 'for a valid state transition'
+ it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
+ end
+
+ context 'when the status is cannot_be_merged' do
+ let(:merge_status) { :cannot_be_merged }
+
+ include_examples 'for an invalid state transition'
+ end
+
+ context 'when the status is cannot_be_merged_rechecking' do
+ let(:merge_status) { :cannot_be_merged_rechecking }
+
+ include_examples 'for a valid state transition'
+ it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
+ end
+ end
+
+ describe '#mark_as_unmergeable' do
+ subject { create(:merge_request, source_project: project, merge_status: merge_status) }
+
+ let(:expected_merge_status) { 'cannot_be_merged' }
+
+ def transition!
+ subject.mark_as_unmergeable!
+ end
+
+ context 'when the status is unchecked' do
+ let(:merge_status) { :unchecked }
+
+ include_examples 'for a valid state transition'
+ it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
+ end
+
+ context 'when the status is checking' do
+ let(:merge_status) { :checking }
+
+ include_examples 'for a valid state transition'
+ it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
+ end
+
+ context 'when the status is can_be_merged' do
+ let(:merge_status) { :can_be_merged }
+
+ include_examples 'for an invalid state transition'
+ end
+
+ context 'when the status is cannot_be_merged_recheck' do
+ let(:merge_status) { :cannot_be_merged_recheck }
+
+ include_examples 'for a valid state transition'
+ it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
+ end
+
+ context 'when the status is cannot_be_merged' do
+ let(:merge_status) { :cannot_be_merged }
+
+ include_examples 'for an invalid state transition'
+ end
+
+ context 'when the status is cannot_be_merged_rechecking' do
+ let(:merge_status) { :cannot_be_merged_rechecking }
+
+ include_examples 'for a valid state transition'
+ it_behaves_like 'transition triggering mergeRequestMergeStatusUpdated GraphQL subscription'
end
end
@@ -4739,15 +4965,17 @@ RSpec.describe MergeRequest, factory_default: :keep do
context 'persisted merge request' do
context 'with a limit' do
it 'returns a limited number of commits' do
- expect(subject.commits(limit: 2).map(&:sha)).to eq(%w[
- b83d6e391c22777fca1ed3012fce84f633d7fed0
- 498214de67004b1da3d820901307bed2a68a8ef6
- ])
- expect(subject.commits(limit: 3).map(&:sha)).to eq(%w[
- b83d6e391c22777fca1ed3012fce84f633d7fed0
- 498214de67004b1da3d820901307bed2a68a8ef6
- 1b12f15a11fc6e62177bef08f47bc7b5ce50b141
- ])
+ expect(subject.commits(limit: 2).map(&:sha)).to eq(
+ %w[
+ b83d6e391c22777fca1ed3012fce84f633d7fed0
+ 498214de67004b1da3d820901307bed2a68a8ef6
+ ])
+ expect(subject.commits(limit: 3).map(&:sha)).to eq(
+ %w[
+ b83d6e391c22777fca1ed3012fce84f633d7fed0
+ 498214de67004b1da3d820901307bed2a68a8ef6
+ 1b12f15a11fc6e62177bef08f47bc7b5ce50b141
+ ])
end
end
@@ -4792,9 +5020,10 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
it 'returns the safe number of commits' do
- expect(subject.recent_commits.map(&:sha)).to eq(%w[
- b83d6e391c22777fca1ed3012fce84f633d7fed0 498214de67004b1da3d820901307bed2a68a8ef6
- ])
+ expect(subject.recent_commits.map(&:sha)).to eq(
+ %w[
+ b83d6e391c22777fca1ed3012fce84f633d7fed0 498214de67004b1da3d820901307bed2a68a8ef6
+ ])
end
end
@@ -5171,4 +5400,22 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
end
+
+ describe '#can_suggest_reviewers?' do
+ let_it_be(:merge_request) { build(:merge_request, :opened, project: project) }
+
+ subject(:can_suggest_reviewers) { merge_request.can_suggest_reviewers? }
+
+ it 'returns false' do
+ expect(can_suggest_reviewers).to be(false)
+ end
+ end
+
+ describe '#suggested_reviewer_users' do
+ let_it_be(:merge_request) { build(:merge_request, project: project) }
+
+ subject(:suggested_reviewer_users) { merge_request.suggested_reviewer_users }
+
+ it { is_expected.to be_empty }
+ end
end
diff --git a/spec/models/milestone_spec.rb b/spec/models/milestone_spec.rb
index af1383b68bf..9f6b1f8016b 100644
--- a/spec/models/milestone_spec.rb
+++ b/spec/models/milestone_spec.rb
@@ -5,9 +5,35 @@ require 'spec_helper'
RSpec.describe Milestone do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :public) }
+ let_it_be(:group) { create(:group) }
let_it_be(:issue) { create(:issue, project: project) }
+ describe 'modules' do
+ context 'with a project' do
+ it_behaves_like 'AtomicInternalId' do
+ let(:internal_id_attribute) { :iid }
+ let(:instance) { build(:milestone, project: create(:project), group: nil) }
+ let(:scope) { :project }
+ let(:scope_attrs) { { project: instance.project } }
+ let(:usage) { :milestones }
+ end
+ end
+
+ context 'with a group' do
+ it_behaves_like 'AtomicInternalId' do
+ let(:internal_id_attribute) { :iid }
+ let(:instance) { build(:milestone, project: nil, group: create(:group)) }
+ let(:scope) { :group }
+ let(:scope_attrs) { { namespace: instance.group } }
+ let(:usage) { :milestones }
+ end
+ end
+ end
+
it_behaves_like 'a timebox', :milestone do
+ let(:project) { create(:project, :public) }
+ let(:timebox) { create(:milestone, project: project) }
+
describe "#uniqueness_of_title" do
context "per project" do
it "does not accept the same title in a project twice" do
@@ -25,7 +51,7 @@ RSpec.describe Milestone do
end
context "per group" do
- let(:timebox) { create(:milestone, *timebox_args, group: group) }
+ let(:timebox) { create(:milestone, group: group) }
before do
project.update!(group: group)
@@ -96,9 +122,22 @@ RSpec.describe Milestone do
end
end
end
+
+ describe '#parent_type_check' do
+ let(:milestone) { build(:milestone, group: group) }
+
+ it 'is invalid if it has both project_id and group_id' do
+ milestone.project = project
+
+ expect(milestone).not_to be_valid
+ expect(milestone.errors[:project_id]).to include("milestone should belong either to a project or a group.")
+ end
+ end
end
describe "Associations" do
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to belong_to(:group) }
it { is_expected.to have_many(:releases) }
it { is_expected.to have_many(:milestone_releases) }
end
@@ -562,6 +601,57 @@ RSpec.describe Milestone do
it { is_expected.not_to match("gitlab-org/gitlab-ce/milestones/123") }
end
+ describe '#merge_requests_enabled?' do
+ context "per project" do
+ it "is true for projects with MRs enabled" do
+ project = create(:project, :merge_requests_enabled)
+ milestone = build(:milestone, project: project)
+
+ expect(milestone.merge_requests_enabled?).to be_truthy
+ end
+
+ it "is false for projects with MRs disabled" do
+ project = create(:project, :repository_enabled, :merge_requests_disabled)
+ milestone = build(:milestone, project: project)
+
+ expect(milestone.merge_requests_enabled?).to be_falsey
+ end
+
+ it "is false for projects with repository disabled" do
+ project = create(:project, :repository_disabled)
+ milestone = build(:milestone, project: project)
+
+ expect(milestone.merge_requests_enabled?).to be_falsey
+ end
+ end
+
+ context "per group" do
+ let(:milestone) { build(:milestone, group: group) }
+
+ it "is always true for groups, for performance reasons" do
+ expect(milestone.merge_requests_enabled?).to be_truthy
+ end
+ end
+ end
+
+ describe '#resource_parent' do
+ context 'when group is present' do
+ let(:milestone) { build(:milestone, group: group) }
+
+ it 'returns the group' do
+ expect(milestone.resource_parent).to eq(group)
+ end
+ end
+
+ context 'when project is present' do
+ let(:milestone) { build(:milestone, project: project) }
+
+ it 'returns the project' do
+ expect(milestone.resource_parent).to eq(project)
+ end
+ end
+ end
+
describe '#parent' do
context 'with group' do
it 'returns the expected parent' do
@@ -598,4 +688,40 @@ RSpec.describe Milestone do
end
end
end
+
+ describe '#project_milestone?' do
+ context 'when project_id is present' do
+ let(:milestone) { build(:milestone, project: project) }
+
+ it 'returns true' do
+ expect(milestone.project_milestone?).to be_truthy
+ end
+ end
+
+ context 'when project_id is not present' do
+ let(:milestone) { build(:milestone, group: group) }
+
+ it 'returns false' do
+ expect(milestone.project_milestone?).to be_falsey
+ end
+ end
+ end
+
+ describe '#group_milestone?' do
+ context 'when group_id is present' do
+ let(:milestone) { build(:milestone, group: group) }
+
+ it 'returns true' do
+ expect(milestone.group_milestone?).to be_truthy
+ end
+ end
+
+ context 'when group_id is not present' do
+ let(:milestone) { build(:milestone, project: project) }
+
+ it 'returns false' do
+ expect(milestone.group_milestone?).to be_falsey
+ end
+ end
+ end
end
diff --git a/spec/models/ml/candidate_spec.rb b/spec/models/ml/candidate_spec.rb
index f58d30f81a0..3bf1e80a152 100644
--- a/spec/models/ml/candidate_spec.rb
+++ b/spec/models/ml/candidate_spec.rb
@@ -2,7 +2,9 @@
require 'spec_helper'
-RSpec.describe Ml::Candidate do
+RSpec.describe Ml::Candidate, factory_default: :keep do
+ let_it_be(:candidate) { create_default(:ml_candidates, :with_metrics_and_params) }
+
describe 'associations' do
it { is_expected.to belong_to(:experiment) }
it { is_expected.to belong_to(:user) }
@@ -12,13 +14,11 @@ RSpec.describe Ml::Candidate do
describe '#new' do
it 'iid is not null' do
- expect(create(:ml_candidates).iid).not_to be_nil
+ expect(candidate.iid).not_to be_nil
end
end
- describe 'by_project_id_and_iid' do
- let_it_be(:candidate) { create(:ml_candidates) }
-
+ describe '#by_project_id_and_iid' do
let(:project_id) { candidate.experiment.project_id }
let(:iid) { candidate.iid }
diff --git a/spec/models/ml/experiment_spec.rb b/spec/models/ml/experiment_spec.rb
index e300f82d290..789bb3aa88a 100644
--- a/spec/models/ml/experiment_spec.rb
+++ b/spec/models/ml/experiment_spec.rb
@@ -3,16 +3,19 @@
require 'spec_helper'
RSpec.describe Ml::Experiment do
+ let_it_be(:exp) { create(:ml_experiments) }
+ let_it_be(:exp2) { create(:ml_experiments, project: exp.project) }
+
+ let(:iid) { exp.iid }
+ let(:exp_name) { exp.name }
+
describe 'associations' do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:user) }
it { is_expected.to have_many(:candidates) }
end
- describe '#by_project_id_and_iid?' do
- let(:exp) { create(:ml_experiments) }
- let(:iid) { exp.iid }
-
+ describe '#by_project_id_and_iid' do
subject { described_class.by_project_id_and_iid(exp.project_id, iid) }
context 'if exists' do
@@ -26,10 +29,7 @@ RSpec.describe Ml::Experiment do
end
end
- describe '#by_project_id_and_name?' do
- let(:exp) { create(:ml_experiments) }
- let(:exp_name) { exp.name }
-
+ describe '#by_project_id_and_name' do
subject { described_class.by_project_id_and_name(exp.project_id, exp_name) }
context 'if exists' do
@@ -43,20 +43,17 @@ RSpec.describe Ml::Experiment do
end
end
- describe '#has_record?' do
- let(:exp) { create(:ml_experiments) }
- let(:exp_name) { exp.name }
+ describe '#by_project_id' do
+ let(:project_id) { exp.project_id }
- subject { described_class.has_record?(exp.project_id, exp_name) }
+ subject { described_class.by_project_id(project_id) }
- context 'if exists' do
- it { is_expected.to be_truthy }
- end
+ it { is_expected.to match_array([exp, exp2]) }
- context 'if does not exist' do
- let(:exp_name) { 'hello' }
+ context 'when project does not have experiment' do
+ let(:project_id) { non_existing_record_iid }
- it { is_expected.to be_falsey }
+ it { is_expected.to be_empty }
end
end
end
diff --git a/spec/models/namespace/aggregation_schedule_spec.rb b/spec/models/namespace/aggregation_schedule_spec.rb
index 38bf8089411..45b66fa12dd 100644
--- a/spec/models/namespace/aggregation_schedule_spec.rb
+++ b/spec/models/namespace/aggregation_schedule_spec.rb
@@ -5,8 +5,24 @@ require 'spec_helper'
RSpec.describe Namespace::AggregationSchedule, :clean_gitlab_redis_shared_state, type: :model do
include ExclusiveLeaseHelpers
+ let(:default_timeout) { described_class.default_lease_timeout }
+
it { is_expected.to belong_to :namespace }
+ describe "#default_lease_timeout" do
+ subject(:default_lease_timeout) { default_timeout }
+
+ it { is_expected.to eq 30.minutes.to_i }
+
+ context 'when remove_namespace_aggregator_delay FF is disabled' do
+ before do
+ stub_feature_flags(remove_namespace_aggregator_delay: false)
+ end
+
+ it { is_expected.to eq 1.hour.to_i }
+ end
+ end
+
describe '#schedule_root_storage_statistics' do
let(:namespace) { create(:namespace) }
let(:aggregation_schedule) { namespace.build_aggregation_schedule }
@@ -14,7 +30,7 @@ RSpec.describe Namespace::AggregationSchedule, :clean_gitlab_redis_shared_state,
context "when we can't obtain the lease" do
it 'does not schedule the workers' do
- stub_exclusive_lease_taken(lease_key, timeout: described_class::DEFAULT_LEASE_TIMEOUT)
+ stub_exclusive_lease_taken(lease_key, timeout: default_timeout)
expect(Namespaces::RootStatisticsWorker)
.not_to receive(:perform_async)
@@ -28,20 +44,20 @@ RSpec.describe Namespace::AggregationSchedule, :clean_gitlab_redis_shared_state,
context 'when we can obtain the lease' do
it 'schedules a root storage statistics after create' do
- stub_exclusive_lease(lease_key, timeout: described_class::DEFAULT_LEASE_TIMEOUT)
+ stub_exclusive_lease(lease_key, timeout: default_timeout)
expect(Namespaces::RootStatisticsWorker)
.to receive(:perform_async).once
expect(Namespaces::RootStatisticsWorker)
.to receive(:perform_in).once
- .with(described_class::DEFAULT_LEASE_TIMEOUT, aggregation_schedule.namespace_id)
+ .with(default_timeout, aggregation_schedule.namespace_id)
aggregation_schedule.save!
end
it 'does not release the lease' do
- stub_exclusive_lease(lease_key, timeout: described_class::DEFAULT_LEASE_TIMEOUT)
+ stub_exclusive_lease(lease_key, timeout: default_timeout)
aggregation_schedule.save!
@@ -58,7 +74,7 @@ RSpec.describe Namespace::AggregationSchedule, :clean_gitlab_redis_shared_state,
expect(Namespaces::RootStatisticsWorker)
.to receive(:perform_in).once
- .with(described_class::DEFAULT_LEASE_TIMEOUT, aggregation_schedule.namespace_id)
+ .with(default_timeout, aggregation_schedule.namespace_id)
.and_return(nil)
# Scheduling workers for the first time
diff --git a/spec/models/namespace/package_setting_spec.rb b/spec/models/namespace/package_setting_spec.rb
index 4308c8c06bc..2584fa597ad 100644
--- a/spec/models/namespace/package_setting_spec.rb
+++ b/spec/models/namespace/package_setting_spec.rb
@@ -85,4 +85,13 @@ RSpec.describe Namespace::PackageSetting do
end
end
end
+
+ describe 'package forwarding attributes' do
+ %i[maven_package_requests_forwarding
+ pypi_package_requests_forwarding
+ npm_package_requests_forwarding].each do |attribute|
+ it_behaves_like 'a cascading namespace setting boolean attribute', settings_attribute_name: attribute,
+ settings_association: :package_settings
+ end
+ end
end
diff --git a/spec/models/namespace_setting_spec.rb b/spec/models/namespace_setting_spec.rb
index 4ac248802b8..a4446bfedd1 100644
--- a/spec/models/namespace_setting_spec.rb
+++ b/spec/models/namespace_setting_spec.rb
@@ -177,4 +177,8 @@ RSpec.describe NamespaceSetting, type: :model do
end
end
end
+
+ describe '#delayed_project_removal' do
+ it_behaves_like 'a cascading namespace setting boolean attribute', settings_attribute_name: :delayed_project_removal
+ end
end
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 2e8d22cb9db..c6d028af22d 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -362,6 +362,9 @@ RSpec.describe Namespace do
it { is_expected.to delegate_method(:name).to(:owner).with_prefix.allow_nil }
it { is_expected.to delegate_method(:avatar_url).to(:owner).allow_nil }
it { is_expected.to delegate_method(:prevent_sharing_groups_outside_hierarchy).to(:namespace_settings).allow_nil }
+ it { is_expected.to delegate_method(:maven_package_requests_forwarding).to(:package_settings) }
+ it { is_expected.to delegate_method(:pypi_package_requests_forwarding).to(:package_settings) }
+ it { is_expected.to delegate_method(:npm_package_requests_forwarding).to(:package_settings) }
it do
is_expected.to delegate_method(:prevent_sharing_groups_outside_hierarchy=).to(:namespace_settings)
@@ -1036,7 +1039,9 @@ RSpec.describe Namespace do
let(:pages_dir) { File.join(TestEnv.pages_path) }
def expect_project_directories_at(namespace_path, with_pages: true)
- expected_repository_path = File.join(TestEnv.repos_path, namespace_path, 'the-project.git')
+ expected_repository_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
+ File.join(TestEnv.repos_path, namespace_path, 'the-project.git')
+ end
expected_upload_path = File.join(uploads_dir, namespace_path, 'the-project')
expected_pages_path = File.join(pages_dir, namespace_path, 'the-project')
@@ -1046,15 +1051,19 @@ RSpec.describe Namespace do
end
before do
- FileUtils.mkdir_p(File.join(TestEnv.repos_path, "#{project.full_path}.git"))
+ Gitlab::GitalyClient::StorageSettings.allow_disk_access do
+ FileUtils.mkdir_p(File.join(TestEnv.repos_path, "#{project.full_path}.git"))
+ end
FileUtils.mkdir_p(File.join(uploads_dir, project.full_path))
FileUtils.mkdir_p(File.join(pages_dir, project.full_path))
end
after do
- FileUtils.remove_entry(File.join(TestEnv.repos_path, parent.full_path), true)
- FileUtils.remove_entry(File.join(TestEnv.repos_path, new_parent.full_path), true)
- FileUtils.remove_entry(File.join(TestEnv.repos_path, child.full_path), true)
+ Gitlab::GitalyClient::StorageSettings.allow_disk_access do
+ FileUtils.remove_entry(File.join(TestEnv.repos_path, parent.full_path), true)
+ FileUtils.remove_entry(File.join(TestEnv.repos_path, new_parent.full_path), true)
+ FileUtils.remove_entry(File.join(TestEnv.repos_path, child.full_path), true)
+ end
FileUtils.remove_entry(File.join(uploads_dir, project.full_path), true)
FileUtils.remove_entry(pages_dir, true)
end
@@ -1962,7 +1971,7 @@ RSpec.describe Namespace do
it 'returns the virual domain' do
expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
expect(virtual_domain.lookup_paths).not_to be_empty
- expect(virtual_domain.cache_key).to eq("pages_domain_for_namespace_#{namespace.root_ancestor.id}")
+ expect(virtual_domain.cache_key).to match(/pages_domain_for_namespace_#{namespace.root_ancestor.id}_/)
end
context 'when :cache_pages_domain_api is disabled' do
diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb
index 1fce1f97dcb..670a6237788 100644
--- a/spec/models/note_spec.rb
+++ b/spec/models/note_spec.rb
@@ -1839,6 +1839,12 @@ RSpec.describe Note do
let(:note) { create(:note, :system, :on_issue, note: original_note_body) }
it { is_expected.to eq(expected_text_replacement) }
+
+ context 'context when note and cache are null (happens in bulk insert)' do
+ let(:note) { build(:note, :system, :on_issue, note: nil, note_html: nil).tap { |note| note.save!(validate: false) } }
+
+ it { is_expected.to be_in([nil, '']) }
+ end
end
end
end
@@ -1868,7 +1874,7 @@ RSpec.describe Note do
let(:expected_text_replacement) { '<p data-sourcepos="1:1-1:48" dir="auto">marked the checklist item <strong>task 1</strong> as completed</p>' }
before do
- note.update_columns(note_html: unchanged_note_body)
+ note.update_columns(note_html: unchanged_note_body) unless note.note.nil?
end
end
@@ -1878,7 +1884,7 @@ RSpec.describe Note do
let(:expected_text_replacement) { '<p data-sourcepos="1:1-1:48" dir="auto">marked the checklist item <strong>task 1</strong> as incomplete</p>' }
before do
- note.update_columns(note_html: unchanged_note_body)
+ note.update_columns(note_html: unchanged_note_body) unless note.note.nil?
end
end
end
diff --git a/spec/models/notification_recipient_spec.rb b/spec/models/notification_recipient_spec.rb
index 8105262aada..068166ebb0d 100644
--- a/spec/models/notification_recipient_spec.rb
+++ b/spec/models/notification_recipient_spec.rb
@@ -57,16 +57,6 @@ RSpec.describe NotificationRecipient do
it 'returns false' do
expect(recipient.notifiable?).to eq(false)
end
-
- context 'when block_emails_with_failures is disabled' do
- before do
- stub_feature_flags(block_emails_with_failures: false)
- end
-
- it 'returns true' do
- expect(recipient.notifiable?).to eq(true)
- end
- end
end
context 'with temporary failures' do
@@ -77,16 +67,6 @@ RSpec.describe NotificationRecipient do
it 'returns false' do
expect(recipient.notifiable?).to eq(false)
end
-
- context 'when block_emails_with_failures is disabled' do
- before do
- stub_feature_flags(block_emails_with_failures: false)
- end
-
- it 'returns true' do
- expect(recipient.notifiable?).to eq(true)
- end
- end
end
end
end
diff --git a/spec/models/operations/feature_flags/strategy_spec.rb b/spec/models/operations/feature_flags/strategy_spec.rb
index de1b9d2c855..949f92b3b2a 100644
--- a/spec/models/operations/feature_flags/strategy_spec.rb
+++ b/spec/models/operations/feature_flags/strategy_spec.rb
@@ -130,14 +130,15 @@ RSpec.describe Operations::FeatureFlags::Strategy do
context 'when the strategy name is flexibleRollout' do
valid_parameters = { rollout: '40', groupId: 'mygroup', stickiness: 'default' }
- where(invalid_parameters: [
- nil,
- {},
- *valid_parameters.to_a.combination(1).to_a.map { |p| p.to_h },
- *valid_parameters.to_a.combination(2).to_a.map { |p| p.to_h },
- { **valid_parameters, userIds: '4' },
- { **valid_parameters, extra: nil }
- ])
+ where(
+ invalid_parameters: [
+ nil,
+ {},
+ *valid_parameters.to_a.combination(1).to_a.map { |p| p.to_h },
+ *valid_parameters.to_a.combination(2).to_a.map { |p| p.to_h },
+ { **valid_parameters, userIds: '4' },
+ { **valid_parameters, extra: nil }
+ ])
with_them do
it 'must have valid parameters for the strategy' do
strategy = build(:operations_strategy,
@@ -180,9 +181,7 @@ RSpec.describe Operations::FeatureFlags::Strategy do
expect(strategy).to be_invalid
- expect(strategy.errors[:parameters]).to eq([
- 'rollout must be a string between 0 and 100 inclusive'
- ])
+ expect(strategy.errors[:parameters]).to eq(['rollout must be a string between 0 and 100 inclusive'])
end
end
@@ -243,9 +242,8 @@ RSpec.describe Operations::FeatureFlags::Strategy do
expect(strategy).to be_invalid
- expect(strategy.errors[:parameters]).to eq([
- 'stickiness parameter must be default, userId, sessionId, or random'
- ])
+ expect(strategy.errors[:parameters]).to eq(
+ ['stickiness parameter must be default, userId, sessionId, or random'])
end
end
diff --git a/spec/models/packages/package_spec.rb b/spec/models/packages/package_spec.rb
index fb88dbb4212..0edb04224a3 100644
--- a/spec/models/packages/package_spec.rb
+++ b/spec/models/packages/package_spec.rb
@@ -904,6 +904,14 @@ RSpec.describe Packages::Package, type: :model do
it { is_expected.to match_array([pypi_package]) }
end
+ describe '.with_case_insensitive_version' do
+ let_it_be(:nuget_package) { create(:nuget_package, version: '1.0.0-ABC') }
+
+ subject { described_class.with_case_insensitive_version('1.0.0-abC') }
+
+ it { is_expected.to match_array([nuget_package]) }
+ end
+
context 'status scopes' do
let_it_be(:default_package) { create(:maven_package, :default) }
let_it_be(:hidden_package) { create(:maven_package, :hidden) }
diff --git a/spec/models/packages/rpm/repository_file_spec.rb b/spec/models/packages/rpm/repository_file_spec.rb
new file mode 100644
index 00000000000..34347793dd8
--- /dev/null
+++ b/spec/models/packages/rpm/repository_file_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::Rpm::RepositoryFile, type: :model do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:repository_file) { create(:rpm_repository_file) }
+
+ it_behaves_like 'having unique enum values'
+
+ describe 'relationships' do
+ it { is_expected.to belong_to(:project) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:project) }
+ end
+
+ context 'when updating project statistics' do
+ context 'when the package file has an explicit size' do
+ it_behaves_like 'UpdateProjectStatistics' do
+ subject { build(:rpm_repository_file, size: 42) }
+ end
+ end
+
+ context 'when the package file does not have a size' do
+ it_behaves_like 'UpdateProjectStatistics' do
+ subject { build(:rpm_repository_file, size: nil) }
+ end
+ end
+ end
+
+ context 'with status scopes' do
+ let_it_be(:pending_destruction_repository_package_file) do
+ create(:rpm_repository_file, :pending_destruction)
+ end
+
+ describe '.with_status' do
+ subject { described_class.with_status(:pending_destruction) }
+
+ it { is_expected.to contain_exactly(pending_destruction_repository_package_file) }
+ end
+ end
+end
diff --git a/spec/models/pages_domain_spec.rb b/spec/models/pages_domain_spec.rb
index b50bfaed528..463ec904e9a 100644
--- a/spec/models/pages_domain_spec.rb
+++ b/spec/models/pages_domain_spec.rb
@@ -563,7 +563,7 @@ RSpec.describe PagesDomain do
it 'returns the virual domain when there are pages deployed for the project' do
expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
expect(virtual_domain.lookup_paths).not_to be_empty
- expect(virtual_domain.cache_key).to eq("pages_domain_for_project_#{project.id}")
+ expect(virtual_domain.cache_key).to match(/pages_domain_for_project_#{project.id}_/)
end
context 'when :cache_pages_domain_api is disabled' do
diff --git a/spec/models/personal_access_token_spec.rb b/spec/models/personal_access_token_spec.rb
index 5bce6a2cc3f..67e7d444d25 100644
--- a/spec/models/personal_access_token_spec.rb
+++ b/spec/models/personal_access_token_spec.rb
@@ -105,6 +105,31 @@ RSpec.describe PersonalAccessToken do
end
end
+ describe '.last_used_before' do
+ context 'last_used_*' do
+ let_it_be(:date) { DateTime.new(2022, 01, 01) }
+ let_it_be(:token) { create(:personal_access_token, last_used_at: date ) }
+ # This token should never occur in the following tests and indicates that filtering was done correctly with it
+ let_it_be(:never_used_token) { create(:personal_access_token) }
+
+ describe '.last_used_before' do
+ it 'returns personal access tokens used before the specified date only' do
+ expect(described_class.last_used_before(date + 1)).to contain_exactly(token)
+ end
+ end
+
+ it 'does not return token that is last_used_at after given date' do
+ expect(described_class.last_used_before(date + 1)).not_to contain_exactly(never_used_token)
+ end
+
+ describe '.last_used_after' do
+ it 'returns personal access tokens used after the specified date only' do
+ expect(described_class.last_used_after(date - 1)).to contain_exactly(token)
+ end
+ end
+ end
+ end
+
describe '.last_used_before_or_unused' do
let(:last_used_at) { 1.month.ago.beginning_of_hour }
let!(:unused_token) { create(:personal_access_token) }
diff --git a/spec/models/pool_repository_spec.rb b/spec/models/pool_repository_spec.rb
index bf88e941540..9861e832bef 100644
--- a/spec/models/pool_repository_spec.rb
+++ b/spec/models/pool_repository_spec.rb
@@ -26,8 +26,6 @@ RSpec.describe PoolRepository do
describe '#unlink_repository' do
let(:pool) { create(:pool_repository, :ready) }
- let(:repository_path) { File.join(TestEnv.repos_path, pool.source_project.repository.relative_path) }
- let(:alternates_file) { File.join(repository_path, 'objects', 'info', 'alternates') }
before do
pool.link_repository(pool.source_project.repository)
@@ -36,19 +34,17 @@ RSpec.describe PoolRepository do
context 'when the last member leaves' do
it 'schedules pool removal' do
expect(::ObjectPool::DestroyWorker).to receive(:perform_async).with(pool.id).and_call_original
+ expect(pool.source_project.repository).to receive(:disconnect_alternates).and_call_original
pool.unlink_repository(pool.source_project.repository)
-
- expect(File).not_to exist(alternates_file)
end
end
context 'when skipping disconnect' do
it 'does not change the alternates file' do
- before = File.read(alternates_file)
- pool.unlink_repository(pool.source_project.repository, disconnect: false)
+ expect(pool.source_project.repository).not_to receive(:disconnect_alternates)
- expect(File.read(alternates_file)).to eq(before)
+ pool.unlink_repository(pool.source_project.repository, disconnect: false)
end
end
@@ -58,10 +54,9 @@ RSpec.describe PoolRepository do
pool.link_repository(other_project.repository)
expect(::ObjectPool::DestroyWorker).not_to receive(:perform_async).with(pool.id)
+ expect(pool.source_project.repository).to receive(:disconnect_alternates).and_call_original
pool.unlink_repository(pool.source_project.repository)
-
- expect(File).not_to exist(alternates_file)
end
end
end
diff --git a/spec/models/preloaders/project_root_ancestor_preloader_spec.rb b/spec/models/preloaders/project_root_ancestor_preloader_spec.rb
index 30036a6a033..bb0de24abe5 100644
--- a/spec/models/preloaders/project_root_ancestor_preloader_spec.rb
+++ b/spec/models/preloaders/project_root_ancestor_preloader_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Preloaders::ProjectRootAncestorPreloader do
let_it_be(:root_parent1) { create(:group, :private, name: 'root-1', path: 'root-1') }
- let_it_be(:root_parent2) { create(:group, :private, name: 'root-2', path: 'root-2') }
+ let_it_be(:root_parent2) { create(:group, name: 'root-2', path: 'root-2') }
let_it_be(:guest_project) { create(:project, name: 'public guest', path: 'public-guest') }
let_it_be(:private_maintainer_project) do
create(:project, :private, name: 'b private maintainer', path: 'b-private-maintainer', namespace: root_parent1)
@@ -15,7 +15,7 @@ RSpec.describe Preloaders::ProjectRootAncestorPreloader do
end
let_it_be(:public_maintainer_project) do
- create(:project, :private, name: 'a public maintainer', path: 'a-public-maintainer', namespace: root_parent2)
+ create(:project, name: 'a public maintainer', path: 'a-public-maintainer', namespace: root_parent2)
end
let(:root_query_regex) { /\ASELECT.+FROM "namespaces" WHERE "namespaces"."id" = \d+/ }
@@ -36,20 +36,20 @@ RSpec.describe Preloaders::ProjectRootAncestorPreloader do
it 'strong_memoizes the correct root_ancestor' do
pristine_projects.each do |project|
- expected_parent_id = project.root_ancestor&.id
+ preloaded_parent_id = project.root_ancestor&.id
- expect(project.parent_id).to eq(expected_parent_id)
+ expect(preloaded_parent_id).to eq(project.parent_id)
end
end
end
context 'when use_traversal_ids FF is enabled' do
context 'when the preloader is used' do
- before do
- preload_ancestors
- end
-
context 'when no additional preloads are provided' do
+ before do
+ preload_ancestors(:group)
+ end
+
it_behaves_like 'executes N matching DB queries', 0
end
@@ -57,6 +57,10 @@ RSpec.describe Preloaders::ProjectRootAncestorPreloader do
let(:additional_preloads) { [:route] }
let(:root_query_regex) { /\ASELECT.+FROM "routes" WHERE "routes"."source_id" = \d+/ }
+ before do
+ preload_ancestors
+ end
+
it_behaves_like 'executes N matching DB queries', 0, :full_path
end
end
@@ -64,6 +68,17 @@ RSpec.describe Preloaders::ProjectRootAncestorPreloader do
context 'when the preloader is not used' do
it_behaves_like 'executes N matching DB queries', 4
end
+
+ context 'when using a :group sti name and passing projects in a user namespace' do
+ let(:projects) { [private_developer_project] }
+ let(:additional_preloads) { [:ip_restrictions, :saml_provider] }
+
+ it 'does not load a nil value for root_ancestor' do
+ preload_ancestors(:group)
+
+ expect(pristine_projects.first.root_ancestor).to eq(private_developer_project.root_ancestor)
+ end
+ end
end
context 'when use_traversal_ids FF is disabled' do
@@ -91,9 +106,22 @@ RSpec.describe Preloaders::ProjectRootAncestorPreloader do
context 'when the preloader is not used' do
it_behaves_like 'executes N matching DB queries', 4
end
+
+ context 'when using a :group sti name and passing projects in a user namespace' do
+ let(:projects) { [private_developer_project] }
+ let(:additional_preloads) { [:ip_restrictions, :saml_provider] }
+
+ it 'does not load a nil value for root_ancestor' do
+ preload_ancestors(:group)
+
+ expect(pristine_projects.first.root_ancestor).to eq(private_developer_project.root_ancestor)
+ end
+ end
end
- def preload_ancestors
- described_class.new(pristine_projects, :namespace, additional_preloads).execute
+ private
+
+ def preload_ancestors(namespace_sti_name = :namespace)
+ described_class.new(pristine_projects, namespace_sti_name, additional_preloads).execute
end
end
diff --git a/spec/models/project_authorization_spec.rb b/spec/models/project_authorization_spec.rb
index 14220007966..55fe28ceb6f 100644
--- a/spec/models/project_authorization_spec.rb
+++ b/spec/models/project_authorization_spec.rb
@@ -92,20 +92,192 @@ RSpec.describe ProjectAuthorization do
let_it_be(:project_2) { create(:project) }
let_it_be(:project_3) { create(:project) }
- let(:per_batch_size) { 2 }
-
- it 'inserts the rows in batches, as per the `per_batch` size' do
- attributes = [
+ let(:attributes) do
+ [
{ user_id: user.id, project_id: project_1.id, access_level: Gitlab::Access::MAINTAINER },
{ user_id: user.id, project_id: project_2.id, access_level: Gitlab::Access::MAINTAINER },
{ user_id: user.id, project_id: project_3.id, access_level: Gitlab::Access::MAINTAINER }
]
+ end
- expect(described_class).to receive(:insert_all).twice.and_call_original
+ before do
+ # Configure as if a replica database is enabled
+ allow(::Gitlab::Database::LoadBalancing).to receive(:primary_only?).and_return(false)
+ stub_feature_flags(enable_minor_delay_during_project_authorizations_refresh: true)
+ end
- described_class.insert_all_in_batches(attributes, per_batch_size)
+ shared_examples_for 'inserts the rows in batches, as per the `per_batch` size, without a delay between each batch' do
+ specify do
+ expect(described_class).not_to receive(:sleep)
- expect(user.project_authorizations.pluck(:user_id, :project_id, :access_level)).to match_array(attributes.map(&:values))
+ described_class.insert_all_in_batches(attributes, per_batch_size)
+
+ expect(user.project_authorizations.pluck(:user_id, :project_id, :access_level)).to match_array(attributes.map(&:values))
+ end
+ end
+
+ context 'when the total number of records to be inserted is greater than the batch size' do
+ let(:per_batch_size) { 2 }
+
+ it 'inserts the rows in batches, as per the `per_batch` size, with a delay between each batch' do
+ expect(described_class).to receive(:insert_all).twice.and_call_original
+ expect(described_class).to receive(:sleep).twice
+
+ described_class.insert_all_in_batches(attributes, per_batch_size)
+
+ expect(user.project_authorizations.pluck(:user_id, :project_id, :access_level)).to match_array(attributes.map(&:values))
+ end
+
+ context 'when the GitLab installation does not have a replica database configured' do
+ before do
+ # Configure as if a replica database is not enabled
+ allow(::Gitlab::Database::LoadBalancing).to receive(:primary_only?).and_return(true)
+ end
+
+ it_behaves_like 'inserts the rows in batches, as per the `per_batch` size, without a delay between each batch'
+ end
+ end
+
+ context 'when the total number of records to be inserted is less than the batch size' do
+ let(:per_batch_size) { 5 }
+
+ it_behaves_like 'inserts the rows in batches, as per the `per_batch` size, without a delay between each batch'
+ end
+ end
+
+ describe '.delete_all_in_batches_for_project' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user_1) { create(:user) }
+ let_it_be(:user_2) { create(:user) }
+ let_it_be(:user_3) { create(:user) }
+ let_it_be(:user_4) { create(:user) }
+
+ let(:user_ids) { [user_1.id, user_2.id, user_3.id] }
+
+ before do
+ # Configure as if a replica database is enabled
+ allow(::Gitlab::Database::LoadBalancing).to receive(:primary_only?).and_return(false)
+ stub_feature_flags(enable_minor_delay_during_project_authorizations_refresh: true)
+ end
+
+ before_all do
+ create(:project_authorization, user: user_1, project: project)
+ create(:project_authorization, user: user_2, project: project)
+ create(:project_authorization, user: user_3, project: project)
+ create(:project_authorization, user: user_4, project: project)
+ end
+
+ shared_examples_for 'removes the project authorizations of the specified users in the current project, without a delay between each batch' do
+ specify do
+ expect(described_class).not_to receive(:sleep)
+
+ described_class.delete_all_in_batches_for_project(
+ project: project,
+ user_ids: user_ids,
+ per_batch: per_batch_size
+ )
+
+ expect(project.project_authorizations.pluck(:user_id)).not_to include(*user_ids)
+ end
+ end
+
+ context 'when the total number of records to be removed is greater than the batch size' do
+ let(:per_batch_size) { 2 }
+
+ it 'removes the project authorizations of the specified users in the current project, with a delay between each batch' do
+ expect(described_class).to receive(:sleep).twice
+
+ described_class.delete_all_in_batches_for_project(
+ project: project,
+ user_ids: user_ids,
+ per_batch: per_batch_size
+ )
+
+ expect(project.project_authorizations.pluck(:user_id)).not_to include(*user_ids)
+ end
+
+ context 'when the GitLab installation does not have a replica database configured' do
+ before do
+ # Configure as if a replica database is not enabled
+ allow(::Gitlab::Database::LoadBalancing).to receive(:primary_only?).and_return(true)
+ end
+
+ it_behaves_like 'removes the project authorizations of the specified users in the current project, without a delay between each batch'
+ end
+ end
+
+ context 'when the total number of records to be removed is less than the batch size' do
+ let(:per_batch_size) { 5 }
+
+ it_behaves_like 'removes the project authorizations of the specified users in the current project, without a delay between each batch'
+ end
+ end
+
+ describe '.delete_all_in_batches_for_user' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project_1) { create(:project) }
+ let_it_be(:project_2) { create(:project) }
+ let_it_be(:project_3) { create(:project) }
+ let_it_be(:project_4) { create(:project) }
+
+ let(:project_ids) { [project_1.id, project_2.id, project_3.id] }
+
+ before do
+ # Configure as if a replica database is enabled
+ allow(::Gitlab::Database::LoadBalancing).to receive(:primary_only?).and_return(false)
+ stub_feature_flags(enable_minor_delay_during_project_authorizations_refresh: true)
+ end
+
+ before_all do
+ create(:project_authorization, user: user, project: project_1)
+ create(:project_authorization, user: user, project: project_2)
+ create(:project_authorization, user: user, project: project_3)
+ create(:project_authorization, user: user, project: project_4)
+ end
+
+ shared_examples_for 'removes the project authorizations of the specified projects from the current user, without a delay between each batch' do
+ specify do
+ expect(described_class).not_to receive(:sleep)
+
+ described_class.delete_all_in_batches_for_user(
+ user: user,
+ project_ids: project_ids,
+ per_batch: per_batch_size
+ )
+
+ expect(user.project_authorizations.pluck(:project_id)).not_to include(*project_ids)
+ end
+ end
+
+ context 'when the total number of records to be removed is greater than the batch size' do
+ let(:per_batch_size) { 2 }
+
+ it 'removes the project authorizations of the specified projects from the current user, with a delay between each batch' do
+ expect(described_class).to receive(:sleep).twice
+
+ described_class.delete_all_in_batches_for_user(
+ user: user,
+ project_ids: project_ids,
+ per_batch: per_batch_size
+ )
+
+ expect(user.project_authorizations.pluck(:project_id)).not_to include(*project_ids)
+ end
+
+ context 'when the GitLab installation does not have a replica database configured' do
+ before do
+ # Configure as if a replica database is not enabled
+ allow(::Gitlab::Database::LoadBalancing).to receive(:primary_only?).and_return(true)
+ end
+
+ it_behaves_like 'removes the project authorizations of the specified projects from the current user, without a delay between each batch'
+ end
+ end
+
+ context 'when the total number of records to be removed is less than the batch size' do
+ let(:per_batch_size) { 5 }
+
+ it_behaves_like 'removes the project authorizations of the specified projects from the current user, without a delay between each batch'
end
end
end
diff --git a/spec/models/project_group_link_spec.rb b/spec/models/project_group_link_spec.rb
index 8b95b86b14b..f141b8e83d6 100644
--- a/spec/models/project_group_link_spec.rb
+++ b/spec/models/project_group_link_spec.rb
@@ -47,9 +47,9 @@ RSpec.describe ProjectGroupLink do
it 'returns all records which are greater than Guests access' do
expect(described_class.non_guests).to match_array([
- project_group_link_reporter,
- project_group_link_developer,
- project_group_link_maintainer
+ project_group_link_reporter,
+ project_group_link_developer,
+ project_group_link_maintainer
])
end
end
diff --git a/spec/models/project_setting_spec.rb b/spec/models/project_setting_spec.rb
index a09ae7ec7ae..5730ca58e9e 100644
--- a/spec/models/project_setting_spec.rb
+++ b/spec/models/project_setting_spec.rb
@@ -21,6 +21,10 @@ RSpec.describe ProjectSetting, type: :model do
it { is_expected.not_to allow_value(nil).for(:target_platforms) }
it { is_expected.to allow_value([]).for(:target_platforms) }
+ it { is_expected.not_to allow_value(nil).for(:suggested_reviewers_enabled) }
+ it { is_expected.to allow_value(true).for(:suggested_reviewers_enabled) }
+ it { is_expected.to allow_value(false).for(:suggested_reviewers_enabled) }
+
it 'allows any combination of the allowed target platforms' do
valid_target_platform_combinations.each do |target_platforms|
expect(subject).to allow_value(target_platforms).for(:target_platforms)
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 99b984ff547..75887e49dc9 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -28,7 +28,6 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_many(:issues) }
it { is_expected.to have_many(:incident_management_issuable_escalation_statuses).through(:issues).inverse_of(:project).class_name('IncidentManagement::IssuableEscalationStatus') }
it { is_expected.to have_many(:milestones) }
- it { is_expected.to have_many(:iterations) }
it { is_expected.to have_many(:project_members).dependent(:delete_all) }
it { is_expected.to have_many(:users).through(:project_members) }
it { is_expected.to have_many(:requesters).dependent(:delete_all) }
@@ -149,6 +148,8 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_many(:secure_files).class_name('Ci::SecureFile').dependent(:restrict_with_error) }
it { is_expected.to have_one(:build_artifacts_size_refresh).class_name('Projects::BuildArtifactsSizeRefresh') }
it { is_expected.to have_many(:project_callouts).class_name('Users::ProjectCallout').with_foreign_key(:project_id) }
+ it { is_expected.to have_many(:pipeline_metadata).class_name('Ci::PipelineMetadata') }
+ it { is_expected.to have_many(:incident_management_timeline_event_tags).class_name('IncidentManagement::TimelineEventTag') }
# GitLab Pages
it { is_expected.to have_many(:pages_domains) }
@@ -845,6 +846,8 @@ RSpec.describe Project, factory_default: :keep do
end
describe 'delegation' do
+ let_it_be(:project) { create(:project) }
+
[:add_guest, :add_reporter, :add_developer, :add_maintainer, :add_member, :add_members].each do |method|
it { is_expected.to delegate_method(method).to(:team) }
end
@@ -859,6 +862,9 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to delegate_method(:environments_access_level).to(:project_feature) }
it { is_expected.to delegate_method(:feature_flags_access_level).to(:project_feature) }
it { is_expected.to delegate_method(:releases_access_level).to(:project_feature) }
+ it { is_expected.to delegate_method(:maven_package_requests_forwarding).to(:namespace) }
+ it { is_expected.to delegate_method(:pypi_package_requests_forwarding).to(:namespace) }
+ it { is_expected.to delegate_method(:npm_package_requests_forwarding).to(:namespace) }
describe 'read project settings' do
%i(
@@ -884,8 +890,24 @@ RSpec.describe Project, factory_default: :keep do
end
include_examples 'ci_cd_settings delegation' do
- # Skip attributes defined in EE code
+ let(:attributes_with_prefix) do
+ {
+ 'group_runners_enabled' => '',
+ 'default_git_depth' => 'ci_',
+ 'forward_deployment_enabled' => 'ci_',
+ 'keep_latest_artifact' => '',
+ 'restrict_user_defined_variables' => '',
+ 'runner_token_expiration_interval' => '',
+ 'separated_caches' => 'ci_',
+ 'opt_in_jwt' => 'ci_',
+ 'allow_fork_pipelines_to_run_in_parent_project' => 'ci_',
+ 'inbound_job_token_scope_enabled' => 'ci_',
+ 'job_token_scope_enabled' => 'ci_outbound_'
+ }
+ end
+
let(:exclude_attributes) do
+ # Skip attributes defined in EE code
%w(
merge_pipelines_enabled
merge_trains_enabled
@@ -906,12 +928,18 @@ RSpec.describe Project, factory_default: :keep do
end
end
- describe '#ci_job_token_scope_enabled?' do
- it_behaves_like 'a ci_cd_settings predicate method', prefix: 'ci_' do
+ describe '#ci_outbound_job_token_scope_enabled?' do
+ it_behaves_like 'a ci_cd_settings predicate method', prefix: 'ci_outbound_' do
let(:delegated_method) { :job_token_scope_enabled? }
end
end
+ describe '#ci_inbound_job_token_scope_enabled?' do
+ it_behaves_like 'a ci_cd_settings predicate method', prefix: 'ci_' do
+ let(:delegated_method) { :inbound_job_token_scope_enabled? }
+ end
+ end
+
describe '#restrict_user_defined_variables?' do
it_behaves_like 'a ci_cd_settings predicate method' do
let(:delegated_method) { :restrict_user_defined_variables? }
@@ -937,23 +965,6 @@ RSpec.describe Project, factory_default: :keep do
end
end
- describe '#remove_project_authorizations' do
- let_it_be(:project) { create(:project) }
- let_it_be(:user_1) { create(:user) }
- let_it_be(:user_2) { create(:user) }
- let_it_be(:user_3) { create(:user) }
-
- it 'removes the project authorizations of the specified users in the current project' do
- create(:project_authorization, user: user_1, project: project)
- create(:project_authorization, user: user_2, project: project)
- create(:project_authorization, user: user_3, project: project)
-
- project.remove_project_authorizations([user_1.id, user_2.id])
-
- expect(project.project_authorizations.pluck(:user_id)).not_to include(user_1.id, user_2.id)
- end
- end
-
describe '#merge_commit_template_or_default' do
let_it_be(:project) { create(:project) }
@@ -1426,7 +1437,7 @@ RSpec.describe Project, factory_default: :keep do
it "is false if used other tracker" do
# NOTE: The current nature of this factory requires persistence
- project = create(:redmine_project)
+ project = create(:project, :with_redmine_integration)
expect(project.default_issues_tracker?).to be_falsey
end
@@ -1471,7 +1482,7 @@ RSpec.describe Project, factory_default: :keep do
describe '#external_issue_tracker' do
it 'sets Project#has_external_issue_tracker when it is nil' do
project_with_no_tracker = create(:project, has_external_issue_tracker: nil)
- project_with_tracker = create(:redmine_project, has_external_issue_tracker: nil)
+ project_with_tracker = create(:project, :with_redmine_integration, has_external_issue_tracker: nil)
expect do
project_with_no_tracker.external_issue_tracker
@@ -1490,7 +1501,7 @@ RSpec.describe Project, factory_default: :keep do
end
it 'retrieves external_issue_tracker querying services and cache it when there is external issue tracker' do
- project = create(:redmine_project)
+ project = create(:project, :with_redmine_integration)
expect(project).to receive(:integrations).once.and_call_original
2.times { expect(project.external_issue_tracker).to be_a_kind_of(Integrations::Redmine) }
@@ -4620,6 +4631,7 @@ RSpec.describe Project, factory_default: :keep do
describe '.filter_by_feature_visibility' do
include_context 'ProjectPolicyTable context'
include ProjectHelpers
+ include UserHelpers
let_it_be(:group) { create(:group) }
let_it_be_with_reload(:project) { create(:project, namespace: group) }
@@ -5761,40 +5773,40 @@ RSpec.describe Project, factory_default: :keep do
describe '#has_active_hooks?' do
let_it_be_with_refind(:project) { create(:project) }
- it { expect(project.has_active_hooks?).to be_falsey }
+ it { expect(project.has_active_hooks?).to eq(false) }
it 'returns true when a matching push hook exists' do
create(:project_hook, push_events: true, project: project)
- expect(project.has_active_hooks?(:merge_request_events)).to be_falsey
- expect(project.has_active_hooks?).to be_truthy
+ expect(project.has_active_hooks?(:merge_request_hooks)).to eq(false)
+ expect(project.has_active_hooks?).to eq(true)
end
it 'returns true when a matching system hook exists' do
create(:system_hook, push_events: true)
- expect(project.has_active_hooks?(:merge_request_events)).to be_falsey
- expect(project.has_active_hooks?).to be_truthy
+ expect(project.has_active_hooks?(:merge_request_hooks)).to eq(false)
+ expect(project.has_active_hooks?).to eq(true)
end
it 'returns true when a plugin exists' do
expect(Gitlab::FileHook).to receive(:any?).twice.and_return(true)
- expect(project.has_active_hooks?(:merge_request_events)).to be_truthy
- expect(project.has_active_hooks?).to be_truthy
+ expect(project.has_active_hooks?(:merge_request_hooks)).to eq(true)
+ expect(project.has_active_hooks?).to eq(true)
end
end
describe '#has_active_integrations?' do
let_it_be(:project) { create(:project) }
- it { expect(project.has_active_integrations?).to be_falsey }
+ it { expect(project.has_active_integrations?).to eq(false) }
it 'returns true when a matching service exists' do
create(:custom_issue_tracker_integration, push_events: true, merge_requests_events: false, project: project)
- expect(project.has_active_integrations?(:merge_request_hooks)).to be_falsey
- expect(project.has_active_integrations?).to be_truthy
+ expect(project.has_active_integrations?(:merge_request_hooks)).to eq(false)
+ expect(project.has_active_integrations?).to eq(true)
end
end
@@ -8308,16 +8320,6 @@ RSpec.describe Project, factory_default: :keep do
expect(project.packages_policy_subject).to be_a(Packages::Policies::Project)
expect(project.packages_policy_subject.project).to eq(project)
end
-
- context 'with feature flag disabled' do
- before do
- stub_feature_flags(read_package_policy_rule: false)
- end
-
- it 'returns project' do
- expect(project.packages_policy_subject).to eq(project)
- end
- end
end
describe '#destroy_deployment_by_id' do
@@ -8356,6 +8358,22 @@ RSpec.describe Project, factory_default: :keep do
end
end
+ describe '#can_suggest_reviewers?' do
+ let_it_be(:project) { create(:project) }
+
+ subject(:can_suggest_reviewers) { project.can_suggest_reviewers? }
+
+ it { is_expected.to be(false) }
+ end
+
+ describe '#suggested_reviewers_available?' do
+ let_it_be(:project) { create(:project) }
+
+ subject(:suggested_reviewers_available) { project.suggested_reviewers_available? }
+
+ it { is_expected.to be(false) }
+ end
+
private
def finish_job(export_job)
diff --git a/spec/models/project_statistics_spec.rb b/spec/models/project_statistics_spec.rb
index b2158baa670..9de31ea66e4 100644
--- a/spec/models/project_statistics_spec.rb
+++ b/spec/models/project_statistics_spec.rb
@@ -98,6 +98,8 @@ RSpec.describe ProjectStatistics do
end
describe '#refresh!' do
+ subject(:refresh_statistics) { statistics.refresh! }
+
before do
allow(statistics).to receive(:update_commit_count)
allow(statistics).to receive(:update_repository_size)
@@ -111,7 +113,7 @@ RSpec.describe ProjectStatistics do
context "without arguments" do
before do
- statistics.refresh!
+ refresh_statistics
end
it "sums all counters" do
@@ -146,7 +148,7 @@ RSpec.describe ProjectStatistics do
expect(project.repository.exists?).to be_falsey
expect(project.wiki.repository.exists?).to be_falsey
- statistics.refresh!
+ refresh_statistics
expect(statistics).to have_received(:update_commit_count)
expect(statistics).to have_received(:update_repository_size)
@@ -167,14 +169,12 @@ RSpec.describe ProjectStatistics do
let(:project) { create(:project, :repository, :wiki_repo) }
before do
- Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- FileUtils.rm_rf(project.repository.path)
- FileUtils.rm_rf(project.wiki.repository.path)
- end
+ project.repository.remove
+ project.wiki.repository.remove
end
it 'does not crash' do
- statistics.refresh!
+ refresh_statistics
expect(statistics).to have_received(:update_commit_count)
expect(statistics).to have_received(:update_repository_size)
@@ -209,7 +209,7 @@ RSpec.describe ProjectStatistics do
expect(Namespaces::ScheduleAggregationWorker)
.to receive(:perform_async)
- statistics.refresh!
+ refresh_statistics
end
end
end
@@ -238,9 +238,13 @@ RSpec.describe ProjectStatistics do
expect(Namespaces::ScheduleAggregationWorker)
.not_to receive(:perform_async)
- statistics.refresh!
+ refresh_statistics
end
end
+
+ it_behaves_like 'obtaining lease to update database' do
+ let(:model) { statistics }
+ end
end
describe '#update_commit_count' do
@@ -408,6 +412,8 @@ RSpec.describe ProjectStatistics do
end
describe '#refresh_storage_size!' do
+ subject(:refresh_storage_size) { statistics.refresh_storage_size! }
+
it 'recalculates storage size from its components and save it' do
statistics.update_columns(
repository_size: 2,
@@ -422,7 +428,29 @@ RSpec.describe ProjectStatistics do
storage_size: 0
)
- expect { statistics.refresh_storage_size! }.to change { statistics.storage_size }.from(0).to(28)
+ expect { refresh_storage_size }.to change { statistics.reload.storage_size }.from(0).to(28)
+ end
+
+ context 'when nullable columns are nil' do
+ before do
+ statistics.update_columns(
+ repository_size: 2,
+ wiki_size: nil,
+ storage_size: 0
+ )
+ end
+
+ it 'does not raise any error' do
+ expect { refresh_storage_size }.not_to raise_error
+ end
+
+ it 'recalculates storage size from its components' do
+ expect { refresh_storage_size }.to change { statistics.reload.storage_size }.from(0).to(2)
+ end
+ end
+
+ it_behaves_like 'obtaining lease to update database' do
+ let(:model) { statistics }
end
end
diff --git a/spec/models/protected_branch_spec.rb b/spec/models/protected_branch_spec.rb
index 54a90ca6049..b88367b9ca2 100644
--- a/spec/models/protected_branch_spec.rb
+++ b/spec/models/protected_branch_spec.rb
@@ -435,4 +435,28 @@ RSpec.describe ProtectedBranch do
expect(described_class.downcase_humanized_name).to eq 'protected branch'
end
end
+
+ describe '.default_branch?' do
+ before do
+ allow(subject.project).to receive(:default_branch).and_return(branch)
+ end
+
+ context 'when the name matches the default branch' do
+ let(:branch) { subject.name }
+
+ it { is_expected.to be_default_branch }
+ end
+
+ context 'when the name does not match the default branch' do
+ let(:branch) { "#{subject.name}qwerty" }
+
+ it { is_expected.not_to be_default_branch }
+ end
+
+ context 'when a wildcard name matches the default branch' do
+ let(:branch) { "#{subject.name}*" }
+
+ it { is_expected.not_to be_default_branch }
+ end
+ end
end
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index 4e386bf584f..6fbf69ec23a 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -8,12 +8,12 @@ RSpec.describe Repository do
TestBlob = Struct.new(:path)
- let(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+
let(:repository) { project.repository }
let(:broken_repository) { create(:project, :broken_storage).repository }
- let(:user) { create(:user) }
let(:git_user) { Gitlab::Git::User.from_gitlab(user) }
-
let(:message) { 'Test message' }
let(:merge_commit) do
@@ -40,16 +40,20 @@ RSpec.describe Repository do
end
describe '#branch_names_contains' do
- let_it_be(:project) { create(:project, :repository) }
+ subject { repository.branch_names_contains(sample_commit.id, **opts) }
- let(:repository) { project.repository }
-
- subject { repository.branch_names_contains(sample_commit.id) }
+ let(:opts) { {} }
it { is_expected.to include('master') }
it { is_expected.not_to include('feature') }
it { is_expected.not_to include('fix') }
+ context 'when limit is provided' do
+ let(:opts) { { limit: 1 } }
+
+ it { is_expected.to match_array(["'test'"]) }
+ end
+
describe 'when storage is broken', :broken_storage do
it 'raises a storage error' do
expect_to_raise_storage_error do
@@ -60,10 +64,18 @@ RSpec.describe Repository do
end
describe '#tag_names_contains' do
- subject { repository.tag_names_contains(sample_commit.id) }
+ subject { repository.tag_names_contains(sample_commit.id, **opts) }
+
+ let(:opts) { {} }
it { is_expected.to include('v1.1.0') }
it { is_expected.not_to include('v1.0.0') }
+
+ context 'when limit is provided' do
+ let(:opts) { { limit: 1 } }
+
+ it { is_expected.to match_array(['v1.1.0']) }
+ end
end
describe '#tags_sorted_by' do
@@ -359,6 +371,8 @@ RSpec.describe Repository do
end
describe '#commits' do
+ let_it_be(:project) { create(:project, :repository) }
+
context 'when neither the all flag nor a ref are specified' do
it 'returns every commit from default branch' do
expect(repository.commits(nil, limit: 60).size).to eq(37)
@@ -431,10 +445,6 @@ RSpec.describe Repository do
end
describe '#new_commits' do
- let_it_be(:project) { create(:project, :repository) }
-
- let(:repository) { project.repository }
-
subject { repository.new_commits(rev) }
context 'when there are no new commits' do
@@ -498,6 +508,8 @@ RSpec.describe Repository do
end
describe '#commits_between' do
+ let_it_be(:project) { create(:project, :repository) }
+
let(:commit) { project.commit }
it 'delegates to Gitlab::Git::Commit#between, returning decorated commits' do
@@ -614,6 +626,8 @@ RSpec.describe Repository do
end
describe '#merged_to_root_ref?' do
+ let_it_be(:project) { create(:project, :repository) }
+
context 'merged branch without ff' do
subject { repository.merged_to_root_ref?('branch-merged') }
@@ -843,14 +857,16 @@ RSpec.describe Repository do
end
describe "#create_dir" do
+ let_it_be(:project) { create(:project, :repository) }
+
it "commits a change that creates a new directory" do
expect do
- repository.create_dir(user, 'newdir',
+ repository.create_dir(user, 'newdir1',
message: 'Create newdir', branch_name: 'master')
end.to change { repository.count_commits(ref: 'master') }.by(1)
- newdir = repository.tree('master', 'newdir')
- expect(newdir.path).to eq('newdir')
+ newdir = repository.tree('master', 'newdir1')
+ expect(newdir.path).to eq('newdir1')
end
context "when committing to another project" do
@@ -858,7 +874,7 @@ RSpec.describe Repository do
it "creates a fork and commit to the forked project" do
expect do
- repository.create_dir(user, 'newdir',
+ repository.create_dir(user, 'newdir2',
message: 'Create newdir', branch_name: 'patch',
start_branch_name: 'master', start_project: forked_project)
end.to change { repository.count_commits(ref: 'master') }.by(0)
@@ -866,15 +882,15 @@ RSpec.describe Repository do
expect(repository.branch_exists?('patch')).to be_truthy
expect(forked_project.repository.branch_exists?('patch')).to be_falsy
- newdir = repository.tree('patch', 'newdir')
- expect(newdir.path).to eq('newdir')
+ newdir = repository.tree('patch', 'newdir2')
+ expect(newdir.path).to eq('newdir2')
end
end
context "when an author is specified" do
it "uses the given email/name to set the commit's author" do
expect do
- repository.create_dir(user, 'newdir',
+ repository.create_dir(user, 'newdir3',
message: 'Add newdir',
branch_name: 'master',
author_email: author_email, author_name: author_name)
@@ -987,6 +1003,8 @@ RSpec.describe Repository do
end
describe "#delete_file" do
+ let(:project) { create(:project, :repository) }
+
it 'removes file successfully' do
expect do
repository.delete_file(user, 'README',
@@ -1013,6 +1031,8 @@ RSpec.describe Repository do
end
describe "search_files_by_content" do
+ let_it_be(:project) { create(:project, :repository) }
+
let(:results) { repository.search_files_by_content('feature', 'master') }
subject { results }
@@ -1248,6 +1268,8 @@ RSpec.describe Repository do
end
describe "#changelog", :use_clean_rails_memory_store_caching do
+ let(:project) { create(:project, :repository) }
+
it 'accepts changelog' do
expect(repository.tree).to receive(:blobs).and_return([TestBlob.new('changelog')])
@@ -1280,6 +1302,8 @@ RSpec.describe Repository do
end
describe "#license_blob", :use_clean_rails_memory_store_caching do
+ let(:project) { create(:project, :repository) }
+
before do
repository.delete_file(
user, 'LICENSE', message: 'Remove LICENSE', branch_name: 'master')
@@ -1323,7 +1347,9 @@ RSpec.describe Repository do
end
end
- describe '#license_key', :use_clean_rails_memory_store_caching do
+ describe '#license_key', :clean_gitlab_redis_cache do
+ let(:project) { create(:project, :repository) }
+
before do
repository.delete_file(user, 'LICENSE',
message: 'Remove LICENSE', branch_name: 'master')
@@ -1367,50 +1393,52 @@ RSpec.describe Repository do
end
end
- describe '#license' do
- before do
- repository.delete_file(user, 'LICENSE',
- message: 'Remove LICENSE', branch_name: 'master')
- end
+ [true, false].each do |ff|
+ context "with feature flag license_from_gitaly=#{ff}" do
+ before do
+ stub_feature_flags(license_from_gitaly: ff)
+ end
- it 'returns nil when no license is detected' do
- expect(repository.license).to be_nil
- end
+ describe '#license', :use_clean_rails_memory_store_caching, :clean_gitlab_redis_cache do
+ let(:project) { create(:project, :repository) }
- it 'returns nil when the repository does not exist' do
- expect(repository).to receive(:exists?).and_return(false)
+ before do
+ repository.delete_file(user, 'LICENSE',
+ message: 'Remove LICENSE', branch_name: 'master')
+ end
- expect(repository.license).to be_nil
- end
+ it 'returns nil when no license is detected' do
+ expect(repository.license).to be_nil
+ end
- it 'returns nil when license_key is not recognized' do
- expect(repository).to receive(:license_key).twice.and_return('not-recognized')
- expect(Gitlab::ErrorTracking).to receive(:track_exception) do |ex|
- expect(ex).to be_a(Licensee::InvalidLicense)
- end
+ it 'returns nil when the repository does not exist' do
+ expect(repository).to receive(:exists?).and_return(false)
- expect(repository.license).to be_nil
- end
+ expect(repository.license).to be_nil
+ end
- it 'returns other when the content is not recognizable' do
- license = Licensee::License.new('other')
- repository.create_file(user, 'LICENSE', 'Gitlab B.V.',
- message: 'Add LICENSE', branch_name: 'master')
+ it 'returns other when the content is not recognizable' do
+ repository.create_file(user, 'LICENSE', 'Gitlab B.V.',
+ message: 'Add LICENSE', branch_name: 'master')
- expect(repository.license).to eq(license)
- end
+ expect(repository.license_key).to eq('other')
+ end
- it 'returns the license' do
- license = Licensee::License.new('mit')
- repository.create_file(user, 'LICENSE',
- license.content,
- message: 'Add LICENSE', branch_name: 'master')
+ it 'returns the license' do
+ license = Licensee::License.new('mit')
+ repository.create_file(user, 'LICENSE',
+ license.content,
+ message: 'Add LICENSE', branch_name: 'master')
- expect(repository.license).to eq(license)
+ expect(repository.license_key).to eq(license.key)
+ end
+ end
end
end
describe "#gitlab_ci_yml", :use_clean_rails_memory_store_caching do
+ let(:project) { create(:project, :repository) }
+
it 'returns valid file' do
files = [TestBlob.new('file'), TestBlob.new('.gitlab-ci.yml'), TestBlob.new('copying')]
expect(repository.tree).to receive(:blobs).and_return(files)
@@ -1430,11 +1458,11 @@ RSpec.describe Repository do
end
describe '#ambiguous_ref?' do
- let(:ref) { 'ref' }
-
subject { repository.ambiguous_ref?(ref) }
context 'when ref is ambiguous' do
+ let(:ref) { 'ref' }
+
before do
repository.add_tag(project.creator, ref, 'master')
repository.add_branch(project.creator, ref, 'master')
@@ -1446,6 +1474,8 @@ RSpec.describe Repository do
end
context 'when ref is not ambiguous' do
+ let(:ref) { 'another_ref' }
+
before do
repository.add_tag(project.creator, ref, 'master')
end
@@ -1457,6 +1487,8 @@ RSpec.describe Repository do
end
describe '#has_ambiguous_refs?' do
+ let(:project) { create(:project, :repository) }
+
using RSpec::Parameterized::TableSyntax
where(:branch_names, :tag_names, :result) do
@@ -1484,6 +1516,7 @@ RSpec.describe Repository do
end
describe '#expand_ref' do
+ let(:project) { create(:project, :repository) }
let(:ref) { 'ref' }
subject { repository.expand_ref(ref) }
@@ -1520,6 +1553,7 @@ RSpec.describe Repository do
describe '#add_branch' do
let(:branch_name) { 'new_feature' }
let(:target) { 'master' }
+ let(:project) { create(:project, :repository) }
subject { repository.add_branch(user, branch_name, target) }
@@ -1604,6 +1638,8 @@ RSpec.describe Repository do
end
describe '#exists?' do
+ let(:project) { create(:project, :repository) }
+
it 'returns true when a repository exists' do
expect(repository.exists?).to be(true)
end
@@ -1624,6 +1660,8 @@ RSpec.describe Repository do
end
describe '#has_visible_content?' do
+ let(:project) { create(:project, :repository) }
+
it 'delegates to raw_repository when true' do
expect(repository.raw_repository).to receive(:has_visible_content?)
.and_return(true)
@@ -1690,6 +1728,8 @@ RSpec.describe Repository do
end
describe '#branch_names', :clean_gitlab_redis_cache do
+ let_it_be(:project) { create(:project, :repository) }
+
let(:fake_branch_names) { ['foobar'] }
it 'gets cached across Repository instances' do
@@ -1706,6 +1746,7 @@ RSpec.describe Repository do
end
describe '#empty?' do
+ let(:project) { create(:project, :repository) }
let(:empty_repository) { create(:project_empty_repo).repository }
it 'returns true for an empty repository' do
@@ -1752,6 +1793,8 @@ RSpec.describe Repository do
end
describe '#root_ref' do
+ let(:project) { create(:project, :repository) }
+
it 'returns a branch name' do
expect(repository.root_ref).to be_an_instance_of(String)
end
@@ -1792,6 +1835,8 @@ RSpec.describe Repository do
describe '#expire_branch_cache' do
# This method is private but we need it for testing purposes. Sadly there's
# no other proper way of testing caching operations.
+ let_it_be(:project) { create(:project, :repository) }
+
let(:cache) { repository.send(:cache) }
it 'expires the cache for all branches' do
@@ -2003,6 +2048,7 @@ RSpec.describe Repository do
end
describe '#revert' do
+ let(:project) { create(:project, :repository) }
let(:new_image_commit) { repository.commit('33f3729a45c02fc67d00adb1b8bca394b0e761d9') }
let(:update_image_commit) { repository.commit('2f63565e7aac07bcdadb654e253078b727143ec4') }
let(:message) { 'revert message' }
@@ -2039,6 +2085,7 @@ RSpec.describe Repository do
end
describe '#cherry_pick' do
+ let(:project) { create(:project, :repository) }
let(:conflict_commit) { repository.commit('c642fe9b8b9f28f9225d7ea953fe14e74748d53b') }
let(:pickable_commit) { repository.commit('7d3b0f7cff5f37573aea97cebfd5692ea1689924') }
let(:pickable_merge) { repository.commit('e56497bb5f03a90a51293fc6d516788730953899') }
@@ -2174,7 +2221,8 @@ RSpec.describe Repository do
:contribution_guide,
:changelog,
:license_blob,
- :license_key,
+ :license_licensee,
+ :license_gitaly,
:gitignore,
:gitlab_ci_yml,
:branch_names,
@@ -2404,7 +2452,7 @@ RSpec.describe Repository do
end
it 'returns a Gitlab::Git::Tag object' do
- tag = repository.add_tag(user, '8.5', 'master', 'foo')
+ tag = repository.add_tag(user, '8.6', 'master', 'foo')
expect(tag).to be_a(Gitlab::Git::Tag)
end
@@ -2412,12 +2460,14 @@ RSpec.describe Repository do
context 'with an invalid target' do
it 'returns false' do
- expect(repository.add_tag(user, '8.5', 'bar', 'foo')).to be false
+ expect(repository.add_tag(user, '8.7', 'bar', 'foo')).to be false
end
end
end
describe '#rm_branch' do
+ let(:project) { create(:project, :repository) }
+
it 'removes a branch' do
expect(repository).to receive(:before_remove_branch)
expect(repository).to receive(:after_remove_branch)
@@ -2452,6 +2502,8 @@ RSpec.describe Repository do
end
describe '#find_tag' do
+ let_it_be(:project) { create(:project, :repository) }
+
before do
allow(Gitlab::GitalyClient).to receive(:call).and_call_original
end
@@ -2477,6 +2529,8 @@ RSpec.describe Repository do
end
describe '#avatar' do
+ let(:project) { create(:project, :repository) }
+
it 'returns nil if repo does not exist' do
allow(repository).to receive(:root_ref).and_raise(Gitlab::Git::Repository::NoRepository)
@@ -2519,6 +2573,8 @@ RSpec.describe Repository do
end
describe '#xcode_project?' do
+ let(:project) { create(:project, :repository) }
+
before do
allow(repository).to receive(:tree).with(:head).and_return(double(:tree, trees: [tree]))
end
@@ -2654,7 +2710,7 @@ RSpec.describe Repository do
match[1].to_sym if match
end.compact
- expect(Repository::CACHED_METHODS + Repository::MEMOIZED_CACHED_METHODS).to include(*methods)
+ expect(Repository::CACHED_METHODS).to include(*methods)
end
end
@@ -2819,18 +2875,20 @@ RSpec.describe Repository do
describe '#refresh_method_caches' do
it 'refreshes the caches of the given types' do
expect(repository).to receive(:expire_method_caches)
- .with(%i(readme_path license_blob license_key license))
+ .with(%i(readme_path license_blob license_licensee license_gitaly))
expect(repository).to receive(:readme_path)
expect(repository).to receive(:license_blob)
- expect(repository).to receive(:license_key)
- expect(repository).to receive(:license)
+ expect(repository).to receive(:license_licensee)
+ expect(repository).to receive(:license_gitaly)
repository.refresh_method_caches(%i(readme license))
end
end
describe '#gitlab_ci_yml_for' do
+ let(:project) { create(:project, :repository) }
+
before do
repository.create_file(User.last, '.gitlab-ci.yml', 'CONTENT', message: 'Add .gitlab-ci.yml', branch_name: 'master')
end
@@ -2849,7 +2907,7 @@ RSpec.describe Repository do
end
describe '#changelog_config' do
- let(:user) { create(:user) }
+ let(:project) { create(:project, :repository) }
let(:changelog_config_path) { Gitlab::Changelog::Config::DEFAULT_FILE_PATH }
before do
@@ -2865,6 +2923,7 @@ RSpec.describe Repository do
context 'when there is a changelog_config_path at the commit' do
it 'returns the content' do
expect(repository.changelog_config(repository.commit.sha, changelog_config_path)).to eq('CONTENT')
+ expect(repository.changelog_config(repository.commit.parent.sha, changelog_config_path)).to be_nil
end
end
@@ -2876,6 +2935,8 @@ RSpec.describe Repository do
end
describe '#route_map_for' do
+ let(:project) { create(:project, :repository) }
+
before do
repository.create_file(User.last, '.gitlab/route-map.yml', 'CONTENT', message: 'Add .gitlab/route-map.yml', branch_name: 'master')
end
@@ -3148,7 +3209,6 @@ RSpec.describe Repository do
describe '#create_if_not_exists' do
let(:project) { create(:project) }
- let(:repository) { project.repository }
it 'creates the repository if it did not exist' do
expect { repository.create_if_not_exists }.to change { repository.exists? }.from(false).to(true)
@@ -3204,7 +3264,6 @@ RSpec.describe Repository do
describe '#create_from_bundle' do
let(:project) { create(:project) }
- let(:repository) { project.repository }
let(:valid_bundle_path) { File.join(Dir.tmpdir, "repo-#{SecureRandom.hex}.bundle") }
let(:raw_repository) { repository.raw }
@@ -3244,8 +3303,6 @@ RSpec.describe Repository do
describe "#blobs_metadata" do
let_it_be(:project) { create(:project, :repository) }
- let(:repository) { project.repository }
-
def expect_metadata_blob(thing)
expect(thing).to be_a(Blob)
expect(thing.data).to be_empty
@@ -3313,8 +3370,6 @@ RSpec.describe Repository do
subject { repository.lfs_enabled? }
context 'for a project repository' do
- let(:repository) { project.repository }
-
it 'returns true when LFS is enabled' do
stub_lfs_setting(enabled: true)
@@ -3425,6 +3480,8 @@ RSpec.describe Repository do
end
describe '#change_head' do
+ let_it_be(:project) { create(:project, :repository) }
+
let(:branch) { repository.container.default_branch }
context 'when the branch exists' do
diff --git a/spec/models/resource_label_event_spec.rb b/spec/models/resource_label_event_spec.rb
index 44de83d9240..5087a8e8524 100644
--- a/spec/models/resource_label_event_spec.rb
+++ b/spec/models/resource_label_event_spec.rb
@@ -144,4 +144,14 @@ RSpec.describe ResourceLabelEvent, type: :model do
create(:resource_label_event, issue: issue, label: label)
end
end
+
+ describe '#discussion_id' do
+ it 'generates different discussion ID for events created milliseconds apart' do
+ now = Time.current
+ event_1 = create(:resource_label_event, issue: issue, label: label, user: issue.author, created_at: now)
+ event_2 = create(:resource_label_event, issue: issue, label: label, user: issue.author, created_at: now.advance(seconds: 0.001))
+
+ expect(event_1.discussion_id).not_to eq(event_2.discussion_id)
+ end
+ end
end
diff --git a/spec/models/user_detail_spec.rb b/spec/models/user_detail_spec.rb
index 9189b9a1469..04964d36dcd 100644
--- a/spec/models/user_detail_spec.rb
+++ b/spec/models/user_detail_spec.rb
@@ -25,5 +25,140 @@ RSpec.describe UserDetail do
describe '#bio' do
it { is_expected.to validate_length_of(:bio).is_at_most(255) }
end
+
+ describe '#linkedin' do
+ it { is_expected.to validate_length_of(:linkedin).is_at_most(500) }
+ end
+
+ describe '#twitter' do
+ it { is_expected.to validate_length_of(:twitter).is_at_most(500) }
+ end
+
+ describe '#skype' do
+ it { is_expected.to validate_length_of(:skype).is_at_most(500) }
+ end
+
+ describe '#location' do
+ it { is_expected.to validate_length_of(:location).is_at_most(500) }
+ end
+
+ describe '#organization' do
+ it { is_expected.to validate_length_of(:organization).is_at_most(500) }
+ end
+
+ describe '#website_url' do
+ it { is_expected.to validate_length_of(:website_url).is_at_most(500) }
+ end
+ end
+
+ describe '.user_fields_changed?' do
+ let(:user) { create(:user) }
+
+ context 'when user detail fields unchanged' do
+ it 'returns false' do
+ expect(described_class.user_fields_changed?(user)).to be false
+ end
+
+ %i[linkedin location organization skype twitter website_url].each do |attr|
+ context "when #{attr} is changed" do
+ before do
+ user[attr] = 'new value'
+ end
+
+ it 'returns true' do
+ expect(described_class.user_fields_changed?(user)).to be true
+ end
+ end
+ end
+ end
+ end
+
+ describe '#sanitize_attrs' do
+ shared_examples 'sanitizes html' do |attr|
+ it 'sanitizes html tags' do
+ details = build_stubbed(:user_detail, attr => '<a href="//evil.com">https://example.com<a>')
+ expect { details.sanitize_attrs }.to change { details[attr] }.to('https://example.com')
+ end
+
+ it 'sanitizes iframe scripts' do
+ details = build_stubbed(:user_detail, attr => '<iframe src=javascript:alert()><iframe>')
+ expect { details.sanitize_attrs }.to change { details[attr] }.to('')
+ end
+
+ it 'sanitizes js scripts' do
+ details = build_stubbed(:user_detail, attr => '<script>alert("Test")</script>')
+ expect { details.sanitize_attrs }.to change { details[attr] }.to('')
+ end
+ end
+
+ %i[linkedin skype twitter website_url].each do |attr|
+ it_behaves_like 'sanitizes html', attr
+
+ it 'encodes HTML entities' do
+ details = build_stubbed(:user_detail, attr => 'test&attr')
+ expect { details.sanitize_attrs }.to change { details[attr] }.to('test&amp;attr')
+ end
+ end
+
+ %i[location organization].each do |attr|
+ it_behaves_like 'sanitizes html', attr
+
+ it 'does not encode HTML entities' do
+ details = build_stubbed(:user_detail, attr => 'test&attr')
+ expect { details.sanitize_attrs }.not_to change { details[attr] }
+ end
+ end
+
+ it 'sanitizes on validation' do
+ details = build(:user_detail)
+
+ expect(details)
+ .to receive(:sanitize_attrs)
+ .at_least(:once)
+ .and_call_original
+
+ details.save!
+ end
+ end
+
+ describe '#assign_changed_fields_from_user' do
+ let(:user_detail) { build(:user_detail) }
+
+ shared_examples 'syncs field with `user_details`' do |field|
+ it 'does not sync the field to `user_details` if unchanged' do
+ expect { user_detail.assign_changed_fields_from_user }
+ .to not_change { user_detail.public_send(field) }
+ end
+
+ it 'syncs the field to `user_details` if changed' do
+ user_detail.user[field] = "new_value"
+ expect { user_detail.assign_changed_fields_from_user }
+ .to change { user_detail.public_send(field) }
+ .to("new_value")
+ end
+
+ it 'truncates the field if too long' do
+ user_detail.user[field] = 'a' * (UserDetail::DEFAULT_FIELD_LENGTH + 1)
+ expect { user_detail.assign_changed_fields_from_user }
+ .to change { user_detail.public_send(field) }
+ .to('a' * UserDetail::DEFAULT_FIELD_LENGTH)
+ end
+
+ it 'properly syncs nil field to `user_details' do
+ user_detail.user[field] = 'Test'
+ user_detail.user.save!(validate: false)
+ user_detail.user[field] = nil
+ expect { user_detail.assign_changed_fields_from_user }
+ .to change { user_detail.public_send(field) }
+ .to('')
+ end
+ end
+
+ it_behaves_like 'syncs field with `user_details`', :linkedin
+ it_behaves_like 'syncs field with `user_details`', :location
+ it_behaves_like 'syncs field with `user_details`', :organization
+ it_behaves_like 'syncs field with `user_details`', :skype
+ it_behaves_like 'syncs field with `user_details`', :twitter
+ it_behaves_like 'syncs field with `user_details`', :website_url
end
end
diff --git a/spec/models/user_preference_spec.rb b/spec/models/user_preference_spec.rb
index 2492521c634..d76334d7c9e 100644
--- a/spec/models/user_preference_spec.rb
+++ b/spec/models/user_preference_spec.rb
@@ -10,20 +10,20 @@ RSpec.describe UserPreference do
using RSpec::Parameterized::TableSyntax
where(color: [
- '#000000',
- '#123456',
- '#abcdef',
- '#AbCdEf',
- '#ffffff',
- '#fFfFfF',
- '#000',
- '#123',
- '#abc',
- '#AbC',
- '#fff',
- '#fFf',
- ''
- ])
+ '#000000',
+ '#123456',
+ '#abcdef',
+ '#AbCdEf',
+ '#ffffff',
+ '#fFfFfF',
+ '#000',
+ '#123',
+ '#abc',
+ '#AbC',
+ '#fff',
+ '#fFf',
+ ''
+ ])
with_them do
it { is_expected.to allow_value(color).for(:diffs_deletion_color) }
@@ -31,20 +31,27 @@ RSpec.describe UserPreference do
end
where(color: [
- '#1',
- '#12',
- '#1234',
- '#12345',
- '#1234567',
- '123456',
- '#12345x'
- ])
+ '#1',
+ '#12',
+ '#1234',
+ '#12345',
+ '#1234567',
+ '123456',
+ '#12345x'
+ ])
with_them do
it { is_expected.not_to allow_value(color).for(:diffs_deletion_color) }
it { is_expected.not_to allow_value(color).for(:diffs_addition_color) }
end
end
+
+ describe 'use_legacy_web_ide' do
+ it { is_expected.to allow_value(true).for(:use_legacy_web_ide) }
+ it { is_expected.to allow_value(false).for(:use_legacy_web_ide) }
+ it { is_expected.not_to allow_value(nil).for(:use_legacy_web_ide) }
+ it { is_expected.not_to allow_value("").for(:use_legacy_web_ide) }
+ end
end
describe 'notes filters global keys' do
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 3cc34681ad6..8ebf3d70165 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -69,12 +69,18 @@ RSpec.describe User do
it { is_expected.to delegate_method(:markdown_surround_selection).to(:user_preference) }
it { is_expected.to delegate_method(:markdown_surround_selection=).to(:user_preference).with_arguments(:args) }
+ it { is_expected.to delegate_method(:markdown_automatic_lists).to(:user_preference) }
+ it { is_expected.to delegate_method(:markdown_automatic_lists=).to(:user_preference).with_arguments(:args) }
+
it { is_expected.to delegate_method(:diffs_deletion_color).to(:user_preference) }
it { is_expected.to delegate_method(:diffs_deletion_color=).to(:user_preference).with_arguments(:args) }
it { is_expected.to delegate_method(:diffs_addition_color).to(:user_preference) }
it { is_expected.to delegate_method(:diffs_addition_color=).to(:user_preference).with_arguments(:args) }
+ it { is_expected.to delegate_method(:use_legacy_web_ide).to(:user_preference) }
+ it { is_expected.to delegate_method(:use_legacy_web_ide=).to(:user_preference).with_arguments(:args) }
+
it { is_expected.to delegate_method(:job_title).to(:user_detail).allow_nil }
it { is_expected.to delegate_method(:job_title=).to(:user_detail).with_arguments(:args).allow_nil }
@@ -101,6 +107,7 @@ RSpec.describe User do
it { is_expected.to have_one(:atlassian_identity) }
it { is_expected.to have_one(:user_highest_role) }
it { is_expected.to have_one(:credit_card_validation) }
+ it { is_expected.to have_one(:phone_number_validation) }
it { is_expected.to have_one(:banned_user) }
it { is_expected.to have_many(:snippets).dependent(:destroy) }
it { is_expected.to have_many(:members) }
@@ -136,7 +143,6 @@ RSpec.describe User do
it { is_expected.to have_many(:timelogs) }
it { is_expected.to have_many(:callouts).class_name('Users::Callout') }
it { is_expected.to have_many(:group_callouts).class_name('Users::GroupCallout') }
- it { is_expected.to have_many(:namespace_callouts).class_name('Users::NamespaceCallout') }
it { is_expected.to have_many(:project_callouts).class_name('Users::ProjectCallout') }
describe '#user_detail' do
@@ -1163,6 +1169,20 @@ RSpec.describe User do
'ORDER BY "users"."last_activity_on" ASC NULLS FIRST, "users"."id" DESC')
end
end
+
+ describe '.order_recent_sign_in' do
+ it 'sorts users by current_sign_in_at in descending order' do
+ expect(described_class.order_recent_sign_in.to_sql).to include(
+ 'ORDER BY "users"."current_sign_in_at" DESC NULLS LAST')
+ end
+ end
+
+ describe '.order_oldest_sign_in' do
+ it 'sorts users by current_sign_in_at in ascending order' do
+ expect(described_class.order_oldest_sign_in.to_sql).to include(
+ 'ORDER BY "users"."current_sign_in_at" ASC NULLS LAST')
+ end
+ end
end
context 'strip attributes' do
@@ -1882,9 +1902,9 @@ RSpec.describe User do
end
it 'ensures correct rights and limits for user' do
- stub_config_setting(default_can_create_group: true)
+ stub_application_setting(can_create_group: true)
- expect { user.update!(external: false) }.to change { user.can_create_group }.to(true)
+ expect { user.update!(external: false) }.to change { user.can_create_group }.from(false).to(true)
.and change { user.projects_limit }.to(Gitlab::CurrentSettings.default_projects_limit)
end
end
@@ -2604,7 +2624,7 @@ RSpec.describe User do
it 'applies defaults to user' do
expect(user.projects_limit).to eq(Gitlab.config.gitlab.default_projects_limit)
- expect(user.can_create_group).to eq(Gitlab.config.gitlab.default_can_create_group)
+ expect(user.can_create_group).to eq(Gitlab::CurrentSettings.can_create_group)
expect(user.theme_id).to eq(Gitlab.config.gitlab.default_theme)
expect(user.external).to be_falsey
expect(user.private_profile).to eq(false)
@@ -3719,6 +3739,22 @@ RSpec.describe User do
expect(user.followees).to be_empty
end
+
+ it 'does not follow if max followee limit is reached' do
+ stub_const('Users::UserFollowUser::MAX_FOLLOWEE_LIMIT', 2)
+
+ user = create(:user)
+ Users::UserFollowUser::MAX_FOLLOWEE_LIMIT.times { user.follow(create(:user)) }
+
+ followee = create(:user)
+ user_follow_user = user.follow(followee)
+
+ expect(user_follow_user).not_to be_persisted
+ expected_message = format(_("You can't follow more than %{limit} users. To follow more users, unfollow some others."), limit: Users::UserFollowUser::MAX_FOLLOWEE_LIMIT)
+ expect(user_follow_user.errors.messages[:base].first).to eq(expected_message)
+
+ expect(user.following?(followee)).to be_falsey
+ end
end
describe '#unfollow' do
@@ -3747,6 +3783,18 @@ RSpec.describe User do
expect(user.followees).to be_empty
end
+
+ it 'unfollows when over followee limit' do
+ user = create(:user)
+
+ followees = create_list(:user, 4)
+ followees.each { |f| expect(user.follow(f)).to be_truthy }
+
+ stub_const('Users::UserFollowUser::MAX_FOLLOWEE_LIMIT', followees.length - 2)
+
+ expect(user.unfollow(followees.first)).to be_truthy
+ expect(user.following?(followees.first)).to be_falsey
+ end
end
describe '#notification_email_or_default' do
@@ -4838,23 +4886,6 @@ RSpec.describe User do
end
end
- describe '#remove_project_authorizations' do
- let_it_be(:project1) { create(:project) }
- let_it_be(:project2) { create(:project) }
- let_it_be(:project3) { create(:project) }
- let_it_be(:user) { create(:user) }
-
- it 'removes the project authorizations of the user, in specified projects' do
- create(:project_authorization, user: user, project: project1)
- create(:project_authorization, user: user, project: project2)
- create(:project_authorization, user: user, project: project3)
-
- user.remove_project_authorizations([project1.id, project2.id])
-
- expect(user.project_authorizations.pluck(:project_id)).to match_array([project3.id])
- end
- end
-
describe '#access_level=' do
let(:user) { build(:user) }
@@ -5393,6 +5424,41 @@ RSpec.describe User do
end
end
+ describe '#ensure_user_detail_assigned' do
+ let(:user) { build(:user) }
+
+ context 'when no user detail field has been changed' do
+ before do
+ allow(UserDetail)
+ .to receive(:user_fields_changed?)
+ .and_return(false)
+ end
+
+ it 'does not assign user details before save' do
+ expect(user.user_detail)
+ .not_to receive(:assign_changed_fields_from_user)
+
+ user.save!
+ end
+ end
+
+ context 'when a user detail field has been changed' do
+ before do
+ allow(UserDetail)
+ .to receive(:user_fields_changed?)
+ .and_return(true)
+ end
+
+ it 'assigns user details before save' do
+ expect(user.user_detail)
+ .to receive(:assign_changed_fields_from_user)
+ .and_call_original
+
+ user.save!
+ end
+ end
+ end
+
describe '#username_changed_hook' do
context 'for a new user' do
let(:user) { build(:user) }
@@ -6251,6 +6317,64 @@ RSpec.describe User do
it { is_expected.to be_falsey }
end
end
+
+ context 'user with autogenerated_password' do
+ let(:user) { build_stubbed(:user, password_automatically_set: true) }
+ let(:password) { user.password }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ describe '#generate_otp_backup_codes!' do
+ let(:user) { create(:user) }
+
+ context 'with FIPS mode', :fips_mode do
+ it 'attempts to use #generate_otp_backup_codes_pbkdf2!' do
+ expect(user).to receive(:generate_otp_backup_codes_pbkdf2!).and_call_original
+
+ user.generate_otp_backup_codes!
+ end
+ end
+
+ context 'outside FIPS mode' do
+ it 'does not attempt to use #generate_otp_backup_codes_pbkdf2!' do
+ expect(user).not_to receive(:generate_otp_backup_codes_pbkdf2!)
+
+ user.generate_otp_backup_codes!
+ end
+ end
+ end
+
+ describe '#invalidate_otp_backup_code!' do
+ let(:user) { create(:user) }
+
+ context 'with FIPS mode', :fips_mode do
+ context 'with a PBKDF2-encrypted password' do
+ let(:encrypted_password) { '$pbkdf2-sha512$20000$boHGAw0hEyI$DBA67J7zNZebyzLtLk2X9wRDbmj1LNKVGnZLYyz6PGrIDGIl45fl/BPH0y1TPZnV90A20i.fD9C3G9Bp8jzzOA' }
+
+ it 'attempts to use #invalidate_otp_backup_code_pdkdf2!' do
+ expect(user).to receive(:otp_backup_codes).at_least(:once).and_return([encrypted_password])
+ expect(user).to receive(:invalidate_otp_backup_code_pdkdf2!).and_return(true)
+
+ user.invalidate_otp_backup_code!(user.password)
+ end
+ end
+
+ it 'does not attempt to use #invalidate_otp_backup_code_pdkdf2!' do
+ expect(user).not_to receive(:invalidate_otp_backup_code_pdkdf2!)
+
+ user.invalidate_otp_backup_code!(user.password)
+ end
+ end
+
+ context 'outside FIPS mode' do
+ it 'does not attempt to use #invalidate_otp_backup_code_pdkdf2!' do
+ expect(user).not_to receive(:invalidate_otp_backup_code_pdkdf2!)
+
+ user.invalidate_otp_backup_code!(user.password)
+ end
+ end
end
# These entire test section can be removed once the :pbkdf2_password_encryption feature flag is removed.
@@ -6593,96 +6717,6 @@ RSpec.describe User do
end
end
- describe 'Users::NamespaceCallout' do
- describe '#dismissed_callout_for_namespace?' do
- let_it_be(:user, refind: true) { create(:user) }
- let_it_be(:namespace) { create(:namespace) }
- let_it_be(:feature_name) { Users::NamespaceCallout.feature_names.each_key.first }
-
- let(:query) do
- { feature_name: feature_name, namespace: namespace }
- end
-
- def have_dismissed_callout
- be_dismissed_callout_for_namespace(**query)
- end
-
- context 'when no callout dismissal record exists' do
- it 'returns false when no ignore_dismissal_earlier_than provided' do
- expect(user).not_to have_dismissed_callout
- end
- end
-
- context 'when dismissed callout exists' do
- before_all do
- create(:namespace_callout,
- user: user,
- namespace_id: namespace.id,
- feature_name: feature_name,
- dismissed_at: 4.months.ago)
- end
-
- it 'returns true when no ignore_dismissal_earlier_than provided' do
- expect(user).to have_dismissed_callout
- end
-
- it 'returns true when ignore_dismissal_earlier_than is earlier than dismissed_at' do
- query[:ignore_dismissal_earlier_than] = 6.months.ago
-
- expect(user).to have_dismissed_callout
- end
-
- it 'returns false when ignore_dismissal_earlier_than is later than dismissed_at' do
- query[:ignore_dismissal_earlier_than] = 2.months.ago
-
- expect(user).not_to have_dismissed_callout
- end
- end
- end
-
- describe '#find_or_initialize_namespace_callout' do
- let_it_be(:user, refind: true) { create(:user) }
- let_it_be(:namespace) { create(:namespace) }
- let_it_be(:feature_name) { Users::NamespaceCallout.feature_names.each_key.first }
-
- subject(:callout_with_source) do
- user.find_or_initialize_namespace_callout(feature_name, namespace.id)
- end
-
- context 'when callout exists' do
- let!(:callout) do
- create(:namespace_callout, user: user, feature_name: feature_name, namespace_id: namespace.id)
- end
-
- it 'returns existing callout' do
- expect(callout_with_source).to eq(callout)
- end
- end
-
- context 'when callout does not exist' do
- context 'when feature name is valid' do
- it 'initializes a new callout' do
- expect(callout_with_source)
- .to be_a_new(Users::NamespaceCallout)
- .and be_valid
- end
- end
-
- context 'when feature name is not valid' do
- let(:feature_name) { 'notvalid' }
-
- it 'initializes a new callout' do
- expect(callout_with_source).to be_a_new(Users::NamespaceCallout)
- end
-
- it 'is not valid' do
- expect(callout_with_source).not_to be_valid
- end
- end
- end
- end
- end
-
describe '#dismissed_callout_for_group?' do
let_it_be(:user, refind: true) { create(:user) }
let_it_be(:group) { create(:group) }
@@ -7432,9 +7466,10 @@ RSpec.describe User do
let_it_be(:internal_user) { User.alert_bot.tap { |u| u.confirm } }
it 'does not return blocked or banned users' do
- expect(described_class.without_forbidden_states).to match_array([
- normal_user, admin_user, external_user, unconfirmed_user, omniauth_user, internal_user
- ])
+ expect(described_class.without_forbidden_states).to match_array(
+ [
+ normal_user, admin_user, external_user, unconfirmed_user, omniauth_user, internal_user
+ ])
end
end
diff --git a/spec/models/users/namespace_callout_spec.rb b/spec/models/users/namespace_callout_spec.rb
deleted file mode 100644
index f8207f2abc8..00000000000
--- a/spec/models/users/namespace_callout_spec.rb
+++ /dev/null
@@ -1,39 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Users::NamespaceCallout do
- let_it_be(:user) { create_default(:user) }
- let_it_be(:namespace) { create_default(:namespace) }
- let_it_be(:callout) { create(:namespace_callout) }
-
- it_behaves_like 'having unique enum values'
-
- describe 'relationships' do
- it { is_expected.to belong_to(:namespace) }
- end
-
- describe 'validations' do
- it { is_expected.to validate_presence_of(:namespace) }
- it { is_expected.to validate_presence_of(:user) }
- it { is_expected.to validate_presence_of(:feature_name) }
-
- specify do
- is_expected.to validate_uniqueness_of(:feature_name)
- .scoped_to(:user_id, :namespace_id)
- .ignoring_case_sensitivity
- end
-
- it { is_expected.to allow_value(:web_hook_disabled).for(:feature_name) }
-
- it 'rejects invalid feature names' do
- expect { callout.feature_name = :non_existent_feature }.to raise_error(ArgumentError)
- end
- end
-
- describe '#source_feature_name' do
- it 'provides string based off source and feature' do
- expect(callout.source_feature_name).to eq "#{callout.feature_name}_#{callout.namespace_id}"
- end
- end
-end
diff --git a/spec/models/users/phone_number_validation_spec.rb b/spec/models/users/phone_number_validation_spec.rb
new file mode 100644
index 00000000000..2f0fd1d3ac9
--- /dev/null
+++ b/spec/models/users/phone_number_validation_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::PhoneNumberValidation do
+ it { is_expected.to belong_to(:user) }
+ it { is_expected.to belong_to(:banned_user) }
+
+ it { is_expected.to validate_presence_of(:country) }
+ it { is_expected.to validate_length_of(:country).is_at_most(3) }
+
+ it { is_expected.to validate_presence_of(:international_dial_code) }
+
+ it {
+ is_expected.to validate_numericality_of(:international_dial_code)
+ .only_integer
+ .is_greater_than_or_equal_to(1)
+ .is_less_than_or_equal_to(999)
+ }
+
+ it { is_expected.to validate_presence_of(:phone_number) }
+ it { is_expected.to validate_length_of(:phone_number).is_at_most(12) }
+ it { is_expected.to allow_value('555555').for(:phone_number) }
+ it { is_expected.not_to allow_value('555-555').for(:phone_number) }
+ it { is_expected.not_to allow_value('+555555').for(:phone_number) }
+ it { is_expected.not_to allow_value('555 555').for(:phone_number) }
+
+ it { is_expected.to validate_length_of(:telesign_reference_xid).is_at_most(255) }
+
+ describe '.related_to_banned_user?' do
+ let_it_be(:international_dial_code) { 1 }
+ let_it_be(:phone_number) { '555' }
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:banned_user) { create(:user, :banned) }
+
+ subject(:related_to_banned_user?) do
+ described_class.related_to_banned_user?(international_dial_code, phone_number)
+ end
+
+ context 'when banned user has the same international dial code and phone number' do
+ before do
+ create(:phone_number_validation, user: banned_user)
+ end
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when banned user has the same international dial code and phone number, but different country code' do
+ before do
+ create(:phone_number_validation, user: banned_user, country: 'CA')
+ end
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when banned user does not have the same international dial code' do
+ before do
+ create(:phone_number_validation, user: banned_user, international_dial_code: 61)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when banned user does not have the same phone number' do
+ before do
+ create(:phone_number_validation, user: banned_user, phone_number: '666')
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when not-banned user has the same international dial code and phone number' do
+ before do
+ create(:phone_number_validation, user: user)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+ end
+end
diff --git a/spec/models/wiki_directory_spec.rb b/spec/models/wiki_directory_spec.rb
index 9b6cec99ddb..44c6f6c9c1a 100644
--- a/spec/models/wiki_directory_spec.rb
+++ b/spec/models/wiki_directory_spec.rb
@@ -24,31 +24,32 @@ RSpec.describe WikiDirectory do
[toplevel1, toplevel2, toplevel3, child1, child2, child3, grandchild1, grandchild2].sort_by(&:title)
)
- expect(entries).to match([
- toplevel1,
- a_kind_of(WikiDirectory).and(
- having_attributes(
- slug: 'parent1', entries: [
- child1,
- child2,
- a_kind_of(WikiDirectory).and(
- having_attributes(
- slug: 'parent1/subparent',
- entries: [grandchild1, grandchild2]
+ expect(entries).to match(
+ [
+ toplevel1,
+ a_kind_of(WikiDirectory).and(
+ having_attributes(
+ slug: 'parent1', entries: [
+ child1,
+ child2,
+ a_kind_of(WikiDirectory).and(
+ having_attributes(
+ slug: 'parent1/subparent',
+ entries: [grandchild1, grandchild2]
+ )
)
- )
- ]
- )
- ),
- a_kind_of(WikiDirectory).and(
- having_attributes(
- slug: 'parent2',
- entries: [child3]
- )
- ),
- toplevel2,
- toplevel3
- ])
+ ]
+ )
+ ),
+ a_kind_of(WikiDirectory).and(
+ having_attributes(
+ slug: 'parent2',
+ entries: [child3]
+ )
+ ),
+ toplevel2,
+ toplevel3
+ ])
end
end
diff --git a/spec/models/wiki_page_spec.rb b/spec/models/wiki_page_spec.rb
index 96c396f085c..fcb041aebe5 100644
--- a/spec/models/wiki_page_spec.rb
+++ b/spec/models/wiki_page_spec.rb
@@ -3,27 +3,24 @@
require "spec_helper"
RSpec.describe WikiPage do
- let_it_be(:user) { create(:user) }
- let_it_be(:container) { create(:project) }
+ let(:user) { create(:user) }
+ let(:container) { create(:project) }
+ let(:wiki) { container.wiki }
- def create_wiki_page(attrs = {})
- page = build_wiki_page(attrs)
+ def create_wiki_page(container, attrs = {})
+ page = build_wiki_page(container, attrs)
page.create(message: (attrs[:message] || 'test commit'))
container.wiki.find_page(page.slug)
end
- def build_wiki_page(attrs = {})
+ def build_wiki_page(container, attrs = {})
wiki_page_attrs = { container: container, content: 'test content' }.merge(attrs)
build(:wiki_page, wiki_page_attrs)
end
- def wiki
- container.wiki
- end
-
def disable_front_matter
stub_feature_flags(Gitlab::WikiPages::FrontMatterParser::FEATURE_FLAG => false)
end
@@ -32,11 +29,20 @@ RSpec.describe WikiPage do
stub_feature_flags(Gitlab::WikiPages::FrontMatterParser::FEATURE_FLAG => thing)
end
+ def force_wiki_change_branch
+ old_default_branch = wiki.default_branch
+ wiki.repository.add_branch(user, 'another_branch', old_default_branch)
+ wiki.repository.rm_branch(user, old_default_branch)
+ wiki.repository.expire_status_cache
+
+ wiki.container.clear_memoization(:wiki)
+ end
+
# Use for groups of tests that do not modify their `subject`.
#
# include_context 'subject is persisted page', title: 'my title'
shared_context 'subject is persisted page' do |attrs = {}|
- let_it_be(:persisted_page) { create_wiki_page(attrs) }
+ let(:persisted_page) { create_wiki_page(container, attrs) }
subject { persisted_page }
end
@@ -192,7 +198,7 @@ RSpec.describe WikiPage do
end
describe "validations" do
- subject { build_wiki_page }
+ subject { build_wiki_page(container) }
it "validates presence of title" do
subject.attributes.delete(:title)
@@ -357,7 +363,7 @@ RSpec.describe WikiPage do
let(:title) { attributes[:title] }
- subject { build_wiki_page }
+ subject { build_wiki_page(container) }
context "with valid attributes" do
it "saves the wiki page" do
@@ -394,7 +400,7 @@ RSpec.describe WikiPage do
let(:title) { 'Index v1.2.3' }
describe "#create" do
- subject { build_wiki_page }
+ subject { build_wiki_page(container) }
it "saves the wiki page and returns true", :aggregate_failures do
attributes = { title: title, content: "Home Page", format: "markdown" }
@@ -405,7 +411,7 @@ RSpec.describe WikiPage do
end
describe '#update' do
- subject { create_wiki_page(title: title) }
+ subject { create_wiki_page(container, title: title) }
it 'updates the content of the page and returns true', :aggregate_failures do
expect(subject.update(content: 'new content')).to be_truthy
@@ -420,7 +426,7 @@ RSpec.describe WikiPage do
describe "#update" do
let!(:original_title) { subject.title }
- subject { create_wiki_page }
+ subject { create_wiki_page(container) }
context "with valid attributes" do
it "updates the content of the page" do
@@ -527,7 +533,7 @@ RSpec.describe WikiPage do
describe 'in subdir' do
it 'keeps the page in the same dir when the content is updated' do
title = 'foo/Existing Page'
- page = create_wiki_page(title: title)
+ page = create_wiki_page(container, title: title)
expect(page.slug).to eq 'foo/Existing-Page'
expect(page.update(title: title, content: 'new_content')).to be_truthy
@@ -541,7 +547,7 @@ RSpec.describe WikiPage do
context 'when renaming a page' do
it 'raises an error if the page already exists' do
- existing_page = create_wiki_page
+ existing_page = create_wiki_page(container)
expect { subject.update(title: existing_page.title, content: 'new_content') }.to raise_error(WikiPage::PageRenameError)
expect(subject.title).to eq original_title
@@ -584,7 +590,7 @@ RSpec.describe WikiPage do
describe 'in subdir' do
it 'moves the page to the root folder if the title is preceded by /' do
- page = create_wiki_page(title: 'foo/Existing Page')
+ page = create_wiki_page(container, title: 'foo/Existing Page')
expect(page.slug).to eq 'foo/Existing-Page'
expect(page.update(title: '/Existing Page', content: 'new_content')).to be_truthy
@@ -592,7 +598,7 @@ RSpec.describe WikiPage do
end
it 'does nothing if it has the same title' do
- page = create_wiki_page(title: 'foo/Another Existing Page')
+ page = create_wiki_page(container, title: 'foo/Another Existing Page')
original_path = page.slug
@@ -625,7 +631,7 @@ RSpec.describe WikiPage do
describe "#delete" do
it "deletes the page and returns true", :aggregate_failures do
- page = create_wiki_page
+ page = create_wiki_page(container)
expect do
expect(page.delete).to eq(true)
@@ -634,22 +640,88 @@ RSpec.describe WikiPage do
end
describe "#versions" do
- let(:subject) { create_wiki_page }
+ let(:subject) { create_wiki_page(container) }
+
+ before do
+ 3.times { |i| subject.update(content: "content #{i}") }
+ end
+
+ context 'number of versions is less than the default paginiated per page' do
+ it "returns an array of all commits for the page" do
+ expect(subject.versions).to be_a(::CommitCollection)
+ expect(subject.versions.length).to eq(4)
+ expect(subject.versions.first.id).to eql(subject.last_version.id)
+ end
+ end
+
+ context 'number of versions is more than the default paginiated per page' do
+ before do
+ allow(Kaminari.config).to receive(:default_per_page).and_return(3)
+ end
+
+ it "returns an arrary containing the first page of commits for the page" do
+ expect(subject.versions).to be_a(::CommitCollection)
+ expect(subject.versions.length).to eq(3)
+ expect(subject.versions.first.id).to eql(subject.last_version.id)
+ end
+
+ it "returns an arrary containing the second page of commits for the page with options[:page] = 2" do
+ versions = subject.versions(page: 2)
+ expect(versions).to be_a(::CommitCollection)
+ expect(versions.length).to eq(1)
+ end
+ end
+
+ context "wiki repository's default is updated" do
+ before do
+ force_wiki_change_branch
+ end
+
+ it "returns the correct versions in the default branch" do
+ page = container.wiki.find_page(subject.title)
- it "returns an array of all commits for the page" do
+ expect(page.versions).to be_a(::CommitCollection)
+ expect(page.versions.length).to eq(4)
+ expect(page.versions.first.id).to eql(page.last_version.id)
+
+ page.update(content: "final content")
+ expect(page.versions.length).to eq(5)
+ end
+ end
+ end
+
+ describe "#count_versions" do
+ let(:subject) { create_wiki_page(container) }
+
+ it "returns the total numbers of commits" do
expect do
3.times { |i| subject.update(content: "content #{i}") }
- end.to change { subject.versions.count }.by(3)
+ end.to change(subject, :count_versions).from(1).to(4)
+ end
+
+ context "wiki repository's default is updated" do
+ before do
+ subject
+ force_wiki_change_branch
+ end
+
+ it "returns the correct number of versions in the default branch" do
+ page = container.wiki.find_page(subject.title)
+ expect(page.count_versions).to eq(1)
+
+ page.update(content: "final content")
+ expect(page.count_versions).to eq(2)
+ end
end
end
describe '#title_changed?' do
using RSpec::Parameterized::TableSyntax
- let_it_be(:unsaved_page) { build_wiki_page(title: 'test page') }
- let_it_be(:existing_page) { create_wiki_page(title: 'test page') }
- let_it_be(:directory_page) { create_wiki_page(title: 'parent directory/child page') }
- let_it_be(:page_with_special_characters) { create_wiki_page(title: 'test+page') }
+ let(:unsaved_page) { build_wiki_page(container, title: 'test page') }
+ let(:existing_page) { create_wiki_page(container, title: 'test page') }
+ let(:directory_page) { create_wiki_page(container, title: 'parent directory/child page') }
+ let(:page_with_special_characters) { create_wiki_page(container, title: 'test+page') }
let(:untitled_page) { described_class.new(wiki) }
@@ -704,7 +776,7 @@ RSpec.describe WikiPage do
describe '#content_changed?' do
context 'with a new page' do
- subject { build_wiki_page }
+ subject { build_wiki_page(container) }
it 'returns true if content is set' do
subject.attributes[:content] = 'new'
@@ -756,13 +828,13 @@ RSpec.describe WikiPage do
describe '#path' do
it 'returns the path when persisted' do
- existing_page = create_wiki_page(title: 'path test')
+ existing_page = create_wiki_page(container, title: 'path test')
expect(existing_page.path).to eq('path-test.md')
end
it 'returns nil when not persisted' do
- unsaved_page = build_wiki_page(title: 'path test')
+ unsaved_page = build_wiki_page(container, title: 'path test')
expect(unsaved_page.path).to be_nil
end
@@ -789,7 +861,7 @@ RSpec.describe WikiPage do
describe '#historical?' do
let!(:container) { create(:project) }
- subject { create_wiki_page }
+ subject { create_wiki_page(container) }
let(:wiki) { subject.wiki }
let(:old_version) { subject.versions.last.id }
@@ -830,17 +902,17 @@ RSpec.describe WikiPage do
describe '#persisted?' do
it 'returns true for a persisted page' do
- expect(create_wiki_page).to be_persisted
+ expect(create_wiki_page(container)).to be_persisted
end
it 'returns false for an unpersisted page' do
- expect(build_wiki_page).not_to be_persisted
+ expect(build_wiki_page(container)).not_to be_persisted
end
end
describe '#to_partial_path' do
it 'returns the relative path to the partial to be used' do
- expect(build_wiki_page.to_partial_path).to eq('../shared/wikis/wiki_page')
+ expect(build_wiki_page(container).to_partial_path).to eq('../shared/wikis/wiki_page')
end
end
@@ -868,7 +940,7 @@ RSpec.describe WikiPage do
end
it 'returns false for page with different slug on same container' do
- other_page = create_wiki_page
+ other_page = create_wiki_page(container)
expect(subject.slug).not_to eq(other_page.slug)
expect(subject.container).to eq(other_page.container)
@@ -902,7 +974,7 @@ RSpec.describe WikiPage do
end
describe '#hook_attrs' do
- subject { build_wiki_page }
+ subject { build_wiki_page(container) }
it 'adds absolute urls for images in the content' do
subject.attributes[:content] = 'test![WikiPage_Image](/uploads/abc/WikiPage_Image.png)'
@@ -914,13 +986,13 @@ RSpec.describe WikiPage do
describe '#version_commit_timestamp' do
context 'for a new page' do
it 'returns nil' do
- expect(build_wiki_page.version_commit_timestamp).to be_nil
+ expect(build_wiki_page(container).version_commit_timestamp).to be_nil
end
end
context 'for page that exists' do
it 'returns the timestamp of the commit' do
- existing_page = create_wiki_page
+ existing_page = create_wiki_page(container)
expect(existing_page.version_commit_timestamp).to eq(existing_page.version.commit.committed_date)
end
diff --git a/spec/policies/blob_policy_spec.rb b/spec/policies/blob_policy_spec.rb
index 1be2318a0fe..c1df4e66677 100644
--- a/spec/policies/blob_policy_spec.rb
+++ b/spec/policies/blob_policy_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe BlobPolicy do
include_context 'ProjectPolicyTable context'
include ProjectHelpers
+ include UserHelpers
let_it_be_with_reload(:project) { create(:project, :repository) }
diff --git a/spec/policies/ci/runner_policy_spec.rb b/spec/policies/ci/runner_policy_spec.rb
index 880ff0722fa..773d3d9a01d 100644
--- a/spec/policies/ci/runner_policy_spec.rb
+++ b/spec/policies/ci/runner_policy_spec.rb
@@ -6,42 +6,64 @@ RSpec.describe Ci::RunnerPolicy do
describe 'ability :read_runner' do
let_it_be(:guest) { create(:user) }
let_it_be(:developer) { create(:user) }
+ let_it_be(:maintainer) { create(:user) }
let_it_be(:owner) { create(:user) }
- let_it_be(:group1) { create(:group, name: 'top-level', path: 'top-level') }
- let_it_be(:subgroup1) { create(:group, name: 'subgroup1', path: 'subgroup1', parent: group1) }
- let_it_be(:project1) { create(:project, group: subgroup1) }
+ let_it_be_with_reload(:group) { create(:group, name: 'top-level', path: 'top-level') }
+ let_it_be_with_reload(:subgroup) { create(:group, name: 'subgroup', path: 'subgroup', parent: group) }
+ let_it_be_with_reload(:project) { create(:project, group: subgroup) }
+
let_it_be(:instance_runner) { create(:ci_runner, :instance) }
- let_it_be(:group1_runner) { create(:ci_runner, :group, groups: [group1]) }
- let_it_be(:project1_runner) { create(:ci_runner, :project, projects: [project1]) }
+ let_it_be(:group_runner) { create(:ci_runner, :group, groups: [group]) }
+ let_it_be(:project_runner) { create(:ci_runner, :project, projects: [project]) }
subject(:policy) { described_class.new(user, runner) }
- before do
- group1.add_guest(guest)
- group1.add_developer(developer)
- group1.add_owner(owner)
+ before_all do
+ group.add_guest(guest)
+ group.add_developer(developer)
+ group.add_maintainer(maintainer)
+ group.add_owner(owner)
end
- shared_context 'on hierarchy with shared runners disabled' do
- around do |example|
- group1.update!(shared_runners_enabled: false)
- project1.update!(shared_runners_enabled: false)
+ shared_examples 'a policy allowing reading instance runner depending on runner sharing' do
+ context 'with instance runner' do
+ let(:runner) { instance_runner }
+
+ it { expect_allowed :read_runner }
+
+ context 'with shared runners disabled on projects' do
+ before do
+ project.update!(shared_runners_enabled: false)
+ end
+
+ it { expect_allowed :read_runner }
+ end
- example.run
- ensure
- project1.update!(shared_runners_enabled: true)
- group1.update!(shared_runners_enabled: true)
+ context 'with shared runners disabled for groups and projects' do
+ before do
+ group.update!(shared_runners_enabled: false)
+ project.update!(shared_runners_enabled: false)
+ end
+
+ it { expect_disallowed :read_runner }
+ end
end
end
- shared_context 'on hierarchy with group runners disabled' do
- around do |example|
- project1.update!(group_runners_enabled: false)
+ shared_examples 'a policy allowing reading group runner depending on runner sharing' do
+ context 'with group runner' do
+ let(:runner) { group_runner }
+
+ it { expect_allowed :read_runner }
- example.run
- ensure
- project1.update!(group_runners_enabled: true)
+ context 'with sharing of group runners disabled' do
+ before do
+ project.update!(group_runners_enabled: false)
+ end
+
+ it { expect_disallowed :read_runner }
+ end
end
end
@@ -51,27 +73,32 @@ RSpec.describe Ci::RunnerPolicy do
it { expect_disallowed :read_runner }
- context 'with shared runners disabled' do
- include_context 'on hierarchy with shared runners disabled' do
- it { expect_disallowed :read_runner }
+ context 'with shared runners disabled for groups and projects' do
+ before do
+ group.update!(shared_runners_enabled: false)
+ project.update!(shared_runners_enabled: false)
end
+
+ it { expect_disallowed :read_runner }
end
end
context 'with group runner' do
- let(:runner) { group1_runner }
+ let(:runner) { group_runner }
it { expect_disallowed :read_runner }
- context 'with group runner disabled' do
- include_context 'on hierarchy with group runners disabled' do
- it { expect_disallowed :read_runner }
+ context 'with sharing of group runners disabled' do
+ before do
+ project.update!(group_runners_enabled: false)
end
+
+ it { expect_disallowed :read_runner }
end
end
context 'with project runner' do
- let(:runner) { project1_runner }
+ let(:runner) { project_runner }
it { expect_disallowed :read_runner }
end
@@ -92,66 +119,52 @@ RSpec.describe Ci::RunnerPolicy do
context 'with developer access' do
let(:user) { developer }
- context 'with instance runner' do
- let(:runner) { instance_runner }
+ it_behaves_like 'a policy allowing reading instance runner depending on runner sharing'
- it { expect_allowed :read_runner }
+ it_behaves_like 'a policy allowing reading group runner depending on runner sharing'
- context 'with shared runners disabled' do
- include_context 'on hierarchy with shared runners disabled' do
- it { expect_disallowed :read_runner }
- end
- end
+ context 'with project runner' do
+ let(:runner) { project_runner }
+
+ it { expect_disallowed :read_runner }
end
+ end
- context 'with group runner' do
- let(:runner) { group1_runner }
+ context 'with maintainer access' do
+ let(:user) { maintainer }
- it { expect_allowed :read_runner }
+ it_behaves_like 'a policy allowing reading instance runner depending on runner sharing'
- context 'with group runner disabled' do
- include_context 'on hierarchy with group runners disabled' do
- it { expect_disallowed :read_runner }
- end
- end
- end
+ it_behaves_like 'a policy allowing reading group runner depending on runner sharing'
context 'with project runner' do
- let(:runner) { project1_runner }
+ let(:runner) { project_runner }
- it { expect_disallowed :read_runner }
+ it { expect_allowed :read_runner }
end
end
context 'with owner access' do
let(:user) { owner }
- context 'with instance runner' do
- let(:runner) { instance_runner }
+ it_behaves_like 'a policy allowing reading instance runner depending on runner sharing'
- context 'with shared runners disabled' do
- include_context 'on hierarchy with shared runners disabled' do
- it { expect_disallowed :read_runner }
- end
- end
+ context 'with group runner' do
+ let(:runner) { group_runner }
it { expect_allowed :read_runner }
- end
- context 'with group runner' do
- let(:runner) { group1_runner }
-
- context 'with group runners disabled' do
- include_context 'on hierarchy with group runners disabled' do
- it { expect_allowed :read_runner }
+ context 'with sharing of group runners disabled' do
+ before do
+ project.update!(group_runners_enabled: false)
end
- end
- it { expect_allowed :read_runner }
+ it { expect_allowed :read_runner }
+ end
end
context 'with project runner' do
- let(:runner) { project1_runner }
+ let(:runner) { project_runner }
it { expect_allowed :read_runner }
end
diff --git a/spec/policies/concerns/crud_policy_helpers_spec.rb b/spec/policies/concerns/crud_policy_helpers_spec.rb
index 69bf9ad12d6..1e7b99178c3 100644
--- a/spec/policies/concerns/crud_policy_helpers_spec.rb
+++ b/spec/policies/concerns/crud_policy_helpers_spec.rb
@@ -17,34 +17,37 @@ RSpec.describe CrudPolicyHelpers do
describe '.create_read_update_admin_destroy' do
it 'returns an array of the appropriate abilites given a feature name' do
- expect(PolicyTestClass.create_read_update_admin_destroy(feature_name)).to eq([
- :read_foo,
- :create_foo,
- :update_foo,
- :admin_foo,
- :destroy_foo
- ])
+ expect(PolicyTestClass.create_read_update_admin_destroy(feature_name)).to eq(
+ [
+ :read_foo,
+ :create_foo,
+ :update_foo,
+ :admin_foo,
+ :destroy_foo
+ ])
end
end
describe '.create_update_admin_destroy' do
it 'returns an array of the appropriate abilites given a feature name' do
- expect(PolicyTestClass.create_update_admin_destroy(feature_name)).to eq([
- :create_foo,
- :update_foo,
- :admin_foo,
- :destroy_foo
- ])
+ expect(PolicyTestClass.create_update_admin_destroy(feature_name)).to eq(
+ [
+ :create_foo,
+ :update_foo,
+ :admin_foo,
+ :destroy_foo
+ ])
end
end
describe '.create_update_admin' do
it 'returns an array of the appropriate abilites given a feature name' do
- expect(PolicyTestClass.create_update_admin(feature_name)).to eq([
- :create_foo,
- :update_foo,
- :admin_foo
- ])
+ expect(PolicyTestClass.create_update_admin(feature_name)).to eq(
+ [
+ :create_foo,
+ :update_foo,
+ :admin_foo
+ ])
end
end
end
diff --git a/spec/policies/group_policy_spec.rb b/spec/policies/group_policy_spec.rb
index da0270c15b9..c65933c5208 100644
--- a/spec/policies/group_policy_spec.rb
+++ b/spec/policies/group_policy_spec.rb
@@ -1175,28 +1175,14 @@ RSpec.describe GroupPolicy do
let(:current_user) { admin }
context 'when admin mode is enabled', :enable_admin_mode do
- context 'with runner_registration_control FF disabled' do
- before do
- stub_feature_flags(runner_registration_control: false)
- end
-
- it { is_expected.to be_allowed(:register_group_runners) }
- end
+ it { is_expected.to be_allowed(:register_group_runners) }
- context 'with runner_registration_control FF enabled' do
+ context 'with group runner registration disabled' do
before do
- stub_feature_flags(runner_registration_control: true)
+ stub_application_setting(valid_runner_registrars: ['project'])
end
it { is_expected.to be_allowed(:register_group_runners) }
-
- context 'with group runner registration disabled' do
- before do
- stub_application_setting(valid_runner_registrars: ['project'])
- end
-
- it { is_expected.to be_allowed(:register_group_runners) }
- end
end
end
@@ -1210,28 +1196,12 @@ RSpec.describe GroupPolicy do
it { is_expected.to be_allowed(:register_group_runners) }
- context 'with runner_registration_control FF disabled' do
- before do
- stub_feature_flags(runner_registration_control: false)
- end
-
- it { is_expected.to be_allowed(:register_group_runners) }
- end
-
- context 'with runner_registration_control FF enabled' do
+ context 'with group runner registration disabled' do
before do
- stub_feature_flags(runner_registration_control: true)
+ stub_application_setting(valid_runner_registrars: ['project'])
end
- it { is_expected.to be_allowed(:register_group_runners) }
-
- context 'with group runner registration disabled' do
- before do
- stub_application_setting(valid_runner_registrars: ['project'])
- end
-
- it { is_expected.to be_disallowed(:register_group_runners) }
- end
+ it { is_expected.to be_disallowed(:register_group_runners) }
end
end
@@ -1266,6 +1236,62 @@ RSpec.describe GroupPolicy do
end
end
+ describe 'read_group_all_available_runners' do
+ context 'admin' do
+ let(:current_user) { admin }
+
+ context 'when admin mode is enabled', :enable_admin_mode do
+ specify { is_expected.to be_allowed(:read_group_all_available_runners) }
+ end
+
+ context 'when admin mode is disabled' do
+ specify { is_expected.to be_disallowed(:read_group_all_available_runners) }
+ end
+ end
+
+ context 'with owner' do
+ let(:current_user) { owner }
+
+ specify { is_expected.to be_allowed(:read_group_all_available_runners) }
+ end
+
+ context 'with maintainer' do
+ let(:current_user) { maintainer }
+
+ specify { is_expected.to be_allowed(:read_group_all_available_runners) }
+ end
+
+ context 'with developer' do
+ let(:current_user) { developer }
+
+ specify { is_expected.to be_allowed(:read_group_all_available_runners) }
+ end
+
+ context 'with reporter' do
+ let(:current_user) { reporter }
+
+ specify { is_expected.to be_disallowed(:read_group_all_available_runners) }
+ end
+
+ context 'with guest' do
+ let(:current_user) { guest }
+
+ specify { is_expected.to be_disallowed(:read_group_all_available_runners) }
+ end
+
+ context 'with non member' do
+ let(:current_user) { create(:user) }
+
+ specify { is_expected.to be_disallowed(:read_group_all_available_runners) }
+ end
+
+ context 'with anonymous' do
+ let(:current_user) { nil }
+
+ specify { is_expected.to be_disallowed(:read_group_all_available_runners) }
+ end
+ end
+
describe 'change_prevent_sharing_groups_outside_hierarchy' do
context 'with owner' do
let(:current_user) { owner }
diff --git a/spec/policies/issuable_policy_spec.rb b/spec/policies/issuable_policy_spec.rb
index c02294571ff..2bedcf60539 100644
--- a/spec/policies/issuable_policy_spec.rb
+++ b/spec/policies/issuable_policy_spec.rb
@@ -31,8 +31,8 @@ RSpec.describe IssuablePolicy, models: true do
expect(policies).to be_allowed(:resolve_note)
end
- it 'allows reading confidential notes' do
- expect(policies).to be_allowed(:read_confidential_notes)
+ it 'allows reading internal notes' do
+ expect(policies).to be_allowed(:read_internal_note)
end
context 'when user is able to read project' do
@@ -94,8 +94,8 @@ RSpec.describe IssuablePolicy, models: true do
let(:issue) { create(:issue, project: project, assignees: [user]) }
let(:policies) { described_class.new(user, issue) }
- it 'allows reading confidential notes' do
- expect(policies).to be_allowed(:read_confidential_notes)
+ it 'allows reading internal notes' do
+ expect(policies).to be_allowed(:read_internal_note)
end
end
@@ -145,6 +145,10 @@ RSpec.describe IssuablePolicy, models: true do
it 'does not allow timelogs creation' do
expect(policies).to be_disallowed(:create_timelog)
end
+
+ it 'does not allow reading internal notes' do
+ expect(permissions(guest, issue)).to be_disallowed(:read_internal_note)
+ end
end
context 'when user is a guest member of the project' do
@@ -152,8 +156,8 @@ RSpec.describe IssuablePolicy, models: true do
expect(permissions(guest, issue)).to be_disallowed(:create_timelog)
end
- it 'does not allow reading confidential notes' do
- expect(permissions(guest, issue)).to be_disallowed(:read_confidential_notes)
+ it 'does not allow reading internal notes' do
+ expect(permissions(guest, issue)).to be_disallowed(:read_internal_note)
end
end
@@ -170,8 +174,8 @@ RSpec.describe IssuablePolicy, models: true do
expect(permissions(reporter, issue)).to be_allowed(:create_timelog)
end
- it 'allows reading confidential notes' do
- expect(permissions(reporter, issue)).to be_allowed(:read_confidential_notes)
+ it 'allows reading internal notes' do
+ expect(permissions(reporter, issue)).to be_allowed(:read_internal_note)
end
end
@@ -188,6 +192,7 @@ RSpec.describe IssuablePolicy, models: true do
it 'does not allow :read_issuable' do
expect(policy).not_to be_allowed(:read_issuable)
+ expect(policy).not_to be_allowed(:read_issuable_participables)
end
end
@@ -196,6 +201,7 @@ RSpec.describe IssuablePolicy, models: true do
it 'allows :read_issuable' do
expect(policy).to be_allowed(:read_issuable)
+ expect(policy).to be_allowed(:read_issuable_participables)
end
end
end
@@ -213,6 +219,7 @@ RSpec.describe IssuablePolicy, models: true do
it 'does not allow :read_issuable' do
expect(policy).not_to be_allowed(:read_issuable)
+ expect(policy).not_to be_allowed(:read_issuable_participables)
end
end
@@ -221,6 +228,7 @@ RSpec.describe IssuablePolicy, models: true do
it 'allows :read_issuable' do
expect(policy).to be_allowed(:read_issuable)
+ expect(policy).to be_allowed(:read_issuable_participables)
end
end
end
diff --git a/spec/policies/issue_policy_spec.rb b/spec/policies/issue_policy_spec.rb
index 4d492deb54c..c110ca705bd 100644
--- a/spec/policies/issue_policy_spec.rb
+++ b/spec/policies/issue_policy_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe IssuePolicy do
include_context 'ProjectPolicyTable context'
include ExternalAuthorizationServiceHelpers
include ProjectHelpers
+ include UserHelpers
let(:guest) { create(:user) }
let(:author) { create(:user) }
@@ -84,7 +85,7 @@ RSpec.describe IssuePolicy do
it 'allows guests to read issues' do
expect(permissions(guest, issue)).to be_allowed(:read_issue, :read_issue_iid)
- expect(permissions(guest, issue)).to be_disallowed(:update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
+ expect(permissions(guest, issue)).to be_disallowed(:update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality, :mark_note_as_confidential)
expect(permissions(guest, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid)
expect(permissions(guest, issue_no_assignee)).to be_disallowed(:update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
@@ -92,10 +93,10 @@ RSpec.describe IssuePolicy do
expect(permissions(guest, new_issue)).to be_allowed(:create_issue, :set_issue_metadata, :set_confidentiality)
end
- it 'allows reporters to read, update, and admin issues' do
+ it 'allows reporters to read, update, admin and create confidential notes' do
expect(permissions(reporter, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
expect(permissions(reporter, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
- expect(permissions(reporter, new_issue)).to be_allowed(:create_issue, :set_issue_metadata, :set_confidentiality)
+ expect(permissions(reporter, new_issue)).to be_allowed(:create_issue, :set_issue_metadata, :set_confidentiality, :mark_note_as_confidential)
end
it 'allows reporters from group links to read, update, and admin issues' do
diff --git a/spec/policies/namespaces/user_namespace_policy_spec.rb b/spec/policies/namespaces/user_namespace_policy_spec.rb
index 22c3f6a6d67..42d27d0f3d6 100644
--- a/spec/policies/namespaces/user_namespace_policy_spec.rb
+++ b/spec/policies/namespaces/user_namespace_policy_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Namespaces::UserNamespacePolicy do
let_it_be(:admin) { create(:admin) }
let_it_be(:namespace) { create(:user_namespace, owner: owner) }
- let(:owner_permissions) { [:owner_access, :create_projects, :admin_namespace, :read_namespace, :read_statistics, :transfer_projects, :admin_package] }
+ let(:owner_permissions) { [:owner_access, :create_projects, :admin_namespace, :read_namespace, :read_statistics, :transfer_projects, :admin_package, :read_billing, :edit_billing] }
subject { described_class.new(current_user, namespace) }
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index fefd9f71408..40ee2e662b2 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -103,6 +103,20 @@ RSpec.describe ProjectPolicy do
end
end
+ context 'when both issues and merge requests are disabled' do
+ let(:current_user) { owner }
+
+ before do
+ project.issues_enabled = false
+ project.merge_requests_enabled = false
+ project.save!
+ end
+
+ it 'does not include the issues permissions' do
+ expect_disallowed :read_cycle_analytics
+ end
+ end
+
context 'creating_merge_request_in' do
context 'when the current_user can download_code' do
before do
@@ -465,15 +479,14 @@ RSpec.describe ProjectPolicy do
end
context 'owner access' do
- let!(:owner_user) { create(:user) }
- let!(:owner_of_different_thing) { create(:user) }
- let(:stranger) { create(:user) }
+ let_it_be(:owner_user) { owner }
+ let_it_be(:owner_of_different_thing) { create(:user) }
context 'personal project' do
- let!(:project) { create(:project) }
- let!(:project2) { create(:project) }
+ let_it_be(:project) { private_project }
+ let_it_be(:project2) { create(:project) }
- before do
+ before_all do
project.add_guest(guest)
project.add_reporter(reporter)
project.add_developer(developer)
@@ -483,7 +496,7 @@ RSpec.describe ProjectPolicy do
it 'allows owner access', :aggregate_failures do
expect(described_class.new(owner_of_different_thing, project)).to be_disallowed(:owner_access)
- expect(described_class.new(stranger, project)).to be_disallowed(:owner_access)
+ expect(described_class.new(non_member, project)).to be_disallowed(:owner_access)
expect(described_class.new(guest, project)).to be_disallowed(:owner_access)
expect(described_class.new(reporter, project)).to be_disallowed(:owner_access)
expect(described_class.new(developer, project)).to be_disallowed(:owner_access)
@@ -493,12 +506,12 @@ RSpec.describe ProjectPolicy do
end
context 'group project' do
- let(:group) { create(:group) }
- let!(:group2) { create(:group) }
- let!(:project) { create(:project, group: group) }
+ let_it_be(:project) { private_project_in_group }
+ let_it_be(:group2) { create(:group) }
+ let_it_be(:group) { project.group }
context 'group members' do
- before do
+ before_all do
group.add_guest(guest)
group.add_reporter(reporter)
group.add_developer(developer)
@@ -509,7 +522,7 @@ RSpec.describe ProjectPolicy do
it 'allows owner access', :aggregate_failures do
expect(described_class.new(owner_of_different_thing, project)).to be_disallowed(:owner_access)
- expect(described_class.new(stranger, project)).to be_disallowed(:owner_access)
+ expect(described_class.new(non_member, project)).to be_disallowed(:owner_access)
expect(described_class.new(guest, project)).to be_disallowed(:owner_access)
expect(described_class.new(reporter, project)).to be_disallowed(:owner_access)
expect(described_class.new(developer, project)).to be_disallowed(:owner_access)
@@ -1692,7 +1705,7 @@ RSpec.describe ProjectPolicy do
let_it_be(:project_with_analytics_private) { create(:project, :analytics_private) }
let_it_be(:project_with_analytics_enabled) { create(:project, :analytics_enabled) }
- before do
+ before_all do
project_with_analytics_disabled.add_guest(guest)
project_with_analytics_private.add_guest(guest)
project_with_analytics_enabled.add_guest(guest)
@@ -2424,7 +2437,7 @@ RSpec.describe ProjectPolicy do
before do
current_user.set_ci_job_token_scope!(job)
current_user.external = external_user
- scope_project.update!(ci_job_token_scope_enabled: token_scope_enabled)
+ scope_project.update!(ci_outbound_job_token_scope_enabled: token_scope_enabled)
end
it "enforces the expected permissions" do
@@ -2617,28 +2630,14 @@ RSpec.describe ProjectPolicy do
let(:current_user) { admin }
context 'when admin mode is enabled', :enable_admin_mode do
- context 'with runner_registration_control FF disabled' do
- before do
- stub_feature_flags(runner_registration_control: false)
- end
-
- it { is_expected.to be_allowed(:register_project_runners) }
- end
+ it { is_expected.to be_allowed(:register_project_runners) }
- context 'with runner_registration_control FF enabled' do
+ context 'with project runner registration disabled' do
before do
- stub_feature_flags(runner_registration_control: true)
+ stub_application_setting(valid_runner_registrars: ['group'])
end
it { is_expected.to be_allowed(:register_project_runners) }
-
- context 'with project runner registration disabled' do
- before do
- stub_application_setting(valid_runner_registrars: ['group'])
- end
-
- it { is_expected.to be_allowed(:register_project_runners) }
- end
end
end
@@ -2652,28 +2651,12 @@ RSpec.describe ProjectPolicy do
it { is_expected.to be_allowed(:register_project_runners) }
- context 'with runner_registration_control FF disabled' do
- before do
- stub_feature_flags(runner_registration_control: false)
- end
-
- it { is_expected.to be_allowed(:register_project_runners) }
- end
-
- context 'with runner_registration_control FF enabled' do
+ context 'with project runner registration disabled' do
before do
- stub_feature_flags(runner_registration_control: true)
+ stub_application_setting(valid_runner_registrars: ['group'])
end
- it { is_expected.to be_allowed(:register_project_runners) }
-
- context 'with project runner registration disabled' do
- before do
- stub_application_setting(valid_runner_registrars: ['group'])
- end
-
- it { is_expected.to be_disallowed(:register_project_runners) }
- end
+ it { is_expected.to be_disallowed(:register_project_runners) }
end
end
@@ -2764,6 +2747,50 @@ RSpec.describe ProjectPolicy do
end
end
+ describe 'role_enables_download_code' do
+ using RSpec::Parameterized::TableSyntax
+
+ context 'default roles' do
+ let(:current_user) { public_send(role) }
+
+ context 'public project' do
+ let(:project) { public_project }
+
+ where(:role, :allowed) do
+ :owner | true
+ :maintainer | true
+ :developer | true
+ :reporter | true
+ :guest | true
+
+ with_them do
+ it do
+ expect(subject.can?(:download_code)).to be(allowed)
+ end
+ end
+ end
+ end
+
+ context 'private project' do
+ let(:project) { private_project }
+
+ where(:role, :allowed) do
+ :owner | true
+ :maintainer | true
+ :developer | true
+ :reporter | true
+ :guest | false
+ end
+
+ with_them do
+ it do
+ expect(subject.can?(:download_code)).to be(allowed)
+ end
+ end
+ end
+ end
+ end
+
private
def project_subject(project_type)
diff --git a/spec/policies/project_snippet_policy_spec.rb b/spec/policies/project_snippet_policy_spec.rb
index 8b96aa99f69..c6d8ef05cfd 100644
--- a/spec/policies/project_snippet_policy_spec.rb
+++ b/spec/policies/project_snippet_policy_spec.rb
@@ -2,29 +2,28 @@
require 'spec_helper'
-# Snippet visibility scenarios are included in more details in spec/support/snippet_visibility.rb
+# Snippet visibility scenarios are included in more details in spec/finders/snippets_finder_spec.rb
RSpec.describe ProjectSnippetPolicy do
+ let_it_be(:group) { create(:group, :public) }
let_it_be(:regular_user) { create(:user) }
- let_it_be(:other_user) { create(:user) }
let_it_be(:external_user) { create(:user, :external) }
- let_it_be(:project) { create(:project, :public) }
-
- let(:snippet) { create(:project_snippet, snippet_visibility, project: project, author: author) }
- let(:author) { other_user }
- let(:author_permissions) do
+ let_it_be(:author) { create(:user) }
+ let_it_be(:author_permissions) do
[
:update_snippet,
:admin_snippet
]
end
+ let(:snippet) { build(:project_snippet, snippet_visibility, project: project, author: author) }
+
subject { described_class.new(current_user, snippet) }
- shared_examples 'regular user access rights' do
+ shared_examples 'regular user member permissions' do
context 'not snippet author' do
- context 'project team member (non guest)' do
+ context 'member (guest)' do
before do
- project.add_developer(current_user)
+ membership_target.add_guest(current_user)
end
it do
@@ -33,25 +32,35 @@ RSpec.describe ProjectSnippetPolicy do
end
end
- context 'project team member (guest)' do
+ context 'member (reporter)' do
before do
- project.add_guest(current_user)
+ membership_target.add_reporter(current_user)
end
it do
expect_allowed(:read_snippet, :create_note)
- expect_disallowed(:admin_snippet)
+ expect_disallowed(*author_permissions)
end
end
- context 'project team member (maintainer)' do
+ context 'member (developer)' do
before do
- project.add_maintainer(current_user)
+ membership_target.add_developer(current_user)
end
it do
expect_allowed(:read_snippet, :create_note)
- expect_allowed(*author_permissions)
+ expect_disallowed(*author_permissions)
+ end
+ end
+
+ context 'member (maintainer)' do
+ before do
+ membership_target.add_maintainer(current_user)
+ end
+
+ it do
+ expect_allowed(:read_snippet, :create_note, *author_permissions)
end
end
end
@@ -59,196 +68,263 @@ RSpec.describe ProjectSnippetPolicy do
context 'snippet author' do
let(:author) { current_user }
- context 'project member (non guest)' do
+ context 'member (guest)' do
before do
- project.add_developer(current_user)
+ membership_target.add_guest(current_user)
end
it do
- expect_allowed(:read_snippet, :create_note)
- expect_allowed(*author_permissions)
+ expect_allowed(:read_snippet, :create_note, :update_snippet)
+ expect_disallowed(:admin_snippet)
end
end
- context 'project member (guest)' do
+ context 'member (reporter)' do
before do
- project.add_guest(current_user)
+ membership_target.add_reporter(current_user)
end
it do
- expect_allowed(:read_snippet, :create_note)
- expect_disallowed(:admin_snippet)
+ expect_allowed(:read_snippet, :create_note, *author_permissions)
end
end
- context 'project team member (maintainer)' do
+ context 'member (developer)' do
before do
- project.add_maintainer(current_user)
+ membership_target.add_developer(current_user)
end
it do
- expect_allowed(:read_snippet, :create_note)
- expect_allowed(*author_permissions)
+ expect_allowed(:read_snippet, :create_note, *author_permissions)
end
end
- context 'not a project member' do
+ context 'member (maintainer)' do
+ before do
+ membership_target.add_maintainer(current_user)
+ end
+
it do
- expect_allowed(:read_snippet, :create_note)
- expect_disallowed(:admin_snippet)
+ expect_allowed(:read_snippet, :create_note, *author_permissions)
end
end
end
end
- context 'public snippet' do
- let(:snippet_visibility) { :public }
-
- context 'no user' do
- let(:current_user) { nil }
+ shared_examples 'regular user non-member author permissions' do
+ let(:author) { current_user }
- it do
- expect_allowed(:read_snippet)
- expect_disallowed(*author_permissions)
- end
+ it do
+ expect_allowed(:read_snippet, :create_note, :update_snippet)
+ expect_disallowed(:admin_snippet)
end
+ end
- context 'regular user' do
- let(:current_user) { regular_user }
-
- it do
- expect_allowed(:read_snippet, :create_note)
- expect_disallowed(*author_permissions)
- end
+ context 'when project is public' do
+ let_it_be(:project) { create(:project, :public, group: group) }
- it_behaves_like 'regular user access rights'
- end
+ context 'with public snippet' do
+ let(:snippet_visibility) { :public }
- context 'external user' do
- let(:current_user) { external_user }
+ context 'no user' do
+ let(:current_user) { nil }
- it do
- expect_allowed(:read_snippet, :create_note)
- expect_disallowed(*author_permissions)
+ it do
+ expect_allowed(:read_snippet)
+ expect_disallowed(*author_permissions)
+ end
end
- context 'project team member' do
- before do
- project.add_developer(external_user)
+ context 'regular user' do
+ let(:current_user) { regular_user }
+ let(:membership_target) { project }
+
+ context 'when user is not a member' do
+ context 'and is not the snippet author' do
+ it do
+ expect_allowed(:read_snippet, :create_note)
+ expect_disallowed(*author_permissions)
+ end
+ end
+
+ context 'and is the snippet author' do
+ it_behaves_like 'regular user non-member author permissions'
+ end
end
+ context 'when user is a member' do
+ it_behaves_like 'regular user member permissions'
+ end
+ end
+
+ context 'external user' do
+ let(:current_user) { external_user }
+
it do
expect_allowed(:read_snippet, :create_note)
expect_disallowed(*author_permissions)
end
- end
- end
- end
-
- context 'internal snippet' do
- let(:snippet_visibility) { :internal }
- context 'no user' do
- let(:current_user) { nil }
+ context 'when user is a member' do
+ before do
+ project.add_developer(external_user)
+ end
- it do
- expect_disallowed(:read_snippet)
- expect_disallowed(*author_permissions)
+ it do
+ expect_allowed(:read_snippet, :create_note)
+ expect_disallowed(*author_permissions)
+ end
+ end
end
end
- context 'regular user' do
- let(:current_user) { regular_user }
+ context 'with internal snippet' do
+ let(:snippet_visibility) { :internal }
- it do
- expect_allowed(:read_snippet, :create_note)
- expect_disallowed(*author_permissions)
- end
+ context 'no user' do
+ let(:current_user) { nil }
- it_behaves_like 'regular user access rights'
- end
+ it do
+ expect_disallowed(:read_snippet)
+ expect_disallowed(*author_permissions)
+ end
+ end
- context 'external user' do
- let(:current_user) { external_user }
+ context 'regular user' do
+ let(:current_user) { regular_user }
+ let(:membership_target) { project }
+
+ context 'when user is not a member' do
+ context 'and is not the snippet author' do
+ it do
+ expect_allowed(:read_snippet, :create_note)
+ expect_disallowed(*author_permissions)
+ end
+ end
+
+ context 'and is the snippet author' do
+ it_behaves_like 'regular user non-member author permissions'
+ end
+ end
- it do
- expect_disallowed(:read_snippet, :create_note)
- expect_disallowed(*author_permissions)
+ context 'when user is a member' do
+ it_behaves_like 'regular user member permissions'
+ end
end
- context 'project team member' do
- before do
- project.add_developer(external_user)
- end
+ context 'external user' do
+ let(:current_user) { external_user }
it do
- expect_allowed(:read_snippet, :create_note)
+ expect_disallowed(:read_snippet, :create_note)
expect_disallowed(*author_permissions)
end
+
+ context 'when user is a member' do
+ before do
+ project.add_developer(external_user)
+ end
+
+ it do
+ expect_allowed(:read_snippet, :create_note)
+ expect_disallowed(*author_permissions)
+ end
+ end
end
end
- end
- context 'private snippet' do
- let(:snippet_visibility) { :private }
+ context 'with private snippet' do
+ let(:snippet_visibility) { :private }
- context 'no user' do
- let(:current_user) { nil }
+ context 'no user' do
+ let(:current_user) { nil }
- it do
- expect_disallowed(:read_snippet)
- expect_disallowed(*author_permissions)
+ it do
+ expect_disallowed(:read_snippet)
+ expect_disallowed(*author_permissions)
+ end
end
- end
- context 'regular user' do
- let(:current_user) { regular_user }
+ context 'regular user' do
+ let(:current_user) { regular_user }
+ let(:membership_target) { project }
+
+ context 'when user is not a member' do
+ context 'and is not the snippet author' do
+ it do
+ expect_disallowed(:read_snippet, :create_note)
+ expect_disallowed(*author_permissions)
+ end
+ end
+
+ context 'and is the snippet author' do
+ it_behaves_like 'regular user non-member author permissions'
+ end
+ end
- it do
- expect_disallowed(:read_snippet, :create_note)
- expect_disallowed(*author_permissions)
+ context 'when user is a member' do
+ it_behaves_like 'regular user member permissions'
+ end
end
- it_behaves_like 'regular user access rights'
- end
-
- context 'external user' do
- let(:current_user) { external_user }
+ context 'inherited user' do
+ let(:current_user) { regular_user }
+ let(:membership_target) { group }
- it do
- expect_disallowed(:read_snippet, :create_note)
- expect_disallowed(*author_permissions)
+ it_behaves_like 'regular user member permissions'
end
- context 'project team member' do
- before do
- project.add_developer(current_user)
- end
+ context 'external user' do
+ let(:current_user) { external_user }
it do
- expect_allowed(:read_snippet, :create_note)
+ expect_disallowed(:read_snippet, :create_note)
expect_disallowed(*author_permissions)
end
- end
- end
- context 'admin user' do
- let(:snippet_visibility) { :private }
- let(:current_user) { create(:admin) }
+ context 'when user is a member' do
+ before do
+ project.add_developer(current_user)
+ end
- context 'when admin mode is enabled', :enable_admin_mode do
- it do
- expect_allowed(:read_snippet, :create_note)
- expect_allowed(*author_permissions)
+ it do
+ expect_allowed(:read_snippet, :create_note)
+ expect_disallowed(*author_permissions)
+ end
end
end
- context 'when admin mode is disabled' do
- it do
- expect_disallowed(:read_snippet, :create_note)
- expect_disallowed(*author_permissions)
+ context 'admin user' do
+ let(:snippet_visibility) { :private }
+ let(:current_user) { create(:admin) }
+
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it do
+ expect_allowed(:read_snippet, :create_note)
+ expect_allowed(*author_permissions)
+ end
+ end
+
+ context 'when admin mode is disabled' do
+ it do
+ expect_disallowed(:read_snippet, :create_note)
+ expect_disallowed(*author_permissions)
+ end
end
end
end
end
+
+ context 'when project is private' do
+ let_it_be(:project) { create(:project, :private, group: group) }
+
+ let(:snippet_visibility) { :private }
+
+ context 'inherited user' do
+ let(:current_user) { regular_user }
+ let(:membership_target) { group }
+
+ it_behaves_like 'regular user member permissions'
+ end
+ end
end
diff --git a/spec/policies/wiki_page_policy_spec.rb b/spec/policies/wiki_page_policy_spec.rb
index a2fa7f29135..2712026035c 100644
--- a/spec/policies/wiki_page_policy_spec.rb
+++ b/spec/policies/wiki_page_policy_spec.rb
@@ -5,28 +5,43 @@ require 'spec_helper'
RSpec.describe WikiPagePolicy do
include_context 'ProjectPolicyTable context'
include ProjectHelpers
+ include UserHelpers
using RSpec::Parameterized::TableSyntax
- let(:project) { create(:project, :wiki_repo, project_level) }
- let(:user) { create_user_from_membership(project, membership) }
- let(:wiki_page) { create(:wiki_page, wiki: project.wiki) }
+ let(:group) { build(:group, :public) }
+ let(:project) { build(:project, :wiki_repo, project_level, group: group) }
+ let(:wiki_page) { build(:wiki_page, container: project) }
- subject(:policy) { described_class.new(user, wiki_page) }
+ shared_context 'with :read_wiki_page policy' do
+ subject(:policy) { described_class.new(user, wiki_page) }
- where(:project_level, :feature_access_level, :membership, :admin_mode, :expected_count) do
- permission_table_for_guest_feature_access
- end
+ where(:project_level, :feature_access_level, :membership, :admin_mode, :expected_count) do
+ permission_table_for_guest_feature_access
+ end
- with_them do
- it "grants permission" do
- enable_admin_mode!(user) if admin_mode
- update_feature_access_level(project, feature_access_level)
+ with_them do
+ it 'grants the expected permissions' do
+ enable_admin_mode!(user) if admin_mode
+ update_feature_access_level(project, feature_access_level)
- if expected_count == 1
- expect(policy).to be_allowed(:read_wiki_page)
- else
- expect(policy).to be_disallowed(:read_wiki_page)
+ if expected_count == 1
+ expect(policy).to be_allowed(:read_wiki_page)
+ else
+ expect(policy).to be_disallowed(:read_wiki_page)
+ end
end
end
end
+
+ context 'when user is a direct project member' do
+ let(:user) { build_user_from_membership(project, membership) }
+
+ include_context 'with :read_wiki_page policy'
+ end
+
+ context 'when user is an inherited member from the group' do
+ let(:user) { build_user_from_membership(group, membership) }
+
+ include_context 'with :read_wiki_page policy'
+ end
end
diff --git a/spec/presenters/blobs/unfold_presenter_spec.rb b/spec/presenters/blobs/unfold_presenter_spec.rb
index 14c36461e90..9b3b7f5a1c8 100644
--- a/spec/presenters/blobs/unfold_presenter_spec.rb
+++ b/spec/presenters/blobs/unfold_presenter_spec.rb
@@ -5,13 +5,13 @@ require 'spec_helper'
RSpec.describe Blobs::UnfoldPresenter do
include FakeBlobHelpers
- let(:project) { create(:project, :repository) }
- let(:blob) { fake_blob(path: 'foo', data: "1\n2\n3") }
- let(:subject) { described_class.new(blob, params) }
+ let(:project) { nil } # Project object is not needed but `fake_blob` helper requires it to be defined.
+ let(:blob) { fake_blob(path: 'foo', data: data) }
+ let(:data) { "1\n\2\n3" }
- describe '#initialize' do
- let(:result) { subject }
+ subject(:result) { described_class.new(blob, params) }
+ describe '#initialize' do
context 'with empty params' do
let(:params) { {} }
@@ -71,7 +71,7 @@ RSpec.describe Blobs::UnfoldPresenter do
describe '#diff_lines' do
let(:total_lines) { 50 }
- let(:blob) { fake_blob(path: 'foo', data: (1..total_lines).to_a.join("\n")) }
+ let(:data) { (1..total_lines).to_a.join("\n") }
context 'when "full" is true' do
let(:params) { { full: true } }
@@ -91,7 +91,7 @@ RSpec.describe Blobs::UnfoldPresenter do
end
context 'when last line is empty' do
- let(:blob) { fake_blob(path: 'foo', data: "1\n2\n") }
+ let(:data) { "1\n2\n" }
it 'disregards last line' do
lines = subject.diff_lines
@@ -123,7 +123,7 @@ RSpec.describe Blobs::UnfoldPresenter do
expect(line.new_pos).to eq(5)
end
- context '"to" is higher than blob size' do
+ context 'when "to" is higher than blob size' do
let(:params) { default_params.merge(to: total_lines + 10, bottom: true) }
it 'does not add bottom match line' do
@@ -133,7 +133,7 @@ RSpec.describe Blobs::UnfoldPresenter do
end
end
- context '"to" is equal to blob size' do
+ context 'when "to" is equal to blob size' do
let(:params) { default_params.merge(to: total_lines, bottom: true) }
it 'does not add bottom match line' do
@@ -143,7 +143,7 @@ RSpec.describe Blobs::UnfoldPresenter do
end
end
- context '"to" is less than blob size' do
+ context 'when "to" is less than blob size' do
let(:params) { default_params.merge(to: total_lines - 3, bottom: true) }
it 'adds bottom match line' do
diff --git a/spec/presenters/ci/build_runner_presenter_spec.rb b/spec/presenters/ci/build_runner_presenter_spec.rb
index fe2d8f0f670..396fe7843ba 100644
--- a/spec/presenters/ci/build_runner_presenter_spec.rb
+++ b/spec/presenters/ci/build_runner_presenter_spec.rb
@@ -350,6 +350,15 @@ RSpec.describe Ci::BuildRunnerPresenter do
)
end
+ it 'logs file_variable_is_referenced_in_another_variable' do
+ expect(Gitlab::AppJsonLogger).to receive(:info).with(
+ event: 'file_variable_is_referenced_in_another_variable',
+ project_id: project.id
+ ).once
+
+ runner_variables
+ end
+
context 'when the FF ci_stop_expanding_file_vars_for_runners is disabled' do
before do
stub_feature_flags(ci_stop_expanding_file_vars_for_runners: false)
@@ -383,10 +392,10 @@ RSpec.describe Ci::BuildRunnerPresenter do
it 'returns expanded and sorted variables' do
is_expected.to eq [
- { key: 'C', value: 'value', public: false, masked: false },
- { key: 'B', value: 'refB-value-$D', public: false, masked: false },
- { key: 'A', value: 'refA-refB-value-$D', public: false, masked: false }
- ]
+ { key: 'C', value: 'value', public: false, masked: false },
+ { key: 'B', value: 'refB-value-$D', public: false, masked: false },
+ { key: 'A', value: 'refA-refB-value-$D', public: false, masked: false }
+ ]
end
end
end
diff --git a/spec/presenters/commit_presenter_spec.rb b/spec/presenters/commit_presenter_spec.rb
index df3ee69621b..eba393da2b7 100644
--- a/spec/presenters/commit_presenter_spec.rb
+++ b/spec/presenters/commit_presenter_spec.rb
@@ -3,11 +3,12 @@
require 'spec_helper'
RSpec.describe CommitPresenter do
- let(:project) { create(:project, :repository) }
let(:commit) { project.commit }
- let(:user) { create(:user) }
let(:presenter) { described_class.new(commit, current_user: user) }
+ let_it_be(:user) { build_stubbed(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+
describe '#web_path' do
it { expect(presenter.web_path).to eq("/#{project.full_path}/-/commit/#{commit.sha}") }
end
diff --git a/spec/presenters/deploy_key_presenter_spec.rb b/spec/presenters/deploy_key_presenter_spec.rb
new file mode 100644
index 00000000000..9e50da12395
--- /dev/null
+++ b/spec/presenters/deploy_key_presenter_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe DeployKeyPresenter do
+ let(:presenter) { described_class.new(deploy_key) }
+
+ describe '#humanized_error_message' do
+ subject { presenter.humanized_error_message }
+
+ before do
+ deploy_key.valid?
+ end
+
+ context 'when public key is unsupported' do
+ let(:deploy_key) { build(:deploy_key, key: 'a') }
+
+ it 'returns the custom error message' do
+ expect(subject).to eq('Deploy Key must be a <a target="_blank" rel="noopener noreferrer" ' \
+ 'href="/help/user/ssh#supported-ssh-key-types">supported SSH public key.</a>')
+ end
+ end
+ end
+end
diff --git a/spec/presenters/event_presenter_spec.rb b/spec/presenters/event_presenter_spec.rb
index 5a67fd92c9d..9093791421d 100644
--- a/spec/presenters/event_presenter_spec.rb
+++ b/spec/presenters/event_presenter_spec.rb
@@ -51,6 +51,14 @@ RSpec.describe EventPresenter do
it 'returns milestone for a milestone event' do
expect(group_event.present).to have_attributes(target_type_name: 'milestone')
end
+
+ it 'returns the issue_type for issue events' do
+ expect(build(:event, :for_issue, :created).present).to have_attributes(target_type_name: 'issue')
+ end
+
+ it 'returns the issue_type for work item events' do
+ expect(build(:event, :for_work_item, :created).present).to have_attributes(target_type_name: 'task')
+ end
end
describe '#note_target_type_name' do
diff --git a/spec/presenters/key_presenter_spec.rb b/spec/presenters/key_presenter_spec.rb
new file mode 100644
index 00000000000..d5aa39d5933
--- /dev/null
+++ b/spec/presenters/key_presenter_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe KeyPresenter do
+ let(:presenter) { described_class.new(key) }
+
+ describe '#humanized_error_message' do
+ subject { presenter.humanized_error_message }
+
+ before do
+ key.valid?
+ end
+
+ context 'when public key is unsupported' do
+ let(:key) { build(:key, key: 'a') }
+
+ it 'returns the custom error message' do
+ expect(subject).to eq('Key must be a <a target="_blank" rel="noopener noreferrer" ' \
+ 'href="/help/user/ssh#supported-ssh-key-types">supported SSH public key.</a>')
+ end
+ end
+
+ context 'when key is expired' do
+ let(:key) { build(:key, :expired) }
+
+ it 'returns Active Record error message' do
+ expect(subject).to eq('Key has expired')
+ end
+ end
+ end
+end
diff --git a/spec/presenters/project_presenter_spec.rb b/spec/presenters/project_presenter_spec.rb
index 7ff19b1b770..832deee6186 100644
--- a/spec/presenters/project_presenter_spec.rb
+++ b/spec/presenters/project_presenter_spec.rb
@@ -10,13 +10,15 @@ RSpec.describe ProjectPresenter do
describe '#license_short_name' do
context 'when project.repository has a license_key' do
it 'returns the nickname of the license if present' do
- allow(project.repository).to receive(:license_key).and_return('agpl-3.0')
+ allow(project.repository).to receive(:license).and_return(
+ ::Gitlab::Git::DeclaredLicense.new(name: 'foo', nickname: 'GNU AGPLv3'))
expect(presenter.license_short_name).to eq('GNU AGPLv3')
end
it 'returns the name of the license if nickname is not present' do
- allow(project.repository).to receive(:license_key).and_return('mit')
+ allow(project.repository).to receive(:license).and_return(
+ ::Gitlab::Git::DeclaredLicense.new(name: 'MIT License'))
expect(presenter.license_short_name).to eq('MIT License')
end
@@ -24,7 +26,7 @@ RSpec.describe ProjectPresenter do
context 'when project.repository has no license_key but a license_blob' do
it 'returns LICENSE' do
- allow(project.repository).to receive(:license_key).and_return(nil)
+ allow(project.repository).to receive(:license).and_return(nil)
expect(presenter.license_short_name).to eq('LICENSE')
end
diff --git a/spec/presenters/projects/security/configuration_presenter_spec.rb b/spec/presenters/projects/security/configuration_presenter_spec.rb
index 05e5a9d4f1d..ca7f96b567d 100644
--- a/spec/presenters/projects/security/configuration_presenter_spec.rb
+++ b/spec/presenters/projects/security/configuration_presenter_spec.rb
@@ -6,9 +6,8 @@ RSpec.describe Projects::Security::ConfigurationPresenter do
include Gitlab::Routing.url_helpers
using RSpec::Parameterized::TableSyntax
- let(:project_with_repo) { create(:project, :repository) }
- let(:project_with_no_repo) { create(:project) }
- let(:current_user) { create(:user) }
+ let_it_be(:current_user) { build_stubbed(:user) }
+
let(:presenter) { described_class.new(project, current_user: current_user) }
before do
@@ -19,9 +18,9 @@ RSpec.describe Projects::Security::ConfigurationPresenter do
subject(:html_data) { presenter.to_html_data_attribute }
context 'when latest default branch pipeline`s source is not auto devops' do
- let(:project) { project_with_repo }
+ let_it_be(:project) { create(:project, :repository) }
- let(:pipeline) do
+ let_it_be(:pipeline) do
create(
:ci_pipeline,
project: project,
@@ -119,6 +118,16 @@ RSpec.describe Projects::Security::ConfigurationPresenter do
context 'when the job has more than one report' do
let(:features) { Gitlab::Json.parse(html_data[:features]) }
+ let(:project) { create(:project, :repository) }
+
+ let(:pipeline) do
+ create(
+ :ci_pipeline,
+ project: project,
+ ref: project.default_branch,
+ sha: project.commit.sha
+ )
+ end
let!(:artifacts) do
{ artifacts: { reports: { other_job: ['gl-other-report.json'], sast: ['gl-sast-report.json'] } } }
@@ -161,6 +170,8 @@ RSpec.describe Projects::Security::ConfigurationPresenter do
end
context "while retrieving information about gitlab ci file" do
+ let(:project) { create(:project, :repository) }
+
context 'when a .gitlab-ci.yml file exists' do
let!(:ci_config) do
project.repository.create_file(
@@ -189,7 +200,7 @@ RSpec.describe Projects::Security::ConfigurationPresenter do
end
context 'when the project is empty' do
- let(:project) { project_with_no_repo }
+ let(:project) { create(:project) }
it 'includes a blank gitlab_ci history path' do
expect(html_data[:gitlab_ci_history_path]).to eq('')
@@ -197,7 +208,7 @@ RSpec.describe Projects::Security::ConfigurationPresenter do
end
context 'when the project has no default branch set' do
- let(:project) { project_with_repo }
+ let(:project) { create(:project, :repository) }
it 'includes the path to gitlab_ci history' do
allow(project).to receive(:default_branch).and_return(nil)
@@ -207,9 +218,9 @@ RSpec.describe Projects::Security::ConfigurationPresenter do
end
context "when the latest default branch pipeline's source is auto devops" do
- let(:project) { project_with_repo }
+ let_it_be(:project) { create(:project, :repository) }
- let(:pipeline) do
+ let_it_be(:pipeline) do
create(
:ci_pipeline,
:auto_devops_source,
@@ -256,7 +267,7 @@ RSpec.describe Projects::Security::ConfigurationPresenter do
end
context 'when the project has no default branch pipeline' do
- let(:project) { project_with_repo }
+ let_it_be(:project) { create(:project, :repository) }
it 'reports that auto devops is disabled' do
expect(html_data[:auto_devops_enabled]).to be_falsy
diff --git a/spec/requests/admin/impersonation_tokens_controller_spec.rb b/spec/requests/admin/impersonation_tokens_controller_spec.rb
index 018f497e7e5..ee0e12ad0c0 100644
--- a/spec/requests/admin/impersonation_tokens_controller_spec.rb
+++ b/spec/requests/admin/impersonation_tokens_controller_spec.rb
@@ -10,6 +10,18 @@ RSpec.describe Admin::ImpersonationTokensController, :enable_admin_mode do
sign_in(admin)
end
+ context 'when impersonation is enabled' do
+ before do
+ stub_config_setting(impersonation_enabled: true)
+ end
+
+ it 'responds ok' do
+ get admin_user_impersonation_tokens_path(user_id: user.username)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
context "when impersonation is disabled" do
before do
stub_config_setting(impersonation_enabled: false)
@@ -35,4 +47,10 @@ RSpec.describe Admin::ImpersonationTokensController, :enable_admin_mode do
expect(response).to have_gitlab_http_status(:not_found)
end
end
+
+ describe "#create" do
+ it_behaves_like "#create access token" do
+ let(:url) { admin_user_impersonation_tokens_path(user_id: user.username) }
+ end
+ end
end
diff --git a/spec/requests/api/admin/batched_background_migrations_spec.rb b/spec/requests/api/admin/batched_background_migrations_spec.rb
index c99b21c0c27..3b396a91d3e 100644
--- a/spec/requests/api/admin/batched_background_migrations_spec.rb
+++ b/spec/requests/api/admin/batched_background_migrations_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe API::Admin::BatchedBackgroundMigrations do
describe 'GET /admin/batched_background_migrations/:id' do
let!(:migration) { create(:batched_background_migration, :paused) }
let(:database) { :main }
+ let(:params) { { database: database } }
subject(:show_migration) do
get api("/admin/batched_background_migrations/#{migration.id}", admin), params: { database: database }
@@ -27,10 +28,8 @@ RSpec.describe API::Admin::BatchedBackgroundMigrations do
end
context 'when the batched background migration does not exist' do
- let(:params) { { database: database } }
-
it 'returns 404' do
- put api("/admin/batched_background_migrations/#{non_existing_record_id}", admin), params: params
+ get api("/admin/batched_background_migrations/#{non_existing_record_id}", admin), params: params
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -58,6 +57,17 @@ RSpec.describe API::Admin::BatchedBackgroundMigrations do
expect(response).to have_gitlab_http_status(:forbidden)
end
end
+
+ context 'when the database name does not exist' do
+ let(:database) { :wrong_database }
+
+ it 'returns bad request' do
+ get api("/admin/batched_background_migrations/#{migration.id}", admin), params: params
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(response.body).to include('database does not have a valid value')
+ end
+ end
end
describe 'GET /admin/batched_background_migrations' do
@@ -82,6 +92,7 @@ RSpec.describe API::Admin::BatchedBackgroundMigrations do
let(:database) { :ci }
let(:schema) { :gitlab_ci }
let(:ci_model) { Ci::ApplicationRecord }
+ let(:params) { { database: database } }
context 'when CI database is provided' do
let(:db_config) { instance_double(ActiveRecord::DatabaseConfigurations::HashConfig, name: 'fake_db') }
@@ -94,7 +105,18 @@ RSpec.describe API::Admin::BatchedBackgroundMigrations do
expect(Gitlab::Database::SharedModel).to receive(:using_connection).with(ci_model.connection).and_yield
- get api('/admin/batched_background_migrations', admin), params: { database: :ci }
+ get api('/admin/batched_background_migrations', admin), params: params
+ end
+
+ context 'when the database name does not exist' do
+ let(:database) { :wrong_database }
+
+ it 'returns bad request' do
+ get api("/admin/batched_background_migrations", admin), params: params
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(response.body).to include('database does not have a valid value')
+ end
end
it 'returns CI database records' do
@@ -105,7 +127,7 @@ RSpec.describe API::Admin::BatchedBackgroundMigrations do
create(:batched_background_migration, :active, gitlab_schema: schema)
end
- get api('/admin/batched_background_migrations', admin), params: { database: :ci }
+ get api('/admin/batched_background_migrations', admin), params: params
aggregate_failures "testing response" do
expect(response).to have_gitlab_http_status(:ok)
@@ -133,9 +155,10 @@ RSpec.describe API::Admin::BatchedBackgroundMigrations do
describe 'PUT /admin/batched_background_migrations/:id/resume' do
let!(:migration) { create(:batched_background_migration, :paused) }
let(:database) { :main }
+ let(:params) { { database: database } }
subject(:resume) do
- put api("/admin/batched_background_migrations/#{migration.id}/resume", admin), params: { database: database }
+ put api("/admin/batched_background_migrations/#{migration.id}/resume", admin), params: params
end
it 'pauses the batched background migration' do
@@ -149,8 +172,6 @@ RSpec.describe API::Admin::BatchedBackgroundMigrations do
end
context 'when the batched background migration does not exist' do
- let(:params) { { database: database } }
-
it 'returns 404' do
put api("/admin/batched_background_migrations/#{non_existing_record_id}/resume", admin), params: params
@@ -158,6 +179,16 @@ RSpec.describe API::Admin::BatchedBackgroundMigrations do
end
end
+ context 'when the migration is not paused' do
+ let!(:migration) { create(:batched_background_migration, :failed) }
+
+ it 'returns 422' do
+ put api("/admin/batched_background_migrations/#{migration.id}/resume", admin), params: params
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ end
+ end
+
context 'when multiple database is enabled' do
let(:ci_model) { Ci::ApplicationRecord }
let(:database) { :ci }
@@ -171,6 +202,17 @@ RSpec.describe API::Admin::BatchedBackgroundMigrations do
resume
end
+
+ context 'when the database name does not exist' do
+ let(:database) { :wrong_database }
+
+ it 'returns bad request' do
+ put api("/admin/batched_background_migrations/#{migration.id}/resume", admin), params: params
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(response.body).to include('database does not have a valid value')
+ end
+ end
end
context 'when authenticated as a non-admin user' do
@@ -184,9 +226,11 @@ RSpec.describe API::Admin::BatchedBackgroundMigrations do
describe 'PUT /admin/batched_background_migrations/:id/pause' do
let!(:migration) { create(:batched_background_migration, :active) }
+ let(:database) { :main }
+ let(:params) { { database: database } }
it 'pauses the batched background migration' do
- put api("/admin/batched_background_migrations/#{migration.id}/pause", admin), params: { database: :main }
+ put api("/admin/batched_background_migrations/#{migration.id}/pause", admin), params: params
aggregate_failures "testing response" do
expect(response).to have_gitlab_http_status(:ok)
@@ -196,8 +240,6 @@ RSpec.describe API::Admin::BatchedBackgroundMigrations do
end
context 'when the batched background migration does not exist' do
- let(:params) { { database: :main } }
-
it 'returns 404' do
put api("/admin/batched_background_migrations/#{non_existing_record_id}/pause", admin), params: params
@@ -205,8 +247,19 @@ RSpec.describe API::Admin::BatchedBackgroundMigrations do
end
end
+ context 'when the migration is not active' do
+ let!(:migration) { create(:batched_background_migration, :failed) }
+
+ it 'returns 422' do
+ put api("/admin/batched_background_migrations/#{migration.id}/pause", admin), params: params
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ end
+ end
+
context 'when multiple database is enabled' do
let(:ci_model) { Ci::ApplicationRecord }
+ let(:database) { :ci }
before do
skip_if_multiple_databases_not_setup
@@ -215,7 +268,18 @@ RSpec.describe API::Admin::BatchedBackgroundMigrations do
it 'uses the correct connection' do
expect(Gitlab::Database::SharedModel).to receive(:using_connection).with(ci_model.connection).and_yield
- put api("/admin/batched_background_migrations/#{migration.id}/pause", admin), params: { database: :ci }
+ put api("/admin/batched_background_migrations/#{migration.id}/pause", admin), params: params
+ end
+
+ context 'when the database name does not exist' do
+ let(:database) { :wrong_database }
+
+ it 'returns bad request' do
+ put api("/admin/batched_background_migrations/#{migration.id}/pause", admin), params: params
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(response.body).to include('database does not have a valid value')
+ end
end
end
diff --git a/spec/requests/api/branches_spec.rb b/spec/requests/api/branches_spec.rb
index f7539e13b80..750b9a39e15 100644
--- a/spec/requests/api/branches_spec.rb
+++ b/spec/requests/api/branches_spec.rb
@@ -221,55 +221,25 @@ RSpec.describe API::Branches do
get api(route), params: { per_page: 1 }
end
- context 'when increase_branch_cache_expiry is enabled' do
- it 'uses the cache up to 60 minutes' do
- time_of_request = Time.current
+ it 'uses the cache up to 60 minutes' do
+ time_of_request = Time.current
- get api(route), params: { per_page: 1 }
-
- travel_to time_of_request + 59.minutes do
- expect(API::Entities::Branch).not_to receive(:represent)
+ get api(route), params: { per_page: 1 }
- get api(route), params: { per_page: 1 }
- end
- end
+ travel_to time_of_request + 59.minutes do
+ expect(API::Entities::Branch).not_to receive(:represent)
- it 'requests for new value after 60 minutes' do
get api(route), params: { per_page: 1 }
-
- travel_to 61.minutes.from_now do
- expect(API::Entities::Branch).to receive(:represent)
-
- get api(route), params: { per_page: 1 }
- end
end
end
- context 'when increase_branch_cache_expiry is disabled' do
- before do
- stub_feature_flags(increase_branch_cache_expiry: false)
- end
-
- it 'uses the cache up to 10 minutes' do
- time_of_request = Time.current
-
- get api(route), params: { per_page: 1 }
+ it 'requests for new value after 60 minutes' do
+ get api(route), params: { per_page: 1 }
- travel_to time_of_request + 9.minutes do
- expect(API::Entities::Branch).not_to receive(:represent)
+ travel_to 61.minutes.from_now do
+ expect(API::Entities::Branch).to receive(:represent)
- get api(route), params: { per_page: 1 }
- end
- end
-
- it 'requests for new value after 10 minutes' do
get api(route), params: { per_page: 1 }
-
- travel_to 11.minutes.from_now do
- expect(API::Entities::Branch).to receive(:represent)
-
- get api(route), params: { per_page: 1 }
- end
end
end
end
diff --git a/spec/requests/api/bulk_imports_spec.rb b/spec/requests/api/bulk_imports_spec.rb
index 6a3d13567bd..ad57a370fc5 100644
--- a/spec/requests/api/bulk_imports_spec.rb
+++ b/spec/requests/api/bulk_imports_spec.rb
@@ -53,6 +53,18 @@ RSpec.describe API::BulkImports do
end
end
+ context 'when bulk_import feature flag is disabled' do
+ before do
+ stub_feature_flags(bulk_import: false)
+ end
+
+ it 'returns 404' do
+ post api('/bulk_imports', user), params: {}
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
shared_examples 'starting a new migration' do
it 'starts a new migration' do
post api('/bulk_imports', user), params: {
diff --git a/spec/requests/api/ci/job_artifacts_spec.rb b/spec/requests/api/ci/job_artifacts_spec.rb
index 0fb11bf98d2..2bf242f06ed 100644
--- a/spec/requests/api/ci/job_artifacts_spec.rb
+++ b/spec/requests/api/ci/job_artifacts_spec.rb
@@ -389,6 +389,49 @@ RSpec.describe API::Ci::JobArtifacts do
end
end
+ context 'when Google CDN is enabled' do
+ let(:cdn_enabled) { true }
+ let(:cdn_config) do
+ {
+ 'provider' => 'Google',
+ 'url' => 'https://cdn.example.org',
+ 'key_name' => 'stanhu-key',
+ 'key' => Base64.urlsafe_encode64(SecureRandom.hex)
+ }
+ end
+
+ before do
+ stub_feature_flags(ci_job_artifacts_cdn: cdn_enabled)
+ stub_object_storage_uploader(config: Gitlab.config.artifacts.object_store,
+ uploader: JobArtifactUploader,
+ proxy_download: proxy_download,
+ cdn: cdn_config)
+ allow(Gitlab::ApplicationContext).to receive(:push).and_call_original
+ end
+
+ subject { get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user), env: { 'REMOTE_ADDR': '18.245.0.1' } }
+
+ it 'returns CDN-signed URL' do
+ expect(Gitlab::ApplicationContext).to receive(:push).with(artifact_used_cdn: true).and_call_original
+
+ subject
+
+ expect(response.redirect_url).to start_with("https://cdn.example.org/#{artifact.file.path}")
+ end
+
+ context 'when ci_job_artifacts_cdn feature flag is disabled' do
+ let(:cdn_enabled) { false }
+
+ it 'returns the file remote URL' do
+ expect(Gitlab::ApplicationContext).to receive(:push).with(artifact_used_cdn: false).and_call_original
+
+ subject
+
+ expect(response).to redirect_to(artifact.file.url)
+ end
+ end
+ end
+
context 'authorized user' do
it 'returns the file remote URL' do
expect(response).to redirect_to(artifact.file.url)
diff --git a/spec/requests/api/ci/jobs_spec.rb b/spec/requests/api/ci/jobs_spec.rb
index b8983e9632e..0e17db516f4 100644
--- a/spec/requests/api/ci/jobs_spec.rb
+++ b/spec/requests/api/ci/jobs_spec.rb
@@ -226,18 +226,19 @@ RSpec.describe API::Ci::Jobs do
expect(json_response.dig('user', 'username')).to eq(api_user.username)
expect(json_response.dig('user', 'roles_in_project')).to match_array %w(guest reporter developer)
expect(json_response).not_to include('environment')
- expect(json_response['allowed_agents']).to match_array([
- {
- 'id' => implicit_authorization.agent_id,
- 'config_project' => hash_including('id' => implicit_authorization.agent.project_id),
- 'configuration' => implicit_authorization.config
- },
- {
- 'id' => group_authorization.agent_id,
- 'config_project' => hash_including('id' => group_authorization.agent.project_id),
- 'configuration' => group_authorization.config
- }
- ])
+ expect(json_response['allowed_agents']).to match_array(
+ [
+ {
+ 'id' => implicit_authorization.agent_id,
+ 'config_project' => hash_including('id' => implicit_authorization.agent.project_id),
+ 'configuration' => implicit_authorization.config
+ },
+ {
+ 'id' => group_authorization.agent_id,
+ 'config_project' => hash_including('id' => group_authorization.agent.project_id),
+ 'configuration' => group_authorization.config
+ }
+ ])
end
end
diff --git a/spec/requests/api/ci/resource_groups_spec.rb b/spec/requests/api/ci/resource_groups_spec.rb
index 864c363e6d3..87df71f6096 100644
--- a/spec/requests/api/ci/resource_groups_spec.rb
+++ b/spec/requests/api/ci/resource_groups_spec.rb
@@ -77,6 +77,48 @@ RSpec.describe API::Ci::ResourceGroups do
end
end
+ describe 'GET /projects/:id/resource_groups/:key/upcoming_jobs' do
+ subject { get api("/projects/#{project.id}/resource_groups/#{key}/upcoming_jobs", user) }
+
+ let_it_be(:resource_group) { create(:ci_resource_group, project: project) }
+ let_it_be(:processable) { create(:ci_processable, resource_group: resource_group) }
+ let_it_be(:upcoming_processable) { create(:ci_processable, :waiting_for_resource, resource_group: resource_group) }
+
+ let(:key) { resource_group.key }
+
+ it 'returns upcoming jobs of resource group', :aggregate_failures do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.length).to eq(1)
+ expect(json_response[0]['id']).to eq(upcoming_processable.id)
+ expect(json_response[0]['name']).to eq(upcoming_processable.name)
+ expect(json_response[0]['ref']).to eq(upcoming_processable.ref)
+ expect(json_response[0]['stage']).to eq(upcoming_processable.stage)
+ expect(json_response[0]['status']).to eq(upcoming_processable.status)
+ end
+
+ context 'when user is reporter' do
+ let(:user) { reporter }
+
+ it 'returns forbidden' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when there is no corresponding resource group' do
+ let(:key) { 'unknown' }
+
+ it 'returns not found' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
describe 'PUT /projects/:id/resource_groups/:key' do
subject { put api("/projects/#{project.id}/resource_groups/#{key}", user), params: params }
diff --git a/spec/requests/api/ci/runner/jobs_request_post_spec.rb b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
index b33b97f90d7..d4f734e7bdd 100644
--- a/spec/requests/api/ci/runner/jobs_request_post_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
@@ -220,14 +220,15 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
expect(json_response['image']).to eq(
{ 'name' => 'image:1.0', 'entrypoint' => '/bin/sh', 'ports' => [], 'pull_policy' => nil }
)
- expect(json_response['services']).to eq([
- { 'name' => 'postgres', 'entrypoint' => nil, 'alias' => nil, 'command' => nil, 'ports' => [],
- 'variables' => nil, 'pull_policy' => nil },
- { 'name' => 'docker:stable-dind', 'entrypoint' => '/bin/sh', 'alias' => 'docker', 'command' => 'sleep 30',
- 'ports' => [], 'variables' => [], 'pull_policy' => nil },
- { 'name' => 'mysql:latest', 'entrypoint' => nil, 'alias' => nil, 'command' => nil, 'ports' => [],
- 'variables' => [{ 'key' => 'MYSQL_ROOT_PASSWORD', 'value' => 'root123.' }], 'pull_policy' => nil }
- ])
+ expect(json_response['services']).to eq(
+ [
+ { 'name' => 'postgres', 'entrypoint' => nil, 'alias' => nil, 'command' => nil, 'ports' => [],
+ 'variables' => nil, 'pull_policy' => nil },
+ { 'name' => 'docker:stable-dind', 'entrypoint' => '/bin/sh', 'alias' => 'docker', 'command' => 'sleep 30',
+ 'ports' => [], 'variables' => [], 'pull_policy' => nil },
+ { 'name' => 'mysql:latest', 'entrypoint' => nil, 'alias' => nil, 'command' => nil, 'ports' => [],
+ 'variables' => [{ 'key' => 'MYSQL_ROOT_PASSWORD', 'value' => 'root123.' }], 'pull_policy' => nil }
+ ])
expect(json_response['steps']).to eq(expected_steps)
expect(json_response['artifacts']).to eq(expected_artifacts)
expect(json_response['cache']).to match(expected_cache)
@@ -383,23 +384,24 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
expect(response).to have_gitlab_http_status(:created)
expect(response.headers).not_to have_key('X-GitLab-Last-Update')
- expect(json_response['steps']).to eq([
- {
- "name" => "script",
- "script" => ["make changelog | tee release_changelog.txt"],
- "timeout" => 3600,
- "when" => "on_success",
- "allow_failure" => false
- },
- {
- "name" => "release",
- "script" =>
- ["release-cli create --name \"Release $CI_COMMIT_SHA\" --description \"Created using the release-cli $EXTRA_DESCRIPTION\" --tag-name \"release-$CI_COMMIT_SHA\" --ref \"$CI_COMMIT_SHA\" --assets-link \"{\\\"url\\\":\\\"https://example.com/assets/1\\\",\\\"name\\\":\\\"asset1\\\"}\""],
- "timeout" => 3600,
- "when" => "on_success",
- "allow_failure" => false
- }
- ])
+ expect(json_response['steps']).to eq(
+ [
+ {
+ "name" => "script",
+ "script" => ["make changelog | tee release_changelog.txt"],
+ "timeout" => 3600,
+ "when" => "on_success",
+ "allow_failure" => false
+ },
+ {
+ "name" => "release",
+ "script" =>
+ ["release-cli create --name \"Release $CI_COMMIT_SHA\" --description \"Created using the release-cli $EXTRA_DESCRIPTION\" --tag-name \"release-$CI_COMMIT_SHA\" --ref \"$CI_COMMIT_SHA\" --assets-link \"{\\\"url\\\":\\\"https://example.com/assets/1\\\",\\\"name\\\":\\\"asset1\\\"}\""],
+ "timeout" => 3600,
+ "when" => "on_success",
+ "allow_failure" => false
+ }
+ ])
end
end
diff --git a/spec/requests/api/ci/runner/runners_reset_spec.rb b/spec/requests/api/ci/runner/runners_reset_spec.rb
index 8a61012ead1..02b66a89a0a 100644
--- a/spec/requests/api/ci/runner/runners_reset_spec.rb
+++ b/spec/requests/api/ci/runner/runners_reset_spec.rb
@@ -9,7 +9,6 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
before do
stub_feature_flags(ci_enable_live_trace: true)
- stub_feature_flags(runner_registration_control: false)
stub_gitlab_calls
stub_application_setting(valid_runner_registrars: ApplicationSetting::VALID_RUNNER_REGISTRAR_TYPES)
end
diff --git a/spec/requests/api/ci/runners_spec.rb b/spec/requests/api/ci/runners_spec.rb
index fa1f713e757..69f26d3f257 100644
--- a/spec/requests/api/ci/runners_spec.rb
+++ b/spec/requests/api/ci/runners_spec.rb
@@ -918,10 +918,11 @@ RSpec.describe API::Ci::Runners do
create(:ci_build, :failed, runner: shared_runner, project: project_with_repo, pipeline: pipeline)
expect_next_instance_of(Repository) do |repo|
- expect(repo).to receive(:commits_by).with(oids: %w[
- 1a0b36b3cdad1d2ee32457c102a8c0b7056fa863
- c1c67abbaf91f624347bb3ae96eabe3a1b742478
- ]).once.and_call_original
+ expect(repo).to receive(:commits_by).with(oids:
+ %w[
+ 1a0b36b3cdad1d2ee32457c102a8c0b7056fa863
+ c1c67abbaf91f624347bb3ae96eabe3a1b742478
+ ]).once.and_call_original
end
get api("/runners/#{shared_runner.id}/jobs", admin), params: { per_page: 2, order_by: 'id', sort: 'desc' }
@@ -1124,30 +1125,27 @@ RSpec.describe API::Ci::Runners do
it 'returns all runners' do
get api("/groups/#{group.id}/runners", user)
- expect(json_response).to match_array([
- a_hash_including('description' => 'Group runner A', 'active' => true, 'paused' => false),
- a_hash_including('description' => 'Shared runner', 'active' => true, 'paused' => false)
- ])
+ expect(json_response).to match_array(
+ [
+ a_hash_including('description' => 'Group runner A', 'active' => true, 'paused' => false),
+ a_hash_including('description' => 'Shared runner', 'active' => true, 'paused' => false)
+ ])
end
context 'filter by type' do
it 'returns record when valid and present' do
get api("/groups/#{group.id}/runners?type=group_type", user)
- expect(json_response).to match_array([
- a_hash_including('description' => 'Group runner A')
- ])
+ expect(json_response).to match_array([a_hash_including('description' => 'Group runner A')])
end
it 'returns instance runners when instance_type is specified' do
get api("/groups/#{group.id}/runners?type=instance_type", user)
- expect(json_response).to match_array([
- a_hash_including('description' => 'Shared runner')
- ])
+ expect(json_response).to match_array([a_hash_including('description' => 'Shared runner')])
end
- # TODO: Remove in %15.0 (https://gitlab.com/gitlab-org/gitlab/-/issues/351466)
+ # TODO: Remove when REST API v5 is implemented (https://gitlab.com/gitlab-org/gitlab/-/issues/351466)
it 'returns empty result when type does not match' do
get api("/groups/#{group.id}/runners?type=project_type", user)
@@ -1167,18 +1165,14 @@ RSpec.describe API::Ci::Runners do
it 'returns runners by paused state' do
get api("/groups/#{group.id}/runners?paused=true", user)
- expect(json_response).to match_array([
- a_hash_including('description' => 'Inactive group runner')
- ])
+ expect(json_response).to match_array([a_hash_including('description' => 'Inactive group runner')])
end
context 'filter runners by status' do
it 'returns runners by valid status' do
get api("/groups/#{group.id}/runners?status=paused", user)
- expect(json_response).to match_array([
- a_hash_including('description' => 'Inactive group runner')
- ])
+ expect(json_response).to match_array([a_hash_including('description' => 'Inactive group runner')])
end
it 'does not filter by invalid status' do
@@ -1195,9 +1189,7 @@ RSpec.describe API::Ci::Runners do
get api("/groups/#{group.id}/runners?tag_list=tag1,tag2", user)
- expect(json_response).to match_array([
- a_hash_including('description' => 'Runner tagged with tag1 and tag2')
- ])
+ expect(json_response).to match_array([a_hash_including('description' => 'Runner tagged with tag1 and tag2')])
end
end
diff --git a/spec/requests/api/deploy_tokens_spec.rb b/spec/requests/api/deploy_tokens_spec.rb
index b5f8da1f327..e0296248a03 100644
--- a/spec/requests/api/deploy_tokens_spec.rb
+++ b/spec/requests/api/deploy_tokens_spec.rb
@@ -44,14 +44,15 @@ RSpec.describe API::DeployTokens do
token_ids = json_response.map { |token| token['id'] }
expect(response).to include_pagination_headers
expect(response).to match_response_schema('public_api/v4/deploy_tokens')
- expect(token_ids).to match_array([
- deploy_token.id,
- revoked_deploy_token.id,
- expired_deploy_token.id,
- group_deploy_token.id,
- revoked_group_deploy_token.id,
- expired_group_deploy_token.id
- ])
+ expect(token_ids).to match_array(
+ [
+ deploy_token.id,
+ revoked_deploy_token.id,
+ expired_deploy_token.id,
+ group_deploy_token.id,
+ revoked_group_deploy_token.id,
+ expired_group_deploy_token.id
+ ])
end
context 'and active=true' do
@@ -61,10 +62,11 @@ RSpec.describe API::DeployTokens do
token_ids = json_response.map { |token| token['id'] }
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
- expect(token_ids).to match_array([
- deploy_token.id,
- group_deploy_token.id
- ])
+ expect(token_ids).to match_array(
+ [
+ deploy_token.id,
+ group_deploy_token.id
+ ])
end
end
end
@@ -110,11 +112,12 @@ RSpec.describe API::DeployTokens do
subject
token_ids = json_response.map { |token| token['id'] }
- expect(token_ids).to match_array([
- deploy_token.id,
- expired_deploy_token.id,
- revoked_deploy_token.id
- ])
+ expect(token_ids).to match_array(
+ [
+ deploy_token.id,
+ expired_deploy_token.id,
+ revoked_deploy_token.id
+ ])
end
context 'and active=true' do
diff --git a/spec/requests/api/features_spec.rb b/spec/requests/api/features_spec.rb
index b54be4f5258..d0334cf6dd2 100644
--- a/spec/requests/api/features_spec.rb
+++ b/spec/requests/api/features_spec.rb
@@ -92,402 +92,292 @@ RSpec.describe API::Features, stub_feature_flags: false do
describe 'POST /feature' do
let(:feature_name) { known_feature_flag.name }
- context 'when the feature does not exist' do
- it 'returns a 401 for anonymous users' do
- post api("/features/#{feature_name}")
+ # TODO: remove this shared examples block when set_feature_flag_service feature flag
+ # is removed. Then remove also any duplicate specs covered by the service class.
+ shared_examples 'sets the feature flag status' do
+ context 'when the feature does not exist' do
+ it 'returns a 401 for anonymous users' do
+ post api("/features/#{feature_name}")
- expect(response).to have_gitlab_http_status(:unauthorized)
- end
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
- it 'returns a 403 for users' do
- post api("/features/#{feature_name}", user)
+ it 'returns a 403 for users' do
+ post api("/features/#{feature_name}", user)
- expect(response).to have_gitlab_http_status(:forbidden)
- end
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
- context 'when passed value=true' do
- it 'creates an enabled feature' do
- post api("/features/#{feature_name}", admin), params: { value: 'true' }
+ context 'when passed value=true' do
+ it 'creates an enabled feature' do
+ post api("/features/#{feature_name}", admin), params: { value: 'true' }
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response).to match(
- 'name' => feature_name,
- 'state' => 'on',
- 'gates' => [{ 'key' => 'boolean', 'value' => true }],
- 'definition' => known_feature_flag_definition_hash
- )
- end
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response).to match(
+ 'name' => feature_name,
+ 'state' => 'on',
+ 'gates' => [{ 'key' => 'boolean', 'value' => true }],
+ 'definition' => known_feature_flag_definition_hash
+ )
+ end
- it 'logs the event' do
- expect(Feature.logger).to receive(:info).once
+ it 'logs the event' do
+ expect(Feature.logger).to receive(:info).once
- post api("/features/#{feature_name}", admin), params: { value: 'true' }
- end
+ post api("/features/#{feature_name}", admin), params: { value: 'true' }
+ end
- it 'creates an enabled feature for the given Flipper group when passed feature_group=perf_team' do
- post api("/features/#{feature_name}", admin), params: { value: 'true', feature_group: 'perf_team' }
+ it 'creates an enabled feature for the given Flipper group when passed feature_group=perf_team' do
+ post api("/features/#{feature_name}", admin), params: { value: 'true', feature_group: 'perf_team' }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response).to match(
+ 'name' => feature_name,
+ 'state' => 'conditional',
+ 'gates' => [
+ { 'key' => 'boolean', 'value' => false },
+ { 'key' => 'groups', 'value' => ['perf_team'] }
+ ],
+ 'definition' => known_feature_flag_definition_hash
+ )
+ end
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response).to match(
- 'name' => feature_name,
- 'state' => 'conditional',
- 'gates' => [
- { 'key' => 'boolean', 'value' => false },
- { 'key' => 'groups', 'value' => ['perf_team'] }
- ],
- 'definition' => known_feature_flag_definition_hash
- )
- end
+ it 'creates an enabled feature for the given user when passed user=username' do
+ post api("/features/#{feature_name}", admin), params: { value: 'true', user: user.username }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response).to match(
+ 'name' => feature_name,
+ 'state' => 'conditional',
+ 'gates' => [
+ { 'key' => 'boolean', 'value' => false },
+ { 'key' => 'actors', 'value' => ["User:#{user.id}"] }
+ ],
+ 'definition' => known_feature_flag_definition_hash
+ )
+ end
- it 'creates an enabled feature for the given user when passed user=username' do
- post api("/features/#{feature_name}", admin), params: { value: 'true', user: user.username }
+ it 'creates an enabled feature for the given user and feature group when passed user=username and feature_group=perf_team' do
+ post api("/features/#{feature_name}", admin), params: { value: 'true', user: user.username, feature_group: 'perf_team' }
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response).to match(
- 'name' => feature_name,
- 'state' => 'conditional',
- 'gates' => [
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['name']).to eq(feature_name)
+ expect(json_response['state']).to eq('conditional')
+ expect(json_response['gates']).to contain_exactly(
{ 'key' => 'boolean', 'value' => false },
+ { 'key' => 'groups', 'value' => ['perf_team'] },
{ 'key' => 'actors', 'value' => ["User:#{user.id}"] }
- ],
- 'definition' => known_feature_flag_definition_hash
- )
- end
-
- it 'creates an enabled feature for the given user and feature group when passed user=username and feature_group=perf_team' do
- post api("/features/#{feature_name}", admin), params: { value: 'true', user: user.username, feature_group: 'perf_team' }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['name']).to eq(feature_name)
- expect(json_response['state']).to eq('conditional')
- expect(json_response['gates']).to contain_exactly(
- { 'key' => 'boolean', 'value' => false },
- { 'key' => 'groups', 'value' => ['perf_team'] },
- { 'key' => 'actors', 'value' => ["User:#{user.id}"] }
- )
+ )
+ end
end
- end
- shared_examples 'does not enable the flag' do |actor_type|
- let(:actor_path) { raise NotImplementedError }
- let(:expected_inexistent_path) { actor_path }
+ shared_examples 'does not enable the flag' do |actor_type|
+ let(:actor_path) { raise NotImplementedError }
+ let(:expected_inexistent_path) { actor_path }
- it 'returns the current state of the flag without changes' do
- post api("/features/#{feature_name}", admin), params: { value: 'true', actor_type => actor_path }
+ it 'returns the current state of the flag without changes' do
+ post api("/features/#{feature_name}", admin), params: { value: 'true', actor_type => actor_path }
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['message']).to eq("400 Bad request - #{expected_inexistent_path} is not found!")
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to eq("400 Bad request - #{expected_inexistent_path} is not found!")
+ end
end
- end
- shared_examples 'enables the flag for the actor' do |actor_type|
- it 'sets the feature gate' do
- post api("/features/#{feature_name}", admin), params: { value: 'true', actor_type => actor.full_path }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response).to match(
- 'name' => feature_name,
- 'state' => 'conditional',
- 'gates' => [
- { 'key' => 'boolean', 'value' => false },
- { 'key' => 'actors', 'value' => ["#{actor.class}:#{actor.id}"] }
- ],
- 'definition' => known_feature_flag_definition_hash
- )
+ shared_examples 'enables the flag for the actor' do |actor_type|
+ it 'sets the feature gate' do
+ post api("/features/#{feature_name}", admin), params: { value: 'true', actor_type => actor.full_path }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response).to match(
+ 'name' => feature_name,
+ 'state' => 'conditional',
+ 'gates' => [
+ { 'key' => 'boolean', 'value' => false },
+ { 'key' => 'actors', 'value' => ["#{actor.class}:#{actor.id}"] }
+ ],
+ 'definition' => known_feature_flag_definition_hash
+ )
+ end
end
- end
- shared_examples 'creates an enabled feature for the specified entries' do
- it do
- post api("/features/#{feature_name}", admin), params: { value: 'true', **gate_params }
+ shared_examples 'creates an enabled feature for the specified entries' do
+ it do
+ post api("/features/#{feature_name}", admin), params: { value: 'true', **gate_params }
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['name']).to eq(feature_name)
- expect(json_response['gates']).to contain_exactly(
- { 'key' => 'boolean', 'value' => false },
- { 'key' => 'actors', 'value' => array_including(expected_gate_params) }
- )
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['name']).to eq(feature_name)
+ expect(json_response['gates']).to contain_exactly(
+ { 'key' => 'boolean', 'value' => false },
+ { 'key' => 'actors', 'value' => array_including(expected_gate_params) }
+ )
+ end
end
- end
- context 'when enabling for a project by path' do
- context 'when the project exists' do
- it_behaves_like 'enables the flag for the actor', :project do
- let(:actor) { create(:project) }
+ context 'when enabling for a project by path' do
+ context 'when the project exists' do
+ it_behaves_like 'enables the flag for the actor', :project do
+ let(:actor) { create(:project) }
+ end
end
- end
- context 'when the project does not exist' do
- it_behaves_like 'does not enable the flag', :project do
- let(:actor_path) { 'mep/to/the/mep/mep' }
+ context 'when the project does not exist' do
+ it_behaves_like 'does not enable the flag', :project do
+ let(:actor_path) { 'mep/to/the/mep/mep' }
+ end
end
end
- end
- context 'when enabling for a group by path' do
- context 'when the group exists' do
- it_behaves_like 'enables the flag for the actor', :group do
- let(:actor) { create(:group) }
+ context 'when enabling for a group by path' do
+ context 'when the group exists' do
+ it_behaves_like 'enables the flag for the actor', :group do
+ let(:actor) { create(:group) }
+ end
end
- end
- context 'when the group does not exist' do
- it_behaves_like 'does not enable the flag', :group do
- let(:actor_path) { 'not/a/group' }
+ context 'when the group does not exist' do
+ it_behaves_like 'does not enable the flag', :group do
+ let(:actor_path) { 'not/a/group' }
+ end
end
end
- end
- context 'when enabling for a namespace by path' do
- context 'when the user namespace exists' do
- it_behaves_like 'enables the flag for the actor', :namespace do
- let(:actor) { create(:namespace) }
+ context 'when enabling for a namespace by path' do
+ context 'when the user namespace exists' do
+ it_behaves_like 'enables the flag for the actor', :namespace do
+ let(:actor) { create(:namespace) }
+ end
end
- end
- context 'when the group namespace exists' do
- it_behaves_like 'enables the flag for the actor', :namespace do
- let(:actor) { create(:group) }
+ context 'when the group namespace exists' do
+ it_behaves_like 'enables the flag for the actor', :namespace do
+ let(:actor) { create(:group) }
+ end
end
- end
- context 'when the user namespace does not exist' do
- it_behaves_like 'does not enable the flag', :namespace do
- let(:actor_path) { 'not/a/group' }
+ context 'when the user namespace does not exist' do
+ it_behaves_like 'does not enable the flag', :namespace do
+ let(:actor_path) { 'not/a/group' }
+ end
end
- end
- context 'when a project namespace exists' do
- let(:project_namespace) { create(:project_namespace) }
+ context 'when a project namespace exists' do
+ let(:project_namespace) { create(:project_namespace) }
- it_behaves_like 'does not enable the flag', :namespace do
- let(:actor_path) { project_namespace.full_path }
+ it_behaves_like 'does not enable the flag', :namespace do
+ let(:actor_path) { project_namespace.full_path }
+ end
end
end
- end
- context 'with multiple users' do
- let_it_be(:users) { create_list(:user, 3) }
+ context 'with multiple users' do
+ let_it_be(:users) { create_list(:user, 3) }
- it_behaves_like 'creates an enabled feature for the specified entries' do
- let(:gate_params) { { user: users.map(&:username).join(',') } }
- let(:expected_gate_params) { users.map(&:flipper_id) }
- end
-
- context 'when empty value exists between comma' do
it_behaves_like 'creates an enabled feature for the specified entries' do
- let(:gate_params) { { user: "#{users.first.username},,,," } }
- let(:expected_gate_params) { users.first.flipper_id }
+ let(:gate_params) { { user: users.map(&:username).join(',') } }
+ let(:expected_gate_params) { users.map(&:flipper_id) }
end
- end
- context 'when one of the users does not exist' do
- it_behaves_like 'does not enable the flag', :user do
- let(:actor_path) { "#{users.first.username},inexistent-entry" }
- let(:expected_inexistent_path) { "inexistent-entry" }
+ context 'when empty value exists between comma' do
+ it_behaves_like 'creates an enabled feature for the specified entries' do
+ let(:gate_params) { { user: "#{users.first.username},,,," } }
+ let(:expected_gate_params) { users.first.flipper_id }
+ end
end
- end
- end
- context 'with multiple projects' do
- let_it_be(:projects) { create_list(:project, 3) }
-
- it_behaves_like 'creates an enabled feature for the specified entries' do
- let(:gate_params) { { project: projects.map(&:full_path).join(',') } }
- let(:expected_gate_params) { projects.map(&:flipper_id) }
- end
-
- context 'when empty value exists between comma' do
- it_behaves_like 'creates an enabled feature for the specified entries' do
- let(:gate_params) { { project: "#{projects.first.full_path},,,," } }
- let(:expected_gate_params) { projects.first.flipper_id }
+ context 'when one of the users does not exist' do
+ it_behaves_like 'does not enable the flag', :user do
+ let(:actor_path) { "#{users.first.username},inexistent-entry" }
+ let(:expected_inexistent_path) { "inexistent-entry" }
+ end
end
end
- context 'when one of the projects does not exist' do
- it_behaves_like 'does not enable the flag', :project do
- let(:actor_path) { "#{projects.first.full_path},inexistent-entry" }
- let(:expected_inexistent_path) { "inexistent-entry" }
- end
- end
- end
-
- context 'with multiple groups' do
- let_it_be(:groups) { create_list(:group, 3) }
+ context 'with multiple projects' do
+ let_it_be(:projects) { create_list(:project, 3) }
- it_behaves_like 'creates an enabled feature for the specified entries' do
- let(:gate_params) { { group: groups.map(&:full_path).join(',') } }
- let(:expected_gate_params) { groups.map(&:flipper_id) }
- end
-
- context 'when empty value exists between comma' do
it_behaves_like 'creates an enabled feature for the specified entries' do
- let(:gate_params) { { group: "#{groups.first.full_path},,,," } }
- let(:expected_gate_params) { groups.first.flipper_id }
+ let(:gate_params) { { project: projects.map(&:full_path).join(',') } }
+ let(:expected_gate_params) { projects.map(&:flipper_id) }
end
- end
- context 'when one of the groups does not exist' do
- it_behaves_like 'does not enable the flag', :group do
- let(:actor_path) { "#{groups.first.full_path},inexistent-entry" }
- let(:expected_inexistent_path) { "inexistent-entry" }
+ context 'when empty value exists between comma' do
+ it_behaves_like 'creates an enabled feature for the specified entries' do
+ let(:gate_params) { { project: "#{projects.first.full_path},,,," } }
+ let(:expected_gate_params) { projects.first.flipper_id }
+ end
end
- end
- end
-
- context 'with multiple namespaces' do
- let_it_be(:namespaces) { create_list(:namespace, 3) }
-
- it_behaves_like 'creates an enabled feature for the specified entries' do
- let(:gate_params) { { namespace: namespaces.map(&:full_path).join(',') } }
- let(:expected_gate_params) { namespaces.map(&:flipper_id) }
- end
- context 'when empty value exists between comma' do
- it_behaves_like 'creates an enabled feature for the specified entries' do
- let(:gate_params) { { namespace: "#{namespaces.first.full_path},,,," } }
- let(:expected_gate_params) { namespaces.first.flipper_id }
+ context 'when one of the projects does not exist' do
+ it_behaves_like 'does not enable the flag', :project do
+ let(:actor_path) { "#{projects.first.full_path},inexistent-entry" }
+ let(:expected_inexistent_path) { "inexistent-entry" }
+ end
end
end
- context 'when one of the namespaces does not exist' do
- it_behaves_like 'does not enable the flag', :namespace do
- let(:actor_path) { "#{namespaces.first.full_path},inexistent-entry" }
- let(:expected_inexistent_path) { "inexistent-entry" }
- end
- end
- end
-
- it 'creates a feature with the given percentage of time if passed an integer' do
- post api("/features/#{feature_name}", admin), params: { value: '50' }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response).to match(
- 'name' => feature_name,
- 'state' => 'conditional',
- 'gates' => [
- { 'key' => 'boolean', 'value' => false },
- { 'key' => 'percentage_of_time', 'value' => 50 }
- ],
- 'definition' => known_feature_flag_definition_hash
- )
- end
-
- it 'creates a feature with the given percentage of time if passed a float' do
- post api("/features/#{feature_name}", admin), params: { value: '0.01' }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response).to match(
- 'name' => feature_name,
- 'state' => 'conditional',
- 'gates' => [
- { 'key' => 'boolean', 'value' => false },
- { 'key' => 'percentage_of_time', 'value' => 0.01 }
- ],
- 'definition' => known_feature_flag_definition_hash
- )
- end
-
- it 'creates a feature with the given percentage of actors if passed an integer' do
- post api("/features/#{feature_name}", admin), params: { value: '50', key: 'percentage_of_actors' }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response).to match(
- 'name' => feature_name,
- 'state' => 'conditional',
- 'gates' => [
- { 'key' => 'boolean', 'value' => false },
- { 'key' => 'percentage_of_actors', 'value' => 50 }
- ],
- 'definition' => known_feature_flag_definition_hash
- )
- end
-
- it 'creates a feature with the given percentage of actors if passed a float' do
- post api("/features/#{feature_name}", admin), params: { value: '0.01', key: 'percentage_of_actors' }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response).to match(
- 'name' => feature_name,
- 'state' => 'conditional',
- 'gates' => [
- { 'key' => 'boolean', 'value' => false },
- { 'key' => 'percentage_of_actors', 'value' => 0.01 }
- ],
- 'definition' => known_feature_flag_definition_hash
- )
- end
+ context 'with multiple groups' do
+ let_it_be(:groups) { create_list(:group, 3) }
- describe 'mutually exclusive parameters' do
- shared_examples 'fails to set the feature flag' do
- it 'returns an error' do
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['error']).to match(/key, \w+ are mutually exclusive/)
+ it_behaves_like 'creates an enabled feature for the specified entries' do
+ let(:gate_params) { { group: groups.map(&:full_path).join(',') } }
+ let(:expected_gate_params) { groups.map(&:flipper_id) }
end
- end
- context 'when key and feature_group are provided' do
- before do
- post api("/features/#{feature_name}", admin), params: { value: '0.01', key: 'percentage_of_actors', feature_group: 'some-value' }
+ context 'when empty value exists between comma' do
+ it_behaves_like 'creates an enabled feature for the specified entries' do
+ let(:gate_params) { { group: "#{groups.first.full_path},,,," } }
+ let(:expected_gate_params) { groups.first.flipper_id }
+ end
end
- it_behaves_like 'fails to set the feature flag'
- end
-
- context 'when key and user are provided' do
- before do
- post api("/features/#{feature_name}", admin), params: { value: '0.01', key: 'percentage_of_actors', user: 'some-user' }
+ context 'when one of the groups does not exist' do
+ it_behaves_like 'does not enable the flag', :group do
+ let(:actor_path) { "#{groups.first.full_path},inexistent-entry" }
+ let(:expected_inexistent_path) { "inexistent-entry" }
+ end
end
-
- it_behaves_like 'fails to set the feature flag'
end
- context 'when key and group are provided' do
- before do
- post api("/features/#{feature_name}", admin), params: { value: '0.01', key: 'percentage_of_actors', group: 'somepath' }
- end
-
- it_behaves_like 'fails to set the feature flag'
- end
+ context 'with multiple namespaces' do
+ let_it_be(:namespaces) { create_list(:namespace, 3) }
- context 'when key and namespace are provided' do
- before do
- post api("/features/#{feature_name}", admin), params: { value: '0.01', key: 'percentage_of_actors', namespace: 'somepath' }
+ it_behaves_like 'creates an enabled feature for the specified entries' do
+ let(:gate_params) { { namespace: namespaces.map(&:full_path).join(',') } }
+ let(:expected_gate_params) { namespaces.map(&:flipper_id) }
end
- it_behaves_like 'fails to set the feature flag'
- end
-
- context 'when key and project are provided' do
- before do
- post api("/features/#{feature_name}", admin), params: { value: '0.01', key: 'percentage_of_actors', project: 'somepath' }
+ context 'when empty value exists between comma' do
+ it_behaves_like 'creates an enabled feature for the specified entries' do
+ let(:gate_params) { { namespace: "#{namespaces.first.full_path},,,," } }
+ let(:expected_gate_params) { namespaces.first.flipper_id }
+ end
end
- it_behaves_like 'fails to set the feature flag'
+ context 'when one of the namespaces does not exist' do
+ it_behaves_like 'does not enable the flag', :namespace do
+ let(:actor_path) { "#{namespaces.first.full_path},inexistent-entry" }
+ let(:expected_inexistent_path) { "inexistent-entry" }
+ end
+ end
end
- end
- end
- context 'when the feature exists' do
- before do
- Feature.disable(feature_name) # This also persists the feature on the DB
- end
-
- context 'when passed value=true' do
- it 'enables the feature' do
- post api("/features/#{feature_name}", admin), params: { value: 'true' }
+ it 'creates a feature with the given percentage of time if passed an integer' do
+ post api("/features/#{feature_name}", admin), params: { value: '50' }
expect(response).to have_gitlab_http_status(:created)
expect(json_response).to match(
'name' => feature_name,
- 'state' => 'on',
- 'gates' => [{ 'key' => 'boolean', 'value' => true }],
+ 'state' => 'conditional',
+ 'gates' => [
+ { 'key' => 'boolean', 'value' => false },
+ { 'key' => 'percentage_of_time', 'value' => 50 }
+ ],
'definition' => known_feature_flag_definition_hash
)
end
- it 'enables the feature for the given Flipper group when passed feature_group=perf_team' do
- post api("/features/#{feature_name}", admin), params: { value: 'true', feature_group: 'perf_team' }
+ it 'creates a feature with the given percentage of time if passed a float' do
+ post api("/features/#{feature_name}", admin), params: { value: '0.01' }
expect(response).to have_gitlab_http_status(:created)
expect(json_response).to match(
@@ -495,14 +385,14 @@ RSpec.describe API::Features, stub_feature_flags: false do
'state' => 'conditional',
'gates' => [
{ 'key' => 'boolean', 'value' => false },
- { 'key' => 'groups', 'value' => ['perf_team'] }
+ { 'key' => 'percentage_of_time', 'value' => 0.01 }
],
'definition' => known_feature_flag_definition_hash
)
end
- it 'enables the feature for the given user when passed user=username' do
- post api("/features/#{feature_name}", admin), params: { value: 'true', user: user.username }
+ it 'creates a feature with the given percentage of actors if passed an integer' do
+ post api("/features/#{feature_name}", admin), params: { value: '50', key: 'percentage_of_actors' }
expect(response).to have_gitlab_http_status(:created)
expect(json_response).to match(
@@ -510,102 +400,230 @@ RSpec.describe API::Features, stub_feature_flags: false do
'state' => 'conditional',
'gates' => [
{ 'key' => 'boolean', 'value' => false },
- { 'key' => 'actors', 'value' => ["User:#{user.id}"] }
+ { 'key' => 'percentage_of_actors', 'value' => 50 }
],
'definition' => known_feature_flag_definition_hash
)
end
- end
-
- context 'when feature is enabled and value=false is passed' do
- it 'disables the feature' do
- Feature.enable(feature_name)
- expect(Feature.enabled?(feature_name)).to eq(true)
- post api("/features/#{feature_name}", admin), params: { value: 'false' }
+ it 'creates a feature with the given percentage of actors if passed a float' do
+ post api("/features/#{feature_name}", admin), params: { value: '0.01', key: 'percentage_of_actors' }
expect(response).to have_gitlab_http_status(:created)
expect(json_response).to match(
'name' => feature_name,
- 'state' => 'off',
- 'gates' => [{ 'key' => 'boolean', 'value' => false }],
+ 'state' => 'conditional',
+ 'gates' => [
+ { 'key' => 'boolean', 'value' => false },
+ { 'key' => 'percentage_of_actors', 'value' => 0.01 }
+ ],
'definition' => known_feature_flag_definition_hash
)
end
- it 'disables the feature for the given Flipper group when passed feature_group=perf_team' do
- Feature.enable(feature_name, Feature.group(:perf_team))
- expect(Feature.enabled?(feature_name, admin)).to be_truthy
+ describe 'mutually exclusive parameters' do
+ shared_examples 'fails to set the feature flag' do
+ it 'returns an error' do
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to match(/key, \w+ are mutually exclusive/)
+ end
+ end
- post api("/features/#{feature_name}", admin), params: { value: 'false', feature_group: 'perf_team' }
+ context 'when key and feature_group are provided' do
+ before do
+ post api("/features/#{feature_name}", admin), params: { value: '0.01', key: 'percentage_of_actors', feature_group: 'some-value' }
+ end
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response).to match(
- 'name' => feature_name,
- 'state' => 'off',
- 'gates' => [{ 'key' => 'boolean', 'value' => false }],
- 'definition' => known_feature_flag_definition_hash
- )
- end
+ it_behaves_like 'fails to set the feature flag'
+ end
- it 'disables the feature for the given user when passed user=username' do
- Feature.enable(feature_name, user)
- expect(Feature.enabled?(feature_name, user)).to be_truthy
+ context 'when key and user are provided' do
+ before do
+ post api("/features/#{feature_name}", admin), params: { value: '0.01', key: 'percentage_of_actors', user: 'some-user' }
+ end
- post api("/features/#{feature_name}", admin), params: { value: 'false', user: user.username }
+ it_behaves_like 'fails to set the feature flag'
+ end
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response).to match(
- 'name' => feature_name,
- 'state' => 'off',
- 'gates' => [{ 'key' => 'boolean', 'value' => false }],
- 'definition' => known_feature_flag_definition_hash
- )
+ context 'when key and group are provided' do
+ before do
+ post api("/features/#{feature_name}", admin), params: { value: '0.01', key: 'percentage_of_actors', group: 'somepath' }
+ end
+
+ it_behaves_like 'fails to set the feature flag'
+ end
+
+ context 'when key and namespace are provided' do
+ before do
+ post api("/features/#{feature_name}", admin), params: { value: '0.01', key: 'percentage_of_actors', namespace: 'somepath' }
+ end
+
+ it_behaves_like 'fails to set the feature flag'
+ end
+
+ context 'when key and project are provided' do
+ before do
+ post api("/features/#{feature_name}", admin), params: { value: '0.01', key: 'percentage_of_actors', project: 'somepath' }
+ end
+
+ it_behaves_like 'fails to set the feature flag'
+ end
end
end
- context 'with a pre-existing percentage of time value' do
+ context 'when the feature exists' do
before do
- Feature.enable_percentage_of_time(feature_name, 50)
+ Feature.disable(feature_name) # This also persists the feature on the DB
end
- it 'updates the percentage of time if passed an integer' do
- post api("/features/#{feature_name}", admin), params: { value: '30' }
+ context 'when passed value=true' do
+ it 'enables the feature' do
+ post api("/features/#{feature_name}", admin), params: { value: 'true' }
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response).to match(
- 'name' => feature_name,
- 'state' => 'conditional',
- 'gates' => [
- { 'key' => 'boolean', 'value' => false },
- { 'key' => 'percentage_of_time', 'value' => 30 }
- ],
- 'definition' => known_feature_flag_definition_hash
- )
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response).to match(
+ 'name' => feature_name,
+ 'state' => 'on',
+ 'gates' => [{ 'key' => 'boolean', 'value' => true }],
+ 'definition' => known_feature_flag_definition_hash
+ )
+ end
+
+ it 'enables the feature for the given Flipper group when passed feature_group=perf_team' do
+ post api("/features/#{feature_name}", admin), params: { value: 'true', feature_group: 'perf_team' }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response).to match(
+ 'name' => feature_name,
+ 'state' => 'conditional',
+ 'gates' => [
+ { 'key' => 'boolean', 'value' => false },
+ { 'key' => 'groups', 'value' => ['perf_team'] }
+ ],
+ 'definition' => known_feature_flag_definition_hash
+ )
+ end
+
+ it 'enables the feature for the given user when passed user=username' do
+ post api("/features/#{feature_name}", admin), params: { value: 'true', user: user.username }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response).to match(
+ 'name' => feature_name,
+ 'state' => 'conditional',
+ 'gates' => [
+ { 'key' => 'boolean', 'value' => false },
+ { 'key' => 'actors', 'value' => ["User:#{user.id}"] }
+ ],
+ 'definition' => known_feature_flag_definition_hash
+ )
+ end
end
- end
- context 'with a pre-existing percentage of actors value' do
- before do
- Feature.enable_percentage_of_actors(feature_name, 42)
+ context 'when feature is enabled and value=false is passed' do
+ it 'disables the feature' do
+ Feature.enable(feature_name)
+ expect(Feature.enabled?(feature_name)).to eq(true)
+
+ post api("/features/#{feature_name}", admin), params: { value: 'false' }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response).to match(
+ 'name' => feature_name,
+ 'state' => 'off',
+ 'gates' => [{ 'key' => 'boolean', 'value' => false }],
+ 'definition' => known_feature_flag_definition_hash
+ )
+ end
+
+ it 'disables the feature for the given Flipper group when passed feature_group=perf_team' do
+ Feature.enable(feature_name, Feature.group(:perf_team))
+ expect(Feature.enabled?(feature_name, admin)).to be_truthy
+
+ post api("/features/#{feature_name}", admin), params: { value: 'false', feature_group: 'perf_team' }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response).to match(
+ 'name' => feature_name,
+ 'state' => 'off',
+ 'gates' => [{ 'key' => 'boolean', 'value' => false }],
+ 'definition' => known_feature_flag_definition_hash
+ )
+ end
+
+ it 'disables the feature for the given user when passed user=username' do
+ Feature.enable(feature_name, user)
+ expect(Feature.enabled?(feature_name, user)).to be_truthy
+
+ post api("/features/#{feature_name}", admin), params: { value: 'false', user: user.username }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response).to match(
+ 'name' => feature_name,
+ 'state' => 'off',
+ 'gates' => [{ 'key' => 'boolean', 'value' => false }],
+ 'definition' => known_feature_flag_definition_hash
+ )
+ end
end
- it 'updates the percentage of actors if passed an integer' do
- post api("/features/#{feature_name}", admin), params: { value: '74', key: 'percentage_of_actors' }
+ context 'with a pre-existing percentage of time value' do
+ before do
+ Feature.enable_percentage_of_time(feature_name, 50)
+ end
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response).to match(
- 'name' => feature_name,
- 'state' => 'conditional',
- 'gates' => [
- { 'key' => 'boolean', 'value' => false },
- { 'key' => 'percentage_of_actors', 'value' => 74 }
- ],
- 'definition' => known_feature_flag_definition_hash
- )
+ it 'updates the percentage of time if passed an integer' do
+ post api("/features/#{feature_name}", admin), params: { value: '30' }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response).to match(
+ 'name' => feature_name,
+ 'state' => 'conditional',
+ 'gates' => [
+ { 'key' => 'boolean', 'value' => false },
+ { 'key' => 'percentage_of_time', 'value' => 30 }
+ ],
+ 'definition' => known_feature_flag_definition_hash
+ )
+ end
+ end
+
+ context 'with a pre-existing percentage of actors value' do
+ before do
+ Feature.enable_percentage_of_actors(feature_name, 42)
+ end
+
+ it 'updates the percentage of actors if passed an integer' do
+ post api("/features/#{feature_name}", admin), params: { value: '74', key: 'percentage_of_actors' }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response).to match(
+ 'name' => feature_name,
+ 'state' => 'conditional',
+ 'gates' => [
+ { 'key' => 'boolean', 'value' => false },
+ { 'key' => 'percentage_of_actors', 'value' => 74 }
+ ],
+ 'definition' => known_feature_flag_definition_hash
+ )
+ end
end
end
end
+
+ before do
+ stub_feature_flags(set_feature_flag_service: true)
+ end
+
+ it_behaves_like 'sets the feature flag status'
+
+ context 'when feature flag set_feature_flag_service is disabled' do
+ before do
+ stub_feature_flags(set_feature_flag_service: false)
+ end
+
+ it_behaves_like 'sets the feature flag status'
+ end
end
describe 'DELETE /feature/:name' do
diff --git a/spec/requests/api/generic_packages_spec.rb b/spec/requests/api/generic_packages_spec.rb
index 823eafab734..0478e123086 100644
--- a/spec/requests/api/generic_packages_spec.rb
+++ b/spec/requests/api/generic_packages_spec.rb
@@ -602,6 +602,21 @@ RSpec.describe API::GenericPackages do
end
end
+ context 'with access to package registry for everyone' do
+ let_it_be(:user_role) { :anonymous }
+
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ project.project_feature.update!(package_registry_access_level: ProjectFeature::PUBLIC)
+ end
+
+ it 'responds with success' do
+ download_file(auth_header)
+
+ expect(response).to have_gitlab_http_status(:success)
+ end
+ end
+
context 'with package status' do
where(:package_status, :expected_status) do
:default | :success
diff --git a/spec/requests/api/graphql/ci/ci_cd_setting_spec.rb b/spec/requests/api/graphql/ci/ci_cd_setting_spec.rb
index c19defa37e8..2dc7b9764fe 100644
--- a/spec/requests/api/graphql/ci/ci_cd_setting_spec.rb
+++ b/spec/requests/api/graphql/ci/ci_cd_setting_spec.rb
@@ -48,6 +48,8 @@ RSpec.describe 'Getting Ci Cd Setting' do
expect(settings_data['mergeTrainsEnabled']).to eql project.ci_cd_settings.merge_trains_enabled?
expect(settings_data['keepLatestArtifact']).to eql project.keep_latest_artifacts_available?
expect(settings_data['jobTokenScopeEnabled']).to eql project.ci_cd_settings.job_token_scope_enabled?
+ expect(settings_data['inboundJobTokenScopeEnabled']).to eql(
+ project.ci_cd_settings.inbound_job_token_scope_enabled?)
end
end
end
diff --git a/spec/requests/api/graphql/ci/config_spec.rb b/spec/requests/api/graphql/ci/config_spec.rb
index 960fda80dd9..784019ee926 100644
--- a/spec/requests/api/graphql/ci/config_spec.rb
+++ b/spec/requests/api/graphql/ci/config_spec.rb
@@ -176,22 +176,22 @@ RSpec.describe 'Query.ciConfig' do
"jobs" =>
{
"nodes" => [
- {
- "name" => "docker",
- "groupName" => "docker",
- "stage" => "test",
- "script" => ["curl http://dockerhub/URL"],
- "beforeScript" => ["bundle install", "bundle exec rake db:create"],
- "afterScript" => ["echo 'run this after'"],
- "allowFailure" => true,
- "only" => { "refs" => %w[branches tags] },
- "when" => "manual",
- "except" => { "refs" => ["branches"] },
- "environment" => nil,
- "tags" => [],
- "needs" => { "nodes" => [{ "name" => "spinach" }, { "name" => "rspec 0 1" }] }
- }
- ]
+ {
+ "name" => "docker",
+ "groupName" => "docker",
+ "stage" => "test",
+ "script" => ["curl http://dockerhub/URL"],
+ "beforeScript" => ["bundle install", "bundle exec rake db:create"],
+ "afterScript" => ["echo 'run this after'"],
+ "allowFailure" => true,
+ "only" => { "refs" => %w[branches tags] },
+ "when" => "manual",
+ "except" => { "refs" => ["branches"] },
+ "environment" => nil,
+ "tags" => [],
+ "needs" => { "nodes" => [{ "name" => "spinach" }, { "name" => "rspec 0 1" }] }
+ }
+ ]
}
}
]
@@ -209,22 +209,22 @@ RSpec.describe 'Query.ciConfig' do
"jobs" =>
{
"nodes" => [
- {
- "name" => "deploy_job",
- "groupName" => "deploy_job",
- "stage" => "deploy",
- "script" => ["echo 'done'"],
- "beforeScript" => ["bundle install", "bundle exec rake db:create"],
- "afterScript" => ["echo 'run this after'"],
- "allowFailure" => false,
- "only" => { "refs" => %w[branches tags] },
- "when" => "on_success",
- "except" => nil,
- "environment" => "production",
- "tags" => [],
- "needs" => { "nodes" => [] }
- }
- ]
+ {
+ "name" => "deploy_job",
+ "groupName" => "deploy_job",
+ "stage" => "deploy",
+ "script" => ["echo 'done'"],
+ "beforeScript" => ["bundle install", "bundle exec rake db:create"],
+ "afterScript" => ["echo 'run this after'"],
+ "allowFailure" => false,
+ "only" => { "refs" => %w[branches tags] },
+ "when" => "on_success",
+ "except" => nil,
+ "environment" => "production",
+ "tags" => [],
+ "needs" => { "nodes" => [] }
+ }
+ ]
}
}
]
diff --git a/spec/requests/api/graphql/ci/config_variables_spec.rb b/spec/requests/api/graphql/ci/config_variables_spec.rb
index 2b5a5d0dc93..17133d7ea66 100644
--- a/spec/requests/api/graphql/ci/config_variables_spec.rb
+++ b/spec/requests/api/graphql/ci/config_variables_spec.rb
@@ -23,6 +23,7 @@ RSpec.describe 'Query.project(fullPath).ciConfigVariables(sha)' do
ciConfigVariables(sha: "#{sha}") {
key
value
+ valueOptions
description
}
}
@@ -53,13 +54,21 @@ RSpec.describe 'Query.project(fullPath).ciConfigVariables(sha)' do
expect(graphql_data.dig('project', 'ciConfigVariables')).to contain_exactly(
{
+ 'key' => 'KEY_VALUE_VAR',
+ 'value' => 'value x',
+ 'valueOptions' => nil,
+ 'description' => 'value of KEY_VALUE_VAR'
+ },
+ {
'key' => 'DB_NAME',
'value' => 'postgres',
+ 'valueOptions' => nil,
'description' => nil
},
{
'key' => 'ENVIRONMENT_VAR',
'value' => 'env var value',
+ 'valueOptions' => ['env var value', 'env var value2'],
'description' => 'env var description'
}
)
diff --git a/spec/requests/api/graphql/ci/jobs_spec.rb b/spec/requests/api/graphql/ci/jobs_spec.rb
index fa8fb1d54aa..47e3221c567 100644
--- a/spec/requests/api/graphql/ci/jobs_spec.rb
+++ b/spec/requests/api/graphql/ci/jobs_spec.rb
@@ -25,11 +25,12 @@ RSpec.describe 'Query.project.pipeline' do
let(:first_n) { var('Int') }
let(:query) do
- with_signature([first_n], wrap_fields(query_graphql_path([
- [:project, { full_path: project.full_path }],
- [:pipeline, { iid: pipeline.iid.to_s }],
- [:stages, { first: first_n }]
- ], stage_fields)))
+ with_signature([first_n], wrap_fields(query_graphql_path(
+ [
+ [:project, { full_path: project.full_path }],
+ [:pipeline, { iid: pipeline.iid.to_s }],
+ [:stages, { first: first_n }]
+ ], stage_fields)))
end
let(:stage_fields) do
diff --git a/spec/requests/api/graphql/ci/pipeline_schedules_spec.rb b/spec/requests/api/graphql/ci/pipeline_schedules_spec.rb
new file mode 100644
index 00000000000..8b8ba09a95c
--- /dev/null
+++ b/spec/requests/api/graphql/ci/pipeline_schedules_spec.rb
@@ -0,0 +1,88 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Query.project.pipelineSchedules' do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project, :repository, :public) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project, owner: user) }
+
+ let(:pipeline_schedule_graphql_data) { graphql_data_at(:project, :pipeline_schedules, :nodes, 0) }
+
+ let(:params) { {} }
+
+ let(:fields) do
+ <<~QUERY
+ nodes {
+ id
+ description
+ active
+ nextRunAt
+ realNextRun
+ lastPipeline {
+ id
+ }
+ refForDisplay
+ refPath
+ forTag
+ cron
+ cronTimezone
+ }
+ QUERY
+ end
+
+ let(:query) do
+ %(
+ query {
+ project(fullPath: "#{project.full_path}") {
+ pipelineSchedules {
+ #{fields}
+ }
+ }
+ }
+ )
+ end
+
+ describe 'computed graphql fields' do
+ before do
+ pipeline_schedule.pipelines << build(:ci_pipeline, project: project)
+
+ post_graphql(query, current_user: user)
+ end
+
+ it_behaves_like 'a working graphql query'
+
+ it 'returns calculated fields for a pipeline schedule' do
+ ref_for_display = pipeline_schedule_graphql_data['refForDisplay']
+
+ expect(ref_for_display).to eq('master')
+ expect(pipeline_schedule_graphql_data['refPath']).to eq("/#{project.full_path}/-/commits/#{ref_for_display}")
+ expect(pipeline_schedule_graphql_data['forTag']).to be(false)
+ end
+ end
+
+ it 'avoids N+1 queries' do
+ create_pipeline_schedules(1)
+
+ control = ActiveRecord::QueryRecorder.new { post_graphql(query, current_user: user) }
+
+ create_pipeline_schedules(3)
+
+ action = ActiveRecord::QueryRecorder.new { post_graphql(query, current_user: user) }
+
+ expect(action).not_to exceed_query_limit(control)
+ end
+
+ def create_pipeline_schedules(count)
+ create_list(:ci_pipeline_schedule, count, project: project)
+ .each do |pipeline_schedule|
+ create(:user).tap do |user|
+ project.add_developer(user)
+ pipeline_schedule.update!(owner: user)
+ end
+ pipeline_schedule.pipelines << build(:ci_pipeline, project: project)
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/jobs_query_spec.rb b/spec/requests/api/graphql/jobs_query_spec.rb
new file mode 100644
index 00000000000..5907566be7f
--- /dev/null
+++ b/spec/requests/api/graphql/jobs_query_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'getting job information' do
+ include GraphqlHelpers
+
+ let_it_be(:job) { create(:ci_build, :success, name: 'job1') }
+
+ let(:query) do
+ graphql_query_for(:jobs)
+ end
+
+ context 'when user is admin' do
+ let_it_be(:current_user) { create(:admin) }
+
+ it 'has full access to all jobs', :aggregate_failure do
+ post_graphql(query, current_user: current_user)
+
+ expect(graphql_data_at(:jobs, :count)).to eq(1)
+ expect(graphql_data_at(:jobs, :nodes)).to contain_exactly(a_graphql_entity_for(job))
+ end
+
+ context 'when filtered by status' do
+ let_it_be(:pending_job) { create(:ci_build, :pending) }
+ let_it_be(:failed_job) { create(:ci_build, :failed) }
+
+ it 'gets pending jobs', :aggregate_failure do
+ post_graphql(graphql_query_for(:jobs, { statuses: :PENDING }), current_user: current_user)
+
+ expect(graphql_data_at(:jobs, :count)).to eq(1)
+ expect(graphql_data_at(:jobs, :nodes)).to contain_exactly(a_graphql_entity_for(pending_job))
+ end
+
+ it 'gets pending and failed jobs', :aggregate_failure do
+ post_graphql(graphql_query_for(:jobs, { statuses: [:PENDING, :FAILED] }), current_user: current_user)
+
+ expect(graphql_data_at(:jobs, :count)).to eq(2)
+ expect(graphql_data_at(:jobs, :nodes)).to match_array([a_graphql_entity_for(pending_job),
+ a_graphql_entity_for(failed_job)])
+ end
+ end
+ end
+
+ context 'if the user is not an admin' do
+ let_it_be(:current_user) { create(:user) }
+
+ it 'has no access to the jobs', :aggregate_failure do
+ post_graphql(query, current_user: current_user)
+
+ expect(graphql_data_at(:jobs, :count)).to eq(0)
+ expect(graphql_data_at(:jobs, :nodes)).to match_array([])
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/milestone_spec.rb b/spec/requests/api/graphql/milestone_spec.rb
index f6835936418..78e7ec39ee3 100644
--- a/spec/requests/api/graphql/milestone_spec.rb
+++ b/spec/requests/api/graphql/milestone_spec.rb
@@ -5,8 +5,12 @@ require 'spec_helper'
RSpec.describe 'Querying a Milestone' do
include GraphqlHelpers
+ let_it_be(:group) { create(:group, :public) }
+ let_it_be(:project) { create(:project, group: group) }
let_it_be(:guest) { create(:user) }
- let_it_be(:project) { create(:project) }
+ let_it_be(:inherited_guest) { create(:user) }
+ let_it_be(:inherited_reporter) { create(:user) }
+ let_it_be(:inherited_developer) { create(:user) }
let_it_be(:milestone) { create(:milestone, project: project) }
let_it_be(:release_a) { create(:release, project: project) }
let_it_be(:release_b) { create(:release, project: project) }
@@ -14,116 +18,137 @@ RSpec.describe 'Querying a Milestone' do
before_all do
milestone.releases << [release_a, release_b]
project.add_guest(guest)
+ group.add_guest(inherited_guest)
+ group.add_reporter(inherited_reporter)
+ group.add_developer(inherited_developer)
end
let(:expected_release_nodes) do
contain_exactly(a_graphql_entity_for(release_a), a_graphql_entity_for(release_b))
end
- context 'when we post the query' do
- let(:current_user) { nil }
- let(:query) do
- graphql_query_for('milestone', { id: milestone.to_global_id.to_s }, all_graphql_fields_for('Milestone'))
- end
+ shared_examples 'returns the milestone successfully' do
+ it_behaves_like 'a working graphql query'
- subject { graphql_data['milestone'] }
+ it { is_expected.to include('title' => milestone.name) }
- before do
- post_graphql(query, current_user: current_user)
+ it 'contains release information' do
+ is_expected.to include('releases' => include('nodes' => expected_release_nodes))
end
+ end
- context 'when the user has access to the milestone' do
- let(:current_user) { guest }
-
- it_behaves_like 'a working graphql query'
-
- it { is_expected.to include('title' => milestone.name) }
-
- it 'contains release information' do
- is_expected.to include('releases' => include('nodes' => expected_release_nodes))
+ context 'when we post the query' do
+ context 'and the project is private' do
+ let(:query) do
+ graphql_query_for('milestone', { id: milestone.to_global_id.to_s }, all_graphql_fields_for('Milestone'))
end
- end
- context 'when the user does not have access to the milestone' do
- it_behaves_like 'a working graphql query'
-
- it { is_expected.to be_nil }
- end
+ subject { graphql_data['milestone'] }
- context 'when ID argument is missing' do
- let(:query) do
- graphql_query_for('milestone', {}, 'title')
+ before do
+ post_graphql(query, current_user: current_user)
end
- it 'raises an exception' do
- expect(graphql_errors).to include(a_hash_including('message' => "Field 'milestone' is missing required arguments: id"))
+ context 'when the user is a direct project member' do
+ context 'and the user is a guest' do
+ let(:current_user) { guest }
+
+ it_behaves_like 'returns the milestone successfully'
+
+ context 'when there are two milestones' do
+ let_it_be(:milestone_b) { create(:milestone, project: project) }
+
+ let(:milestone_fields) do
+ <<~GQL
+ fragment milestoneFields on Milestone {
+ #{all_graphql_fields_for('Milestone', max_depth: 1)}
+ releases { nodes { #{all_graphql_fields_for('Release', max_depth: 1)} } }
+ }
+ GQL
+ end
+
+ let(:single_query) do
+ <<~GQL
+ query ($id_a: MilestoneID!) {
+ a: milestone(id: $id_a) { ...milestoneFields }
+ }
+
+ #{milestone_fields}
+ GQL
+ end
+
+ let(:multi_query) do
+ <<~GQL
+ query ($id_a: MilestoneID!, $id_b: MilestoneID!) {
+ a: milestone(id: $id_a) { ...milestoneFields }
+ b: milestone(id: $id_b) { ...milestoneFields }
+ }
+ #{milestone_fields}
+ GQL
+ end
+
+ it 'returns the correct releases associated with each milestone' do
+ r = run_with_clean_state(multi_query,
+ context: { current_user: current_user },
+ variables: {
+ id_a: global_id_of(milestone).to_s,
+ id_b: milestone_b.to_global_id.to_s
+ })
+
+ expect(r.to_h['errors']).to be_blank
+ expect(graphql_dig_at(r.to_h, :data, :a, :releases, :nodes)).to match expected_release_nodes
+ expect(graphql_dig_at(r.to_h, :data, :b, :releases, :nodes)).to be_empty
+ end
+
+ it 'does not suffer from N+1 performance issues' do
+ baseline = ActiveRecord::QueryRecorder.new do
+ run_with_clean_state(single_query,
+ context: { current_user: current_user },
+ variables: { id_a: milestone.to_global_id.to_s })
+ end
+
+ multi = ActiveRecord::QueryRecorder.new do
+ run_with_clean_state(multi_query,
+ context: { current_user: current_user },
+ variables: {
+ id_a: milestone.to_global_id.to_s,
+ id_b: milestone_b.to_global_id.to_s
+ })
+ end
+
+ expect(multi).not_to exceed_query_limit(baseline)
+ end
+ end
+ end
end
- end
- end
- context 'when there are two milestones' do
- let_it_be(:milestone_b) { create(:milestone, project: project) }
-
- let(:current_user) { guest }
- let(:milestone_fields) do
- <<~GQL
- fragment milestoneFields on Milestone {
- #{all_graphql_fields_for('Milestone', max_depth: 1)}
- releases { nodes { #{all_graphql_fields_for('Release', max_depth: 1)} } }
- }
- GQL
- end
+ context 'when the user is an inherited member from the group' do
+ where(:user) { [ref(:inherited_guest), ref(:inherited_reporter), ref(:inherited_developer)] }
- let(:single_query) do
- <<~GQL
- query ($id_a: MilestoneID!) {
- a: milestone(id: $id_a) { ...milestoneFields }
- }
+ with_them do
+ let(:current_user) { user }
- #{milestone_fields}
- GQL
- end
+ it_behaves_like 'returns the milestone successfully'
+ end
+ end
- let(:multi_query) do
- <<~GQL
- query ($id_a: MilestoneID!, $id_b: MilestoneID!) {
- a: milestone(id: $id_a) { ...milestoneFields }
- b: milestone(id: $id_b) { ...milestoneFields }
- }
- #{milestone_fields}
- GQL
- end
+ context 'when unauthenticated' do
+ let(:current_user) { nil }
- it 'produces correct results' do
- r = run_with_clean_state(multi_query,
- context: { current_user: current_user },
- variables: {
- id_a: global_id_of(milestone).to_s,
- id_b: milestone_b.to_global_id.to_s
- })
-
- expect(r.to_h['errors']).to be_blank
- expect(graphql_dig_at(r.to_h, :data, :a, :releases, :nodes)).to match expected_release_nodes
- expect(graphql_dig_at(r.to_h, :data, :b, :releases, :nodes)).to be_empty
- end
+ it_behaves_like 'a working graphql query'
- it 'does not suffer from N+1 performance issues' do
- baseline = ActiveRecord::QueryRecorder.new do
- run_with_clean_state(single_query,
- context: { current_user: current_user },
- variables: { id_a: milestone.to_global_id.to_s })
- end
+ it { is_expected.to be_nil }
- multi = ActiveRecord::QueryRecorder.new do
- run_with_clean_state(multi_query,
- context: { current_user: current_user },
- variables: {
- id_a: milestone.to_global_id.to_s,
- id_b: milestone_b.to_global_id.to_s
- })
- end
+ context 'when ID argument is missing' do
+ let(:query) do
+ graphql_query_for('milestone', {}, 'title')
+ end
- expect(multi).not_to exceed_query_limit(baseline)
+ it 'raises an exception' do
+ expect(graphql_errors).to include(a_hash_including('message' => "Field 'milestone' is missing required arguments: id"))
+ end
+ end
+ end
end
end
end
diff --git a/spec/requests/api/graphql/mutations/ci/job/artifacts_destroy_spec.rb b/spec/requests/api/graphql/mutations/ci/job/artifacts_destroy_spec.rb
new file mode 100644
index 00000000000..bdad80995ea
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/ci/job/artifacts_destroy_spec.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'JobArtifactsDestroy' do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:job) { create(:ci_build) }
+
+ let(:mutation) do
+ variables = {
+ id: job.to_global_id.to_s
+ }
+ graphql_mutation(:job_artifacts_destroy, variables, <<~FIELDS)
+ job {
+ name
+ }
+ destroyedArtifactsCount
+ errors
+ FIELDS
+ end
+
+ before do
+ create(:ci_job_artifact, :archive, job: job)
+ create(:ci_job_artifact, :junit, job: job)
+ end
+
+ context 'when the user is not allowed to destroy the job artifacts' do
+ it 'returns an error' do
+ post_graphql_mutation(mutation, current_user: user)
+
+ expect(graphql_errors).not_to be_empty
+ expect(job.reload.job_artifacts.count).to be(2)
+ end
+ end
+
+ context 'when the user is allowed to destroy the job artifacts' do
+ before do
+ job.project.add_maintainer(user)
+ end
+
+ it 'destroys the job artifacts and returns the expected data' do
+ expected_data = {
+ 'jobArtifactsDestroy' => {
+ 'errors' => [],
+ 'destroyedArtifactsCount' => 2,
+ 'job' => {
+ 'name' => job.name
+ }
+ }
+ }
+
+ post_graphql_mutation(mutation, current_user: user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(graphql_data).to eq(expected_data)
+ expect(job.reload.job_artifacts.count).to be(0)
+ end
+
+ context 'when the the project this job belongs to is undergoing stats refresh' do
+ it 'destroys no artifacts and returns the correct error' do
+ allow_next_found_instance_of(Project) do |project|
+ allow(project).to receive(:refreshing_build_artifacts_size?).and_return(true)
+ end
+
+ expected_data = {
+ 'jobArtifactsDestroy' => {
+ 'errors' => ['Action temporarily disabled. The project this job belongs to is undergoing stats refresh.'],
+ 'destroyedArtifactsCount' => 0,
+ 'job' => {
+ 'name' => job.name
+ }
+ }
+ }
+
+ post_graphql_mutation(mutation, current_user: user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(graphql_data).to eq(expected_data)
+ expect(job.reload.job_artifacts.count).to be(2)
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/ci/job_token_scope/add_project_spec.rb b/spec/requests/api/graphql/mutations/ci/job_token_scope/add_project_spec.rb
index 5269c60b50a..b2f84ab2869 100644
--- a/spec/requests/api/graphql/mutations/ci/job_token_scope/add_project_spec.rb
+++ b/spec/requests/api/graphql/mutations/ci/job_token_scope/add_project_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe 'CiJobTokenScopeAddProject' do
include GraphqlHelpers
- let_it_be(:project) { create(:project, ci_job_token_scope_enabled: true).tap(&:save!) }
+ let_it_be(:project) { create(:project, ci_outbound_job_token_scope_enabled: true).tap(&:save!) }
let_it_be(:target_project) { create(:project) }
let(:variables) do
diff --git a/spec/requests/api/graphql/mutations/ci/job_token_scope/remove_project_spec.rb b/spec/requests/api/graphql/mutations/ci/job_token_scope/remove_project_spec.rb
index b62291d1ebd..2b0adf89f40 100644
--- a/spec/requests/api/graphql/mutations/ci/job_token_scope/remove_project_spec.rb
+++ b/spec/requests/api/graphql/mutations/ci/job_token_scope/remove_project_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe 'CiJobTokenScopeRemoveProject' do
include GraphqlHelpers
- let_it_be(:project) { create(:project, ci_job_token_scope_enabled: true).tap(&:save!) }
+ let_it_be(:project) { create(:project, ci_outbound_job_token_scope_enabled: true).tap(&:save!) }
let_it_be(:target_project) { create(:project) }
let_it_be(:link) do
diff --git a/spec/requests/api/graphql/mutations/ci/pipeline_schedule_delete_spec.rb b/spec/requests/api/graphql/mutations/ci/pipeline_schedule_delete_spec.rb
new file mode 100644
index 00000000000..b197d223463
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/ci/pipeline_schedule_delete_spec.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'PipelineScheduleDelete' do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project, owner: user) }
+
+ let(:mutation) do
+ graphql_mutation(
+ :pipeline_schedule_delete,
+ { id: pipeline_schedule_id },
+ <<-QL
+ errors
+ QL
+ )
+ end
+
+ let(:pipeline_schedule_id) { pipeline_schedule.to_global_id.to_s }
+ let(:mutation_response) { graphql_mutation_response(:pipeline_schedule_delete) }
+
+ context 'when unauthorized' do
+ it 'returns an error' do
+ post_graphql_mutation(mutation, current_user: create(:user))
+
+ expect(graphql_errors).not_to be_empty
+ expect(graphql_errors[0]['message'])
+ .to eq(
+ "The resource that you are attempting to access does not exist " \
+ "or you don't have permission to perform this action"
+ )
+ end
+ end
+
+ context 'when authorized' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ context 'when success' do
+ it do
+ post_graphql_mutation(mutation, current_user: user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['errors']).to eq([])
+ end
+ end
+
+ context 'when failure' do
+ context 'when destroy fails' do
+ before do
+ allow_next_found_instance_of(Ci::PipelineSchedule) do |pipeline_schedule|
+ allow(pipeline_schedule).to receive(:destroy).and_return(false)
+ end
+ end
+
+ it do
+ post_graphql_mutation(mutation, current_user: user)
+
+ expect(response).to have_gitlab_http_status(:success)
+
+ expect(mutation_response['errors']).to match_array(['Failed to remove the pipeline schedule'])
+ end
+ end
+
+ context 'when pipeline schedule not found' do
+ let(:pipeline_schedule_id) { 'gid://gitlab/Ci::PipelineSchedule/0' }
+
+ it do
+ post_graphql_mutation(mutation, current_user: user)
+
+ expect(graphql_errors).not_to be_empty
+ expect(graphql_errors[0]['message'])
+ .to eq("Internal server error: Couldn't find Ci::PipelineSchedule with 'id'=0")
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/ci/project_ci_cd_settings_update_spec.rb b/spec/requests/api/graphql/mutations/ci/project_ci_cd_settings_update_spec.rb
index 394d9ff53d1..c808cf5ede9 100644
--- a/spec/requests/api/graphql/mutations/ci/project_ci_cd_settings_update_spec.rb
+++ b/spec/requests/api/graphql/mutations/ci/project_ci_cd_settings_update_spec.rb
@@ -6,15 +6,19 @@ RSpec.describe 'ProjectCiCdSettingsUpdate' do
include GraphqlHelpers
let_it_be(:project) do
- create(:project, keep_latest_artifact: true, ci_job_token_scope_enabled: true)
- .tap(&:save!)
+ create(:project,
+ keep_latest_artifact: true,
+ ci_outbound_job_token_scope_enabled: true,
+ ci_inbound_job_token_scope_enabled: true
+ ).tap(&:save!)
end
let(:variables) do
{
full_path: project.full_path,
keep_latest_artifact: false,
- job_token_scope_enabled: false
+ job_token_scope_enabled: false,
+ inbound_job_token_scope_enabled: false
}
end
@@ -62,7 +66,7 @@ RSpec.describe 'ProjectCiCdSettingsUpdate' do
project.reload
expect(response).to have_gitlab_http_status(:success)
- expect(project.ci_job_token_scope_enabled).to eq(false)
+ expect(project.ci_outbound_job_token_scope_enabled).to eq(false)
end
it 'does not update job_token_scope_enabled if not specified' do
@@ -73,7 +77,44 @@ RSpec.describe 'ProjectCiCdSettingsUpdate' do
project.reload
expect(response).to have_gitlab_http_status(:success)
- expect(project.ci_job_token_scope_enabled).to eq(true)
+ expect(project.ci_outbound_job_token_scope_enabled).to eq(true)
+ end
+
+ describe 'inbound_job_token_scope_enabled' do
+ it 'updates inbound_job_token_scope_enabled' do
+ post_graphql_mutation(mutation, current_user: user)
+
+ project.reload
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(project.ci_inbound_job_token_scope_enabled).to eq(false)
+ end
+
+ it 'does not update inbound_job_token_scope_enabled if not specified' do
+ variables.except!(:inbound_job_token_scope_enabled)
+
+ post_graphql_mutation(mutation, current_user: user)
+
+ project.reload
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(project.ci_inbound_job_token_scope_enabled).to eq(true)
+ end
+
+ context 'when ci_inbound_job_token_scope disabled' do
+ before do
+ stub_feature_flags(ci_inbound_job_token_scope: false)
+ end
+
+ it 'does not update inbound_job_token_scope_enabled' do
+ post_graphql_mutation(mutation, current_user: user)
+
+ project.reload
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(project.ci_inbound_job_token_scope_enabled).to eq(true)
+ end
+ end
end
context 'when bad arguments are provided' do
diff --git a/spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb b/spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb
index 194e42bf59d..567d8799d93 100644
--- a/spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb
@@ -14,7 +14,13 @@ RSpec.describe 'Updating the package settings' do
maven_duplicates_allowed: false,
maven_duplicate_exception_regex: 'foo-.*',
generic_duplicates_allowed: false,
- generic_duplicate_exception_regex: 'bar-.*'
+ generic_duplicate_exception_regex: 'bar-.*',
+ maven_package_requests_forwarding: true,
+ lock_maven_package_requests_forwarding: true,
+ npm_package_requests_forwarding: true,
+ lock_npm_package_requests_forwarding: true,
+ pypi_package_requests_forwarding: true,
+ lock_pypi_package_requests_forwarding: true
}
end
@@ -26,6 +32,12 @@ RSpec.describe 'Updating the package settings' do
mavenDuplicateExceptionRegex
genericDuplicatesAllowed
genericDuplicateExceptionRegex
+ mavenPackageRequestsForwarding
+ lockMavenPackageRequestsForwarding
+ npmPackageRequestsForwarding
+ lockNpmPackageRequestsForwarding
+ pypiPackageRequestsForwarding
+ lockPypiPackageRequestsForwarding
}
errors
QL
@@ -46,6 +58,12 @@ RSpec.describe 'Updating the package settings' do
expect(package_settings_response['mavenDuplicateExceptionRegex']).to eq(params[:maven_duplicate_exception_regex])
expect(package_settings_response['genericDuplicatesAllowed']).to eq(params[:generic_duplicates_allowed])
expect(package_settings_response['genericDuplicateExceptionRegex']).to eq(params[:generic_duplicate_exception_regex])
+ expect(package_settings_response['mavenPackageRequestsForwarding']).to eq(params[:maven_package_requests_forwarding])
+ expect(package_settings_response['lockMavenPackageRequestsForwarding']).to eq(params[:lock_maven_package_requests_forwarding])
+ expect(package_settings_response['pypiPackageRequestsForwarding']).to eq(params[:pypi_package_requests_forwarding])
+ expect(package_settings_response['lockPypiPackageRequestsForwarding']).to eq(params[:lock_pypi_package_requests_forwarding])
+ expect(package_settings_response['npmPackageRequestsForwarding']).to eq(params[:npm_package_requests_forwarding])
+ expect(package_settings_response['lockNpmPackageRequestsForwarding']).to eq(params[:lock_npm_package_requests_forwarding])
end
end
@@ -75,8 +93,29 @@ RSpec.describe 'Updating the package settings' do
RSpec.shared_examples 'accepting the mutation request updating the package settings' do
it_behaves_like 'updating the namespace package setting attributes',
- from: { maven_duplicates_allowed: true, maven_duplicate_exception_regex: 'SNAPSHOT', generic_duplicates_allowed: true, generic_duplicate_exception_regex: 'foo' },
- to: { maven_duplicates_allowed: false, maven_duplicate_exception_regex: 'foo-.*', generic_duplicates_allowed: false, generic_duplicate_exception_regex: 'bar-.*' }
+ from: {
+ maven_duplicates_allowed: true,
+ maven_duplicate_exception_regex: 'SNAPSHOT',
+ generic_duplicates_allowed: true,
+ generic_duplicate_exception_regex: 'foo',
+ maven_package_requests_forwarding: nil,
+ lock_maven_package_requests_forwarding: false,
+ npm_package_requests_forwarding: nil,
+ lock_npm_package_requests_forwarding: false,
+ pypi_package_requests_forwarding: nil,
+ lock_pypi_package_requests_forwarding: false
+ }, to: {
+ maven_duplicates_allowed: false,
+ maven_duplicate_exception_regex: 'foo-.*',
+ generic_duplicates_allowed: false,
+ generic_duplicate_exception_regex: 'bar-.*',
+ maven_package_requests_forwarding: true,
+ lock_maven_package_requests_forwarding: true,
+ npm_package_requests_forwarding: true,
+ lock_npm_package_requests_forwarding: true,
+ pypi_package_requests_forwarding: true,
+ lock_pypi_package_requests_forwarding: true
+ }
it_behaves_like 'returning a success'
it_behaves_like 'rejecting invalid regex'
diff --git a/spec/requests/api/graphql/mutations/packages/bulk_destroy_spec.rb b/spec/requests/api/graphql/mutations/packages/bulk_destroy_spec.rb
new file mode 100644
index 00000000000..1fe01af4f1c
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/packages/bulk_destroy_spec.rb
@@ -0,0 +1,128 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Destroying multiple packages' do
+ using RSpec::Parameterized::TableSyntax
+
+ include GraphqlHelpers
+
+ let_it_be(:project1) { create(:project) }
+ let_it_be(:project2) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be_with_reload(:packages1) { create_list(:package, 3, project: project1) }
+ let_it_be_with_reload(:packages2) { create_list(:package, 2, project: project2) }
+
+ let(:ids) { packages1.append(packages2).flatten.map(&:to_global_id).map(&:to_s) }
+
+ let(:query) do
+ <<~GQL
+ errors
+ GQL
+ end
+
+ let(:params) do
+ {
+ ids: ids
+ }
+ end
+
+ let(:mutation) { graphql_mutation(:destroy_packages, params, query) }
+
+ describe 'post graphql mutation' do
+ subject(:mutation_request) { post_graphql_mutation(mutation, current_user: user) }
+
+ shared_examples 'destroying the packages' do
+ it 'marks the packages as pending destruction' do
+ expect { mutation_request }.to change { ::Packages::Package.pending_destruction.count }.by(5)
+ end
+
+ it_behaves_like 'returning response status', :success
+ end
+
+ shared_examples 'denying the mutation request' do
+ |response = ::Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR|
+ it 'does not mark the packages as pending destruction' do
+ expect { mutation_request }.not_to change { ::Packages::Package.pending_destruction.count }
+ expect_graphql_errors_to_include(response)
+ end
+
+ it_behaves_like 'returning response status', :success
+ end
+
+ context 'with valid params' do
+ where(:user_role, :shared_examples_name) do
+ :maintainer | 'destroying the packages'
+ :developer | 'denying the mutation request'
+ :reporter | 'denying the mutation request'
+ :guest | 'denying the mutation request'
+ :not_in_project | 'denying the mutation request'
+ end
+
+ with_them do
+ before do
+ unless user_role == :not_in_project
+ project1.send("add_#{user_role}", user)
+ project2.send("add_#{user_role}", user)
+ end
+ end
+
+ it_behaves_like params[:shared_examples_name]
+ end
+
+ context 'for over the limit' do
+ before do
+ project1.add_maintainer(user)
+ project2.add_maintainer(user)
+ stub_const("Mutations::Packages::BulkDestroy::MAX_PACKAGES", 2)
+ end
+
+ it_behaves_like 'denying the mutation request', ::Mutations::Packages::BulkDestroy::TOO_MANY_IDS_ERROR
+ end
+
+ context 'with packages outside of the project' do
+ before do
+ project1.add_maintainer(user)
+ end
+
+ it_behaves_like 'denying the mutation request'
+ end
+ end
+
+ context 'with invalid params' do
+ let(:ids) { 'foo' }
+
+ it_behaves_like 'denying the mutation request', 'invalid value for id'
+ end
+
+ context 'with multi mutations' do
+ let(:package1) { packages1.first }
+ let(:package2) { packages2.first }
+ let(:query) do
+ <<~QUERY
+ mutation {
+ a: destroyPackages(input: { ids: ["#{package1.to_global_id}"]}) {
+ errors
+ }
+ b: destroyPackages(input: { ids: ["#{package2.to_global_id}"]}) {
+ errors
+ }
+ }
+ QUERY
+ end
+
+ subject(:mutation_request) { post_graphql(query, current_user: user) }
+
+ before do
+ project1.add_maintainer(user)
+ project2.add_maintainer(user)
+ end
+
+ it 'executes the first mutation but not the second one' do
+ expect { mutation_request }.to change { package1.reload.status }.from('default').to('pending_destruction')
+ .and not_change { package2.reload.status }
+ expect_graphql_errors_to_include('"destroyPackages" field can be requested only for 1 Mutation(s) at a time.')
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/uploads/delete_spec.rb b/spec/requests/api/graphql/mutations/uploads/delete_spec.rb
index f44bf179397..2d1b33cc086 100644
--- a/spec/requests/api/graphql/mutations/uploads/delete_spec.rb
+++ b/spec/requests/api/graphql/mutations/uploads/delete_spec.rb
@@ -47,10 +47,11 @@ RSpec.describe 'Delete an upload' do
expect(response).to have_gitlab_http_status(:success)
expect(mutation_response['upload']).to be_nil
- expect(mutation_response['errors']).to match_array([
- "The resource that you are attempting to access does not "\
- "exist or you don't have permission to perform this action."
- ])
+ expect(mutation_response['errors']).to match_array(
+ [
+ "The resource that you are attempting to access does not "\
+ "exist or you don't have permission to perform this action."
+ ])
end
end
end
diff --git a/spec/requests/api/graphql/mutations/work_items/update_spec.rb b/spec/requests/api/graphql/mutations/work_items/update_spec.rb
index 909d6549fa5..6b0129c457f 100644
--- a/spec/requests/api/graphql/mutations/work_items/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/work_items/update_spec.rb
@@ -144,6 +144,78 @@ RSpec.describe 'Update a work item' do
end
end
+ context 'with labels widget input' do
+ shared_examples 'mutation updating work item labels' do
+ it 'updates labels' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ work_item.reload
+ end.to change { work_item.labels.count }.to(expected_labels.count)
+
+ expect(work_item.labels).to match_array(expected_labels)
+ expect(mutation_response['workItem']['widgets']).to include(
+ 'labels' => {
+ 'nodes' => match_array(expected_labels.map { |l| { 'id' => l.to_gid.to_s } })
+ },
+ 'type' => 'LABELS'
+ )
+ end
+ end
+
+ let_it_be(:existing_label) { create(:label, project: project) }
+ let_it_be(:label1) { create(:label, project: project) }
+ let_it_be(:label2) { create(:label, project: project) }
+
+ let(:fields) do
+ <<~FIELDS
+ workItem {
+ widgets {
+ type
+ ... on WorkItemWidgetLabels {
+ labels {
+ nodes { id }
+ }
+ }
+ }
+ }
+ errors
+ FIELDS
+ end
+
+ let(:input) do
+ { 'labelsWidget' => { 'addLabelIds' => add_label_ids, 'removeLabelIds' => remove_label_ids } }
+ end
+
+ let(:add_label_ids) { [] }
+ let(:remove_label_ids) { [] }
+
+ before_all do
+ work_item.update!(labels: [existing_label])
+ end
+
+ context 'when only removing labels' do
+ let(:remove_label_ids) { [existing_label.to_gid.to_s] }
+ let(:expected_labels) { [] }
+
+ it_behaves_like 'mutation updating work item labels'
+ end
+
+ context 'when only adding labels' do
+ let(:add_label_ids) { [label1.to_gid.to_s, label2.to_gid.to_s] }
+ let(:expected_labels) { [label1, label2, existing_label] }
+
+ it_behaves_like 'mutation updating work item labels'
+ end
+
+ context 'when adding and removing labels' do
+ let(:remove_label_ids) { [existing_label.to_gid.to_s] }
+ let(:add_label_ids) { [label1.to_gid.to_s, label2.to_gid.to_s] }
+ let(:expected_labels) { [label1, label2] }
+
+ it_behaves_like 'mutation updating work item labels'
+ end
+ end
+
context 'with due and start date widget input' do
let(:start_date) { Date.today }
let(:due_date) { 1.week.from_now.to_date }
diff --git a/spec/requests/api/graphql/mutations/work_items/update_widgets_spec.rb b/spec/requests/api/graphql/mutations/work_items/update_widgets_spec.rb
deleted file mode 100644
index 2a5cb937a2f..00000000000
--- a/spec/requests/api/graphql/mutations/work_items/update_widgets_spec.rb
+++ /dev/null
@@ -1,61 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Update work item widgets' do
- include GraphqlHelpers
-
- let_it_be(:project) { create(:project) }
- let_it_be(:developer) { create(:user).tap { |user| project.add_developer(user) } }
- let_it_be(:work_item, refind: true) { create(:work_item, project: project) }
-
- let(:input) { { 'descriptionWidget' => { 'description' => 'updated description' } } }
- let(:mutation_response) { graphql_mutation_response(:work_item_update_widgets) }
- let(:mutation) do
- graphql_mutation(:workItemUpdateWidgets, input.merge('id' => work_item.to_global_id.to_s), <<~FIELDS)
- errors
- workItem {
- description
- widgets {
- type
- ... on WorkItemWidgetDescription {
- description
- }
- }
- }
- FIELDS
- end
-
- context 'the user is not allowed to update a work item' do
- let(:current_user) { create(:user) }
-
- it_behaves_like 'a mutation that returns a top-level access error'
- end
-
- context 'when user has permissions to update a work item', :aggregate_failures do
- let(:current_user) { developer }
-
- it_behaves_like 'update work item description widget' do
- let(:new_description) { 'updated description' }
- end
-
- it_behaves_like 'has spam protection' do
- let(:mutation_class) { ::Mutations::WorkItems::UpdateWidgets }
- end
-
- context 'when the work_items feature flag is disabled' do
- before do
- stub_feature_flags(work_items: false)
- end
-
- it 'does not update the work item and returns and error' do
- expect do
- post_graphql_mutation(mutation, current_user: current_user)
- work_item.reload
- end.to not_change(work_item, :description)
-
- expect(mutation_response['errors']).to contain_exactly('`work_items` feature flag disabled for this project')
- end
- end
- end
-end
diff --git a/spec/requests/api/graphql/project/branch_rules_spec.rb b/spec/requests/api/graphql/project/branch_rules_spec.rb
index 70fb37941e2..1aaf0e9edc7 100644
--- a/spec/requests/api/graphql/project/branch_rules_spec.rb
+++ b/spec/requests/api/graphql/project/branch_rules_spec.rb
@@ -21,27 +21,24 @@ RSpec.describe 'getting list of branch rules for a project' do
let(:branch_rules_data) { graphql_data_at('project', 'branchRules', 'edges') }
let(:variables) { { path: project.full_path } }
-
- let(:fields) do
- <<~QUERY
- pageInfo {
- hasNextPage
- hasPreviousPage
- }
- edges {
- cursor
- node {
- #{all_graphql_fields_for('branch_rules'.classify)}
- }
- }
- QUERY
- end
-
+ # fields must use let as the all_graphql_fields_for also configures some spies
+ let(:fields) { all_graphql_fields_for('BranchRule') }
let(:query) do
<<~GQL
query($path: ID!, $n: Int, $cursor: String) {
project(fullPath: $path) {
- branchRules(first: $n, after: $cursor) { #{fields} }
+ branchRules(first: $n, after: $cursor) {
+ pageInfo {
+ hasNextPage
+ hasPreviousPage
+ }
+ edges {
+ cursor
+ node {
+ #{fields}
+ }
+ }
+ }
}
}
GQL
@@ -55,7 +52,9 @@ RSpec.describe 'getting list of branch rules for a project' do
it_behaves_like 'a working graphql query'
- it { expect(branch_rules_data).to be_empty }
+ it 'hides branch rules data' do
+ expect(branch_rules_data).to be_empty
+ end
end
context 'when the user does have read_protected_branch abilities' do
@@ -66,12 +65,17 @@ RSpec.describe 'getting list of branch rules for a project' do
it_behaves_like 'a working graphql query'
- it 'includes a name' do
+ it 'returns branch rules data' do
expect(branch_rules_data.dig(0, 'node', 'name')).to be_present
- end
-
- it 'includes created_at and updated_at' do
+ expect(branch_rules_data.dig(0, 'node', 'isDefault')).to be(true).or be(false)
+ expect(branch_rules_data.dig(0, 'node', 'branchProtection')).to be_present
expect(branch_rules_data.dig(0, 'node', 'createdAt')).to be_present
+ expect(branch_rules_data.dig(0, 'node', 'updatedAt')).to be_present
+
+ expect(branch_rules_data.dig(1, 'node', 'name')).to be_present
+ expect(branch_rules_data.dig(1, 'node', 'isDefault')).to be(true).or be(false)
+ expect(branch_rules_data.dig(1, 'node', 'branchProtection')).to be_present
+ expect(branch_rules_data.dig(1, 'node', 'createdAt')).to be_present
expect(branch_rules_data.dig(1, 'node', 'updatedAt')).to be_present
end
@@ -82,16 +86,16 @@ RSpec.describe 'getting list of branch rules for a project' do
{ path: project.full_path, n: branch_rule_limit, cursor: last_cursor }
end
- it_behaves_like 'a working graphql query' do
- it 'only returns N branch_rules' do
- expect(branch_rules_data.size).to eq(branch_rule_limit)
- expect(has_next_page).to be_truthy
- expect(has_prev_page).to be_falsey
- post_graphql(query, current_user: current_user, variables: next_variables)
- expect(branch_rules_data.size).to eq(branch_rule_limit)
- expect(has_next_page).to be_falsey
- expect(has_prev_page).to be_truthy
- end
+ it_behaves_like 'a working graphql query'
+
+ it 'returns pagination information' do
+ expect(branch_rules_data.size).to eq(branch_rule_limit)
+ expect(has_next_page).to be_truthy
+ expect(has_prev_page).to be_falsey
+ post_graphql(query, current_user: current_user, variables: next_variables)
+ expect(branch_rules_data.size).to eq(branch_rule_limit)
+ expect(has_next_page).to be_falsey
+ expect(has_prev_page).to be_truthy
end
context 'when no limit is provided' do
diff --git a/spec/requests/api/graphql/project/cluster_agents_spec.rb b/spec/requests/api/graphql/project/cluster_agents_spec.rb
index a34df0ee6f4..bb716cf2849 100644
--- a/spec/requests/api/graphql/project/cluster_agents_spec.rb
+++ b/spec/requests/api/graphql/project/cluster_agents_spec.rb
@@ -61,11 +61,12 @@ RSpec.describe 'Project.cluster_agents' do
tokens = graphql_data_at(:project, :cluster_agents, :nodes, :tokens, :nodes)
- expect(tokens).to match([
- a_graphql_entity_for(token_3),
- a_graphql_entity_for(token_2),
- a_graphql_entity_for(token_1)
- ])
+ expect(tokens).to match(
+ [
+ a_graphql_entity_for(token_3),
+ a_graphql_entity_for(token_2),
+ a_graphql_entity_for(token_1)
+ ])
end
it 'does not suffer from N+1 performance issues' do
diff --git a/spec/requests/api/graphql/project/issue/designs/designs_spec.rb b/spec/requests/api/graphql/project/issue/designs/designs_spec.rb
index 02bc9457c07..965534654ea 100644
--- a/spec/requests/api/graphql/project/issue/designs/designs_spec.rb
+++ b/spec/requests/api/graphql/project/issue/designs/designs_spec.rb
@@ -245,9 +245,10 @@ RSpec.describe 'Getting designs related to an issue' do
end
it 'only returns one version record for the design (the original version)' do
- expect(version_nodes).to eq([
- [{ 'node' => { 'id' => global_id(version) } }]
- ])
+ expect(version_nodes).to eq(
+ [
+ [{ 'node' => { 'id' => global_id(version) } }]
+ ])
end
end
@@ -289,10 +290,11 @@ RSpec.describe 'Getting designs related to an issue' do
end
it 'returns the correct versions records for both designs' do
- expect(version_nodes).to eq([
- [{ 'node' => { 'id' => global_id(design.versions.first) } }],
- [{ 'node' => { 'id' => global_id(second_design.versions.first) } }]
- ])
+ expect(version_nodes).to eq(
+ [
+ [{ 'node' => { 'id' => global_id(design.versions.first) } }],
+ [{ 'node' => { 'id' => global_id(second_design.versions.first) } }]
+ ])
end
end
@@ -341,15 +343,16 @@ RSpec.describe 'Getting designs related to an issue' do
end
it 'returns all versions records for the designs' do
- expect(version_nodes).to eq([
- [
- { 'node' => { 'id' => global_id(design.versions.first) } }
- ],
+ expect(version_nodes).to eq(
[
- { 'node' => { 'id' => global_id(second_design.versions.second) } },
- { 'node' => { 'id' => global_id(second_design.versions.first) } }
- ]
- ])
+ [
+ { 'node' => { 'id' => global_id(design.versions.first) } }
+ ],
+ [
+ { 'node' => { 'id' => global_id(second_design.versions.second) } },
+ { 'node' => { 'id' => global_id(second_design.versions.first) } }
+ ]
+ ])
end
end
end
diff --git a/spec/requests/api/graphql/project/issues_spec.rb b/spec/requests/api/graphql/project/issues_spec.rb
index 28282860416..3b8beb4f798 100644
--- a/spec/requests/api/graphql/project/issues_spec.rb
+++ b/spec/requests/api/graphql/project/issues_spec.rb
@@ -662,6 +662,7 @@ RSpec.describe 'getting an issue list for a project' do
include_examples 'N+1 query check'
end
+
context 'when requesting `closed_as_duplicate_of`' do
let(:requested_fields) { 'closedAsDuplicateOf { id }' }
let(:issue_a_dup) { create(:issue, project: project) }
@@ -674,6 +675,55 @@ RSpec.describe 'getting an issue list for a project' do
include_examples 'N+1 query check'
end
+
+ context 'when award emoji votes' do
+ let(:requested_fields) { [:upvotes, :downvotes] }
+
+ before do
+ create_list(:award_emoji, 2, name: 'thumbsup', awardable: issue_a)
+ create_list(:award_emoji, 2, name: 'thumbsdown', awardable: issue_b)
+ end
+
+ include_examples 'N+1 query check'
+ end
+
+ context 'when requesting participants' do
+ let_it_be(:issue_c) { create(:issue, project: project) }
+
+ let(:search_params) { { iids: [issue_a.iid.to_s, issue_c.iid.to_s] } }
+ let(:requested_fields) { 'participants { nodes { name } }' }
+
+ before do
+ create(:award_emoji, :upvote, awardable: issue_a)
+ create(:award_emoji, :upvote, awardable: issue_b)
+ create(:award_emoji, :upvote, awardable: issue_c)
+
+ note_with_emoji_a = create(:note_on_issue, noteable: issue_a, project: project)
+ note_with_emoji_b = create(:note_on_issue, noteable: issue_b, project: project)
+ note_with_emoji_c = create(:note_on_issue, noteable: issue_c, project: project)
+
+ create(:award_emoji, :upvote, awardable: note_with_emoji_a)
+ create(:award_emoji, :upvote, awardable: note_with_emoji_b)
+ create(:award_emoji, :upvote, awardable: note_with_emoji_c)
+ end
+
+ # Executes 3 extra queries to fetch participant_attrs
+ include_examples 'N+1 query check', threshold: 3
+ end
+
+ context 'when requesting labels' do
+ let(:requested_fields) { ['labels { nodes { id } }'] }
+
+ before do
+ project_labels = create_list(:label, 2, project: project)
+ group_labels = create_list(:group_label, 2, group: group)
+
+ issue_a.update!(labels: [project_labels.first, group_labels.first].flatten)
+ issue_b.update!(labels: [project_labels, group_labels].flatten)
+ end
+
+ include_examples 'N+1 query check', skip_cached: false
+ end
end
def issues_ids
diff --git a/spec/requests/api/graphql/project/merge_requests_spec.rb b/spec/requests/api/graphql/project/merge_requests_spec.rb
index 5daec5543c0..2895737ae6f 100644
--- a/spec/requests/api/graphql/project/merge_requests_spec.rb
+++ b/spec/requests/api/graphql/project/merge_requests_spec.rb
@@ -5,12 +5,14 @@ require 'spec_helper'
RSpec.describe 'getting merge request listings nested in a project' do
include GraphqlHelpers
- let_it_be(:project) { create(:project, :repository, :public) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :repository, :public, group: group) }
let_it_be(:current_user) { create(:user) }
let_it_be(:label) { create(:label, project: project) }
+ let_it_be(:group_label) { create(:group_label, group: group) }
let_it_be_with_reload(:merge_request_a) do
- create(:labeled_merge_request, :unique_branches, source_project: project, labels: [label])
+ create(:labeled_merge_request, :unique_branches, source_project: project, labels: [label, group_label])
end
let_it_be(:merge_request_b) do
@@ -18,7 +20,7 @@ RSpec.describe 'getting merge request listings nested in a project' do
end
let_it_be(:merge_request_c) do
- create(:labeled_merge_request, :closed, :unique_branches, source_project: project, labels: [label])
+ create(:labeled_merge_request, :closed, :unique_branches, source_project: project, labels: [label, group_label])
end
let_it_be(:merge_request_d) do
@@ -327,6 +329,51 @@ RSpec.describe 'getting merge request listings nested in a project' do
include_examples 'N+1 query check'
end
+
+ context 'when award emoji votes' do
+ let(:requested_fields) { [:upvotes, :downvotes] }
+
+ before do
+ create_list(:award_emoji, 2, name: 'thumbsup', awardable: merge_request_a)
+ create_list(:award_emoji, 2, name: 'thumbsdown', awardable: merge_request_b)
+ end
+
+ include_examples 'N+1 query check'
+ end
+
+ context 'when requesting participants' do
+ let(:requested_fields) { 'participants { nodes { name } }' }
+
+ before do
+ create(:award_emoji, :upvote, awardable: merge_request_a)
+ create(:award_emoji, :upvote, awardable: merge_request_b)
+ create(:award_emoji, :upvote, awardable: merge_request_c)
+
+ note_with_emoji_a = create(:note_on_merge_request, noteable: merge_request_a, project: project)
+ note_with_emoji_b = create(:note_on_merge_request, noteable: merge_request_b, project: project)
+ note_with_emoji_c = create(:note_on_merge_request, noteable: merge_request_c, project: project)
+
+ create(:award_emoji, :upvote, awardable: note_with_emoji_a)
+ create(:award_emoji, :upvote, awardable: note_with_emoji_b)
+ create(:award_emoji, :upvote, awardable: note_with_emoji_c)
+ end
+
+ # Executes 3 extra queries to fetch participant_attrs
+ include_examples 'N+1 query check', threshold: 3
+ end
+
+ context 'when requesting labels' do
+ let(:requested_fields) { ['labels { nodes { id } }'] }
+
+ before do
+ project_labels = create_list(:label, 2, project: project)
+ group_labels = create_list(:group_label, 2, group: group)
+
+ merge_request_c.update!(labels: [project_labels, group_labels].flatten)
+ end
+
+ include_examples 'N+1 query check', skip_cached: false
+ end
end
describe 'performance' do
diff --git a/spec/requests/api/graphql/project/milestones_spec.rb b/spec/requests/api/graphql/project/milestones_spec.rb
index d1ee157fc74..a577c367fe5 100644
--- a/spec/requests/api/graphql/project/milestones_spec.rb
+++ b/spec/requests/api/graphql/project/milestones_spec.rb
@@ -25,9 +25,10 @@ RSpec.describe 'getting milestone listings nested in a project' do
graphql_query_for(
:project,
{ full_path: project.full_path },
- query_graphql_field(:milestones, search_params, [
- query_graphql_field(:nodes, nil, %i[id title])
- ])
+ query_graphql_field(:milestones, search_params,
+ [
+ query_graphql_field(:nodes, nil, %i[id title])
+ ])
)
end
diff --git a/spec/requests/api/graphql/project/work_items_spec.rb b/spec/requests/api/graphql/project/work_items_spec.rb
index 69f8d1cac74..e82f6ad24a2 100644
--- a/spec/requests/api/graphql/project/work_items_spec.rb
+++ b/spec/requests/api/graphql/project/work_items_spec.rb
@@ -2,16 +2,25 @@
require 'spec_helper'
-RSpec.describe 'getting an work item list for a project' do
+RSpec.describe 'getting a work item list for a project' do
include GraphqlHelpers
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :repository, :public, group: group) }
let_it_be(:current_user) { create(:user) }
+ let_it_be(:label1) { create(:label, project: project) }
+ let_it_be(:label2) { create(:label, project: project) }
- let_it_be(:item1) { create(:work_item, project: project, discussion_locked: true, title: 'item1') }
+ let_it_be(:item1) { create(:work_item, project: project, discussion_locked: true, title: 'item1', labels: [label1]) }
let_it_be(:item2) do
- create(:work_item, project: project, title: 'item2', last_edited_by: current_user, last_edited_at: 1.day.ago)
+ create(
+ :work_item,
+ project: project,
+ title: 'item2',
+ last_edited_by: current_user,
+ last_edited_at: 1.day.ago,
+ labels: [label2]
+ )
end
let_it_be(:confidential_item) { create(:work_item, confidential: true, project: project, title: 'item3') }
@@ -30,6 +39,70 @@ RSpec.describe 'getting an work item list for a project' do
QUERY
end
+ shared_examples 'work items resolver without N + 1 queries' do
+ it 'avoids N+1 queries' do
+ post_graphql(query, current_user: current_user) # warm-up
+
+ control = ActiveRecord::QueryRecorder.new do
+ post_graphql(query, current_user: current_user)
+ end
+
+ expect_graphql_errors_to_be_empty
+
+ create_list(
+ :work_item, 3,
+ :task,
+ :last_edited_by_user,
+ last_edited_at: 1.week.ago,
+ project: project,
+ labels: [label1, label2]
+ )
+
+ expect_graphql_errors_to_be_empty
+ expect { post_graphql(query, current_user: current_user) }.not_to exceed_query_limit(control)
+ end
+ end
+
+ describe 'N + 1 queries' do
+ context 'when querying root fields' do
+ it_behaves_like 'work items resolver without N + 1 queries'
+ end
+
+ # We need a separate example since all_graphql_fields_for will not fetch fields from types
+ # that implement the widget interface. Only `type` for the widgets field.
+ context 'when querying the widget interface' do
+ let(:fields) do
+ <<~GRAPHQL
+ nodes {
+ widgets {
+ type
+ ... on WorkItemWidgetDescription {
+ edited
+ lastEditedAt
+ lastEditedBy {
+ webPath
+ username
+ }
+ }
+ ... on WorkItemWidgetAssignees {
+ assignees { nodes { id } }
+ }
+ ... on WorkItemWidgetHierarchy {
+ parent { id }
+ }
+ ... on WorkItemWidgetLabels {
+ labels { nodes { id } }
+ allowsScopedLabels
+ }
+ }
+ }
+ GRAPHQL
+ end
+
+ it_behaves_like 'work items resolver without N + 1 queries'
+ end
+ end
+
it_behaves_like 'a working graphql query' do
before do
post_graphql(query, current_user: current_user)
@@ -78,40 +151,6 @@ RSpec.describe 'getting an work item list for a project' do
end
end
- context 'when fetching description edit information' do
- let(:fields) do
- <<~GRAPHQL
- nodes {
- widgets {
- type
- ... on WorkItemWidgetDescription {
- edited
- lastEditedAt
- lastEditedBy {
- webPath
- username
- }
- }
- }
- }
- GRAPHQL
- end
-
- it 'avoids N+1 queries' do
- post_graphql(query, current_user: current_user) # warm-up
-
- control = ActiveRecord::QueryRecorder.new do
- post_graphql(query, current_user: current_user)
- end
- expect_graphql_errors_to_be_empty
-
- create_list(:work_item, 3, :last_edited_by_user, last_edited_at: 1.week.ago, project: project)
-
- expect_graphql_errors_to_be_empty
- expect { post_graphql(query, current_user: current_user) }.not_to exceed_query_limit(control)
- end
- end
-
context 'when filtering by search' do
it_behaves_like 'query with a search term' do
let(:issuable_data) { items_data }
diff --git a/spec/requests/api/graphql/todo_query_spec.rb b/spec/requests/api/graphql/todo_query_spec.rb
index be7242d95bd..7fe19448083 100644
--- a/spec/requests/api/graphql/todo_query_spec.rb
+++ b/spec/requests/api/graphql/todo_query_spec.rb
@@ -14,6 +14,11 @@ RSpec.describe 'Todo Query' do
let_it_be(:todo) { create(:todo, user: todo_owner, target: issue) }
let(:todo_subject) { todo }
+
+ before do
+ project.add_developer(todo_owner)
+ end
+
let(:fields) do
<<~GRAPHQL
id
@@ -31,10 +36,6 @@ RSpec.describe 'Todo Query' do
graphql_query_for(:todo, { id: todo_subject.to_global_id.to_s }, fields)
end
- before do
- project.add_developer(todo_owner)
- end
-
subject(:graphql_response) do
result = GitlabSchema.execute(query, context: { current_user: current_user }).to_h
graphql_dig_at(result, :data, :todo)
diff --git a/spec/requests/api/graphql/usage_trends_measurements_spec.rb b/spec/requests/api/graphql/usage_trends_measurements_spec.rb
index 69a3ed7e09c..78a4321f522 100644
--- a/spec/requests/api/graphql/usage_trends_measurements_spec.rb
+++ b/spec/requests/api/graphql/usage_trends_measurements_spec.rb
@@ -17,19 +17,25 @@ RSpec.describe 'UsageTrendsMeasurements' do
end
it 'returns measurement objects' do
- expect(graphql_data.dig('usageTrendsMeasurements', 'nodes')).to eq([
- { "count" => 10, 'identifier' => 'PROJECTS' },
- { "count" => 5, 'identifier' => 'PROJECTS' }
- ])
+ expect(graphql_data.dig('usageTrendsMeasurements', 'nodes')).to eq(
+ [
+ { "count" => 10, 'identifier' => 'PROJECTS' },
+ { "count" => 5, 'identifier' => 'PROJECTS' }
+ ])
end
context 'with recorded_at filters' do
- let(:arguments) { %(identifier: PROJECTS, recordedAfter: "#{15.days.ago.to_date}", recordedBefore: "#{5.days.ago.to_date}") }
+ let(:arguments) do
+ %(identifier: PROJECTS,
+ recordedAfter: "#{15.days.ago.to_date}",
+ recordedBefore: "#{5.days.ago.to_date}")
+ end
it 'returns filtered measurement objects' do
- expect(graphql_data.dig('usageTrendsMeasurements', 'nodes')).to eq([
- { "count" => 10, 'identifier' => 'PROJECTS' }
- ])
+ expect(graphql_data.dig('usageTrendsMeasurements', 'nodes')).to eq(
+ [
+ { "count" => 10, 'identifier' => 'PROJECTS' }
+ ])
end
end
end
diff --git a/spec/requests/api/graphql/work_item_spec.rb b/spec/requests/api/graphql/work_item_spec.rb
index e4bb4109c76..2105e479ed2 100644
--- a/spec/requests/api/graphql/work_item_spec.rb
+++ b/spec/requests/api/graphql/work_item_spec.rb
@@ -128,10 +128,11 @@ RSpec.describe 'Query.work_item(id)' do
hash_including(
'type' => 'HIERARCHY',
'parent' => nil,
- 'children' => { 'nodes' => match_array([
- hash_including('id' => child_link1.work_item.to_gid.to_s),
- hash_including('id' => child_link2.work_item.to_gid.to_s)
- ]) }
+ 'children' => { 'nodes' => match_array(
+ [
+ hash_including('id' => child_link1.work_item.to_gid.to_s),
+ hash_including('id' => child_link2.work_item.to_gid.to_s)
+ ]) }
)
)
)
@@ -161,9 +162,10 @@ RSpec.describe 'Query.work_item(id)' do
hash_including(
'type' => 'HIERARCHY',
'parent' => nil,
- 'children' => { 'nodes' => match_array([
- hash_including('id' => child_link1.work_item.to_gid.to_s)
- ]) }
+ 'children' => { 'nodes' => match_array(
+ [
+ hash_including('id' => child_link1.work_item.to_gid.to_s)
+ ]) }
)
)
)
diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb
index 6169bc9b2a2..02d29601ceb 100644
--- a/spec/requests/api/groups_spec.rb
+++ b/spec/requests/api/groups_spec.rb
@@ -505,13 +505,35 @@ RSpec.describe API::Groups do
group3.add_maintainer(user2)
end
- it 'returns an array of groups the user has at least master access' do
- get api('/groups', user2), params: { min_access_level: 40 }
+ context 'with min_access_level parameter' do
+ it 'returns an array of groups the user has at least master access' do
+ get api('/groups', user2), params: { min_access_level: 40 }
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(response_groups).to contain_exactly(group2.id, group3.id)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(response_groups).to contain_exactly(group2.id, group3.id)
+ end
+
+ context 'distinct count with present_groups_select_all feature flag' do
+ subject { get api('/groups', user2), params: { min_access_level: 40 } }
+
+ it 'counts with *' do
+ count_sql = /#{Regexp.escape('SELECT count(*)')}/i
+ expect { subject }.to make_queries_matching count_sql
+ end
+
+ context 'when present_groups_select_all feature flag is disabled' do
+ before do
+ stub_feature_flags(present_groups_select_all: false)
+ end
+
+ it 'counts with count_column' do
+ count_sql = /#{Regexp.escape('SELECT count(count_column)')}/i
+ expect { subject }.to make_queries_matching count_sql
+ end
+ end
+ end
end
end
diff --git a/spec/requests/api/helm_packages_spec.rb b/spec/requests/api/helm_packages_spec.rb
index 5212e225351..6bd81f64913 100644
--- a/spec/requests/api/helm_packages_spec.rb
+++ b/spec/requests/api/helm_packages_spec.rb
@@ -73,6 +73,17 @@ RSpec.describe API::HelmPackages do
end
end
+ context 'with access to package registry for everyone' do
+ let(:snowplow_gitlab_standard_context) { { project: project, namespace: project.namespace } }
+
+ before do
+ project.update!(visibility: Gitlab::VisibilityLevel::PRIVATE)
+ project.project_feature.update!(package_registry_access_level: ProjectFeature::PUBLIC)
+ end
+
+ it_behaves_like 'process helm download content request', :anonymous, :success
+ end
+
context 'when an invalid token is passed' do
let(:headers) { basic_auth_header(user.username, 'wrong') }
diff --git a/spec/requests/api/import_github_spec.rb b/spec/requests/api/import_github_spec.rb
index d2fa3dabe69..015a09d41ab 100644
--- a/spec/requests/api/import_github_spec.rb
+++ b/spec/requests/api/import_github_spec.rb
@@ -70,7 +70,8 @@ RSpec.describe API::ImportGithub do
target_namespace: user.namespace_path,
personal_access_token: token,
repo_id: non_existing_record_id,
- github_hostname: "https://github.somecompany.com/"
+ github_hostname: "https://github.somecompany.com/",
+ optional_stages: { attachments_import: true }
}
expect(response).to have_gitlab_http_status(:created)
expect(json_response).to be_a Hash
@@ -89,4 +90,42 @@ RSpec.describe API::ImportGithub do
expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
+
+ describe "POST /import/github/cancel" do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :import_started, import_type: 'github', import_url: 'https://fake.url') }
+
+ context 'when project import was canceled' do
+ before do
+ allow(Import::Github::CancelProjectImportService)
+ .to receive(:new).with(project, user)
+ .and_return(double(execute: { status: :success, project: project }))
+ end
+
+ it 'returns success' do
+ post api("/import/github/cancel", user), params: {
+ project_id: project.id
+ }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'when project import was not canceled' do
+ before do
+ allow(Import::Github::CancelProjectImportService)
+ .to receive(:new).with(project, user)
+ .and_return(double(execute: { status: :error, message: 'The import cannot be canceled because it is finished', http_status: :bad_request }))
+ end
+
+ it 'returns error' do
+ post api("/import/github/cancel", user), params: {
+ project_id: project.id
+ }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to eq('The import cannot be canceled because it is finished')
+ end
+ end
+ end
end
diff --git a/spec/requests/api/internal/base_spec.rb b/spec/requests/api/internal/base_spec.rb
index 1f6c241b3f5..32cacfc713c 100644
--- a/spec/requests/api/internal/base_spec.rb
+++ b/spec/requests/api/internal/base_spec.rb
@@ -68,24 +68,6 @@ RSpec.describe API::Internal::Base do
expect(response).to have_gitlab_http_status(:unauthorized)
end
-
- context 'when gitlab_shell_jwt_token is disabled' do
- before do
- stub_feature_flags(gitlab_shell_jwt_token: false)
- end
-
- it 'authenticates using a header' do
- perform_request(headers: { API::Helpers::GITLAB_SHARED_SECRET_HEADER => Base64.encode64(secret_token) })
-
- expect(response).to have_gitlab_http_status(:ok)
- end
-
- it 'returns 401 when no credentials provided' do
- get(api("/internal/check"))
-
- expect(response).to have_gitlab_http_status(:unauthorized)
- end
- end
end
end
@@ -1033,7 +1015,7 @@ RSpec.describe API::Internal::Base do
context 'git push' do
before do
- stub_const('Gitlab::QueryLimiting::Transaction::THRESHOLD', 120)
+ allow(Gitlab::QueryLimiting::Transaction).to receive(:threshold).and_return(120)
end
subject { push_with_path(key, full_path: path, changes: '_any') }
diff --git a/spec/requests/api/issues/issues_spec.rb b/spec/requests/api/issues/issues_spec.rb
index dd7d32f3565..f5c73846173 100644
--- a/spec/requests/api/issues/issues_spec.rb
+++ b/spec/requests/api/issues/issues_spec.rb
@@ -1164,6 +1164,21 @@ RSpec.describe API::Issues do
expect(json_response['title']).to eq('new issue')
expect(json_response['issue_type']).to eq('issue')
end
+
+ context 'when issue create service returns an unrecoverable error' do
+ before do
+ allow_next_instance_of(Issues::CreateService) do |create_service|
+ allow(create_service).to receive(:execute).and_return(ServiceResponse.error(message: 'some error', http_status: 403))
+ end
+ end
+
+ it 'returns and error message and status code from the service' do
+ post api("/projects/#{project.id}/issues", user), params: { title: 'new issue' }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ expect(json_response['message']).to eq('some error')
+ end
+ end
end
describe 'PUT /projects/:id/issues/:issue_iid' do
diff --git a/spec/requests/api/issues/post_projects_issues_spec.rb b/spec/requests/api/issues/post_projects_issues_spec.rb
index 7c8994ad9ba..3883eb01391 100644
--- a/spec/requests/api/issues/post_projects_issues_spec.rb
+++ b/spec/requests/api/issues/post_projects_issues_spec.rb
@@ -274,9 +274,7 @@ RSpec.describe API::Issues do
post api("/projects/#{project.id}/issues", user),
params: { title: 'g' * 256 }
expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['message']['title']).to eq([
- 'is too long (maximum is 255 characters)'
- ])
+ expect(json_response['message']['title']).to eq(['is too long (maximum is 255 characters)'])
end
context 'resolving discussions' do
diff --git a/spec/requests/api/issues/put_projects_issues_spec.rb b/spec/requests/api/issues/put_projects_issues_spec.rb
index 6ea77cc6578..d6c57b460e0 100644
--- a/spec/requests/api/issues/put_projects_issues_spec.rb
+++ b/spec/requests/api/issues/put_projects_issues_spec.rb
@@ -381,9 +381,7 @@ RSpec.describe API::Issues do
put api_for_user, params: { title: 'g' * 256 }
expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['message']['title']).to eq([
- 'is too long (maximum is 255 characters)'
- ])
+ expect(json_response['message']['title']).to eq(['is too long (maximum is 255 characters)'])
end
end
diff --git a/spec/requests/api/maven_packages_spec.rb b/spec/requests/api/maven_packages_spec.rb
index d7cc6991ef4..d771c1e2dcc 100644
--- a/spec/requests/api/maven_packages_spec.rb
+++ b/spec/requests/api/maven_packages_spec.rb
@@ -254,7 +254,7 @@ RSpec.describe API::MavenPackages do
let(:package_name) { package_in_project ? package_file.file_name : 'foo' }
before do
- allow_fetch_application_setting(attribute: 'maven_package_requests_forwarding', return_value: forward)
+ allow_fetch_cascade_application_setting(attribute: 'maven_package_requests_forwarding', return_value: forward)
end
it_behaves_like params[:shared_examples_name]
@@ -273,7 +273,7 @@ RSpec.describe API::MavenPackages do
before do
stub_feature_flags(maven_central_request_forwarding: false)
- allow_fetch_application_setting(attribute: 'maven_package_requests_forwarding', return_value: forward)
+ allow_fetch_cascade_application_setting(attribute: 'maven_package_requests_forwarding', return_value: forward)
end
it_behaves_like params[:shared_examples_name]
@@ -438,21 +438,7 @@ RSpec.describe API::MavenPackages do
it_behaves_like 'processing HEAD requests', instance_level: true
end
- context 'with check_maven_path_first enabled' do
- before do
- stub_feature_flags(check_maven_path_first: true)
- end
-
- it_behaves_like 'handling groups, subgroups and user namespaces for', 'heading a file'
- end
-
- context 'with check_maven_path_first disabled' do
- before do
- stub_feature_flags(check_maven_path_first: false)
- end
-
- it_behaves_like 'handling groups, subgroups and user namespaces for', 'heading a file'
- end
+ it_behaves_like 'handling groups, subgroups and user namespaces for', 'heading a file'
end
describe 'GET /api/v4/groups/:id/-/packages/maven/*path/:file_name' do
@@ -668,21 +654,7 @@ RSpec.describe API::MavenPackages do
let(:path) { package.maven_metadatum.path }
let(:url) { "/groups/#{group.id}/-/packages/maven/#{path}/#{package_file.file_name}" }
- context 'with check_maven_path_first enabled' do
- before do
- stub_feature_flags(check_maven_path_first: true)
- end
-
- it_behaves_like 'handling groups and subgroups for', 'processing HEAD requests'
- end
-
- context 'with check_maven_path_first disabled' do
- before do
- stub_feature_flags(check_maven_path_first: false)
- end
-
- it_behaves_like 'handling groups and subgroups for', 'processing HEAD requests'
- end
+ it_behaves_like 'handling groups and subgroups for', 'processing HEAD requests'
end
describe 'GET /api/v4/projects/:id/packages/maven/*path/:file_name' do
@@ -774,21 +746,7 @@ RSpec.describe API::MavenPackages do
let(:path) { package.maven_metadatum.path }
let(:url) { "/projects/#{project.id}/packages/maven/#{path}/#{package_file.file_name}" }
- context 'with check_maven_path_first enabled' do
- before do
- stub_feature_flags(check_maven_path_first: true)
- end
-
- it_behaves_like 'processing HEAD requests'
- end
-
- context 'with check_maven_path_first disabled' do
- before do
- stub_feature_flags(check_maven_path_first: false)
- end
-
- it_behaves_like 'processing HEAD requests'
- end
+ it_behaves_like 'processing HEAD requests'
end
describe 'PUT /api/v4/projects/:id/packages/maven/*path/:file_name/authorize' do
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index 9d153286d14..d593e369d27 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -119,10 +119,13 @@ RSpec.describe API::MergeRequests do
it 'returns an array of all merge_requests' do
get api(endpoint_path, user)
- expect_paginated_array_response([
- merge_request_merged.id, merge_request_locked.id,
- merge_request_closed.id, merge_request.id
- ])
+ expect_paginated_array_response(
+ [
+ merge_request_merged.id,
+ merge_request_locked.id,
+ merge_request_closed.id,
+ merge_request.id
+ ])
expect(json_response.last['title']).to eq(merge_request.title)
expect(json_response.last).to have_key('web_url')
@@ -172,10 +175,13 @@ RSpec.describe API::MergeRequests do
get api(path, user)
- expect_paginated_array_response([
- merge_request_merged.id, merge_request_locked.id,
- merge_request_closed.id, merge_request.id
- ])
+ expect_paginated_array_response(
+ [
+ merge_request_merged.id,
+ merge_request_locked.id,
+ merge_request_closed.id,
+ merge_request.id
+ ])
expect(json_response.last.keys).to match_array(%w(id iid title web_url created_at description project_id state updated_at))
expect(json_response.last['iid']).to eq(merge_request.iid)
expect(json_response.last['title']).to eq(merge_request.title)
@@ -190,10 +196,13 @@ RSpec.describe API::MergeRequests do
get api(path, user)
- expect_paginated_array_response([
- merge_request_merged.id, merge_request_locked.id,
- merge_request_closed.id, merge_request.id
- ])
+ expect_paginated_array_response(
+ [
+ merge_request_merged.id,
+ merge_request_locked.id,
+ merge_request_closed.id,
+ merge_request.id
+ ])
expect(json_response.last['title']).to eq(merge_request.title)
end
@@ -354,10 +363,13 @@ RSpec.describe API::MergeRequests do
get api(path, user)
- expect_paginated_array_response([
- merge_request.id, merge_request_closed.id,
- merge_request_locked.id, merge_request_merged.id
- ])
+ expect_paginated_array_response(
+ [
+ merge_request.id,
+ merge_request_closed.id,
+ merge_request_locked.id,
+ merge_request_merged.id
+ ])
response_dates = json_response.map { |merge_request| merge_request['created_at'] }
expect(response_dates).to eq(response_dates.sort)
end
@@ -367,10 +379,13 @@ RSpec.describe API::MergeRequests do
get api(path, user)
- expect_paginated_array_response([
- merge_request_merged.id, merge_request_locked.id,
- merge_request_closed.id, merge_request.id
- ])
+ expect_paginated_array_response(
+ [
+ merge_request_merged.id,
+ merge_request_locked.id,
+ merge_request_closed.id,
+ merge_request.id
+ ])
response_dates = json_response.map { |merge_request| merge_request['created_at'] }
expect(response_dates).to eq(response_dates.sort.reverse)
end
@@ -398,10 +413,13 @@ RSpec.describe API::MergeRequests do
get api(path, user)
- expect_paginated_array_response([
- merge_request.id, merge_request_locked.id,
- merge_request_merged.id, merge_request_closed.id
- ])
+ expect_paginated_array_response(
+ [
+ merge_request.id,
+ merge_request_locked.id,
+ merge_request_merged.id,
+ merge_request_closed.id
+ ])
response_dates = json_response.map { |merge_request| merge_request['updated_at'] }
expect(response_dates).to eq(response_dates.sort.reverse)
end
@@ -411,10 +429,13 @@ RSpec.describe API::MergeRequests do
get api(path, user)
- expect_paginated_array_response([
- merge_request.id, merge_request_closed.id,
- merge_request_locked.id, merge_request_merged.id
- ])
+ expect_paginated_array_response(
+ [
+ merge_request.id,
+ merge_request_closed.id,
+ merge_request_locked.id,
+ merge_request_merged.id
+ ])
response_dates = json_response.map { |merge_request| merge_request['created_at'] }
expect(response_dates).to eq(response_dates.sort)
end
@@ -1023,6 +1044,14 @@ RSpec.describe API::MergeRequests do
it_behaves_like 'a non-cached MergeRequest api request', 1
end
+ context 'when the label changes' do
+ before do
+ merge_request.labels << create(:label, project: merge_request.project)
+ end
+
+ it_behaves_like 'a non-cached MergeRequest api request', 1
+ end
+
context 'when the assignees change' do
before do
merge_request.assignees << create(:user)
@@ -3315,7 +3344,7 @@ RSpec.describe API::MergeRequests do
end
it 'handles external issues' do
- jira_project = create(:jira_project, :public, :repository, name: 'JIR_EXT1')
+ jira_project = create(:project, :with_jira_integration, :public, :repository, name: 'JIR_EXT1')
ext_issue = ExternalIssue.new("#{jira_project.name}-123", jira_project)
issue = create(:issue, project: jira_project)
description = "Closes #{ext_issue.to_reference(jira_project)}\ncloses #{issue.to_reference}"
diff --git a/spec/requests/api/metadata_spec.rb b/spec/requests/api/metadata_spec.rb
index dbca06b7f3e..5b6407c689b 100644
--- a/spec/requests/api/metadata_spec.rb
+++ b/spec/requests/api/metadata_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe API::Metadata do
shared_examples_for 'GET /metadata' do
context 'when unauthenticated' do
it 'returns authentication error' do
- get api('/metadata')
+ get api(endpoint)
expect(response).to have_gitlab_http_status(:unauthorized)
end
@@ -16,7 +16,7 @@ RSpec.describe API::Metadata do
let(:user) { create(:user) }
it 'returns the metadata information' do
- get api('/metadata', user)
+ get api(endpoint, user)
expect_metadata
end
@@ -29,13 +29,13 @@ RSpec.describe API::Metadata do
let(:scopes) { %i(api) }
it 'returns the metadata information' do
- get api('/metadata', personal_access_token: personal_access_token)
+ get api(endpoint, personal_access_token: personal_access_token)
expect_metadata
end
it 'returns "200" response on head requests' do
- head api('/metadata', personal_access_token: personal_access_token)
+ head api(endpoint, personal_access_token: personal_access_token)
expect(response).to have_gitlab_http_status(:ok)
end
@@ -45,13 +45,13 @@ RSpec.describe API::Metadata do
let(:scopes) { %i(read_user) }
it 'returns the metadata information' do
- get api('/metadata', personal_access_token: personal_access_token)
+ get api(endpoint, personal_access_token: personal_access_token)
expect_metadata
end
it 'returns "200" response on head requests' do
- head api('/metadata', personal_access_token: personal_access_token)
+ head api(endpoint, personal_access_token: personal_access_token)
expect(response).to have_gitlab_http_status(:ok)
end
@@ -61,7 +61,7 @@ RSpec.describe API::Metadata do
let(:scopes) { %i(read_repository) }
it 'returns authorization error' do
- get api('/metadata', personal_access_token: personal_access_token)
+ get api(endpoint, personal_access_token: personal_access_token)
expect(response).to have_gitlab_http_status(:forbidden)
end
@@ -76,18 +76,14 @@ RSpec.describe API::Metadata do
end
end
- context 'with graphql enabled' do
- before do
- stub_feature_flags(graphql: true)
- end
+ describe 'GET /metadata' do
+ let(:endpoint) { '/metadata' }
include_examples 'GET /metadata'
end
- context 'with graphql disabled' do
- before do
- stub_feature_flags(graphql: false)
- end
+ describe 'GET /version' do
+ let(:endpoint) { '/version' }
include_examples 'GET /metadata'
end
diff --git a/spec/requests/api/ml/mlflow_spec.rb b/spec/requests/api/ml/mlflow_spec.rb
index 4e7091a5b0f..09e9359c0b3 100644
--- a/spec/requests/api/ml/mlflow_spec.rb
+++ b/spec/requests/api/ml/mlflow_spec.rb
@@ -10,27 +10,35 @@ RSpec.describe API::Ml::Mlflow do
let_it_be(:project) { create(:project, :private) }
let_it_be(:developer) { create(:user).tap { |u| project.add_developer(u) } }
+ let_it_be(:another_project) { build(:project).tap { |p| p.add_developer(developer) } }
let_it_be(:experiment) do
create(:ml_experiments, user: project.creator, project: project)
end
let_it_be(:candidate) do
- create(:ml_candidates, user: experiment.user, start_time: 1234, experiment: experiment)
+ create(:ml_candidates,
+ :with_metrics_and_params, user: experiment.user, start_time: 1234, experiment: experiment)
end
- let_it_be(:another_candidate) do
- create(:ml_candidates,
- experiment: create(:ml_experiments, project: create(:project)))
+ let_it_be(:tokens) do
+ {
+ write: create(:personal_access_token, scopes: %w[read_api api], user: developer),
+ read: create(:personal_access_token, scopes: %w[read_api], user: developer),
+ no_access: create(:personal_access_token, scopes: %w[read_user], user: developer),
+ different_user: create(:personal_access_token, scopes: %w[read_api api], user: build(:user))
+ }
end
let(:current_user) { developer }
let(:ff_value) { true }
- let(:scopes) { %w[read_api api] }
+ let(:access_token) { tokens[:write] }
let(:headers) do
- { 'Authorization' => "Bearer #{create(:personal_access_token, scopes: scopes, user: current_user).token}" }
+ { 'Authorization' => "Bearer #{access_token.token}" }
end
- let(:params) { {} }
+ let(:project_id) { project.id }
+ let(:default_params) { {} }
+ let(:params) { default_params }
let(:request) { get api(route), params: params, headers: headers }
before do
@@ -57,7 +65,7 @@ RSpec.describe API::Ml::Mlflow do
shared_examples 'Requires api scope' do
context 'when user has access but token has wrong scope' do
- let(:scopes) { %w[read_api] }
+ let(:access_token) { tokens[:read] }
it { expect(response).to have_gitlab_http_status(:forbidden) }
end
@@ -65,7 +73,7 @@ RSpec.describe API::Ml::Mlflow do
shared_examples 'Requires read_api scope' do
context 'when user has access but token has wrong scope' do
- let(:scopes) { %w[read_user] }
+ let(:access_token) { tokens[:no_access] }
it { expect(response).to have_gitlab_http_status(:forbidden) }
end
@@ -89,7 +97,7 @@ RSpec.describe API::Ml::Mlflow do
end
context 'when user does not have access' do
- let(:current_user) { create(:user) }
+ let(:access_token) { tokens[:different_user] }
it_behaves_like 'Not Found'
end
@@ -101,11 +109,41 @@ RSpec.describe API::Ml::Mlflow do
end
end
- describe 'GET /projects/:id/ml/mflow/api/2.0/mlflow/get' do
+ shared_examples 'run_id param error cases' do
+ context 'when run id is not passed' do
+ let(:params) { {} }
+
+ it_behaves_like 'Bad Request'
+ end
+
+ context 'when run_id is invalid' do
+ let(:params) { default_params.merge(run_id: non_existing_record_iid.to_s) }
+
+ it_behaves_like 'Not Found - Resource Does Not Exist'
+ end
+
+ context 'when run_id is not in in the project' do
+ let(:project_id) { another_project.id }
+
+ it_behaves_like 'Not Found - Resource Does Not Exist'
+ end
+ end
+
+ shared_examples 'Bad Request on missing required' do |keys|
+ keys.each do |key|
+ context "when \"#{key}\" is missing" do
+ let(:params) { default_params.tap { |p| p.delete(key) } }
+
+ it_behaves_like 'Bad Request'
+ end
+ end
+ end
+
+ describe 'GET /projects/:id/ml/mlflow/api/2.0/mlflow/experiments/get' do
let(:experiment_iid) { experiment.iid.to_s }
- let(:route) { "/projects/#{project.id}/ml/mflow/api/2.0/mlflow/experiments/get?experiment_id=#{experiment_iid}" }
+ let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/experiments/get?experiment_id=#{experiment_iid}" }
- it 'returns the experiment' do
+ it 'returns the experiment', :aggregate_failures do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('ml/get_experiment')
expect(json_response).to include({
@@ -127,7 +165,7 @@ RSpec.describe API::Ml::Mlflow do
end
context 'and experiment_id is not passed' do
- let(:route) { "/projects/#{project.id}/ml/mflow/api/2.0/mlflow/experiments/get" }
+ let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/experiments/get" }
it_behaves_like 'Not Found - Resource Does Not Exist'
end
@@ -138,13 +176,43 @@ RSpec.describe API::Ml::Mlflow do
end
end
- describe 'GET /projects/:id/ml/mflow/api/2.0/mlflow/experiments/get-by-name' do
+ describe 'GET /projects/:id/ml/mlflow/api/2.0/mlflow/experiments/list' do
+ let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/experiments/list" }
+
+ it 'returns the experiments' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('ml/list_experiments')
+ expect(json_response).to include({
+ 'experiments' => [
+ 'experiment_id' => experiment.iid.to_s,
+ 'name' => experiment.name,
+ 'lifecycle_stage' => 'active',
+ 'artifact_location' => 'not_implemented'
+ ]
+ })
+ end
+
+ context 'when there are no experiments' do
+ let(:project_id) { another_project.id }
+
+ it 'returns an empty list' do
+ expect(json_response).to include({ 'experiments' => [] })
+ end
+ end
+
+ describe 'Error States' do
+ it_behaves_like 'shared error cases'
+ it_behaves_like 'Requires read_api scope'
+ end
+ end
+
+ describe 'GET /projects/:id/ml/mlflow/api/2.0/mlflow/experiments/get-by-name' do
let(:experiment_name) { experiment.name }
let(:route) do
- "/projects/#{project.id}/ml/mflow/api/2.0/mlflow/experiments/get-by-name?experiment_name=#{experiment_name}"
+ "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/experiments/get-by-name?experiment_name=#{experiment_name}"
end
- it 'returns the experiment' do
+ it 'returns the experiment', :aggregate_failures do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('ml/get_experiment')
expect(json_response).to include({
@@ -165,7 +233,7 @@ RSpec.describe API::Ml::Mlflow do
end
context 'when has access but experiment_name is not passed' do
- let(:route) { "/projects/#{project.id}/ml/mflow/api/2.0/mlflow/experiments/get-by-name" }
+ let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/experiments/get-by-name" }
it_behaves_like 'Not Found - Resource Does Not Exist'
end
@@ -175,16 +243,16 @@ RSpec.describe API::Ml::Mlflow do
end
end
- describe 'POST /projects/:id/ml/mflow/api/2.0/mlflow/experiments/create' do
+ describe 'POST /projects/:id/ml/mlflow/api/2.0/mlflow/experiments/create' do
let(:route) do
- "/projects/#{project.id}/ml/mflow/api/2.0/mlflow/experiments/create"
+ "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/experiments/create"
end
let(:params) { { name: 'new_experiment' } }
let(:request) { post api(route), params: params, headers: headers }
- it 'creates the experiment' do
- expect(response).to have_gitlab_http_status(:created)
+ it 'creates the experiment', :aggregate_failures do
+ expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to include('experiment_id' )
end
@@ -206,7 +274,7 @@ RSpec.describe API::Ml::Mlflow do
end
context 'when project does not exist' do
- let(:route) { "/projects/#{non_existing_record_id}/ml/mflow/api/2.0/mlflow/experiments/create" }
+ let(:route) { "/projects/#{non_existing_record_id}/ml/mlflow/api/2.0/mlflow/experiments/create" }
it_behaves_like 'Not Found', '404 Project Not Found'
end
@@ -217,15 +285,12 @@ RSpec.describe API::Ml::Mlflow do
end
describe 'Runs' do
- describe 'POST /projects/:id/ml/mflow/api/2.0/mlflow/runs/create' do
- let(:route) do
- "/projects/#{project.id}/ml/mflow/api/2.0/mlflow/runs/create"
- end
-
+ describe 'POST /projects/:id/ml/mlflow/api/2.0/mlflow/runs/create' do
+ let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/runs/create" }
let(:params) { { experiment_id: experiment.iid.to_s, start_time: Time.now.to_i } }
let(:request) { post api(route), params: params, headers: headers }
- it 'creates the run' do
+ it 'creates the run', :aggregate_failures do
expected_properties = {
'experiment_id' => params[:experiment_id],
'user_id' => current_user.id.to_s,
@@ -235,9 +300,10 @@ RSpec.describe API::Ml::Mlflow do
'lifecycle_stage' => "active"
}
- expect(response).to have_gitlab_http_status(:created)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('ml/run')
- expect(json_response['run']).to include('info' => hash_including(**expected_properties), 'data' => {})
+ expect(json_response['run']).to include('info' => hash_including(**expected_properties),
+ 'data' => { 'metrics' => [], 'params' => [] })
end
describe 'Error States' do
@@ -253,21 +319,22 @@ RSpec.describe API::Ml::Mlflow do
it_behaves_like 'Not Found - Resource Does Not Exist'
end
+ context 'when experiment exists but is not part of the project' do
+ let(:project_id) { another_project.id }
+
+ it_behaves_like 'Not Found - Resource Does Not Exist'
+ end
+
it_behaves_like 'shared error cases'
it_behaves_like 'Requires api scope'
end
end
- describe 'GET /projects/:id/ml/mflow/api/2.0/mlflow/runs/get' do
- let_it_be(:route) do
- "/projects/#{project.id}/ml/mflow/api/2.0/mlflow/runs/get"
- end
-
- let_it_be(:candidate) { create(:ml_candidates, user: experiment.user, start_time: 1234, experiment: experiment) }
-
- let(:params) { { 'run_id' => candidate.iid } }
+ describe 'GET /projects/:id/ml/mlflow/api/2.0/mlflow/runs/get' do
+ let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/runs/get" }
+ let(:default_params) { { 'run_id' => candidate.iid } }
- it 'gets the run' do
+ it 'gets the run', :aggregate_failures do
expected_properties = {
'experiment_id' => candidate.experiment.iid.to_s,
'user_id' => candidate.user.id.to_s,
@@ -277,90 +344,173 @@ RSpec.describe API::Ml::Mlflow do
'lifecycle_stage' => "active"
}
- expect(response).to have_gitlab_http_status(:success)
+ expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('ml/run')
- expect(json_response['run']).to include('info' => hash_including(**expected_properties), 'data' => {})
+ expect(json_response['run']).to include(
+ 'info' => hash_including(**expected_properties),
+ 'data' => {
+ 'metrics' => [
+ hash_including('key' => candidate.metrics[0].name),
+ hash_including('key' => candidate.metrics[1].name)
+ ],
+ 'params' => [
+ { 'key' => candidate.params[0].name, 'value' => candidate.params[0].value },
+ { 'key' => candidate.params[1].name, 'value' => candidate.params[1].value }
+ ]
+ })
end
describe 'Error States' do
- context 'when run id is not passed' do
- let(:params) { {} }
+ it_behaves_like 'run_id param error cases'
+ it_behaves_like 'shared error cases'
+ it_behaves_like 'Requires read_api scope'
+ end
+ end
- it_behaves_like 'Not Found - Resource Does Not Exist'
- end
+ describe 'POST /projects/:id/ml/mlflow/api/2.0/mlflow/runs/update' do
+ let(:default_params) { { run_id: candidate.iid.to_s, status: 'FAILED', end_time: Time.now.to_i } }
+ let(:request) { post api(route), params: params, headers: headers }
+ let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/runs/update" }
- context 'when run id does not exist' do
- let(:params) { { run_id: non_existing_record_iid.to_s } }
+ it 'updates the run', :aggregate_failures do
+ expected_properties = {
+ 'experiment_id' => candidate.experiment.iid.to_s,
+ 'user_id' => candidate.user.id.to_s,
+ 'start_time' => candidate.start_time,
+ 'end_time' => params[:end_time],
+ 'artifact_uri' => 'not_implemented',
+ 'status' => 'FAILED',
+ 'lifecycle_stage' => 'active'
+ }
- it_behaves_like 'Not Found - Resource Does Not Exist'
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('ml/update_run')
+ expect(json_response).to include('run_info' => hash_including(**expected_properties))
+ end
+
+ describe 'Error States' do
+ context 'when status in invalid' do
+ let(:params) { default_params.merge(status: 'YOLO') }
+
+ it_behaves_like 'Bad Request'
end
- context 'when run id exists but does not belong to project' do
- let(:params) { { run_id: another_candidate.iid.to_s } }
+ context 'when end_time is invalid' do
+ let(:params) { default_params.merge(end_time: 's') }
- it_behaves_like 'Not Found - Resource Does Not Exist'
+ it_behaves_like 'Bad Request'
end
it_behaves_like 'shared error cases'
- it_behaves_like 'Requires read_api scope'
+ it_behaves_like 'Requires api scope'
+ it_behaves_like 'run_id param error cases'
end
end
- end
- describe 'POST /projects/:id/ml/mflow/api/2.0/mlflow/runs/update' do
- let(:route) { "/projects/#{project.id}/ml/mflow/api/2.0/mlflow/runs/update" }
- let(:params) { { run_id: candidate.iid.to_s, status: 'FAILED', end_time: Time.now.to_i } }
- let(:request) { post api(route), params: params, headers: headers }
+ describe 'POST /projects/:id/ml/mlflow/api/2.0/mlflow/runs/log-metric' do
+ let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/runs/log-metric" }
+ let(:default_params) { { run_id: candidate.iid.to_s, key: 'some_key', value: 10.0, timestamp: Time.now.to_i } }
+ let(:request) { post api(route), params: params, headers: headers }
- it 'updates the run' do
- expected_properties = {
- 'experiment_id' => candidate.experiment.iid.to_s,
- 'user_id' => candidate.user.id.to_s,
- 'start_time' => candidate.start_time,
- 'end_time' => params[:end_time],
- 'artifact_uri' => 'not_implemented',
- 'status' => 'FAILED',
- 'lifecycle_stage' => 'active'
- }
-
- expect(response).to have_gitlab_http_status(:success)
- expect(response).to match_response_schema('ml/update_run')
- expect(json_response).to include('run_info' => hash_including(**expected_properties))
+ it 'logs the metric', :aggregate_failures do
+ candidate.metrics.reload
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_empty
+ expect(candidate.metrics.length).to eq(3)
+ end
+
+ describe 'Error Cases' do
+ it_behaves_like 'shared error cases'
+ it_behaves_like 'Requires api scope'
+ it_behaves_like 'run_id param error cases'
+ it_behaves_like 'Bad Request on missing required', [:key, :value, :timestamp]
+ end
end
- describe 'Error States' do
- context 'when run id is not passed' do
- let(:params) { {} }
+ describe 'POST /projects/:id/ml/mlflow/api/2.0/mlflow/runs/log-parameter' do
+ let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/runs/log-parameter" }
+ let(:default_params) { { run_id: candidate.iid.to_s, key: 'some_key', value: 'value' } }
+ let(:request) { post api(route), params: params, headers: headers }
- it_behaves_like 'Not Found - Resource Does Not Exist'
+ it 'logs the parameter', :aggregate_failures do
+ candidate.params.reload
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_empty
+ expect(candidate.params.length).to eq(3)
end
- context 'when run id does not exist' do
- let(:params) { { run_id: non_existing_record_iid.to_s } }
+ describe 'Error Cases' do
+ context 'when parameter was already logged' do
+ let(:params) { default_params.tap { |p| p[:key] = candidate.params[0].name } }
- it_behaves_like 'Not Found - Resource Does Not Exist'
+ it_behaves_like 'Bad Request'
+ end
+
+ it_behaves_like 'shared error cases'
+ it_behaves_like 'Requires api scope'
+ it_behaves_like 'run_id param error cases'
+ it_behaves_like 'Bad Request on missing required', [:key, :value]
end
+ end
- context 'when run id exists but does not belong to project' do
- let(:params) { { run_id: another_candidate.iid.to_s } }
+ describe 'POST /projects/:id/ml/mlflow/api/2.0/mlflow/runs/log-batch' do
+ let(:candidate2) do
+ create(:ml_candidates, user: experiment.user, start_time: 1234, experiment: experiment)
+ end
- it_behaves_like 'Not Found - Resource Does Not Exist'
+ let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/runs/log-batch" }
+ let(:default_params) do
+ {
+ run_id: candidate2.iid.to_s,
+ metrics: [
+ { key: 'mae', value: 2.5, timestamp: 1552550804 },
+ { key: 'rmse', value: 2.7, timestamp: 1552550804 }
+ ],
+ params: [{ key: 'model_class', value: 'LogisticRegression' }]
+ }
end
- context 'when run id exists but status in invalid' do
- let(:params) { { run_id: candidate.iid.to_s, status: 'YOLO', end_time: Time.now.to_i } }
+ let(:request) { post api(route), params: params, headers: headers }
- it_behaves_like 'Bad Request'
+ it 'logs parameters and metrics', :aggregate_failures do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_empty
+ expect(candidate2.params.size).to eq(1)
+ expect(candidate2.metrics.size).to eq(2)
end
- context 'when run id exists but end_time is invalid' do
- let(:params) { { run_id: candidate.iid.to_s, status: 'FAILED', end_time: 's' } }
+ context 'when parameter was already logged' do
+ let(:params) do
+ default_params.tap { |p| p[:params] = [{ key: 'hello', value: 'a' }, { key: 'hello', value: 'b' }] }
+ end
- it_behaves_like 'Bad Request'
+ it 'does not log', :aggregate_failures do
+ candidate.params.reload
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(candidate2.params.size).to eq(1)
+ end
end
- it_behaves_like 'shared error cases'
- it_behaves_like 'Requires api scope'
+ describe 'Error Cases' do
+ context 'when required metric key is missing' do
+ let(:params) { default_params.tap { |p| p[:metrics] = [p[:metrics][0].delete(:key)] } }
+
+ it_behaves_like 'Bad Request'
+ end
+
+ context 'when required param key is missing' do
+ let(:params) { default_params.tap { |p| p[:params] = [p[:params][0].delete(:key)] } }
+
+ it_behaves_like 'Bad Request'
+ end
+
+ it_behaves_like 'shared error cases'
+ it_behaves_like 'Requires api scope'
+ it_behaves_like 'run_id param error cases'
+ end
end
end
end
diff --git a/spec/requests/api/pages_domains_spec.rb b/spec/requests/api/pages_domains_spec.rb
index cd4e8b30d8f..8ef4e899193 100644
--- a/spec/requests/api/pages_domains_spec.rb
+++ b/spec/requests/api/pages_domains_spec.rb
@@ -259,7 +259,15 @@ RSpec.describe API::PagesDomains do
shared_examples_for 'post pages domains' do
it 'creates a new pages domain' do
- post api(route, user), params: params
+ expect { post api(route, user), params: params }
+ .to publish_event(PagesDomains::PagesDomainCreatedEvent)
+ .with(
+ project_id: project.id,
+ namespace_id: project.namespace.id,
+ root_namespace_id: project.root_namespace.id,
+ domain: params[:domain]
+ )
+
pages_domain = PagesDomain.find_by(domain: json_response['domain'])
expect(response).to have_gitlab_http_status(:created)
@@ -378,6 +386,17 @@ RSpec.describe API::PagesDomains do
expect(pages_domain_secure.auto_ssl_enabled).to be false
end
+ it 'publishes PagesDomainUpdatedEvent event' do
+ expect { put api(route_secure_domain, user), params: { certificate: nil, key: nil } }
+ .to publish_event(PagesDomains::PagesDomainUpdatedEvent)
+ .with(
+ project_id: project.id,
+ namespace_id: project.namespace.id,
+ root_namespace_id: project.root_namespace.id,
+ domain: pages_domain_secure.domain
+ )
+ end
+
it 'updates pages domain adding certificate' do
put api(route_domain, user), params: params_secure
pages_domain.reload
@@ -446,22 +465,29 @@ RSpec.describe API::PagesDomains do
end.to change { pages_domain.reload.certificate_source }.from('gitlab_provided').to('user_provided')
end
- it 'fails to update pages domain adding certificate without key' do
- put api(route_domain, user), params: params_secure_nokey
+ context 'with invalid params' do
+ it 'fails to update pages domain adding certificate without key' do
+ put api(route_domain, user), params: params_secure_nokey
- expect(response).to have_gitlab_http_status(:bad_request)
- end
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
- it 'fails to update pages domain adding certificate with missing chain' do
- put api(route_domain, user), params: pages_domain_secure_missing_chain_params.slice(:certificate)
+ it 'does not publish PagesDomainUpdatedEvent event' do
+ expect { put api(route_domain, user), params: params_secure_nokey }
+ .not_to publish_event(PagesDomains::PagesDomainUpdatedEvent)
+ end
- expect(response).to have_gitlab_http_status(:bad_request)
- end
+ it 'fails to update pages domain adding certificate with missing chain' do
+ put api(route_domain, user), params: pages_domain_secure_missing_chain_params.slice(:certificate)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
- it 'fails to update pages domain with key missmatch' do
- put api(route_secure_domain, user), params: pages_domain_secure_key_missmatch_params.slice(:certificate, :key)
+ it 'fails to update pages domain with key missmatch' do
+ put api(route_secure_domain, user), params: pages_domain_secure_key_missmatch_params.slice(:certificate, :key)
- expect(response).to have_gitlab_http_status(:bad_request)
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
end
end
@@ -523,7 +549,15 @@ RSpec.describe API::PagesDomains do
describe 'DELETE /projects/:project_id/pages/domains/:domain' do
shared_examples_for 'delete pages domain' do
it 'deletes a pages domain' do
- delete api(route_domain, user)
+ expect { delete api(route_domain, user) }
+ .to change(PagesDomain, :count).by(-1)
+ .and publish_event(PagesDomains::PagesDomainDeletedEvent)
+ .with(
+ project_id: project.id,
+ namespace_id: project.namespace.id,
+ root_namespace_id: project.root_namespace.id,
+ domain: pages_domain.domain
+ )
expect(response).to have_gitlab_http_status(:no_content)
end
diff --git a/spec/requests/api/personal_access_tokens/self_revocation_spec.rb b/spec/requests/api/personal_access_tokens/self_information_spec.rb
index f829b39cc1e..bdfac3ed14f 100644
--- a/spec/requests/api/personal_access_tokens/self_revocation_spec.rb
+++ b/spec/requests/api/personal_access_tokens/self_information_spec.rb
@@ -2,13 +2,13 @@
require 'spec_helper'
-RSpec.describe API::PersonalAccessTokens::SelfRevocation do
+RSpec.describe API::PersonalAccessTokens::SelfInformation do
+ let(:path) { '/personal_access_tokens/self' }
+ let(:token) { create(:personal_access_token, user: current_user) }
+
let_it_be(:current_user) { create(:user) }
describe 'DELETE /personal_access_tokens/self' do
- let(:path) { '/personal_access_tokens/self' }
- let(:token) { create(:personal_access_token, user: current_user) }
-
subject(:delete_token) { delete api(path, personal_access_token: token) }
shared_examples 'revoking token succeeds' do
@@ -66,4 +66,37 @@ RSpec.describe API::PersonalAccessTokens::SelfRevocation do
end
end
end
+
+ describe 'GET /personal_access_tokens/self' do
+ Gitlab::Auth.all_available_scopes.each do |scope|
+ context "with a '#{scope}' scoped token" do
+ let(:token) { create(:personal_access_token, scopes: [scope], user: current_user) }
+
+ it 'shows token info' do
+ get api(path, personal_access_token: token)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['scopes']).to match_array([scope.to_s])
+ end
+ end
+ end
+
+ context 'when token is invalid' do
+ it 'returns 401' do
+ get api(path, personal_access_token: instance_double(PersonalAccessToken, token: 'invalidtoken'))
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ context 'when token is expired' do
+ it 'returns 401' do
+ token = create(:personal_access_token, expires_at: 1.day.ago, user: current_user)
+
+ get api(path, personal_access_token: token)
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+ end
end
diff --git a/spec/requests/api/personal_access_tokens_spec.rb b/spec/requests/api/personal_access_tokens_spec.rb
index 37b5a594f2a..31c4e8803e3 100644
--- a/spec/requests/api/personal_access_tokens_spec.rb
+++ b/spec/requests/api/personal_access_tokens_spec.rb
@@ -4,14 +4,34 @@ require 'spec_helper'
RSpec.describe API::PersonalAccessTokens do
let_it_be(:path) { '/personal_access_tokens' }
- let_it_be(:token1) { create(:personal_access_token) }
- let_it_be(:token2) { create(:personal_access_token) }
- let_it_be(:token_impersonated) { create(:personal_access_token, impersonation: true, user: token1.user) }
- let_it_be(:current_user) { create(:user) }
describe 'GET /personal_access_tokens' do
+ using RSpec::Parameterized::TableSyntax
+
+ def map_id(json_resonse)
+ json_response.map { |pat| pat['id'] }
+ end
+
+ shared_examples 'response as expected' do |params|
+ subject { get api(path, personal_access_token: current_users_token), params: params }
+
+ it "status, count and result as expected" do
+ subject
+
+ if status == :bad_request
+ expect(json_response).to eq(result)
+ elsif status == :ok
+ expect(map_id(json_response)).to a_collection_containing_exactly(*result)
+ end
+
+ expect(response).to have_gitlab_http_status(status)
+ expect(json_response.count).to eq(result_count)
+ end
+ end
+
context 'logged in as an Administrator' do
let_it_be(:current_user) { create(:admin) }
+ let_it_be(:current_users_token) { create(:personal_access_token, user: current_user) }
it 'returns all PATs by default' do
get api(path, current_user)
@@ -21,60 +41,348 @@ RSpec.describe API::PersonalAccessTokens do
end
context 'filtered with user_id parameter' do
+ let_it_be(:token) { create(:personal_access_token) }
+ let_it_be(:token_impersonated) { create(:personal_access_token, impersonation: true, user: token.user) }
+
it 'returns only PATs belonging to that user' do
- get api(path, current_user), params: { user_id: token1.user.id }
+ get api(path, current_user), params: { user_id: token.user.id }
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.count).to eq(2)
- expect(json_response.first['user_id']).to eq(token1.user.id)
+ expect(json_response.first['user_id']).to eq(token.user.id)
expect(json_response.last['id']).to eq(token_impersonated.id)
end
end
- context 'logged in as a non-Administrator' do
- let_it_be(:current_user) { create(:user) }
+ context 'filter with revoked parameter' do
+ let_it_be(:revoked_token) { create(:personal_access_token, revoked: true) }
+ let_it_be(:not_revoked_token1) { create(:personal_access_token, revoked: false) }
+ let_it_be(:not_revoked_token2) { create(:personal_access_token, revoked: false) }
+
+ where(:revoked, :status, :result_count, :result) do
+ true | :ok | 1 | lazy { [revoked_token.id] }
+ false | :ok | 3 | lazy { [not_revoked_token1.id, not_revoked_token2.id, current_users_token.id] }
+ 'asdf' | :bad_request | 1 | { "error" => "revoked is invalid" }
+ end
+
+ with_them do
+ it_behaves_like 'response as expected', revoked: params[:revoked]
+ end
+ end
+
+ context 'filter with active parameter' do
+ let_it_be(:inactive_token1) { create(:personal_access_token, revoked: true) }
+ let_it_be(:inactive_token2) { create(:personal_access_token, expires_at: Time.new(2022, 01, 01, 00, 00, 00)) }
+ let_it_be(:active_token) { create(:personal_access_token) }
+
+ where(:state, :status, :result_count, :result) do
+ 'inactive' | :ok | 2 | lazy { [inactive_token1.id, inactive_token2.id] }
+ 'active' | :ok | 2 | lazy { [active_token.id, current_users_token.id] }
+ 'asdf' | :bad_request | 1 | { "error" => "state does not have a valid value" }
+ end
+
+ with_them do
+ it_behaves_like 'response as expected', state: params[:state]
+ end
+ end
+
+ context 'filter with created parameter' do
+ let_it_be(:token1) { create(:personal_access_token, created_at: DateTime.new(2022, 01, 01, 12, 30, 25) ) }
+
+ context 'test created_before' do
+ where(:created_at, :status, :result_count, :result) do
+ '2022-01-02' | :ok | 1 | lazy { [token1.id] }
+ '2022-01-01' | :ok | 0 | lazy { [] }
+ '2022-01-01T12:30:24' | :ok | 0 | lazy { [] }
+ '2022-01-01T12:30:25' | :ok | 1 | lazy { [token1.id] }
+ '2022-01-01T:12:30:26' | :ok | 1 | lazy { [token1.id] }
+ 'asdf' | :bad_request | 1 | { "error" => "created_before is invalid" }
+ end
+
+ with_them do
+ it_behaves_like 'response as expected', created_before: params[:created_at]
+ end
+ end
+
+ context 'test created_after' do
+ where(:created_at, :status, :result_count, :result) do
+ '2022-01-03' | :ok | 1 | lazy { [current_users_token.id] }
+ '2022-01-01' | :ok | 2 | lazy { [token1.id, current_users_token.id] }
+ '2022-01-01T12:30:25' | :ok | 2 | lazy { [token1.id, current_users_token.id] }
+ '2022-01-01T12:30:26' | :ok | 1 | lazy { [current_users_token.id] }
+ (DateTime.now + 1).to_s | :ok | 0 | lazy { [] }
+ 'asdf' | :bad_request | 1 | { "error" => "created_after is invalid" }
+ end
+
+ with_them do
+ it_behaves_like 'response as expected', created_after: params[:created_at]
+ end
+ end
+ end
+
+ context 'filter with last_used parameter' do
+ let_it_be(:token1) { create(:personal_access_token, last_used_at: DateTime.new(2022, 01, 01, 12, 30, 25) ) }
+ let_it_be(:never_used_token) { create(:personal_access_token) }
+
+ context 'test last_used_before' do
+ where(:last_used_at, :status, :result_count, :result) do
+ '2022-01-02' | :ok | 1 | lazy { [token1.id] }
+ '2022-01-01' | :ok | 0 | lazy { [] }
+ '2022-01-01T12:30:24' | :ok | 0 | lazy { [] }
+ '2022-01-01T12:30:25' | :ok | 1 | lazy { [token1.id] }
+ '2022-01-01T12:30:26' | :ok | 1 | lazy { [token1.id] }
+ 'asdf' | :bad_request | 1 | { "error" => "last_used_before is invalid" }
+ end
+
+ with_them do
+ it_behaves_like 'response as expected', last_used_before: params[:last_used_at]
+ end
+ end
+
+ context 'test last_used_after' do
+ where(:last_used_at, :status, :result_count, :result) do
+ '2022-01-03' | :ok | 1 | lazy { [current_users_token.id] }
+ '2022-01-01' | :ok | 2 | lazy { [token1.id, current_users_token.id] }
+ '2022-01-01T12:30:26' | :ok | 1 | lazy { [current_users_token.id] }
+ '2022-01-01T12:30:25' | :ok | 2 | lazy { [token1.id, current_users_token.id] }
+ (DateTime.now + 1).to_s | :ok | 0 | lazy { [] }
+ 'asdf' | :bad_request | 1 | { "error" => "last_used_after is invalid" }
+ end
+
+ with_them do
+ it_behaves_like 'response as expected', last_used_after: params[:last_used_at]
+ end
+ end
+ end
+
+ context 'filter with search parameter' do
+ let_it_be(:token1) { create(:personal_access_token, name: 'test_1') }
+ let_it_be(:token2) { create(:personal_access_token, name: 'test_2') }
+ let_it_be(:token3) { create(:personal_access_token, name: '') }
+
+ where(:pattern, :status, :result_count, :result) do
+ 'test' | :ok | 2 | lazy { [token1.id, token2.id] }
+ '' | :ok | 4 | lazy { [token1.id, token2.id, token3.id, current_users_token.id] }
+ 'test_1' | :ok | 1 | lazy { [token1.id] }
+ 'asdf' | :ok | 0 | lazy { [] }
+ end
+
+ with_them do
+ it_behaves_like 'response as expected', search: params[:pattern]
+ end
+ end
+
+ context 'filter created_before/created_after combined with last_used_before/last_used_after' do
+ let_it_be(:date) { DateTime.new(2022, 01, 02) }
+ let_it_be(:token1) { create(:personal_access_token, created_at: date, last_used_at: date) }
+
+ where(:date_before, :date_after, :status, :result_count, :result) do
+ '2022-01-03' | '2022-01-01' | :ok | 1 | lazy { [token1.id] }
+ '2022-01-01' | '2022-01-03' | :ok | 0 | lazy { [] }
+ '2022-01-03' | nil | :ok | 1 | lazy { [token1.id] }
+ nil | '2022-01-01' | :ok | 2 | lazy { [token1.id, current_users_token.id] }
+ end
+
+ with_them do
+ it_behaves_like 'response as expected', { created_before: params[:date_before],
+ created_after: params[:date_after] }
+ it_behaves_like 'response as expected', { last_used_before: params[:date_before],
+ last_used_after: params[:date_after] }
+ end
+ end
+
+ context 'filter created_before and created_after combined is valid' do
+ let_it_be(:token1) { create(:personal_access_token, created_at: DateTime.new(2022, 01, 02)) }
+
+ where(:created_before, :created_after, :status, :result) do
+ '2022-01-02' | '2022-01-02' | :ok | lazy { [token1.id] }
+ '2022-01-03' | '2022-01-01' | :ok | lazy { [token1.id] }
+ '2022-01-01' | '2022-01-03' | :ok | lazy { [] }
+ '2022-01-03' | nil | :ok | lazy { [token1.id] }
+ nil | '2022-01-01' | :ok | lazy { [token1.id] }
+ end
+
+ with_them do
+ it "returns all valid tokens" do
+ get api(path, personal_access_token: current_users_token),
+ params: { created_before: created_before, created_after: created_after }
+
+ expect(response).to have_gitlab_http_status(status)
+
+ expect(json_response.map { |pat| pat['id'] } ).to include(*result) if status == :ok
+ end
+ end
+ end
+
+ context 'filter last_used_before and last_used_after combined is valid' do
+ let_it_be(:token1) { create(:personal_access_token, last_used_at: DateTime.new(2022, 01, 02) ) }
+
+ where(:last_used_before, :last_used_after, :status, :result) do
+ '2022-01-02' | '2022-01-02' | :ok | lazy { [token1.id] }
+ '2022-01-03' | '2022-01-01' | :ok | lazy { [token1.id] }
+ '2022-01-01' | '2022-01-03' | :ok | lazy { [] }
+ '2022-01-03' | nil | :ok | lazy { [token1.id] }
+ nil | '2022-01-01' | :ok | lazy { [token1.id] }
+ end
+
+ with_them do
+ it "returns all valid tokens" do
+ get api(path, personal_access_token: current_users_token),
+ params: { last_used_before: last_used_before, last_used_after: last_used_after }
+
+ expect(response).to have_gitlab_http_status(status)
+
+ expect(json_response.map { |pat| pat['id'] } ).to include(*result) if status == :ok
+ end
+ end
+ end
+ end
+
+ context 'logged in as a non-Administrator' do
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:current_users_token) { create(:personal_access_token, user: current_user) }
+
+ it 'returns all PATs belonging to the signed-in user' do
+ get api(path, personal_access_token: current_users_token)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.count).to eq(1)
+ expect(json_response.map { |r| r['id'] }.uniq).to contain_exactly(current_users_token.id)
+ expect(json_response.map { |r| r['user_id'] }.uniq).to contain_exactly(current_user.id)
+ end
+
+ context 'filtered with user_id parameter' do
let_it_be(:user) { create(:user) }
- let_it_be(:token) { create(:personal_access_token, user: current_user) }
- let_it_be(:other_token) { create(:personal_access_token, user: user) }
- let_it_be(:token_impersonated) { create(:personal_access_token, impersonation: true, user: current_user) }
- it 'returns all PATs belonging to the signed-in user' do
- get api(path, current_user, personal_access_token: token)
+ it 'returns PATs belonging to the specific user' do
+ get api(path, current_user, personal_access_token: current_users_token), params: { user_id: current_user.id }
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.count).to eq(1)
+ expect(json_response.map { |r| r['id'] }.uniq).to contain_exactly(current_users_token.id)
expect(json_response.map { |r| r['user_id'] }.uniq).to contain_exactly(current_user.id)
end
- context 'filtered with user_id parameter' do
- it 'returns PATs belonging to the specific user' do
- get api(path, current_user, personal_access_token: token), params: { user_id: current_user.id }
+ it 'is unauthorized if filtered by a user other than current_user' do
+ get api(path, current_user, personal_access_token: current_users_token), params: { user_id: user.id }
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response.count).to eq(1)
- expect(json_response.map { |r| r['user_id'] }.uniq).to contain_exactly(current_user.id)
- end
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
- it 'is unauthorized if filtered by a user other than current_user' do
- get api(path, current_user, personal_access_token: token), params: { user_id: user.id }
+ context 'filter with revoked parameter' do
+ let_it_be(:users_revoked_token) { create(:personal_access_token, revoked: true, user: current_user) }
+ let_it_be(:not_revoked_token) { create(:personal_access_token, revoked: false) }
+ let_it_be(:oter_revoked_token) { create(:personal_access_token, revoked: true) }
- expect(response).to have_gitlab_http_status(:unauthorized)
- end
+ where(:revoked, :status, :result_count, :result) do
+ true | :ok | 1 | lazy { [users_revoked_token.id] }
+ false | :ok | 1 | lazy { [current_users_token.id] }
+ end
+
+ with_them do
+ it_behaves_like 'response as expected', revoked: params[:revoked]
end
end
- context 'not authenticated' do
- it 'is forbidden' do
- get api(path)
+ context 'filter with active parameter' do
+ let_it_be(:users_inactive_token) { create(:personal_access_token, revoked: true, user: current_user) }
+ let_it_be(:inactive_token) { create(:personal_access_token, expires_at: Time.new(2022, 01, 01, 00, 00, 00)) }
+ let_it_be(:other_active_token) { create(:personal_access_token) }
- expect(response).to have_gitlab_http_status(:unauthorized)
+ where(:state, :status, :result_count, :result) do
+ 'inactive' | :ok | 1 | lazy { [users_inactive_token.id] }
+ 'active' | :ok | 1 | lazy { [current_users_token.id] }
+ end
+
+ with_them do
+ it_behaves_like 'response as expected', state: params[:state]
+ end
+ end
+
+ # The created_before filter has been extensively tested in the 'logged in as administrator' section.
+ # Here it is only tested whether PATs to which the user has no access right are excluded from the filter function.
+ context 'filter with created parameter' do
+ let_it_be(:token1) do
+ create(:personal_access_token, created_at: DateTime.new(2022, 01, 02, 12, 30, 25), user: current_user )
+ end
+
+ let_it_be(:token2) { create(:personal_access_token, created_at: DateTime.new(2022, 01, 02, 12, 30, 25)) }
+ let_it_be(:status) { :ok }
+
+ context 'created_before' do
+ let_it_be(:result_count) { 1 }
+ let_it_be(:result) { [token1.id] }
+
+ it_behaves_like 'response as expected', created_before: '2022-01-03'
+ end
+
+ context 'created_after' do
+ let_it_be(:result_count) { 2 }
+ let_it_be(:result) { [token1.id, current_users_token.id] }
+
+ it_behaves_like 'response as expected', created_after: '2022-01-01'
+ end
+ end
+
+ # The last_used_before filter has been extensively tested in the 'logged in as administrator' section.
+ # Here it is only tested whether PATs to which the user has no access right are excluded from the filter function.
+ context 'filter with last_used' do
+ let_it_be(:token1) do
+ create(:personal_access_token, last_used_at: DateTime.new(2022, 01, 01, 12, 30, 25), user: current_user)
+ end
+
+ let_it_be(:token2) { create(:personal_access_token, last_used_at: DateTime.new(2022, 01, 01, 12, 30, 25) ) }
+ let_it_be(:never_used_token) { create(:personal_access_token) }
+ let_it_be(:status) { :ok }
+
+ context 'last_used_before' do
+ let_it_be(:result_count) { 1 }
+ let_it_be(:result) { [token1.id] }
+
+ it_behaves_like 'response as expected', last_used_before: '2022-01-02'
+ end
+
+ context 'last_used_after' do
+ let_it_be(:result_count) { 2 }
+ let_it_be(:result) { [token1.id, current_users_token.id] }
+
+ it_behaves_like 'response as expected', last_used_after: '2022-01-01'
+ end
+ end
+
+ # The search filter has been extensively tested in the 'logged in as administrator' section.
+ # Here it is only tested whether PATs to which the user has no access right are excluded from the filter function.
+ context 'filter with search parameter' do
+ let_it_be(:token1) { create(:personal_access_token, name: 'test_1', user: current_user) }
+ let_it_be(:token2) { create(:personal_access_token, name: 'test_1') }
+ let_it_be(:token3) { create(:personal_access_token, name: '') }
+
+ where(:pattern, :status, :result_count, :result) do
+ 'test' | :ok | 1 | lazy { [token1.id] }
+ '' | :ok | 2 | lazy { [token1.id, current_users_token.id] }
+ 'test_1' | :ok | 1 | lazy { [token1.id] }
+ end
+
+ with_them do
+ it_behaves_like 'response as expected', search: params[:pattern]
end
end
end
+
+ context 'not authenticated' do
+ it 'is forbidden' do
+ get api(path)
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
end
describe 'GET /personal_access_tokens/:id' do
+ let_it_be(:current_user) { create(:user) }
let_it_be(:user_token) { create(:personal_access_token, user: current_user) }
+ let_it_be(:token1) { create(:personal_access_token) }
let_it_be(:user_read_only_token) { create(:personal_access_token, scopes: ['read_repository'], user: current_user) }
let_it_be(:user_token_path) { "/personal_access_tokens/#{user_token.id}" }
let_it_be(:invalid_path) { "/personal_access_tokens/#{non_existing_record_id}" }
@@ -136,6 +444,9 @@ RSpec.describe API::PersonalAccessTokens do
end
describe 'DELETE /personal_access_tokens/:id' do
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:token1) { create(:personal_access_token) }
+
let(:path) { "/personal_access_tokens/#{token1.id}" }
context 'when current_user is an administrator', :enable_admin_mode do
diff --git a/spec/requests/api/project_attributes.yml b/spec/requests/api/project_attributes.yml
index 1335fa02aaf..0b4a96896d6 100644
--- a/spec/requests/api/project_attributes.yml
+++ b/spec/requests/api/project_attributes.yml
@@ -90,11 +90,10 @@ ci_cd_settings:
- id
- project_id
- group_runners_enabled
- - merge_pipelines_enabled
- merge_trains_enabled
- merge_pipelines_enabled
- - merge_trains_enabled
- auto_rollback_enabled
+ - inbound_job_token_scope_enabled
remapped_attributes:
default_git_depth: ci_default_git_depth
forward_deployment_enabled: ci_forward_deployment_enabled
@@ -129,7 +128,6 @@ project_feature:
- infrastructure_access_level
- feature_flags_access_level
- environments_access_level
- - releases_access_level
- project_id
- updated_at
computed_attributes:
@@ -149,6 +147,7 @@ project_setting:
- has_vulnerabilities
- legacy_open_source_license_available
- prevent_merge_without_jira_issue
+ - only_allow_merge_if_all_status_checks_passed
- warn_about_potentially_unwanted_characters
- previous_default_branch
- project_id
@@ -161,6 +160,8 @@ project_setting:
- target_platforms
- selective_code_owner_removals
- show_diff_preview_in_email
+ - suggested_reviewers_enabled
+ - jitsu_key
build_service_desk_setting: # service_desk_setting
unexposed_attributes:
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index 7ad1ce0ede9..38f7d6e3eba 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -221,6 +221,16 @@ RSpec.describe API::Projects do
expect(project_response['container_registry_enabled']).to eq(false)
end
+ it 'includes releases_access_level', :aggregate_failures do
+ project.project_feature.update!(releases_access_level: ProjectFeature::DISABLED)
+
+ get api('/projects', user)
+ project_response = json_response.find { |p| p['id'] == project.id }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(project_response['releases_access_level']).to eq('disabled')
+ end
+
context 'when some projects are in a group' do
before do
create(:project, :public, group: create(:group))
@@ -1171,6 +1181,7 @@ RSpec.describe API::Projects do
attrs[:analytics_access_level] = 'disabled'
attrs[:container_registry_access_level] = 'private'
attrs[:security_and_compliance_access_level] = 'private'
+ attrs[:releases_access_level] = 'disabled'
end
post api('/projects', user), params: project
@@ -1180,7 +1191,7 @@ RSpec.describe API::Projects do
project.each_pair do |k, v|
next if %i[
has_external_issue_tracker has_external_wiki issues_enabled merge_requests_enabled wiki_enabled storage_version
- container_registry_access_level
+ container_registry_access_level releases_access_level
].include?(k)
expect(json_response[k.to_s]).to eq(v)
@@ -1195,6 +1206,7 @@ RSpec.describe API::Projects do
expect(project.project_feature.analytics_access_level).to eq(ProjectFeature::DISABLED)
expect(project.project_feature.container_registry_access_level).to eq(ProjectFeature::PRIVATE)
expect(project.project_feature.security_and_compliance_access_level).to eq(ProjectFeature::PRIVATE)
+ expect(project.project_feature.releases_access_level).to eq(ProjectFeature::DISABLED)
end
it 'assigns container_registry_enabled to project', :aggregate_failures do
@@ -2333,6 +2345,7 @@ RSpec.describe API::Projects do
expect(json_response['only_allow_merge_if_all_discussions_are_resolved']).to eq(project.only_allow_merge_if_all_discussions_are_resolved)
expect(json_response['operations_access_level']).to be_present
expect(json_response['security_and_compliance_access_level']).to be_present
+ expect(json_response['releases_access_level']).to be_present
end
it 'exposes all necessary attributes' do
@@ -2402,6 +2415,7 @@ RSpec.describe API::Projects do
expect(json_response['builds_access_level']).to be_present
expect(json_response['operations_access_level']).to be_present
expect(json_response['security_and_compliance_access_level']).to be_present
+ expect(json_response['releases_access_level']).to be_present
expect(json_response).to have_key('emails_disabled')
expect(json_response['resolve_outdated_diff_discussions']).to eq(project.resolve_outdated_diff_discussions)
expect(json_response['remove_source_branch_after_merge']).to be_truthy
@@ -2516,7 +2530,7 @@ RSpec.describe API::Projects do
'name' => project.repository.license.name,
'nickname' => project.repository.license.nickname,
'html_url' => project.repository.license.url,
- 'source_url' => project.repository.license.meta['source']
+ 'source_url' => nil
})
end
@@ -3386,6 +3400,14 @@ RSpec.describe API::Projects do
expect(Project.find_by(path: project[:path]).analytics_access_level).to eq(ProjectFeature::PRIVATE)
end
+ it 'sets releases_access_level', :aggregate_failures do
+ put api("/projects/#{project.id}", user), params: { releases_access_level: 'private' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['releases_access_level']).to eq('private')
+ expect(Project.find_by(path: project[:path]).releases_access_level).to eq(ProjectFeature::PRIVATE)
+ end
+
it 'returns 400 when nothing sent' do
project_param = {}
diff --git a/spec/requests/api/settings_spec.rb b/spec/requests/api/settings_spec.rb
index 315c76c8ac3..3a9b2d02af5 100644
--- a/spec/requests/api/settings_spec.rb
+++ b/spec/requests/api/settings_spec.rb
@@ -60,6 +60,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting do
expect(json_response['inactive_projects_delete_after_months']).to eq(2)
expect(json_response['inactive_projects_min_size_mb']).to eq(0)
expect(json_response['inactive_projects_send_warning_email_after_months']).to eq(1)
+ expect(json_response['can_create_group']).to eq(true)
end
end
@@ -156,7 +157,8 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting do
delete_inactive_projects: true,
inactive_projects_delete_after_months: 24,
inactive_projects_min_size_mb: 10,
- inactive_projects_send_warning_email_after_months: 12
+ inactive_projects_send_warning_email_after_months: 12,
+ can_create_group: false
}
expect(response).to have_gitlab_http_status(:ok)
@@ -217,6 +219,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting do
expect(json_response['inactive_projects_delete_after_months']).to eq(24)
expect(json_response['inactive_projects_min_size_mb']).to eq(10)
expect(json_response['inactive_projects_send_warning_email_after_months']).to eq(12)
+ expect(json_response['can_create_group']).to eq(false)
end
end
diff --git a/spec/requests/api/tags_spec.rb b/spec/requests/api/tags_spec.rb
index b62fbaead6f..c635d73efe3 100644
--- a/spec/requests/api/tags_spec.rb
+++ b/spec/requests/api/tags_spec.rb
@@ -418,14 +418,6 @@ RSpec.describe API::Tags do
context 'annotated tag' do
it 'creates a new annotated tag' do
- # Identity must be set in .gitconfig to create annotated tag.
- repo_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- project.repository.path_to_repo
- end
-
- system(*%W(#{Gitlab.config.git.bin_path} --git-dir=#{repo_path} config user.name #{user.name}))
- system(*%W(#{Gitlab.config.git.bin_path} --git-dir=#{repo_path} config user.email #{user.email}))
-
post api(route, current_user), params: { tag_name: 'v7.1.0', ref: 'master', message: 'Release 7.1.0' }
expect(response).to have_gitlab_http_status(:created)
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index 96e23337411..1b0a27e78e3 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -165,6 +165,7 @@ RSpec.describe API::Users do
expect(json_response.first).not_to have_key('note')
expect(json_response.first).not_to have_key('namespace_id')
+ expect(json_response.first).not_to have_key('created_by')
end
end
@@ -175,6 +176,7 @@ RSpec.describe API::Users do
expect(json_response.first).not_to have_key('note')
expect(json_response.first).not_to have_key('namespace_id')
+ expect(json_response.first).not_to have_key('created_by')
end
end
@@ -186,6 +188,26 @@ RSpec.describe API::Users do
expect(json_response.first).to have_key('note')
expect(json_response.first['note']).to eq '2018-11-05 | 2FA removed | user requested | www.gitlab.com'
end
+
+ context 'with `created_by` details' do
+ it 'has created_by as nil with a self-registered account' do
+ get api("/users", admin), params: { username: user.username }
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(json_response.first).to have_key('created_by')
+ expect(json_response.first['created_by']).to eq(nil)
+ end
+
+ it 'is created_by a user and has those details' do
+ created = create(:user, created_by_id: user.id)
+
+ get api("/users", admin), params: { username: created.username }
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(json_response.first['created_by'].symbolize_keys)
+ .to eq(API::Entities::UserBasic.new(user).as_json)
+ end
+ end
end
context 'N+1 queries' do
@@ -940,6 +962,17 @@ RSpec.describe API::Users do
expect(user.followees).to contain_exactly(followee)
expect(response).to have_gitlab_http_status(:created)
end
+
+ it 'alerts and not follow when over followee limit' do
+ stub_const('Users::UserFollowUser::MAX_FOLLOWEE_LIMIT', 2)
+ Users::UserFollowUser::MAX_FOLLOWEE_LIMIT.times { user.follow(create(:user)) }
+
+ post api("/users/#{followee.id}/follow", user)
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expected_message = format(_("You can't follow more than %{limit} users. To follow more users, unfollow some others."), limit: Users::UserFollowUser::MAX_FOLLOWEE_LIMIT)
+ expect(json_response['message']).to eq(expected_message)
+ expect(user.following?(followee)).to be_falsey
+ end
end
context 'on a followed user' do
diff --git a/spec/requests/api/version_spec.rb b/spec/requests/api/version_spec.rb
deleted file mode 100644
index 7abbaf4f9ec..00000000000
--- a/spec/requests/api/version_spec.rb
+++ /dev/null
@@ -1,93 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe API::Version do
- shared_examples_for 'GET /version' do
- context 'when unauthenticated' do
- it 'returns authentication error' do
- get api('/version')
-
- expect(response).to have_gitlab_http_status(:unauthorized)
- end
- end
-
- context 'when authenticated as user' do
- let(:user) { create(:user) }
-
- it 'returns the version information' do
- get api('/version', user)
-
- expect_version
- end
- end
-
- context 'when authenticated with token' do
- let(:personal_access_token) { create(:personal_access_token, scopes: scopes) }
-
- context 'with api scope' do
- let(:scopes) { %i(api) }
-
- it 'returns the version information' do
- get api('/version', personal_access_token: personal_access_token)
-
- expect_version
- end
-
- it 'returns "200" response on head requests' do
- head api('/version', personal_access_token: personal_access_token)
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
-
- context 'with read_user scope' do
- let(:scopes) { %i(read_user) }
-
- it 'returns the version information' do
- get api('/version', personal_access_token: personal_access_token)
-
- expect_version
- end
-
- it 'returns "200" response on head requests' do
- head api('/version', personal_access_token: personal_access_token)
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
-
- context 'with neither api nor read_user scope' do
- let(:scopes) { %i(read_repository) }
-
- it 'returns authorization error' do
- get api('/version', personal_access_token: personal_access_token)
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
- end
-
- def expect_version
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['version']).to eq(Gitlab::VERSION)
- expect(json_response['revision']).to eq(Gitlab.revision)
- end
- end
-
- context 'with graphql enabled' do
- before do
- stub_feature_flags(graphql: true)
- end
-
- include_examples 'GET /version'
- end
-
- context 'with graphql disabled' do
- before do
- stub_feature_flags(graphql: false)
- end
-
- include_examples 'GET /version'
- end
-end
diff --git a/spec/requests/boards/lists_controller_spec.rb b/spec/requests/boards/lists_controller_spec.rb
deleted file mode 100644
index 47f4925d5b0..00000000000
--- a/spec/requests/boards/lists_controller_spec.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Boards::ListsController do
- describe '#index' do
- let(:board) { create(:board) }
- let(:user) { board.project.first_owner }
-
- it 'does not have N+1 queries' do
- login_as(user)
-
- # First request has more queries because we create the default `backlog` list
- get board_lists_path(board)
-
- create(:list, board: board)
-
- control_count = ActiveRecord::QueryRecorder.new { get board_lists_path(board) }.count
-
- create_list(:list, 5, board: board)
-
- expect { get board_lists_path(board) }.not_to exceed_query_limit(control_count)
- end
- end
-end
diff --git a/spec/requests/git_http_spec.rb b/spec/requests/git_http_spec.rb
index 81e923983ab..20d298edfe5 100644
--- a/spec/requests/git_http_spec.rb
+++ b/spec/requests/git_http_spec.rb
@@ -880,29 +880,20 @@ RSpec.describe 'Git HTTP requests' do
let(:path) { "#{project.full_path}.git" }
let(:env) { { user: 'gitlab-ci-token', password: build.token } }
- it_behaves_like 'pulls are allowed'
+ it 'rejects pulls' do
+ download(path, **env) do |response|
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
- # A non-401 here is not an information leak since the system is
- # "authenticated" as CI using the correct token. It does not have
- # push access, so pushes should be rejected as forbidden, and giving
- # a reason is fine.
- #
- # We know for sure it is not an information leak since pulls using
- # the build token must be allowed.
- it "rejects pushes with 403 Forbidden" do
+ it 'rejects pushes' do
push_get(path, **env)
expect(response).to have_gitlab_http_status(:forbidden)
- expect(response.body).to eq(git_access_error(:auth_upload))
end
- # We are "authenticated" as CI using a valid token here. But we are
- # not authorized to see any other project, so return "not found".
- it "rejects pulls for other project with 404 Not Found" do
- clone_get("#{other_project.full_path}.git", **env)
-
- expect(response).to have_gitlab_http_status(:not_found)
- expect(response.body).to eq(git_access_error(:project_not_found))
+ def pull
+ download(path, **env)
end
end
@@ -1494,33 +1485,21 @@ RSpec.describe 'Git HTTP requests' do
added_by: user)
end
+ # legacy behavior that is blocked/deprecated
context 'when build created by system is authenticated' do
let(:path) { "#{project.full_path}.git" }
let(:env) { { user: 'gitlab-ci-token', password: build.token } }
- it_behaves_like 'pulls are allowed'
+ it 'rejects pulls' do
+ download(path, **env) do |response|
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
- # A non-401 here is not an information leak since the system is
- # "authenticated" as CI using the correct token. It does not have
- # push access, so pushes should be rejected as forbidden, and giving
- # a reason is fine.
- #
- # We know for sure it is not an information leak since pulls using
- # the build token must be allowed.
- it "rejects pushes with 403 Forbidden" do
+ it 'rejects pushes' do
push_get(path, **env)
expect(response).to have_gitlab_http_status(:forbidden)
- expect(response.body).to eq(git_access_error(:auth_upload))
- end
-
- # We are "authenticated" as CI using a valid token here. But we are
- # not authorized to see any other project, so return "not found".
- it "rejects pulls for other project with 404 Not Found" do
- clone_get("#{other_project.full_path}.git", **env)
-
- expect(response).to have_gitlab_http_status(:not_found)
- expect(response.body).to eq(git_access_error(:project_not_found))
end
end
@@ -1780,8 +1759,7 @@ RSpec.describe 'Git HTTP requests' do
end
describe "User with LDAP identity" do
- let(:user) { create(:omniauth_user, extern_uid: dn) }
- let(:dn) { 'uid=john,ou=people,dc=example,dc=com' }
+ let(:user) { create(:omniauth_user, :ldap) }
let(:path) { 'doesnt/exist.git' }
before do
diff --git a/spec/requests/groups/settings/access_tokens_controller_spec.rb b/spec/requests/groups/settings/access_tokens_controller_spec.rb
index eabdef3c41e..cf728b3935f 100644
--- a/spec/requests/groups/settings/access_tokens_controller_spec.rb
+++ b/spec/requests/groups/settings/access_tokens_controller_spec.rb
@@ -87,4 +87,19 @@ RSpec.describe Groups::Settings::AccessTokensController do
it_behaves_like 'feature unavailable'
it_behaves_like 'PUT resource access tokens available'
end
+
+ describe '#index' do
+ let_it_be(:resource_access_tokens) { create_list(:personal_access_token, 3, user: bot_user) }
+
+ before do
+ get group_settings_access_tokens_path(resource)
+ end
+
+ it 'includes details of the active group access tokens' do
+ active_resource_access_tokens =
+ ::GroupAccessTokenSerializer.new.represent(resource_access_tokens.reverse, group: resource)
+
+ expect(assigns(:active_resource_access_tokens).to_json).to eq(active_resource_access_tokens.to_json)
+ end
+ end
end
diff --git a/spec/requests/ide_controller_spec.rb b/spec/requests/ide_controller_spec.rb
index 151fa89b819..8d61399c824 100644
--- a/spec/requests/ide_controller_spec.rb
+++ b/spec/requests/ide_controller_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe IdeController do
+ using RSpec::Parameterized::TableSyntax
+
let_it_be(:reporter) { create(:user) }
let_it_be(:project) do
@@ -14,6 +16,8 @@ RSpec.describe IdeController do
let_it_be(:creator) { project.creator }
let_it_be(:other_user) { create(:user) }
+ let_it_be(:top_nav_partial) { 'layouts/header/_default' }
+
let(:user) { creator }
let(:branch) { '' }
@@ -233,6 +237,33 @@ RSpec.describe IdeController do
end
end
end
+
+ # This indirectly tests that `minimal: true` was passed to the fullscreen layout
+ describe 'layout' do
+ where(:ff_state, :use_legacy_web_ide, :expect_top_nav) do
+ false | false | true
+ false | true | true
+ true | true | true
+ true | false | false
+ end
+
+ with_them do
+ before do
+ stub_feature_flags(vscode_web_ide: ff_state)
+ allow(user).to receive(:use_legacy_web_ide).and_return(use_legacy_web_ide)
+
+ subject
+ end
+
+ it 'handles rendering top nav' do
+ if expect_top_nav
+ expect(response).to render_template(top_nav_partial)
+ else
+ expect(response).not_to render_template(top_nav_partial)
+ end
+ end
+ end
+ end
end
end
end
diff --git a/spec/requests/import/github_groups_controller_spec.rb b/spec/requests/import/github_groups_controller_spec.rb
new file mode 100644
index 00000000000..544cbf88cd2
--- /dev/null
+++ b/spec/requests/import/github_groups_controller_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Import::GithubGroupsController do
+ describe 'GET status' do
+ subject(:status) { get '/import/github_group/status', params: params, headers: headers }
+
+ let_it_be(:user) { create(:user) }
+ let(:headers) { { 'Accept' => 'application/json' } }
+ let(:params) { {} }
+
+ before do
+ login_as(user)
+ end
+
+ context 'when OAuth config is missing' do
+ before do
+ allow(Gitlab::Auth::OAuth::Provider).to receive(:config_for).with('github').and_return(nil)
+ end
+
+ it 'returns missing config error' do
+ status
+
+ expect(json_response['errors']).to eq('Missing OAuth configuration for GitHub.')
+ end
+ end
+
+ context 'when OAuth config present' do
+ let(:github_access_token) { 'asdasd12345' }
+
+ before do
+ post '/import/github/personal_access_token', params: { personal_access_token: github_access_token }
+ end
+
+ it 'fetches organizations' do
+ expect_next_instance_of(Octokit::Client) do |client|
+ expect(client).to receive(:organizations).and_return([].to_enum)
+ end
+
+ status
+ end
+
+ context 'with pagination' do
+ context 'when no page is specified' do
+ it 'requests first page' do
+ expect_next_instance_of(Octokit::Client) do |client|
+ expect(client).to receive(:organizations).with(nil, { page: 1, per_page: 25 }).and_return([].to_enum)
+ end
+
+ status
+ end
+ end
+
+ context 'when page is specified' do
+ let(:params) { { page: 2 } }
+
+ it 'responds with organizations with specified page' do
+ expect_next_instance_of(Octokit::Client) do |client|
+ expect(client).to receive(:organizations).with(nil, { page: 2, per_page: 25 }).and_return([].to_enum)
+ end
+
+ status
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/jira_connect/public_keys_controller_spec.rb b/spec/requests/jira_connect/public_keys_controller_spec.rb
new file mode 100644
index 00000000000..2eca4c0ea2f
--- /dev/null
+++ b/spec/requests/jira_connect/public_keys_controller_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe JiraConnect::PublicKeysController do
+ describe 'GET /-/jira_connect/public_keys/:uuid' do
+ before do
+ allow(Gitlab).to receive(:com?).and_return(dot_com)
+ end
+
+ let(:uuid) { non_existing_record_id }
+ let(:dot_com) { true }
+
+ it 'renders 404' do
+ get jira_connect_public_key_path(id: uuid)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ context 'when public key exists' do
+ let_it_be(:public_key) { JiraConnect::PublicKey.create!(key: OpenSSL::PKey::RSA.generate(3072).public_key) }
+
+ let(:uuid) { public_key.uuid }
+
+ it 'renders 200' do
+ get jira_connect_public_key_path(id: uuid)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.body).to eq(public_key.key)
+ end
+
+ context 'when not on GitLab.com' do
+ let(:dot_com) { false }
+
+ it 'renders 404' do
+ get jira_connect_public_key_path(id: uuid)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when jira_connect_oauth_self_managed disabled' do
+ before do
+ stub_feature_flags(jira_connect_oauth_self_managed: false)
+ end
+
+ it 'renders 404' do
+ get jira_connect_public_key_path(id: uuid)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/projects/ci/promeheus_metrics/histograms_controller_spec.rb b/spec/requests/projects/ci/promeheus_metrics/histograms_controller_spec.rb
index 7d5eb1c9685..c5e7369b0a9 100644
--- a/spec/requests/projects/ci/promeheus_metrics/histograms_controller_spec.rb
+++ b/spec/requests/projects/ci/promeheus_metrics/histograms_controller_spec.rb
@@ -8,10 +8,11 @@ RSpec.describe 'Projects::Ci::PrometheusMetrics::HistogramsController' do
describe 'POST /*namespace_id/:project_id/-/ci/prometheus_metrics/histograms' do
context 'with known histograms' do
it 'returns 201 Created' do
- post histograms_route(histograms: [
- { name: :pipeline_graph_link_calculation_duration_seconds, value: 1 },
- { name: :pipeline_graph_links_total, value: 10 }
- ])
+ post histograms_route(histograms:
+ [
+ { name: :pipeline_graph_link_calculation_duration_seconds, value: 1 },
+ { name: :pipeline_graph_links_total, value: 10 }
+ ])
expect(response).to have_gitlab_http_status(:created)
end
diff --git a/spec/requests/projects/incident_management/timeline_events_spec.rb b/spec/requests/projects/incident_management/timeline_events_spec.rb
new file mode 100644
index 00000000000..f7dead4834d
--- /dev/null
+++ b/spec/requests/projects/incident_management/timeline_events_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Timeline Events' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:incident) { create(:incident, project: project) }
+
+ describe 'POST /preview_markdown' do
+ let(:timeline_text) { "timeline text with image ![img](img/src.png) and reference #{incident.to_reference}" }
+
+ context 'when authorized' do
+ let(:expected_img) do
+ '<a class="with-attachment-icon" href="img/src.png" target="_blank" rel="noopener noreferrer">img</a>'
+ end
+
+ let(:expected_reference) do
+ %(<a href="/#{project.full_path}/-/issues/#{incident.iid}" data-reference-type="issue" ) +
+ %(data-original="##{incident.iid}" data-link="false" data-link-reference="false" ) +
+ %(data-project="#{project.id}" data-issue="#{incident.id}" data-project-path="#{project.full_path}" ) +
+ %(data-iid="#{incident.iid}" data-issue-type="incident" data-container="body" data-placement="top" ) +
+ %(title="#{incident.title}" class="gfm gfm-issue">##{incident.iid}</a>)
+ end
+
+ let(:expected_body) do
+ "<p>timeline text with image #{expected_img} and reference #{expected_reference}</p>"
+ end
+
+ before do
+ project.add_developer(user)
+ login_as(user)
+ end
+
+ it 'renders JSON in a correct format' do
+ post preview_markdown_project_incident_management_timeline_events_path(project, format: :json),
+ params: { text: timeline_text }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq({
+ body: expected_body,
+ references: {
+ commands: '',
+ suggestions: [],
+ users: []
+ }
+ }.as_json)
+ end
+ end
+
+ context 'when not authorized' do
+ it 'returns 302' do
+ post preview_markdown_project_incident_management_timeline_events_path(project, format: :json),
+ params: { text: timeline_text }
+
+ expect(response).to have_gitlab_http_status(:found)
+ end
+ end
+ end
+end
diff --git a/spec/requests/projects/issues_controller_spec.rb b/spec/requests/projects/issues_controller_spec.rb
index de1d55ff5be..aa2ba5e114b 100644
--- a/spec/requests/projects/issues_controller_spec.rb
+++ b/spec/requests/projects/issues_controller_spec.rb
@@ -31,11 +31,12 @@ RSpec.describe Projects::IssuesController do
notes = discussions.flat_map { |d| d['notes'] }
expect(discussions.count).to eq(2)
- expect(notes).to match([
- a_hash_including('id' => discussion.id.to_s),
- a_hash_including('id' => discussion_reply.id.to_s),
- a_hash_including('type' => 'StateNote')
- ])
+ expect(notes).to match(
+ [
+ a_hash_including('id' => discussion.id.to_s),
+ a_hash_including('id' => discussion_reply.id.to_s),
+ a_hash_including('type' => 'StateNote')
+ ])
cursor = response.header['X-Next-Page-Cursor']
expect(cursor).to be_present
@@ -46,9 +47,7 @@ RSpec.describe Projects::IssuesController do
notes = discussions.flat_map { |d| d['notes'] }
expect(discussions.count).to eq(1)
- expect(notes).to match([
- a_hash_including('id' => discussion_2.id.to_s)
- ])
+ expect(notes).to match([a_hash_including('id' => discussion_2.id.to_s)])
end
end
end
diff --git a/spec/requests/projects/merge_requests_controller_spec.rb b/spec/requests/projects/merge_requests_controller_spec.rb
index 6580fc8b80f..2ee86bb423b 100644
--- a/spec/requests/projects/merge_requests_controller_spec.rb
+++ b/spec/requests/projects/merge_requests_controller_spec.rb
@@ -29,11 +29,12 @@ RSpec.describe Projects::MergeRequestsController do
notes = discussions.flat_map { |d| d['notes'] }
expect(discussions.count).to eq(2)
- expect(notes).to match([
- a_hash_including('id' => discussion.id.to_s),
- a_hash_including('id' => discussion_reply.id.to_s),
- a_hash_including('type' => 'StateNote')
- ])
+ expect(notes).to match(
+ [
+ a_hash_including('id' => discussion.id.to_s),
+ a_hash_including('id' => discussion_reply.id.to_s),
+ a_hash_including('type' => 'StateNote')
+ ])
cursor = response.header['X-Next-Page-Cursor']
expect(cursor).to be_present
@@ -44,9 +45,7 @@ RSpec.describe Projects::MergeRequestsController do
notes = discussions.flat_map { |d| d['notes'] }
expect(discussions.count).to eq(1)
- expect(notes).to match([
- a_hash_including('id' => discussion_2.id.to_s)
- ])
+ expect(notes).to match([a_hash_including('id' => discussion_2.id.to_s)])
end
context 'when paginated_mr_discussions is disabled' do
diff --git a/spec/requests/projects/settings/access_tokens_controller_spec.rb b/spec/requests/projects/settings/access_tokens_controller_spec.rb
index 780d1b8caef..48114834c65 100644
--- a/spec/requests/projects/settings/access_tokens_controller_spec.rb
+++ b/spec/requests/projects/settings/access_tokens_controller_spec.rb
@@ -88,4 +88,19 @@ RSpec.describe Projects::Settings::AccessTokensController do
it_behaves_like 'feature unavailable'
it_behaves_like 'PUT resource access tokens available'
end
+
+ describe '#index' do
+ let_it_be(:resource_access_tokens) { create_list(:personal_access_token, 3, user: bot_user) }
+
+ before do
+ get project_settings_access_tokens_path(resource)
+ end
+
+ it 'includes details of the active project access tokens' do
+ active_resource_access_tokens =
+ ::ProjectAccessTokenSerializer.new.represent(resource_access_tokens.reverse, project: resource)
+
+ expect(assigns(:active_resource_access_tokens).to_json).to eq(active_resource_access_tokens.to_json)
+ end
+ end
end
diff --git a/spec/requests/users/namespace_callouts_spec.rb b/spec/requests/users/namespace_callouts_spec.rb
deleted file mode 100644
index 5a4e269eefb..00000000000
--- a/spec/requests/users/namespace_callouts_spec.rb
+++ /dev/null
@@ -1,57 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Namespace callouts' do
- let_it_be(:user) { create(:user) }
-
- before do
- sign_in(user)
- end
-
- describe 'POST /-/users/namespace_callouts' do
- let(:params) { { feature_name: feature_name, namespace_id: user.namespace.id } }
-
- subject { post namespace_callouts_path, params: params, headers: { 'ACCEPT' => 'application/json' } }
-
- context 'with valid feature name and group' do
- let(:feature_name) { Users::NamespaceCallout.feature_names.each_key.first }
-
- context 'when callout entry does not exist' do
- it 'creates a callout entry with dismissed state' do
- expect { subject }.to change { Users::NamespaceCallout.count }.by(1)
- end
-
- it 'returns success' do
- subject
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
-
- context 'when callout entry already exists' do
- let!(:callout) do
- create(:namespace_callout,
- feature_name: Users::GroupCallout.feature_names.each_key.first,
- user: user,
- namespace: user.namespace)
- end
-
- it 'returns success', :aggregate_failures do
- expect { subject }.not_to change { Users::NamespaceCallout.count }
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
- end
-
- context 'with invalid feature name' do
- let(:feature_name) { 'bogus_feature_name' }
-
- it 'returns bad request' do
- subject
-
- expect(response).to have_gitlab_http_status(:bad_request)
- end
- end
- end
-end
diff --git a/spec/requests/users_controller_spec.rb b/spec/requests/users_controller_spec.rb
index 42f14392117..e78d4cc326e 100644
--- a/spec/requests/users_controller_spec.rb
+++ b/spec/requests/users_controller_spec.rb
@@ -828,6 +828,26 @@ RSpec.describe UsersController do
end
end
+ describe 'POST #follow' do
+ context 'when over followee limit' do
+ before do
+ stub_const('Users::UserFollowUser::MAX_FOLLOWEE_LIMIT', 2)
+ sign_in(user)
+ end
+
+ it 'alerts and not follow' do
+ Users::UserFollowUser::MAX_FOLLOWEE_LIMIT.times { user.follow(create(:user)) }
+
+ post user_follow_url(username: public_user.username)
+ expect(response).to be_redirect
+
+ expected_message = format(_("You can't follow more than %{limit} users. To follow more users, unfollow some others."), limit: Users::UserFollowUser::MAX_FOLLOWEE_LIMIT)
+ expect(flash[:alert]).to eq(expected_message)
+ expect(user).not_to be_following(public_user)
+ end
+ end
+ end
+
context 'token authentication' do
it_behaves_like 'authenticates sessionless user for the request spec', 'show atom', public_resource: true do
let(:url) { user_url(user, format: :atom) }
diff --git a/spec/routing/import_routing_spec.rb b/spec/routing/import_routing_spec.rb
index b1da2eaa33b..b63ae1e7e4e 100644
--- a/spec/routing/import_routing_spec.rb
+++ b/spec/routing/import_routing_spec.rb
@@ -177,3 +177,10 @@ RSpec.describe Import::PhabricatorController, 'routing' do
expect(get("/import/phabricator/new")).to route_to("import/phabricator#new")
end
end
+
+# status_import_github_group GET /import/github_group/status(.:format) import/github_groups#status
+RSpec.describe Import::GithubGroupsController, 'routing' do
+ it 'to #status' do
+ expect(get('/import/github_group/status')).to route_to('import/github_groups#status')
+ end
+end
diff --git a/spec/rubocop/check_graceful_task_spec.rb b/spec/rubocop/check_graceful_task_spec.rb
index 0364820a602..c39a00470fd 100644
--- a/spec/rubocop/check_graceful_task_spec.rb
+++ b/spec/rubocop/check_graceful_task_spec.rb
@@ -62,25 +62,35 @@ RSpec.describe RuboCop::CheckGracefulTask do
let(:adjusted_rubocop_status) { status_success }
context 'with sufficient environment variables' do
+ let(:script) { 'scripts/slack' }
let(:channel) { 'f_rubocop' }
+ let(:emoji) { 'rubocop' }
+ let(:user_name) { 'GitLab Bot' }
+ let(:job_name) { 'some job name' }
+ let(:job_url) { 'some job url' }
+ let(:docs_link) { 'https://docs.gitlab.com/ee/development/contributing/style_guides.html#silenced-offenses' }
before do
env = {
'CI_SLACK_WEBHOOK_URL' => 'webhook_url',
- 'CI_JOB_NAME' => 'job_name',
- 'CI_JOB_URL' => 'job_url'
+ 'CI_JOB_NAME' => job_name,
+ 'CI_JOB_URL' => job_url
}
stub_const('ENV', ENV.to_hash.update(env))
end
it 'notifies slack' do
- popen_args = ['scripts/slack', channel, kind_of(String), 'rubocop', kind_of(String)]
popen_result = ['', 0]
- expect(Gitlab::Popen).to receive(:popen).with(popen_args).and_return(popen_result)
+ allow(Gitlab::Popen).to receive(:popen).with(anything).and_return(popen_result)
subject
+ message = a_kind_of(String).and include(job_name).and include(job_url).and include(docs_link)
+
+ expect(Gitlab::Popen).to have_received(:popen)
+ .with([script, channel, message, emoji, user_name])
+
expect(output.string).to include("Notifying Slack ##{channel}.")
end
diff --git a/spec/rubocop/cop/gitlab/duplicate_spec_location_spec.rb b/spec/rubocop/cop/gitlab/duplicate_spec_location_spec.rb
index 9a1639806c8..0a121a495c9 100644
--- a/spec/rubocop/cop/gitlab/duplicate_spec_location_spec.rb
+++ b/spec/rubocop/cop/gitlab/duplicate_spec_location_spec.rb
@@ -4,7 +4,7 @@ require 'rubocop_spec_helper'
require_relative '../../../../rubocop/cop/gitlab/duplicate_spec_location'
-RSpec.describe RuboCop::Cop::Gitlab::DuplicateSpecLocation do
+RSpec.describe RuboCop::Cop::Gitlab::DuplicateSpecLocation, type: :rubocop_rspec do
let(:rails_root) { '../../../../' }
def full_path(path)
diff --git a/spec/rubocop/cop/gitlab/mark_used_feature_flags_spec.rb b/spec/rubocop/cop/gitlab/mark_used_feature_flags_spec.rb
index ac7e41dda44..a3c9ae8916e 100644
--- a/spec/rubocop/cop/gitlab/mark_used_feature_flags_spec.rb
+++ b/spec/rubocop/cop/gitlab/mark_used_feature_flags_spec.rb
@@ -7,7 +7,7 @@ require_relative '../../../../rubocop/cop/gitlab/mark_used_feature_flags'
RSpec.describe RuboCop::Cop::Gitlab::MarkUsedFeatureFlags do
let(:defined_feature_flags) do
- %w[a_feature_flag foo_hello foo_world baz_experiment_percentage bar_baz]
+ %w[a_feature_flag foo_hello foo_world bar_baz baz]
end
before do
@@ -118,40 +118,33 @@ RSpec.describe RuboCop::Cop::Gitlab::MarkUsedFeatureFlags do
end
end
- %w[
- experiment
- experiment_enabled?
- push_frontend_experiment
- Gitlab::Experimentation.active?
- ].each do |feature_flag_method|
- context "#{feature_flag_method} method" do
- context 'a string feature flag' do
- include_examples 'sets flag as used', %Q|#{feature_flag_method}("baz")|, %w[baz baz_experiment_percentage]
- end
+ context 'with the experiment method' do
+ context 'a string feature flag' do
+ include_examples 'sets flag as used', %q|experiment("baz")|, %w[baz]
+ end
- context 'a symbol feature flag' do
- include_examples 'sets flag as used', %Q|#{feature_flag_method}(:baz)|, %w[baz baz_experiment_percentage]
- end
+ context 'a symbol feature flag' do
+ include_examples 'sets flag as used', %q|experiment(:baz)|, %w[baz]
+ end
- context 'an interpolated string feature flag with a string prefix' do
- include_examples 'sets flag as used', %Q|#{feature_flag_method}("foo_\#{bar}")|, %w[foo_hello foo_world]
- end
+ context 'an interpolated string feature flag with a string prefix' do
+ include_examples 'sets flag as used', %Q|experiment("foo_\#{bar}")|, %w[foo_hello foo_world]
+ end
- context 'an interpolated symbol feature flag with a string prefix' do
- include_examples 'sets flag as used', %Q|#{feature_flag_method}(:"foo_\#{bar}")|, %w[foo_hello foo_world]
- end
+ context 'an interpolated symbol feature flag with a string prefix' do
+ include_examples 'sets flag as used', %Q|experiment(:"foo_\#{bar}")|, %w[foo_hello foo_world]
+ end
- context 'an interpolated string feature flag with a string prefix and suffix' do
- include_examples 'does not set any flags as used', %Q|#{feature_flag_method}(:"foo_\#{bar}_baz")|
- end
+ context 'an interpolated string feature flag with a string prefix and suffix' do
+ include_examples 'does not set any flags as used', %Q|experiment(:"foo_\#{bar}_baz")|
+ end
- context 'a dynamic string feature flag as a variable' do
- include_examples 'does not set any flags as used', %Q|#{feature_flag_method}(a_variable, an_arg)|
- end
+ context 'a dynamic string feature flag as a variable' do
+ include_examples 'does not set any flags as used', %q|experiment(a_variable, an_arg)|
+ end
- context 'an integer feature flag' do
- include_examples 'does not set any flags as used', %Q|#{feature_flag_method}(123)|
- end
+ context 'an integer feature flag' do
+ include_examples 'does not set any flags as used', %q|experiment(123)|
end
end
diff --git a/spec/rubocop/cop/gitlab/no_code_coverage_comment_spec.rb b/spec/rubocop/cop/gitlab/no_code_coverage_comment_spec.rb
new file mode 100644
index 00000000000..f0c0297d266
--- /dev/null
+++ b/spec/rubocop/cop/gitlab/no_code_coverage_comment_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'rubocop_spec_helper'
+require_relative '../../../../rubocop/cop/gitlab/no_code_coverage_comment'
+
+RSpec.describe RuboCop::Cop::Gitlab::NoCodeCoverageComment do
+ let(:msg) { format(described_class::MSG, nocov_comment: nocov_comment) }
+ let(:nocov_comment) { ":#{comment_token}:" }
+
+ shared_examples 'nocov check' do
+ it 'flags related code comments' do
+ expect_offense(<<~RUBY, nocov_token: comment_token, msg: msg)
+ # :%{nocov_token}:
+ ^^^{nocov_token} %{msg}
+ def method
+ end
+ #:%{nocov_token}:
+ ^^^{nocov_token} %{msg}
+
+ def other_method
+ if expr
+ # :%{nocov_token}: With some additional comments
+ ^^^{nocov_token} %{msg}
+ value << line.strip
+ # :%{nocov_token}:
+ ^^^{nocov_token} %{msg}
+ end
+ end
+ RUBY
+ end
+
+ it 'ignores unrelated comments' do
+ expect_no_offenses(<<~RUBY)
+ # Other comments are ignored :#{comment_token}:
+ #
+ # # :#{comment_token}:
+ RUBY
+ end
+ end
+
+ context 'with nocov as default comment token' do
+ it_behaves_like 'nocov check' do
+ let(:comment_token) { described_class::DEFAULT_COMMENT_TOKEN }
+ end
+ end
+
+ context 'with configured comment token' do
+ it_behaves_like 'nocov check' do
+ let(:comment_token) { 'skipit' }
+
+ let(:config) do
+ RuboCop::Config.new(
+ 'Gitlab/NoCodeCoverageComment' => {
+ 'CommentToken' => comment_token
+ }
+ )
+ end
+ end
+ end
+end
diff --git a/spec/rubocop/cop/gitlab/service_response_spec.rb b/spec/rubocop/cop/gitlab/service_response_spec.rb
new file mode 100644
index 00000000000..84cf0dbff52
--- /dev/null
+++ b/spec/rubocop/cop/gitlab/service_response_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'rubocop_spec_helper'
+require_relative '../../../../rubocop/cop/gitlab/service_response'
+
+RSpec.describe RuboCop::Cop::Gitlab::ServiceResponse do
+ subject(:cop) { described_class.new }
+
+ it 'does not flag the `http_status:` param on a homonym method' do
+ expect_no_offenses("MyClass.error(http_status: :ok)")
+ end
+
+ it 'does not flag calls without params' do
+ expect_no_offenses('ServiceResponse.error')
+ end
+
+ it 'does not flag the offense when `http_status` is not used' do
+ expect_no_offenses('ServiceResponse.error(message: "some error", reason: :bad_time)')
+ end
+
+ it 'flags the use of `http_status:` parameter in ServiceResponse in error' do
+ expect_offense(<<~CODE, msg: described_class::MSG)
+ ServiceResponse.error(message: "some error", http_status: :bad_request)
+ ^^^^^^^^^^^^^^^^^^^^^^^^^ %{msg}
+ CODE
+ end
+
+ it 'flags the use of `http_status:` parameter in ServiceResponse in success' do
+ expect_offense(<<~CODE, msg: described_class::MSG)
+ ServiceResponse.success(message: "some error", http_status: :bad_request)
+ ^^^^^^^^^^^^^^^^^^^^^^^^^ %{msg}
+ CODE
+ end
+
+ it 'flags the use of `http_status:` parameter in ServiceResponse in initializer' do
+ expect_offense(<<~CODE, msg: described_class::MSG)
+ ServiceResponse.new(message: "some error", http_status: :bad_request)
+ ^^^^^^^^^^^^^^^^^^^^^^^^^ %{msg}
+ CODE
+ end
+end
diff --git a/spec/rubocop/cop/migration/background_migration_missing_active_concern_spec.rb b/spec/rubocop/cop/migration/background_migration_missing_active_concern_spec.rb
new file mode 100644
index 00000000000..c74a7d29056
--- /dev/null
+++ b/spec/rubocop/cop/migration/background_migration_missing_active_concern_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require 'rubocop_spec_helper'
+require_relative '../../../../rubocop/cop/migration/background_migration_missing_active_concern'
+
+RSpec.describe RuboCop::Cop::Migration::BackgroundMigrationMissingActiveConcern do
+ shared_examples 'offense is not registered' do
+ it 'does not register any offenses' do
+ expect_no_offenses(<<~RUBY)
+ module Gitlab
+ module BackgroundMigration
+ prepended do
+ scope_to -> (relation) { relation }
+ end
+ end
+ end
+ RUBY
+ end
+ end
+
+ context 'when outside of a migration' do
+ it_behaves_like 'offense is not registered'
+ end
+
+ context 'in non-ee background migration' do
+ before do
+ allow(cop).to receive(:in_ee_background_migration?).and_return(false)
+ end
+
+ it_behaves_like 'offense is not registered'
+ end
+
+ context 'in ee background migration' do
+ before do
+ allow(cop).to receive(:in_ee_background_migration?).and_return(true)
+ end
+
+ context 'when scope_to is not used inside prepended block' do
+ it 'does not register any offenses' do
+ expect_no_offenses(<<~RUBY)
+ module Gitlab
+ module BackgroundMigration
+ prepended do
+ some_method_to -> (relation) { relation }
+ end
+
+ def foo
+ scope_to -> (relation) { relation }
+ end
+ end
+ end
+ RUBY
+ end
+ end
+
+ context 'when scope_to is used inside prepended block' do
+ it 'does not register any offenses if the module does extend ActiveSupport::Concern' do
+ expect_no_offenses(<<~RUBY)
+ module Gitlab
+ module BackgroundMigration
+ extend ::Gitlab::Utils::Override
+ extend ActiveSupport::Concern
+
+ prepended do
+ scope_to -> (relation) { relation }
+ end
+ end
+ end
+ RUBY
+ end
+
+ it 'registers an offense if the module does not extend ActiveSupport::Concern' do
+ expect_offense(<<~RUBY)
+ module Gitlab
+ module BackgroundMigration
+ prepended do
+ scope_to -> (relation) { relation }
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Extend `ActiveSupport::Concern` [...]
+ end
+ end
+ end
+ RUBY
+ end
+ end
+ end
+end
diff --git a/spec/rubocop/cop/redis_queue_usage_spec.rb b/spec/rubocop/cop/redis_queue_usage_spec.rb
new file mode 100644
index 00000000000..9861a6a79d9
--- /dev/null
+++ b/spec/rubocop/cop/redis_queue_usage_spec.rb
@@ -0,0 +1,99 @@
+# frozen_string_literal: true
+
+require 'rubocop_spec_helper'
+
+require_relative '../../../rubocop/cop/redis_queue_usage'
+
+RSpec.describe RuboCop::Cop::RedisQueueUsage do
+ let(:msg) { described_class::MSG }
+
+ context 'when assigning Gitlab::Redis::Queues as a variable' do
+ it 'registers offence for any variable assignment' do
+ expect_offense(<<~PATTERN)
+ x = Gitlab::Redis::Queues
+ ^^^^^^^^^^^^^^^^^^^^^^^^^ #{msg}
+ PATTERN
+ end
+
+ it 'registers offence for constant assignment' do
+ expect_offense(<<~PATTERN)
+ X = Gitlab::Redis::Queues
+ ^^^^^^^^^^^^^^^^^^^^^^^^^ #{msg}
+ PATTERN
+ end
+ end
+
+ context 'when assigning Gitlab::Redis::Queues as a part of an array' do
+ it 'registers offence for variable assignments' do
+ expect_offense(<<~PATTERN)
+ x = [ Gitlab::Redis::Cache, Gitlab::Redis::Queues, Gitlab::Redis::SharedState ]
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{msg}
+ PATTERN
+ end
+
+ it 'registers offence for constant assignments' do
+ expect_offense(<<~PATTERN)
+ ALL = [ Gitlab::Redis::Cache, Gitlab::Redis::Queues, Gitlab::Redis::SharedState ]
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{msg}
+ PATTERN
+ end
+
+ it 'registers offence for constant assignments while invoking function' do
+ expect_offense(<<~PATTERN)
+ ALL = [ Gitlab::Redis::Cache, Gitlab::Redis::Queues, Gitlab::Redis::SharedState ].freeze
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{msg}
+ PATTERN
+ end
+
+ it 'registers offence for constant assignments while invoking multiple functions' do
+ expect_offense(<<~PATTERN)
+ ALL = [ Gitlab::Redis::Cache, Gitlab::Redis::Queues, Gitlab::Redis::SharedState ].foo.freeze
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{msg}
+ PATTERN
+ end
+ end
+
+ context 'when assigning Gitlab::Redis::Queues as a part of a hash' do
+ it 'registers offence for variable assignments' do
+ expect_offense(<<~PATTERN)
+ x = { "test": Gitlab::Redis::Queues, "test2": Gitlab::Redis::SharedState }
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{msg}
+ PATTERN
+ end
+
+ it 'registers offence for constant assignments' do
+ expect_offense(<<~PATTERN)
+ ALL = { "test": Gitlab::Redis::Queues, "test2": Gitlab::Redis::SharedState }
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{msg}
+ PATTERN
+ end
+
+ it 'registers offence for constant assignments while invoking function' do
+ expect_offense(<<~PATTERN)
+ ALL = { "test": Gitlab::Redis::Queues, "test2": Gitlab::Redis::SharedState }.freeze
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{msg}
+ PATTERN
+ end
+
+ it 'registers offence for constant assignments while invoking multiple functions' do
+ expect_offense(<<~PATTERN)
+ ALL = { "test": Gitlab::Redis::Queues, "test2": Gitlab::Redis::SharedState }.foo.freeze
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{msg}
+ PATTERN
+ end
+ end
+
+ it 'registers offence for any invocation of Gitlab::Redis::Queues methods' do
+ expect_offense(<<~PATTERN)
+ Gitlab::Redis::Queues.params
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{msg}
+ PATTERN
+ end
+
+ it 'registers offence for using Gitlab::Redis::Queues as parameter in method calls' do
+ expect_offense(<<~PATTERN)
+ use_redis(Gitlab::Redis::Queues)
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{msg}
+ PATTERN
+ end
+end
diff --git a/spec/rubocop/cop/rspec/factory_bot/avoid_create_spec.rb b/spec/rubocop/cop/rspec/factory_bot/avoid_create_spec.rb
new file mode 100644
index 00000000000..7f45661c13d
--- /dev/null
+++ b/spec/rubocop/cop/rspec/factory_bot/avoid_create_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'rubocop_spec_helper'
+
+require_relative '../../../../../rubocop/cop/rspec/factory_bot/avoid_create'
+
+RSpec.describe RuboCop::Cop::RSpec::FactoryBot::AvoidCreate do
+ shared_examples 'an offensive factory call' do |namespace|
+ %i[create create_list].each do |forbidden_method|
+ namespaced_forbidden_method = "#{namespace}#{forbidden_method}(:user)"
+
+ it "registers an offense for #{namespaced_forbidden_method}" do
+ expect_offense(<<-RUBY)
+ describe 'foo' do
+ let(:user) { #{namespaced_forbidden_method} }
+ #{'^' * namespaced_forbidden_method.size} Prefer using `build_stubbed` or similar over `#{forbidden_method}`. See https://docs.gitlab.com/ee/development/testing_guide/best_practices.html#optimize-factory-usage
+ end
+ RUBY
+ end
+ end
+ end
+
+ it_behaves_like 'an offensive factory call', ''
+ it_behaves_like 'an offensive factory call', 'FactoryBot.'
+end
diff --git a/spec/rubocop/cop/rspec/top_level_describe_path_spec.rb b/spec/rubocop/cop/rspec/top_level_describe_path_spec.rb
index 90101e09023..6c596210f4e 100644
--- a/spec/rubocop/cop/rspec/top_level_describe_path_spec.rb
+++ b/spec/rubocop/cop/rspec/top_level_describe_path_spec.rb
@@ -22,6 +22,19 @@ RSpec.describe RuboCop::Cop::RSpec::TopLevelDescribePath do
end
end
+ context 'when the describe is in a shared context' do
+ context 'with shared_context' do
+ it 'registers no offenses' do
+ expect_no_offenses(<<~SOURCE, 'spec/foo.rb')
+ shared_context 'Foo' do
+ describe '#bar' do
+ end
+ end
+ SOURCE
+ end
+ end
+ end
+
context 'when the describe is in a shared example' do
context 'with shared_examples' do
it 'registers no offenses' do
diff --git a/spec/rubocop/cop/sidekiq_api_usage_spec.rb b/spec/rubocop/cop/sidekiq_api_usage_spec.rb
new file mode 100644
index 00000000000..79a0774e625
--- /dev/null
+++ b/spec/rubocop/cop/sidekiq_api_usage_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'rubocop_spec_helper'
+
+require_relative '../../../rubocop/cop/sidekiq_api_usage'
+
+RSpec.describe RuboCop::Cop::SidekiqApiUsage do
+ let(:msg) { described_class::MSG }
+
+ context 'when calling Sidekiq::Worker' do
+ it 'registers no offences for calling skipping_transaction_check' do
+ expect_no_offenses(<<~PATTERN)
+ Sidekiq::Worker.skipping_transaction_check do
+ end
+ PATTERN
+ end
+
+ it 'registers no offences for calling raise_inside_transaction_exception' do
+ expect_no_offenses(<<~PATTERN)
+ Sidekiq::Worker.raise_inside_transaction_exception(cause: "testing")
+ PATTERN
+ end
+
+ it 'registers no offences for calling raise_exception_for_being_inside_a_transaction?' do
+ expect_no_offenses(<<~PATTERN)
+ return if Sidekiq::Worker.raise_exception_for_being_inside_a_transaction?
+ PATTERN
+ end
+
+ it 'registers offence for calling other Sidekiq::Worker methods' do
+ expect_offense(<<~PATTERN)
+ Sidekiq::Worker.drain_all
+ ^^^^^^^^^^^^^^^^^^^^^^^^^ #{msg}
+ PATTERN
+ end
+ end
+
+ it 'does not registers offence when calling Sidekiq::Testing' do
+ expect_no_offenses(<<~PATTERN)
+ Sidekiq::Testing.inline! do
+ create_real_projects!
+ create_large_projects!
+ end
+ PATTERN
+ end
+
+ it 'registers offence when calling Sidekiq API' do
+ expect_offense(<<~PATTERN)
+ Sidekiq::Queue.new('testing').all
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{msg}
+ PATTERN
+ end
+
+ it 'registers offence when assigning Sidekiq API classes' do
+ expect_offense(<<~PATTERN)
+ retry_set = Sidekiq::RetrySet.new
+ ^^^^^^^^^^^^^^^^^^^^^ #{msg}
+ PATTERN
+ end
+end
diff --git a/spec/rubocop/cop/sidekiq_redis_call_spec.rb b/spec/rubocop/cop/sidekiq_redis_call_spec.rb
new file mode 100644
index 00000000000..7d1c68bfabe
--- /dev/null
+++ b/spec/rubocop/cop/sidekiq_redis_call_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'rubocop_spec_helper'
+
+require_relative '../../../rubocop/cop/sidekiq_redis_call'
+
+RSpec.describe RuboCop::Cop::SidekiqRedisCall do
+ it 'flags any use of Sidekiq.redis even without blocks' do
+ expect_offense(<<~PATTERN)
+ Sidekiq.redis
+ ^^^^^^^^^^^^^ Refrain from directly using Sidekiq.redis unless for migration. For admin operations, use Sidekiq APIs.
+ PATTERN
+ end
+
+ it 'flags the use of Sidekiq.redis in single-line blocks' do
+ expect_offense(<<~PATTERN)
+ Sidekiq.redis { |redis| yield redis }
+ ^^^^^^^^^^^^^ Refrain from directly using Sidekiq.redis unless for migration. For admin operations, use Sidekiq APIs.
+ PATTERN
+ end
+
+ it 'flags the use of Sidekiq.redis in multi-line blocks' do
+ expect_offense(<<~PATTERN)
+ Sidekiq.redis do |conn|
+ ^^^^^^^^^^^^^ Refrain from directly using Sidekiq.redis unless for migration. For admin operations, use Sidekiq APIs.
+ conn.sadd('queues', queues)
+ end
+ PATTERN
+ end
+end
diff --git a/spec/rubocop/cop/static_translation_definition_spec.rb b/spec/rubocop/cop/static_translation_definition_spec.rb
index 10b4f162504..9f45f6f8c5b 100644
--- a/spec/rubocop/cop/static_translation_definition_spec.rb
+++ b/spec/rubocop/cop/static_translation_definition_spec.rb
@@ -38,6 +38,15 @@ RSpec.describe RuboCop::Cop::StaticTranslationDefinition do
C = n_("c")
^^^^^^^ #{msg}
CODE
+ <<~'CODE',
+ A = _('a' \
+ ^^^^^^^ [...]
+ 'b')
+ CODE
+ <<~'CODE',
+ A = _("a#{s}")
+ ^^^^^^^^^^ [...]
+ CODE
<<~CODE,
class MyClass
def self.translations
@@ -100,6 +109,9 @@ RSpec.describe RuboCop::Cop::StaticTranslationDefinition do
'CONSTANT_1 = __("a")',
'CONSTANT_2 = s__("a")',
'CONSTANT_3 = n__("a")',
+ 'CONSTANT_var = _(code)',
+ 'CONSTANT_int = _(1)',
+ 'CONSTANT_none = _()',
<<~CODE,
class MyClass
def self.method
diff --git a/spec/rubocop/cop_todo_spec.rb b/spec/rubocop/cop_todo_spec.rb
index 3f9c378b303..c641001789f 100644
--- a/spec/rubocop/cop_todo_spec.rb
+++ b/spec/rubocop/cop_todo_spec.rb
@@ -66,6 +66,38 @@ RSpec.describe RuboCop::CopTodo do
end
end
+ describe '#generate?' do
+ subject { cop_todo.generate? }
+
+ context 'when empty todo' do
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when previously disabled' do
+ before do
+ cop_todo.previously_disabled = true
+ end
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when in grace period' do
+ before do
+ cop_todo.grace_period = true
+ end
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'with offenses recorded' do
+ before do
+ cop_todo.record('a.rb', 1)
+ end
+
+ it { is_expected.to eq(true) }
+ end
+ end
+
describe '#to_yaml' do
subject(:yaml) { cop_todo.to_yaml }
@@ -77,9 +109,8 @@ RSpec.describe RuboCop::CopTodo do
specify do
expect(yaml).to eq(<<~YAML)
---
- # Cop supports --auto-correct.
+ # Cop supports --autocorrect.
#{cop_name}:
- Exclude:
YAML
end
end
diff --git a/spec/rubocop/formatter/graceful_formatter_spec.rb b/spec/rubocop/formatter/graceful_formatter_spec.rb
index 0e0c1d52067..1ed8533ac16 100644
--- a/spec/rubocop/formatter/graceful_formatter_spec.rb
+++ b/spec/rubocop/formatter/graceful_formatter_spec.rb
@@ -2,7 +2,8 @@
require 'fast_spec_helper'
require 'rspec-parameterized'
-require 'rubocop/rspec/shared_contexts'
+require 'rubocop'
+require 'rubocop/rspec/support'
require 'stringio'
require_relative '../../../rubocop/formatter/graceful_formatter'
@@ -225,14 +226,14 @@ RSpec.describe RuboCop::Formatter::GracefulFormatter, :isolated_environment do
cop_name: cop_name,
corrected?: false,
correctable?: false,
- severity: double(:severity, name: 'convention', code: :C),
+ severity: double(:severity, name: :convention, code: :C),
line: 5,
column: 23,
real_column: 23,
corrected_with_todo?: false,
message: "#{cop_name} message",
- location: double(:location, source_line: 'line', first_line: 1, last_line: 2),
- highlighted_area: double(:highlighted_area, begin_pos: 1, size: 2)
+ location: double(:location, source_line: 'line', first_line: 1, last_line: 1, single_line?: true),
+ highlighted_area: double(:highlighted_area, begin_pos: 1, size: 2, source_buffer: 'line', source: 'i')
)
# rubocop:enable RSpec/VerifiedDoubles
end
diff --git a/spec/rubocop/formatter/todo_formatter_spec.rb b/spec/rubocop/formatter/todo_formatter_spec.rb
index edd84632409..5494d518605 100644
--- a/spec/rubocop/formatter/todo_formatter_spec.rb
+++ b/spec/rubocop/formatter/todo_formatter_spec.rb
@@ -82,7 +82,7 @@ RSpec.describe RuboCop::Formatter::TodoFormatter do
expect(todo_yml('B/AutoCorrect')).to eq(<<~YAML)
---
- # Cop supports --auto-correct.
+ # Cop supports --autocorrect.
B/AutoCorrect:
Exclude:
- 'd.rb'
@@ -309,18 +309,78 @@ RSpec.describe RuboCop::Formatter::TodoFormatter do
context 'without offenses detected' do
before do
+ todo_dir.write('A/Cop', yaml) if yaml
+ todo_dir.inspect_all
+
formatter.started(%w[a.rb b.rb])
formatter.file_finished('a.rb', [])
formatter.file_finished('b.rb', [])
formatter.finished(%w[a.rb b.rb])
+
+ todo_dir.delete_inspected
end
- it 'does not output anything' do
- expect(stdout.string).to eq('')
+ context 'without existing TODOs' do
+ let(:yaml) { nil }
+
+ it 'does not output anything' do
+ expect(stdout.string).to eq('')
+ end
+
+ it 'does not write any YAML files' do
+ expect(rubocop_todo_dir_listing).to be_empty
+ end
end
- it 'does not write any YAML files' do
- expect(rubocop_todo_dir_listing).to be_empty
+ context 'with existing TODOs' do
+ context 'when existing offenses only' do
+ let(:yaml) do
+ <<~YAML
+ ---
+ A/Cop:
+ Exclude:
+ - x.rb
+ YAML
+ end
+
+ it 'does not output anything' do
+ expect(stdout.string).to eq('')
+ end
+
+ it 'does not write any YAML files' do
+ expect(rubocop_todo_dir_listing).to be_empty
+ end
+ end
+
+ context 'when in grace period' do
+ let(:yaml) do
+ <<~YAML
+ ---
+ A/Cop:
+ Details: grace period
+ Exclude:
+ - x.rb
+ YAML
+ end
+
+ it 'outputs its actions' do
+ expect(stdout.string).to eq(<<~OUTPUT)
+ Written to .rubocop_todo/a/cop.yml
+ OUTPUT
+ end
+
+ it 'creates YAML file with Details only', :aggregate_failures do
+ expect(rubocop_todo_dir_listing).to contain_exactly(
+ 'a/cop.yml'
+ )
+
+ expect(todo_yml('A/Cop')).to eq(<<~YAML)
+ ---
+ A/Cop:
+ Details: grace period
+ YAML
+ end
+ end
end
end
diff --git a/spec/rubocop_spec_helper.rb b/spec/rubocop_spec_helper.rb
index a37415a25de..cf747132ec1 100644
--- a/spec/rubocop_spec_helper.rb
+++ b/spec/rubocop_spec_helper.rb
@@ -8,13 +8,23 @@ require 'fast_spec_helper'
require 'rubocop'
require 'rubocop/rspec/support'
-RSpec.configure do |config|
- config.include RuboCop::RSpec::ExpectOffense, type: :rubocop
+require_relative './support/shared_contexts/rubocop_default_rspec_language_config_context'
+RSpec.configure do |config|
config.define_derived_metadata(file_path: %r{spec/rubocop}) do |metadata|
- metadata[:type] = :rubocop
+ # TODO: move DuplicateSpecLocation cop to RSpec::DuplicateSpecLocation
+ unless metadata[:type] == :rubocop_rspec
+ metadata[:type] = :rubocop
+ end
end
- # Include config shared context for all cop specs.
+ config.define_derived_metadata(file_path: %r{spec/rubocop/cop/rspec}) do |metadata|
+ metadata[:type] = :rubocop_rspec
+ end
+
+ config.include RuboCop::RSpec::ExpectOffense, type: :rubocop
+ config.include RuboCop::RSpec::ExpectOffense, type: :rubocop_rspec
+
config.include_context 'config', type: :rubocop
+ config.include_context 'with default RSpec/Language config', type: :rubocop_rspec
end
diff --git a/spec/scripts/lib/glfm/update_example_snapshots_spec.rb b/spec/scripts/lib/glfm/update_example_snapshots_spec.rb
index f96936c0a6f..c97226c1a2d 100644
--- a/spec/scripts/lib/glfm/update_example_snapshots_spec.rb
+++ b/spec/scripts/lib/glfm/update_example_snapshots_spec.rb
@@ -2,8 +2,8 @@
require 'fast_spec_helper'
require_relative '../../../../scripts/lib/glfm/update_example_snapshots'
-# IMPORTANT NOTE: See https://docs.gitlab.com/ee/development/gitlab_flavored_markdown/specification_guide/
-# for details on the implementation and usage of the `update_example_snapshots` script being tested.
+# IMPORTANT NOTE: See https://docs.gitlab.com/ee/development/gitlab_flavored_markdown/specification_guide/#update-example-snapshotsrb-script
+# for details on the implementation and usage of the `update_example_snapshots.rb` script being tested.
# This developers guide contains diagrams and documentation of the script,
# including explanations and examples of all files it reads and writes.
#
@@ -16,17 +16,18 @@ require_relative '../../../../scripts/lib/glfm/update_example_snapshots'
# which runs a jest test environment. This results in each full run of the script
# taking between 30-60 seconds. The majority of this is spent loading the Rails environment.
#
-# However, only the `writing html.yml and prosemirror_json.yml` context is used
-# to test these slow sub-processes, and it only contains a single example.
+# However, only the `with full processing of static and WYSIWYG HTML` context is used
+# to test these slow sub-processes, and it only contains two examples.
#
# All other tests currently in the file pass the `skip_static_and_wysiwyg: true`
-# flag to `#process`, which skips the slow sub-processes. All of these tests
+# flag to `#process`, which skips the slow sub-processes. All of these other tests
# should run in sub-second time when the Spring pre-loader is used. This allows
# logic which is not directly related to the slow sub-processes to be TDD'd with a
# very rapid feedback cycle.
#
# Also, the textual content of the individual fixture file entries is also crafted to help
# indicate which scenarios which they are covering.
+# rubocop:disable RSpec/MultipleMemoizedHelpers
RSpec.describe Glfm::UpdateExampleSnapshots, '#process' do
subject { described_class.new }
@@ -34,9 +35,8 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process' do
let(:glfm_spec_txt_path) { described_class::GLFM_SPEC_TXT_PATH }
let(:glfm_spec_txt_local_io) { StringIO.new(glfm_spec_txt_contents) }
let(:glfm_example_status_yml_path) { described_class::GLFM_EXAMPLE_STATUS_YML_PATH }
- let(:glfm_example_status_yml_io) { StringIO.new(glfm_example_status_yml_contents) }
let(:glfm_example_metadata_yml_path) { described_class::GLFM_EXAMPLE_METADATA_YML_PATH }
- let(:glfm_example_metadata_yml_io) { StringIO.new(glfm_example_metadata_yml_contents) }
+ let(:glfm_example_normalizations_yml_path) { described_class::GLFM_EXAMPLE_NORMALIZATIONS_YML_PATH }
# Example Snapshot (ES) output files
let(:es_examples_index_yml_path) { described_class::ES_EXAMPLES_INDEX_YML_PATH }
@@ -285,10 +285,25 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process' do
YAML
end
+ let(:test1) { '\1\2URI_PREFIX\4' }
+
+ let(:glfm_example_normalizations_yml_contents) do
+ # NOTE: This heredoc identifier must be quoted because we are using control characters in the heredoc body.
+ # See https://stackoverflow.com/a/73831037/25192
+ <<~'YAML'
+ ---
+ # If a config file entry starts with `00_`, it will be skipped for validation that it exists in `examples_index.yml`
+ 00_shared:
+ 00_uri: &00_uri
+ - regex: '(href|data-src)(=")(.*?)(test-file\.(png|zip)")'
+ replacement: '\1\2URI_PREFIX\4'
+ YAML
+ end
+
let(:es_html_yml_io_existing_contents) do
<<~YAML
---
- 00_00_00__obsolete_entry_to_be_deleted__001:
+ 01_00_00__obsolete_entry_to_be_deleted__001:
canonical: |
This entry is no longer exists in the spec.txt, so it will be deleted.
static: |-
@@ -315,7 +330,7 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process' do
let(:es_prosemirror_json_yml_io_existing_contents) do
<<~YAML
---
- 00_00_00__obsolete_entry_to_be_deleted__001: |-
+ 01_00_00__obsolete_entry_to_be_deleted__001: |-
{
"obsolete": "This entry is no longer exists in the spec.txt, and is not skipped, so it will be deleted."
}
@@ -356,9 +371,14 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process' do
# input files
allow(File).to receive(:open).with(glfm_spec_txt_path) { glfm_spec_txt_local_io }
- allow(File).to receive(:open).with(glfm_example_status_yml_path) { glfm_example_status_yml_io }
+ allow(File).to receive(:open).with(glfm_example_status_yml_path) do
+ StringIO.new(glfm_example_status_yml_contents)
+ end
allow(File).to receive(:open).with(glfm_example_metadata_yml_path) do
- glfm_example_metadata_yml_io
+ StringIO.new(glfm_example_metadata_yml_contents)
+ end
+ allow(File).to receive(:open).with(glfm_example_normalizations_yml_path) do
+ StringIO.new(glfm_example_normalizations_yml_contents)
end
# output files
@@ -525,353 +545,404 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process' do
end
end
- # rubocop:disable RSpec/MultipleMemoizedHelpers
- describe 'writing html.yml and prosemirror_json.yml' do
- let(:es_html_yml_contents) { reread_io(es_html_yml_io) }
- let(:es_prosemirror_json_yml_contents) { reread_io(es_prosemirror_json_yml_io) }
+ describe 'error handling when manually-curated input specification config files contain invalid example names:' do
+ let(:err_msg) do
+ /#{config_file}.*01_00_00__invalid__001.*does not have.*entry in.*#{described_class::ES_EXAMPLES_INDEX_YML_PATH}/m
+ end
- # NOTE: This example_status.yml is crafted in conjunction with expected_html_yml_contents
- # to test the behavior of the `skip_update_*` flags
- let(:glfm_example_status_yml_contents) do
+ let(:invalid_example_name_file_contents) do
<<~YAML
---
- 02_01_00__inlines__strong__002:
- # NOTE: 02_01_00__inlines__strong__002: is omitted from the existing prosemirror_json.yml file, and is also
- # skipped here, to show that an example does not need to exist in order to be skipped.
- # TODO: This should be changed to raise an error instead, to enforce that there cannot be orphaned
- # entries in glfm_example_status.yml. This task is captured in
- # https://gitlab.com/gitlab-org/gitlab/-/issues/361241#other-cleanup-tasks
- skip_update_example_snapshot_prosemirror_json: "skipping because JSON isn't cool enough"
- 03_01_00__first_gitlab_specific_section_with_examples__strong_but_with_two_asterisks__001:
- skip_update_example_snapshot_html_static: "skipping because there's too much static"
- 04_01_00__second_gitlab_specific_section_with_examples__strong_but_with_html__001:
- skip_update_example_snapshot_html_wysiwyg: 'skipping because what you see is NOT what you get'
- skip_update_example_snapshot_prosemirror_json: "skipping because JSON still isn't cool enough"
- 05_01_00__third_gitlab_specific_section_with_skipped_examples__strong_but_skipped__001:
- skip_update_example_snapshots: 'skipping this example because it is very bad'
- 05_02_00__third_gitlab_specific_section_with_skipped_examples__strong_but_manually_modified_and_skipped__001:
- skip_update_example_snapshots: 'skipping this example because we have manually modified it'
+ 01_00_00__invalid__001:
+ a: 1
YAML
end
- let(:expected_html_yml_contents) do
- <<~YAML
- ---
- 02_01_00__inlines__strong__001:
- canonical: |
- <p><strong>bold</strong></p>
- static: |-
- <p data-sourcepos="1:1-1:8" dir="auto"><strong>bold</strong></p>
- wysiwyg: |-
- <p><strong>bold</strong></p>
- 02_01_00__inlines__strong__002:
- canonical: |
- <p><strong>bold with more text</strong></p>
- static: |-
- <p data-sourcepos="1:1-1:23" dir="auto"><strong>bold with more text</strong></p>
- wysiwyg: |-
- <p><strong>bold with more text</strong></p>
- 02_03_00__inlines__strikethrough_extension__001:
- canonical: |
- <p><del>Hi</del> Hello, world!</p>
- static: |-
- <p data-sourcepos="1:1-1:20" dir="auto"><del>Hi</del> Hello, world!</p>
- wysiwyg: |-
- <p><s>Hi</s> Hello, world!</p>
- 03_01_00__first_gitlab_specific_section_with_examples__strong_but_with_two_asterisks__001:
- canonical: |
- <p><strong>bold</strong></p>
- wysiwyg: |-
- <p><strong>bold</strong></p>
- 03_02_01__first_gitlab_specific_section_with_examples__h2_which_contains_an_h3__example_in_an_h3__001:
- canonical: |
- <p>Example in an H3</p>
- static: |-
- <p data-sourcepos="1:1-1:16" dir="auto">Example in an H3</p>
- wysiwyg: |-
- <p>Example in an H3</p>
- 04_01_00__second_gitlab_specific_section_with_examples__strong_but_with_html__001:
- canonical: |
- <p><strong>
- bold
- </strong></p>
- static: |-
- <strong>
- bold
- </strong>
- 05_02_00__third_gitlab_specific_section_with_skipped_examples__strong_but_manually_modified_and_skipped__001:
- canonical: |
- <p><strong>This example will have its manually modified static HTML, WYSIWYG HTML, and ProseMirror JSON preserved</strong></p>
- static: |-
- <p>This is the manually modified static HTML which will be preserved</p>
- wysiwyg: |-
- <p>This is the manually modified WYSIWYG HTML which will be preserved</p>
- 06_01_00__api_request_overrides__group_upload_link__001:
- canonical: |
- <p><a href="groups-test-file">groups-test-file</a></p>
- static: |-
- <p data-sourcepos="1:1-1:45" dir="auto"><a href="/groups/glfm_group/-/uploads/groups-test-file" data-canonical-src="/uploads/groups-test-file" data-link="true" class="gfm">groups-test-file</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="/uploads/groups-test-file">groups-test-file</a></p>
- 06_02_00__api_request_overrides__project_repo_link__001:
- canonical: |
- <p><a href="projects-test-file">projects-test-file</a></p>
- static: |-
- <p data-sourcepos="1:1-1:40" dir="auto"><a href="/glfm_group/glfm_project/-/blob/master/projects-test-file">projects-test-file</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="projects-test-file">projects-test-file</a></p>
- 06_03_00__api_request_overrides__project_snippet_ref__001:
- canonical: |
- <p>This project snippet ID reference IS filtered: <a href="/glfm_group/glfm_project/-/snippets/88888">$88888</a>
- static: |-
- <p data-sourcepos="1:1-1:53" dir="auto">This project snippet ID reference IS filtered: <a href="/glfm_group/glfm_project/-/snippets/88888" data-reference-type="snippet" data-original="$88888" data-link="false" data-link-reference="false" data-project="77777" data-snippet="88888" data-container="body" data-placement="top" title="glfm_project_snippet" class="gfm gfm-snippet has-tooltip">$88888</a></p>
- wysiwyg: |-
- <p>This project snippet ID reference IS filtered: $88888</p>
- 06_04_00__api_request_overrides__personal_snippet_ref__001:
- canonical: |
- <p>This personal snippet ID reference is NOT filtered: $99999</p>
- static: |-
- <p data-sourcepos="1:1-1:58" dir="auto">This personal snippet ID reference is NOT filtered: $99999</p>
- wysiwyg: |-
- <p>This personal snippet ID reference is NOT filtered: $99999</p>
- 06_05_00__api_request_overrides__project_wiki_link__001:
- canonical: |
- <p><a href="project-wikis-test-file">project-wikis-test-file</a></p>
- static: |-
- <p data-sourcepos="1:1-1:50" dir="auto"><a href="/glfm_group/glfm_project/-/wikis/project-wikis-test-file" data-canonical-src="project-wikis-test-file">project-wikis-test-file</a></p>
- wysiwyg: |-
- <p><a target="_blank" rel="noopener noreferrer nofollow" href="project-wikis-test-file">project-wikis-test-file</a></p>
- YAML
+ context 'for glfm_example_status.yml' do
+ let(:config_file) { described_class::GLFM_EXAMPLE_STATUS_YML_PATH }
+ let(:glfm_example_status_yml_contents) { invalid_example_name_file_contents }
+
+ it 'raises error' do
+ expect { subject.process(skip_static_and_wysiwyg: true) }.to raise_error(err_msg)
+ end
end
- let(:expected_prosemirror_json_contents) do
- <<~YAML
- ---
- 02_01_00__inlines__strong__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "bold"
- }
- ]
- }
- ]
- }
- 02_03_00__inlines__strikethrough_extension__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "strike"
- }
- ],
- "text": "Hi"
- },
- {
- "type": "text",
- "text": " Hello, world!"
- }
- ]
- }
- ]
- }
- 03_01_00__first_gitlab_specific_section_with_examples__strong_but_with_two_asterisks__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "bold"
- }
- ],
- "text": "bold"
- }
- ]
- }
- ]
- }
- 03_02_01__first_gitlab_specific_section_with_examples__h2_which_contains_an_h3__example_in_an_h3__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "Example in an H3"
- }
- ]
- }
- ]
- }
- 04_01_00__second_gitlab_specific_section_with_examples__strong_but_with_html__001: |-
- {
- "existing": "This entry is manually modified and preserved because skip_update_example_snapshot_prosemirror_json will be truthy"
- }
- 05_02_00__third_gitlab_specific_section_with_skipped_examples__strong_but_manually_modified_and_skipped__001: |-
- {
- "existing": "This entry is manually modified and preserved because skip_update_example_snapshots will be truthy"
- }
- 06_01_00__api_request_overrides__group_upload_link__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "/uploads/groups-test-file",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": "/uploads/groups-test-file",
- "isReference": false
- }
- }
- ],
- "text": "groups-test-file"
- }
- ]
- }
- ]
- }
- 06_02_00__api_request_overrides__project_repo_link__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "projects-test-file",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": "projects-test-file",
- "isReference": false
- }
- }
- ],
- "text": "projects-test-file"
- }
- ]
- }
- ]
- }
- 06_03_00__api_request_overrides__project_snippet_ref__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "This project snippet ID reference IS filtered: $88888"
- }
- ]
- }
- ]
- }
- 06_04_00__api_request_overrides__personal_snippet_ref__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "text": "This personal snippet ID reference is NOT filtered: $99999"
- }
- ]
- }
- ]
- }
- 06_05_00__api_request_overrides__project_wiki_link__001: |-
- {
- "type": "doc",
- "content": [
- {
- "type": "paragraph",
- "content": [
- {
- "type": "text",
- "marks": [
- {
- "type": "link",
- "attrs": {
- "href": "project-wikis-test-file",
- "target": "_blank",
- "class": null,
- "title": null,
- "canonicalSrc": "project-wikis-test-file",
- "isReference": false
- }
- }
- ],
- "text": "project-wikis-test-file"
- }
- ]
- }
- ]
- }
- YAML
+ context 'for glfm_example_metadata.yml' do
+ let(:config_file) { described_class::GLFM_EXAMPLE_METADATA_YML_PATH }
+ let(:glfm_example_metadata_yml_contents) { invalid_example_name_file_contents }
+
+ it 'raises error' do
+ expect { subject.process(skip_static_and_wysiwyg: true) }.to raise_error(err_msg)
+ end
end
- before do
- # NOTE: This is a necessary to avoid an `error Couldn't find an integrity file` error
- # when invoking `yarn jest ...` on CI from within an RSpec job. It could be solved by
- # adding `.yarn-install` to be included in the RSpec CI job, but that would be a performance
- # hit to all RSpec jobs. We could also make a dedicate job just for this spec. However,
- # since this is just a single script, those options may not be justified.
- described_class.new.run_external_cmd('yarn install') if ENV['CI']
+ context 'for glfm_example_normalizations.yml' do
+ let(:config_file) { described_class::GLFM_EXAMPLE_NORMALIZATIONS_YML_PATH }
+ let(:glfm_example_normalizations_yml_contents) { invalid_example_name_file_contents }
+
+ it 'raises error' do
+ expect { subject.process(skip_static_and_wysiwyg: true) }.to raise_error(err_msg)
+ end
end
+ end
+
+ context 'with full processing of static and WYSIWYG HTML' do
+ before(:all) do
+ # NOTE: It is a necessary to do a `yarn install` in order to ensure that
+ # `scripts/lib/glfm/render_wysiwyg_html_and_json.js` can be invoked successfully
+ # on the CI job (which will not be set up for frontend specs since this is
+ # an RSpec spec), or if the current yarn dependencies are not installed locally.
+ described_class.new.run_external_cmd('yarn install --frozen-lockfile')
+ end
+
+ describe 'manually-curated input specification config files' do
+ let(:glfm_example_status_yml_contents) { '' }
+ let(:glfm_example_metadata_yml_contents) { '' }
+ let(:glfm_example_normalizations_yml_contents) { '' }
+
+ it 'can be empty' do
+ expect { subject.process }.not_to raise_error
+ end
+ end
+
+ describe 'writing html.yml and prosemirror_json.yml' do
+ let(:es_html_yml_contents) { reread_io(es_html_yml_io) }
+ let(:es_prosemirror_json_yml_contents) { reread_io(es_prosemirror_json_yml_io) }
+
+ # NOTE: This example_status.yml is crafted in conjunction with expected_html_yml_contents
+ # to test the behavior of the `skip_update_*` flags
+ let(:glfm_example_status_yml_contents) do
+ <<~YAML
+ ---
+ 02_01_00__inlines__strong__002:
+ # NOTE: 02_01_00__inlines__strong__002: is omitted from the existing prosemirror_json.yml file, and is also
+ # skipped here, to show that an example does not need to exist in order to be skipped.
+ # TODO: This should be changed to raise an error instead, to enforce that there cannot be orphaned
+ # entries in glfm_example_status.yml. This task is captured in
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/361241#other-cleanup-tasks
+ skip_update_example_snapshot_prosemirror_json: "skipping because JSON isn't cool enough"
+ 03_01_00__first_gitlab_specific_section_with_examples__strong_but_with_two_asterisks__001:
+ skip_update_example_snapshot_html_static: "skipping because there's too much static"
+ 04_01_00__second_gitlab_specific_section_with_examples__strong_but_with_html__001:
+ skip_update_example_snapshot_html_wysiwyg: 'skipping because what you see is NOT what you get'
+ skip_update_example_snapshot_prosemirror_json: "skipping because JSON still isn't cool enough"
+ 05_01_00__third_gitlab_specific_section_with_skipped_examples__strong_but_skipped__001:
+ skip_update_example_snapshots: 'skipping this example because it is very bad'
+ 05_02_00__third_gitlab_specific_section_with_skipped_examples__strong_but_manually_modified_and_skipped__001:
+ skip_update_example_snapshots: 'skipping this example because we have manually modified it'
+ YAML
+ end
+
+ let(:expected_html_yml_contents) do
+ <<~YAML
+ ---
+ 02_01_00__inlines__strong__001:
+ canonical: |
+ <p><strong>bold</strong></p>
+ static: |-
+ <p data-sourcepos="1:1-1:8" dir="auto"><strong>bold</strong></p>
+ wysiwyg: |-
+ <p><strong>bold</strong></p>
+ 02_01_00__inlines__strong__002:
+ canonical: |
+ <p><strong>bold with more text</strong></p>
+ static: |-
+ <p data-sourcepos="1:1-1:23" dir="auto"><strong>bold with more text</strong></p>
+ wysiwyg: |-
+ <p><strong>bold with more text</strong></p>
+ 02_03_00__inlines__strikethrough_extension__001:
+ canonical: |
+ <p><del>Hi</del> Hello, world!</p>
+ static: |-
+ <p data-sourcepos="1:1-1:20" dir="auto"><del>Hi</del> Hello, world!</p>
+ wysiwyg: |-
+ <p><s>Hi</s> Hello, world!</p>
+ 03_01_00__first_gitlab_specific_section_with_examples__strong_but_with_two_asterisks__001:
+ canonical: |
+ <p><strong>bold</strong></p>
+ wysiwyg: |-
+ <p><strong>bold</strong></p>
+ 03_02_01__first_gitlab_specific_section_with_examples__h2_which_contains_an_h3__example_in_an_h3__001:
+ canonical: |
+ <p>Example in an H3</p>
+ static: |-
+ <p data-sourcepos="1:1-1:16" dir="auto">Example in an H3</p>
+ wysiwyg: |-
+ <p>Example in an H3</p>
+ 04_01_00__second_gitlab_specific_section_with_examples__strong_but_with_html__001:
+ canonical: |
+ <p><strong>
+ bold
+ </strong></p>
+ static: |-
+ <strong>
+ bold
+ </strong>
+ 05_02_00__third_gitlab_specific_section_with_skipped_examples__strong_but_manually_modified_and_skipped__001:
+ canonical: |
+ <p><strong>This example will have its manually modified static HTML, WYSIWYG HTML, and ProseMirror JSON preserved</strong></p>
+ static: |-
+ <p>This is the manually modified static HTML which will be preserved</p>
+ wysiwyg: |-
+ <p>This is the manually modified WYSIWYG HTML which will be preserved</p>
+ 06_01_00__api_request_overrides__group_upload_link__001:
+ canonical: |
+ <p><a href="groups-test-file">groups-test-file</a></p>
+ static: |-
+ <p data-sourcepos="1:1-1:45" dir="auto"><a href="/groups/glfm_group/-/uploads/groups-test-file" data-canonical-src="/uploads/groups-test-file" data-link="true" class="gfm">groups-test-file</a></p>
+ wysiwyg: |-
+ <p><a target="_blank" rel="noopener noreferrer nofollow" href="/uploads/groups-test-file">groups-test-file</a></p>
+ 06_02_00__api_request_overrides__project_repo_link__001:
+ canonical: |
+ <p><a href="projects-test-file">projects-test-file</a></p>
+ static: |-
+ <p data-sourcepos="1:1-1:40" dir="auto"><a href="/glfm_group/glfm_project/-/blob/master/projects-test-file">projects-test-file</a></p>
+ wysiwyg: |-
+ <p><a target="_blank" rel="noopener noreferrer nofollow" href="projects-test-file">projects-test-file</a></p>
+ 06_03_00__api_request_overrides__project_snippet_ref__001:
+ canonical: |
+ <p>This project snippet ID reference IS filtered: <a href="/glfm_group/glfm_project/-/snippets/88888">$88888</a>
+ static: |-
+ <p data-sourcepos="1:1-1:53" dir="auto">This project snippet ID reference IS filtered: <a href="/glfm_group/glfm_project/-/snippets/88888" data-reference-type="snippet" data-original="$88888" data-link="false" data-link-reference="false" data-project="77777" data-snippet="88888" data-container="body" data-placement="top" title="glfm_project_snippet" class="gfm gfm-snippet has-tooltip">$88888</a></p>
+ wysiwyg: |-
+ <p>This project snippet ID reference IS filtered: $88888</p>
+ 06_04_00__api_request_overrides__personal_snippet_ref__001:
+ canonical: |
+ <p>This personal snippet ID reference is NOT filtered: $99999</p>
+ static: |-
+ <p data-sourcepos="1:1-1:58" dir="auto">This personal snippet ID reference is NOT filtered: $99999</p>
+ wysiwyg: |-
+ <p>This personal snippet ID reference is NOT filtered: $99999</p>
+ 06_05_00__api_request_overrides__project_wiki_link__001:
+ canonical: |
+ <p><a href="project-wikis-test-file">project-wikis-test-file</a></p>
+ static: |-
+ <p data-sourcepos="1:1-1:50" dir="auto"><a href="/glfm_group/glfm_project/-/wikis/project-wikis-test-file" data-canonical-src="project-wikis-test-file">project-wikis-test-file</a></p>
+ wysiwyg: |-
+ <p><a target="_blank" rel="noopener noreferrer nofollow" href="project-wikis-test-file">project-wikis-test-file</a></p>
+ YAML
+ end
+
+ let(:expected_prosemirror_json_contents) do
+ <<~YAML
+ ---
+ 02_01_00__inlines__strong__001: |-
+ {
+ "type": "doc",
+ "content": [
+ {
+ "type": "paragraph",
+ "content": [
+ {
+ "type": "text",
+ "marks": [
+ {
+ "type": "bold"
+ }
+ ],
+ "text": "bold"
+ }
+ ]
+ }
+ ]
+ }
+ 02_03_00__inlines__strikethrough_extension__001: |-
+ {
+ "type": "doc",
+ "content": [
+ {
+ "type": "paragraph",
+ "content": [
+ {
+ "type": "text",
+ "marks": [
+ {
+ "type": "strike"
+ }
+ ],
+ "text": "Hi"
+ },
+ {
+ "type": "text",
+ "text": " Hello, world!"
+ }
+ ]
+ }
+ ]
+ }
+ 03_01_00__first_gitlab_specific_section_with_examples__strong_but_with_two_asterisks__001: |-
+ {
+ "type": "doc",
+ "content": [
+ {
+ "type": "paragraph",
+ "content": [
+ {
+ "type": "text",
+ "marks": [
+ {
+ "type": "bold"
+ }
+ ],
+ "text": "bold"
+ }
+ ]
+ }
+ ]
+ }
+ 03_02_01__first_gitlab_specific_section_with_examples__h2_which_contains_an_h3__example_in_an_h3__001: |-
+ {
+ "type": "doc",
+ "content": [
+ {
+ "type": "paragraph",
+ "content": [
+ {
+ "type": "text",
+ "text": "Example in an H3"
+ }
+ ]
+ }
+ ]
+ }
+ 04_01_00__second_gitlab_specific_section_with_examples__strong_but_with_html__001: |-
+ {
+ "existing": "This entry is manually modified and preserved because skip_update_example_snapshot_prosemirror_json will be truthy"
+ }
+ 05_02_00__third_gitlab_specific_section_with_skipped_examples__strong_but_manually_modified_and_skipped__001: |-
+ {
+ "existing": "This entry is manually modified and preserved because skip_update_example_snapshots will be truthy"
+ }
+ 06_01_00__api_request_overrides__group_upload_link__001: |-
+ {
+ "type": "doc",
+ "content": [
+ {
+ "type": "paragraph",
+ "content": [
+ {
+ "type": "text",
+ "marks": [
+ {
+ "type": "link",
+ "attrs": {
+ "href": "/uploads/groups-test-file",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": "/uploads/groups-test-file",
+ "isReference": false
+ }
+ }
+ ],
+ "text": "groups-test-file"
+ }
+ ]
+ }
+ ]
+ }
+ 06_02_00__api_request_overrides__project_repo_link__001: |-
+ {
+ "type": "doc",
+ "content": [
+ {
+ "type": "paragraph",
+ "content": [
+ {
+ "type": "text",
+ "marks": [
+ {
+ "type": "link",
+ "attrs": {
+ "href": "projects-test-file",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": "projects-test-file",
+ "isReference": false
+ }
+ }
+ ],
+ "text": "projects-test-file"
+ }
+ ]
+ }
+ ]
+ }
+ 06_03_00__api_request_overrides__project_snippet_ref__001: |-
+ {
+ "type": "doc",
+ "content": [
+ {
+ "type": "paragraph",
+ "content": [
+ {
+ "type": "text",
+ "text": "This project snippet ID reference IS filtered: $88888"
+ }
+ ]
+ }
+ ]
+ }
+ 06_04_00__api_request_overrides__personal_snippet_ref__001: |-
+ {
+ "type": "doc",
+ "content": [
+ {
+ "type": "paragraph",
+ "content": [
+ {
+ "type": "text",
+ "text": "This personal snippet ID reference is NOT filtered: $99999"
+ }
+ ]
+ }
+ ]
+ }
+ 06_05_00__api_request_overrides__project_wiki_link__001: |-
+ {
+ "type": "doc",
+ "content": [
+ {
+ "type": "paragraph",
+ "content": [
+ {
+ "type": "text",
+ "marks": [
+ {
+ "type": "link",
+ "attrs": {
+ "href": "project-wikis-test-file",
+ "target": "_blank",
+ "class": null,
+ "title": null,
+ "canonicalSrc": "project-wikis-test-file",
+ "isReference": false
+ }
+ }
+ ],
+ "text": "project-wikis-test-file"
+ }
+ ]
+ }
+ ]
+ }
+ YAML
+ end
- # NOTE: Both `html.yml` and `prosemirror_json.yml` generation are tested in a single example, to
- # avoid slower tests, because generating the static HTML is slow due to the need to invoke
- # the rails environment. We could have separate sections, but this would require an extra flag
- # to the `process` method to independently skip static vs. WYSIWYG, which is not worth the effort.
- it 'writes the correct content', :unlimited_max_formatted_output_length do
- # expectation that skipping message is only output once per example
- expect(subject).to receive(:output).once.with(/reason.*skipping this example because it is very bad/i)
+ # NOTE: Both `html.yml` and `prosemirror_json.yml` generation are tested in a single example, to
+ # avoid slower tests, because generating the static HTML is slow due to the need to invoke
+ # the rails environment. We could have separate sections, but this would require an extra flag
+ # to the `process` method to independently skip static vs. WYSIWYG, which is not worth the effort.
+ it 'writes the correct content', :unlimited_max_formatted_output_length do
+ # expectation that skipping message is only output once per example
+ expect(subject).to receive(:output).once.with(/reason.*skipping this example because it is very bad/i)
- subject.process
+ subject.process
- expect(es_html_yml_contents).to eq(expected_html_yml_contents)
- expect(es_prosemirror_json_yml_contents).to eq(expected_prosemirror_json_contents)
+ expect(es_html_yml_contents).to eq(expected_html_yml_contents)
+ expect(es_prosemirror_json_yml_contents).to eq(expected_prosemirror_json_contents)
+ end
end
end
# rubocop:enable RSpec/MultipleMemoizedHelpers
diff --git a/spec/scripts/lib/glfm/update_specification_spec.rb b/spec/scripts/lib/glfm/update_specification_spec.rb
index 9fb671e0016..852b2b580e6 100644
--- a/spec/scripts/lib/glfm/update_specification_spec.rb
+++ b/spec/scripts/lib/glfm/update_specification_spec.rb
@@ -1,21 +1,53 @@
# frozen_string_literal: true
+
require 'fast_spec_helper'
require_relative '../../../../scripts/lib/glfm/update_specification'
-
+require_relative '../../../support/helpers/next_instance_of'
+
+# IMPORTANT NOTE: See https://docs.gitlab.com/ee/development/gitlab_flavored_markdown/specification_guide/#update-specificationrb-script
+# for details on the implementation and usage of the `update_specification.rb` script being tested.
+# This developers guide contains diagrams and documentation of the script,
+# including explanations and examples of all files it reads and writes.
+#
+# Note that this test is not structured in a traditional way, with multiple examples
+# to cover all different scenarios. Instead, the content of the stubbed test fixture
+# files are crafted to cover multiple scenarios with in a single example run.
+#
+# This is because the invocation of the full script is slow, because it executes
+# a subshell for processing, which runs a full Rails environment.
+# This results in each full run of the script taking between 30-60 seconds.
+# The majority of this is spent loading the Rails environment.
+#
+# However, only the `with generation of spec.html` context is used
+# to test this slow sub-process, and it only contains one example.
+#
+# All other tests currently in the file pass the `skip_spec_html_generation: true`
+# flag to `#process`, which skips the slow sub-process. All of these other tests
+# should run in sub-second time when the Spring pre-loader is used. This allows
+# logic which is not directly related to the slow sub-processes to be TDD'd with a
+# very rapid feedback cycle.
RSpec.describe Glfm::UpdateSpecification, '#process' do
+ include NextInstanceOf
+
subject { described_class.new }
let(:ghfm_spec_txt_uri) { described_class::GHFM_SPEC_TXT_URI }
+ let(:ghfm_spec_txt_uri_parsed) { instance_double(URI::HTTPS, :ghfm_spec_txt_uri_parsed) }
let(:ghfm_spec_txt_uri_io) { StringIO.new(ghfm_spec_txt_contents) }
- let(:ghfm_spec_txt_path) { described_class::GHFM_SPEC_TXT_PATH }
+ let(:ghfm_spec_md_path) { described_class::GHFM_SPEC_MD_PATH }
let(:ghfm_spec_txt_local_io) { StringIO.new(ghfm_spec_txt_contents) }
- let(:glfm_intro_txt_path) { described_class::GLFM_INTRO_TXT_PATH }
- let(:glfm_intro_txt_io) { StringIO.new(glfm_intro_txt_contents) }
- let(:glfm_examples_txt_path) { described_class::GLFM_EXAMPLES_TXT_PATH }
- let(:glfm_examples_txt_io) { StringIO.new(glfm_examples_txt_contents) }
+ let(:glfm_intro_md_path) { described_class::GLFM_INTRO_MD_PATH }
+ let(:glfm_intro_md_io) { StringIO.new(glfm_intro_md_contents) }
+ let(:glfm_official_specification_examples_md_path) { described_class::GLFM_OFFICIAL_SPECIFICATION_EXAMPLES_MD_PATH }
+ let(:glfm_official_specification_examples_md_io) { StringIO.new(glfm_official_specification_examples_md_contents) }
+ let(:glfm_internal_extension_examples_md_path) { described_class::GLFM_INTERNAL_EXTENSION_EXAMPLES_MD_PATH }
+ let(:glfm_internal_extension_examples_md_io) { StringIO.new(glfm_internal_extension_examples_md_contents) }
let(:glfm_spec_txt_path) { described_class::GLFM_SPEC_TXT_PATH }
let(:glfm_spec_txt_io) { StringIO.new }
+ let(:glfm_spec_html_path) { described_class::GLFM_SPEC_HTML_PATH }
+ let(:glfm_spec_html_io) { StringIO.new }
+ let(:markdown_tempfile_io) { StringIO.new }
let(:ghfm_spec_txt_contents) do
<<~MARKDOWN
@@ -52,7 +84,7 @@ RSpec.describe Glfm::UpdateSpecification, '#process' do
MARKDOWN
end
- let(:glfm_intro_txt_contents) do
+ let(:glfm_intro_md_contents) do
# language=Markdown
<<~MARKDOWN
# Introduction
@@ -63,9 +95,17 @@ RSpec.describe Glfm::UpdateSpecification, '#process' do
MARKDOWN
end
- let(:glfm_examples_txt_contents) do
+ let(:glfm_official_specification_examples_md_contents) do
<<~MARKDOWN
- # GitLab-Specific Section with Examples
+ # Official Specification Section with Examples
+
+ Some examples.
+ MARKDOWN
+ end
+
+ let(:glfm_internal_extension_examples_md_contents) do
+ <<~MARKDOWN
+ # Internal Extension Section with Examples
Some examples.
MARKDOWN
@@ -73,44 +113,66 @@ RSpec.describe Glfm::UpdateSpecification, '#process' do
before do
# Mock default ENV var values
- allow(ENV).to receive(:[]).with('UPDATE_GHFM_SPEC_TXT').and_return(nil)
+ allow(ENV).to receive(:[]).with('UPDATE_GHFM_SPEC_MD').and_return(nil)
allow(ENV).to receive(:[]).and_call_original
# We mock out the URI and local file IO objects with real StringIO, instead of just mock
# objects. This gives better and more realistic coverage, while still avoiding
# actual network and filesystem I/O during the spec run.
- allow(URI).to receive(:open).with(ghfm_spec_txt_uri) { ghfm_spec_txt_uri_io }
- allow(File).to receive(:open).with(ghfm_spec_txt_path) { ghfm_spec_txt_local_io }
- allow(File).to receive(:open).with(glfm_intro_txt_path) { glfm_intro_txt_io }
- allow(File).to receive(:open).with(glfm_examples_txt_path) { glfm_examples_txt_io }
+
+ # input files
+ allow(URI).to receive(:parse).with(ghfm_spec_txt_uri).and_return(ghfm_spec_txt_uri_parsed)
+ allow(ghfm_spec_txt_uri_parsed).to receive(:open).and_return(ghfm_spec_txt_uri_io)
+ allow(File).to receive(:open).with(ghfm_spec_md_path) { ghfm_spec_txt_local_io }
+ allow(File).to receive(:open).with(glfm_intro_md_path) { glfm_intro_md_io }
+ allow(File).to receive(:open).with(glfm_official_specification_examples_md_path) do
+ glfm_official_specification_examples_md_io
+ end
+ allow(File).to receive(:open).with(glfm_internal_extension_examples_md_path) do
+ glfm_internal_extension_examples_md_io
+ end
+
+ # output files
allow(File).to receive(:open).with(glfm_spec_txt_path, 'w') { glfm_spec_txt_io }
+ allow(File).to receive(:open).with(glfm_spec_html_path, 'w') { glfm_spec_html_io }
+
+ # Allow normal opening of Tempfile files created during script execution.
+ tempfile_basenames = [
+ described_class::MARKDOWN_TEMPFILE_BASENAME[0],
+ described_class::STATIC_HTML_TEMPFILE_BASENAME[0]
+ ].join('|')
+ # NOTE: This approach with a single regex seems to be the only way this can work. If you
+ # attempt to have multiple `allow...and_call_original` with `any_args`, the mocked
+ # parameter matching will fail to match the second one.
+ tempfiles_regex = /(#{tempfile_basenames})/
+ allow(File).to receive(:open).with(tempfiles_regex, any_args).and_call_original
# Prevent console output when running tests
allow(subject).to receive(:output)
end
describe 'retrieving latest GHFM spec.txt' do
- context 'when UPDATE_GHFM_SPEC_TXT is not true (default)' do
+ context 'when UPDATE_GHFM_SPEC_MD is not true (default)' do
it 'does not download' do
- expect(URI).not_to receive(:open).with(ghfm_spec_txt_uri)
+ expect(URI).not_to receive(:parse).with(ghfm_spec_txt_uri)
- subject.process
+ subject.process(skip_spec_html_generation: true)
expect(reread_io(ghfm_spec_txt_local_io)).to eq(ghfm_spec_txt_contents)
end
end
- context 'when UPDATE_GHFM_SPEC_TXT is true' do
+ context 'when UPDATE_GHFM_SPEC_MD is true' do
let(:ghfm_spec_txt_local_io) { StringIO.new }
before do
- allow(ENV).to receive(:[]).with('UPDATE_GHFM_SPEC_TXT').and_return('true')
- allow(File).to receive(:open).with(ghfm_spec_txt_path, 'w') { ghfm_spec_txt_local_io }
+ allow(ENV).to receive(:[]).with('UPDATE_GHFM_SPEC_MD').and_return('true')
+ allow(File).to receive(:open).with(ghfm_spec_md_path, 'w') { ghfm_spec_txt_local_io }
end
context 'with success' do
it 'downloads and saves' do
- subject.process
+ subject.process(skip_spec_html_generation: true)
expect(reread_io(ghfm_spec_txt_local_io)).to eq(ghfm_spec_txt_contents)
end
@@ -128,7 +190,9 @@ RSpec.describe Glfm::UpdateSpecification, '#process' do
end
it 'raises an error' do
- expect { subject.process }.to raise_error /version mismatch.*expected.*29.*got.*30/i
+ expect do
+ subject.process(skip_spec_html_generation: true)
+ end.to raise_error /version mismatch.*expected.*29.*got.*30/i
end
end
@@ -136,7 +200,7 @@ RSpec.describe Glfm::UpdateSpecification, '#process' do
let(:ghfm_spec_txt_contents) { '' }
it 'raises an error if lines cannot be read' do
- expect { subject.process }.to raise_error /unable to read lines/i
+ expect { subject.process(skip_spec_html_generation: true) }.to raise_error /unable to read lines/i
end
end
@@ -146,7 +210,7 @@ RSpec.describe Glfm::UpdateSpecification, '#process' do
end
it 'raises an error if file is blank' do
- expect { subject.process }.to raise_error /unable to read string/i
+ expect { subject.process(skip_spec_html_generation: true) }.to raise_error /unable to read string/i
end
end
end
@@ -157,7 +221,7 @@ RSpec.describe Glfm::UpdateSpecification, '#process' do
let(:glfm_contents) { reread_io(glfm_spec_txt_io) }
before do
- subject.process
+ subject.process(skip_spec_html_generation: true)
end
it 'replaces the header text with the GitLab version' do
@@ -170,14 +234,18 @@ RSpec.describe Glfm::UpdateSpecification, '#process' do
it 'replaces the intro section with the GitLab version' do
expect(glfm_contents).not_to match(/What is GitHub Flavored Markdown/m)
- expect(glfm_contents).to match(/#{Regexp.escape(glfm_intro_txt_contents)}/m)
+ expect(glfm_contents).to match(/#{Regexp.escape(glfm_intro_md_contents)}/m)
end
- it 'inserts the GitLab examples sections before the appendix section' do
+ it 'inserts the GitLab official spec and internal extension examples sections before the appendix section' do
expected = <<~MARKDOWN
End of last GitHub examples section.
- # GitLab-Specific Section with Examples
+ # Official Specification Section with Examples
+
+ Some examples.
+
+ # Internal Extension Section with Examples
Some examples.
@@ -189,6 +257,51 @@ RSpec.describe Glfm::UpdateSpecification, '#process' do
end
end
+ describe 'writing GLFM spec.html' do
+ let(:glfm_contents) { reread_io(glfm_spec_html_io) }
+
+ before do
+ subject.process
+ end
+
+ it 'renders HTML from spec.txt', :unlimited_max_formatted_output_length do
+ expected = <<~HTML
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="1:1-4:3" class="code highlight js-syntax-highlight language-yaml" lang="yaml" data-lang-params="frontmatter" v-pre="true"><code><span id="LC1" class="line" lang="yaml"><span class="na">title</span><span class="pi">:</span> <span class="s">GitLab Flavored Markdown (GLFM) Spec</span></span>
+ <span id="LC2" class="line" lang="yaml"><span class="na">version</span><span class="pi">:</span> <span class="s">alpha</span></span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ <h1 data-sourcepos="6:1-6:14" dir="auto">
+ <a id="user-content-introduction" class="anchor" href="#introduction" aria-hidden="true"></a>Introduction</h1>
+ <h2 data-sourcepos="8:1-8:36" dir="auto">
+ <a id="user-content-what-is-gitlab-flavored-markdown" class="anchor" href="#what-is-gitlab-flavored-markdown" aria-hidden="true"></a>What is GitLab Flavored Markdown?</h2>
+ <p data-sourcepos="10:1-10:42" dir="auto">Intro text about GitLab Flavored Markdown.</p>
+ <h1 data-sourcepos="12:1-12:23" dir="auto">
+ <a id="user-content-section-with-examples" class="anchor" href="#section-with-examples" aria-hidden="true"></a>Section with Examples</h1>
+ <h2 data-sourcepos="14:1-14:9" dir="auto">
+ <a id="user-content-strong" class="anchor" href="#strong" aria-hidden="true"></a>Strong</h2>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre data-sourcepos="16:1-20:32" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="example" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">__bold__</span>
+ <span id="LC2" class="line" lang="plaintext">.</span>
+ <span id="LC3" class="line" lang="plaintext">&lt;p&gt;&lt;strong&gt;bold&lt;/strong&gt;&lt;/p&gt;</span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ <p data-sourcepos="22:1-22:36" dir="auto">End of last GitHub examples section.</p>
+ <h1 data-sourcepos="24:1-24:46" dir="auto">
+ <a id="user-content-official-specification-section-with-examples" class="anchor" href="#official-specification-section-with-examples" aria-hidden="true"></a>Official Specification Section with Examples</h1>
+ <p data-sourcepos="26:1-26:14" dir="auto">Some examples.</p>
+ <h1 data-sourcepos="28:1-28:42" dir="auto">
+ <a id="user-content-internal-extension-section-with-examples" class="anchor" href="#internal-extension-section-with-examples" aria-hidden="true"></a>Internal Extension Section with Examples</h1>
+ <p data-sourcepos="30:1-30:14" dir="auto">Some examples.</p>
+
+ <h1 data-sourcepos="34:1-34:10" dir="auto">
+ <a id="user-content-appendix" class="anchor" href="#appendix" aria-hidden="true"></a>Appendix</h1>
+ <p data-sourcepos="36:1-36:14" dir="auto">Appendix text.</p>
+ HTML
+ expect(glfm_contents).to be == expected
+ end
+ end
+
def reread_io(io)
# Reset the io StringIO to the beginning position of the buffer
io.seek(0)
diff --git a/spec/scripts/lib/glfm/verify_all_generated_files_are_up_to_date_spec.rb b/spec/scripts/lib/glfm/verify_all_generated_files_are_up_to_date_spec.rb
new file mode 100644
index 00000000000..fca037c9ff3
--- /dev/null
+++ b/spec/scripts/lib/glfm/verify_all_generated_files_are_up_to_date_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+require 'fast_spec_helper'
+require_relative '../../../../scripts/lib/glfm/verify_all_generated_files_are_up_to_date'
+
+# IMPORTANT NOTE: See https://docs.gitlab.com/ee/development/gitlab_flavored_markdown/specification_guide/#verify-all-generated-files-are-up-to-daterb-script
+# for details on the implementation and usage of the `verify_all_generated_files_are_up_to_date.rb` script being tested.
+# This developers guide contains diagrams and documentation of the script,
+# including explanations and examples of all files it reads and writes.
+RSpec.describe Glfm::VerifyAllGeneratedFilesAreUpToDate, '#process' do
+ subject { described_class.new }
+
+ let(:output_path) { described_class::GLFM_SPEC_OUTPUT_PATH }
+ let(:snapshots_path) { described_class::EXAMPLE_SNAPSHOTS_PATH }
+ let(:verify_cmd) { "git status --porcelain #{output_path} #{snapshots_path}" }
+
+ before do
+ # Prevent console output when running tests
+ allow(subject).to receive(:output)
+ end
+
+ context 'when repo is dirty' do
+ before do
+ # Simulate a dirty repo
+ allow(subject).to receive(:run_external_cmd).with(verify_cmd).and_return(" M #{output_path}")
+ end
+
+ it 'raises an error', :unlimited_max_formatted_output_length do
+ expect { subject.process }.to raise_error(/Cannot run.*uncommitted changes.*#{output_path}/m)
+ end
+ end
+
+ context 'when repo is clean' do
+ before do
+ # Mock out all yarn install and script execution
+ allow(subject).to receive(:run_external_cmd).with('yarn install --frozen-lockfile')
+ allow(subject).to receive(:run_external_cmd).with(/update-specification.rb/)
+ allow(subject).to receive(:run_external_cmd).with(/update-example-snapshots.rb/)
+ end
+
+ context 'when all generated files are up to date' do
+ before do
+ # Simulate a clean repo, then simulate no changes to generated files
+ allow(subject).to receive(:run_external_cmd).twice.with(verify_cmd).and_return('', '')
+ end
+
+ it 'does not raise an error', :unlimited_max_formatted_output_length do
+ expect { subject.process }.not_to raise_error
+ end
+ end
+
+ context 'when generated file(s) are not up to date' do
+ before do
+ # Simulate a clean repo, then simulate changes to generated files
+ allow(subject).to receive(:run_external_cmd).twice.with(verify_cmd).and_return('', "M #{snapshots_path}")
+ end
+
+ it 'raises an error', :unlimited_max_formatted_output_length do
+ expect { subject.process }.to raise_error(/following files were modified.*#{snapshots_path}/m)
+ end
+ end
+ end
+end
diff --git a/spec/scripts/trigger-build_spec.rb b/spec/scripts/trigger-build_spec.rb
index 46023d5823d..ac8e3c7797c 100644
--- a/spec/scripts/trigger-build_spec.rb
+++ b/spec/scripts/trigger-build_spec.rb
@@ -21,8 +21,6 @@ RSpec.describe Trigger do
'GITLAB_USER_NAME' => 'gitlab_user_name',
'GITLAB_USER_LOGIN' => 'gitlab_user_login',
'QA_IMAGE' => 'qa_image',
- 'OMNIBUS_GITLAB_CACHE_UPDATE' => 'omnibus_gitlab_cache_update',
- 'OMNIBUS_GITLAB_PROJECT_ACCESS_TOKEN' => nil,
'DOCS_PROJECT_API_TOKEN' => nil
}
end
diff --git a/spec/serializers/board_serializer_spec.rb b/spec/serializers/board_serializer_spec.rb
deleted file mode 100644
index 9e6d5a93d53..00000000000
--- a/spec/serializers/board_serializer_spec.rb
+++ /dev/null
@@ -1,20 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BoardSerializer do
- let(:resource) { create(:board) }
- let(:json_entity) do
- described_class.new
- .represent(resource, serializer: serializer)
- .with_indifferent_access
- end
-
- context 'serialization' do
- let(:serializer) { 'board' }
-
- it 'matches issue_sidebar json schema' do
- expect(json_entity).to match_schema('board')
- end
- end
-end
diff --git a/spec/serializers/board_simple_entity_spec.rb b/spec/serializers/board_simple_entity_spec.rb
deleted file mode 100644
index c5ab9833adf..00000000000
--- a/spec/serializers/board_simple_entity_spec.rb
+++ /dev/null
@@ -1,16 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BoardSimpleEntity do
- let_it_be(:project) { create(:project) }
- let_it_be(:board) { create(:board, project: project) }
-
- subject { described_class.new(board).as_json }
-
- describe '#name' do
- it 'has `name` attribute' do
- is_expected.to include(:name)
- end
- end
-end
diff --git a/spec/serializers/build_trace_entity_spec.rb b/spec/serializers/build_trace_entity_spec.rb
index 82bd56caaac..f3d1d0a21a8 100644
--- a/spec/serializers/build_trace_entity_spec.rb
+++ b/spec/serializers/build_trace_entity_spec.rb
@@ -38,8 +38,9 @@ RSpec.describe BuildTraceEntity do
end
it 'includes the trace content in json' do
- expect(subject[:lines]).to eq([
- { offset: 0, content: [{ text: 'the-trace' }] }
- ])
+ expect(subject[:lines]).to eq(
+ [
+ { offset: 0, content: [{ text: 'the-trace' }] }
+ ])
end
end
diff --git a/spec/serializers/ci/daily_build_group_report_result_serializer_spec.rb b/spec/serializers/ci/daily_build_group_report_result_serializer_spec.rb
index ddeeb367afe..6fe1376b890 100644
--- a/spec/serializers/ci/daily_build_group_report_result_serializer_spec.rb
+++ b/spec/serializers/ci/daily_build_group_report_result_serializer_spec.rb
@@ -18,22 +18,23 @@ RSpec.describe Ci::DailyBuildGroupReportResultSerializer do
let(:json) { Gitlab::Json.parse(serializer.to_json) }
it 'returns an array of group results' do
- expect(json).to eq([
- {
- 'group_name' => 'rspec',
- 'data' => [
- { 'date' => '2020-05-20', 'coverage' => 79.1 },
- { 'date' => '2020-05-19', 'coverage' => 77.1 }
- ]
- },
- {
- 'group_name' => 'karma',
- 'data' => [
- { 'date' => '2020-05-20', 'coverage' => 90.1 },
- { 'date' => '2020-05-19', 'coverage' => 89.1 }
- ]
- }
- ])
+ expect(json).to eq(
+ [
+ {
+ 'group_name' => 'rspec',
+ 'data' => [
+ { 'date' => '2020-05-20', 'coverage' => 79.1 },
+ { 'date' => '2020-05-19', 'coverage' => 77.1 }
+ ]
+ },
+ {
+ 'group_name' => 'karma',
+ 'data' => [
+ { 'date' => '2020-05-20', 'coverage' => 90.1 },
+ { 'date' => '2020-05-19', 'coverage' => 89.1 }
+ ]
+ }
+ ])
end
end
end
diff --git a/spec/serializers/deployment_entity_spec.rb b/spec/serializers/deployment_entity_spec.rb
index 433ce344680..0746e68d7c5 100644
--- a/spec/serializers/deployment_entity_spec.rb
+++ b/spec/serializers/deployment_entity_spec.rb
@@ -3,56 +3,47 @@
require 'spec_helper'
RSpec.describe DeploymentEntity do
- let(:user) { developer }
- let(:developer) { create(:user) }
- let(:reporter) { create(:user) }
- let(:project) { create(:project, :repository) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:reporter) { create(:user) }
+ let_it_be(:user) { developer }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:environment) { create(:environment, project: project) }
+ let_it_be_with_reload(:pipeline) { create(:ci_pipeline, project: project, user: user) }
+ let_it_be_with_reload(:build) { create(:ci_build, :manual, :environment_with_deployment_tier, pipeline: pipeline) }
+
+ let_it_be_with_refind(:deployment) { create(:deployment, deployable: build, environment: environment) }
+
let(:request) { double('request') }
- let(:deployment) { create(:deployment, deployable: build, project: project) }
- let(:build) { create(:ci_build, :manual, :environment_with_deployment_tier, pipeline: pipeline) }
- let(:pipeline) { create(:ci_pipeline, project: project, user: user) }
let(:entity) { described_class.new(deployment, request: request) }
subject { entity.as_json }
- before do
+ before_all do
project.add_developer(developer)
project.add_reporter(reporter)
+ end
+
+ before do
allow(request).to receive(:current_user).and_return(user)
allow(request).to receive(:project).and_return(project)
end
- it 'exposes internal deployment id' do
+ it 'exposes fields', :aggregate_failures do
expect(subject).to include(:iid)
- end
-
- it 'exposes nested information about branch' do
expect(subject[:ref][:name]).to eq 'master'
- end
-
- it 'exposes status' do
expect(subject).to include(:status)
- end
-
- it 'exposes creation date' do
expect(subject).to include(:created_at)
- end
-
- it 'exposes deployed_at' do
expect(subject).to include(:deployed_at)
- end
-
- it 'exposes last? as is_last' do
expect(subject).to include(:is_last)
- end
-
- it 'exposes deployment tier in yaml' do
expect(subject).to include(:tier_in_yaml)
end
context 'when deployable is nil' do
let(:entity) { described_class.new(deployment, request: request, deployment_details: false) }
- let(:deployment) { create(:deployment, deployable: nil, project: project) }
+
+ before do
+ deployment.update!(deployable: nil)
+ end
it 'does not expose deployable entry' do
expect(subject).not_to include(:deployable)
@@ -60,19 +51,19 @@ RSpec.describe DeploymentEntity do
end
context 'when the pipeline has another manual action' do
- let!(:other_build) do
+ let_it_be(:other_build) do
create(:ci_build, :manual, name: 'another deploy', pipeline: pipeline, environment: build.environment)
end
- let!(:other_deployment) { create(:deployment, deployable: build) }
+ let_it_be(:other_deployment) { create(:deployment, deployable: build, environment: environment) }
it 'returns another manual action' do
- expect(subject[:manual_actions].count).to eq(2)
- expect(subject[:manual_actions].pluck(:name)).to match_array(['test', 'another deploy'])
+ expect(subject[:manual_actions].count).to eq(1)
+ expect(subject[:manual_actions].pluck(:name)).to match_array(['another deploy'])
end
context 'when user is a reporter' do
- let(:user) { reporter }
+ let_it_be(:user) { reporter }
it 'returns another manual action' do
expect(subject[:manual_actions]).not_to be_present
@@ -91,14 +82,15 @@ RSpec.describe DeploymentEntity do
end
describe 'scheduled_actions' do
- let(:project) { create(:project, :repository) }
- let(:pipeline) { create(:ci_pipeline, project: project, user: user) }
let(:build) { create(:ci_build, :success, pipeline: pipeline) }
- let(:deployment) { create(:deployment, deployable: build) }
+
+ before do
+ deployment.update!(deployable: build)
+ end
context 'when the same pipeline has a scheduled action' do
let(:other_build) { create(:ci_build, :schedulable, :success, pipeline: pipeline, name: 'other build') }
- let!(:other_deployment) { create(:deployment, deployable: other_build) }
+ let!(:other_deployment) { create(:deployment, deployable: other_build, environment: environment) }
it 'returns other scheduled actions' do
expect(subject[:scheduled_actions][0][:name]).to eq 'other build'
@@ -123,7 +115,9 @@ RSpec.describe DeploymentEntity do
end
describe 'playable_build' do
- let_it_be(:project) { create(:project, :repository) }
+ before do
+ deployment.update!(deployable: build)
+ end
context 'when the deployment has a playable deployable' do
context 'when this build is ready to be played' do
@@ -144,7 +138,7 @@ RSpec.describe DeploymentEntity do
end
context 'when the deployment does not have a playable deployable' do
- let(:build) { create(:ci_build) }
+ let(:build) { create(:ci_build, pipeline: pipeline) }
it 'is not exposed' do
expect(subject[:playable_build]).to be_nil
diff --git a/spec/serializers/environment_status_entity_spec.rb b/spec/serializers/environment_status_entity_spec.rb
index 77ef06f90c2..2ee4e8ade8f 100644
--- a/spec/serializers/environment_status_entity_spec.rb
+++ b/spec/serializers/environment_status_entity_spec.rb
@@ -3,21 +3,26 @@
require 'spec_helper'
RSpec.describe EnvironmentStatusEntity do
- let(:user) { create(:user) }
+ let_it_be(:non_member) { create(:user) }
+ let_it_be(:maintainer) { create(:user) }
+ let_it_be(:deployment) { create(:deployment, :succeed, :review_app) }
+ let_it_be(:merge_request) { create(:merge_request, :deployed_review_app, deployment: deployment) }
+ let_it_be(:environment) { deployment.environment }
+ let_it_be(:project) { deployment.project }
+
+ let(:user) { non_member }
let(:request) { double('request', project: project) }
-
- let(:deployment) { create(:deployment, :succeed, :review_app) }
- let(:environment) { deployment.environment }
- let(:project) { deployment.project }
- let(:merge_request) { create(:merge_request, :deployed_review_app, deployment: deployment) }
-
let(:environment_status) { EnvironmentStatus.new(project, environment, merge_request, merge_request.diff_head_sha) }
- let(:entity) { described_class.new(environment_status, request: request) }
+ let(:entity) { described_class.new(environment_status, request: request) }
subject { entity.as_json }
- before do
+ before_all do
+ project.add_maintainer(maintainer)
deployment.update!(sha: merge_request.diff_head_sha)
+ end
+
+ before do
allow(request).to receive(:current_user).and_return(user)
end
@@ -37,14 +42,13 @@ RSpec.describe EnvironmentStatusEntity do
it { is_expected.not_to include(:metrics_monitoring_url) }
context 'when the user is project maintainer' do
- before do
- project.add_maintainer(user)
- end
+ let(:user) { maintainer }
it { is_expected.to include(:stop_url) }
end
context 'when deployment has metrics' do
+ let(:user) { maintainer }
let(:prometheus_adapter) { double('prometheus_adapter', can_query?: true, configured?: true) }
let(:simple_metrics) do
@@ -56,7 +60,6 @@ RSpec.describe EnvironmentStatusEntity do
end
before do
- project.add_maintainer(user)
allow(deployment).to receive(:prometheus_adapter).and_return(prometheus_adapter)
allow(entity).to receive(:deployment).and_return(deployment)
@@ -69,8 +72,6 @@ RSpec.describe EnvironmentStatusEntity do
end
context 'when deployment succeeded' do
- let(:deployment) { create(:deployment, :succeed, :review_app) }
-
it 'returns metrics url' do
expect(subject[:metrics_url])
.to eq("/#{project.full_path}/-/environments/#{environment.id}/deployments/#{deployment.iid}/metrics")
@@ -78,7 +79,9 @@ RSpec.describe EnvironmentStatusEntity do
end
context 'when deployment is running' do
- let(:deployment) { create(:deployment, :running, :review_app) }
+ before do
+ deployment.update!(status: :running)
+ end
it 'does not return metrics url' do
expect(subject[:metrics_url]).to be_nil
diff --git a/spec/serializers/group_access_token_entity_spec.rb b/spec/serializers/group_access_token_entity_spec.rb
index 05609dc3c7a..586eb0a8588 100644
--- a/spec/serializers/group_access_token_entity_spec.rb
+++ b/spec/serializers/group_access_token_entity_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe GroupAccessTokenEntity do
expected_revoke_path = Gitlab::Routing.url_helpers
.revoke_group_settings_access_token_path(
{ id: token,
- group_id: group.path })
+ group_id: group.full_path })
expect(json).to(
include(
@@ -39,7 +39,7 @@ RSpec.describe GroupAccessTokenEntity do
expected_revoke_path = Gitlab::Routing.url_helpers
.revoke_group_settings_access_token_path(
{ id: token,
- group_id: group.path })
+ group_id: group.full_path })
expect(json).to(
include(
diff --git a/spec/serializers/import/github_org_entity_spec.rb b/spec/serializers/import/github_org_entity_spec.rb
new file mode 100644
index 00000000000..46fceb8ac60
--- /dev/null
+++ b/spec/serializers/import/github_org_entity_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Import::GithubOrgEntity do
+ let(:org_data) do
+ {
+ 'id' => 12345,
+ 'login' => 'org-name',
+ 'url' => 'https://api.github.com/orgs/org-name',
+ 'avatar_url' => 'https://avatars.githubusercontent.com/u/12345?v=4',
+ 'node_id' => 'O_teStT',
+ 'description' => ''
+ }
+ end
+
+ subject { described_class.new(org_data).as_json }
+
+ it 'exposes correct attributes' do
+ expect(subject.keys).to contain_exactly(
+ :description,
+ :name
+ )
+ end
+end
diff --git a/spec/serializers/import/github_org_serializer_spec.rb b/spec/serializers/import/github_org_serializer_spec.rb
new file mode 100644
index 00000000000..4206914cd6e
--- /dev/null
+++ b/spec/serializers/import/github_org_serializer_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Import::GithubOrgSerializer do
+ it 'represents GithubOrgEntity entities' do
+ expect(described_class.entity_class).to eq(Import::GithubOrgEntity)
+ end
+
+ describe '#represent' do
+ let(:org_data) do
+ {
+ id: 123456,
+ login: 'org-name',
+ node_id: 'O_teStT',
+ url: 'https://api.github.com/orgs/org-name',
+ repos_url: 'https://api.github.com/orgs/org-name/repos',
+ events_url: 'https://api.github.com/orgs/org-name/events',
+ hooks_url: 'https://api.github.com/orgs/org-name/hooks',
+ issues_url: 'https://api.github.com/orgs/org-name/issues',
+ members_url: 'https://api.github.com/orgs/org-name/members{/member}',
+ public_members_url: 'https://api.github.com/orgs/org-name/public_members{/member}',
+ avatar_url: 'avatar_url',
+ description: 'description'
+ }
+ end
+
+ subject { described_class.new.represent(resource) }
+
+ context 'when a single object is being serialized' do
+ let(:resource) { org_data }
+
+ it 'serializes organization object' do
+ expect(subject).to eq({ name: 'org-name', description: 'description' })
+ end
+ end
+
+ context 'when multiple objects are being serialized' do
+ let(:count) { 3 }
+ let(:resource) { Array.new(count, org_data) }
+
+ it 'serializes array of organizations' do
+ expect(subject).to all(eq({ name: 'org-name', description: 'description' }))
+ end
+ end
+ end
+end
diff --git a/spec/serializers/issue_entity_spec.rb b/spec/serializers/issue_entity_spec.rb
index 9335ca61b7d..25e9e8c17e2 100644
--- a/spec/serializers/issue_entity_spec.rb
+++ b/spec/serializers/issue_entity_spec.rb
@@ -150,4 +150,6 @@ RSpec.describe IssueEntity do
end
end
end
+
+ it_behaves_like 'issuable entity current_user properties'
end
diff --git a/spec/serializers/merge_request_poll_widget_entity_spec.rb b/spec/serializers/merge_request_poll_widget_entity_spec.rb
index 90a82d16e38..59ffba0e7a9 100644
--- a/spec/serializers/merge_request_poll_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_poll_widget_entity_spec.rb
@@ -152,17 +152,19 @@ RSpec.describe MergeRequestPollWidgetEntity do
describe '#builds_with_coverage' do
it 'serializes the builds with coverage' do
- allow(resource).to receive(:head_pipeline_builds_with_coverage).and_return([
- double(name: 'rspec', coverage: 91.5),
- double(name: 'jest', coverage: 94.1)
- ])
+ allow(resource).to receive(:head_pipeline_builds_with_coverage).and_return(
+ [
+ double(name: 'rspec', coverage: 91.5),
+ double(name: 'jest', coverage: 94.1)
+ ])
result = subject[:builds_with_coverage]
- expect(result).to eq([
- { name: 'rspec', coverage: 91.5 },
- { name: 'jest', coverage: 94.1 }
- ])
+ expect(result).to eq(
+ [
+ { name: 'rspec', coverage: 91.5 },
+ { name: 'jest', coverage: 94.1 }
+ ])
end
end
diff --git a/spec/serializers/pipeline_serializer_spec.rb b/spec/serializers/pipeline_serializer_spec.rb
index f5398013a70..4d9bdc4bb17 100644
--- a/spec/serializers/pipeline_serializer_spec.rb
+++ b/spec/serializers/pipeline_serializer_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe PipelineSerializer do
end
context 'when a single object is being serialized' do
- let(:resource) { create(:ci_empty_pipeline, project: project) }
+ let(:resource) { build_stubbed(:ci_empty_pipeline, project: project) }
it 'serializers the pipeline object' do
expect(subject[:id]).to eq resource.id
@@ -27,10 +27,11 @@ RSpec.describe PipelineSerializer do
end
context 'when multiple objects are being serialized' do
- let(:resource) { create_list(:ci_pipeline, 2, project: project) }
+ let(:resource) { Array.new(2) { build_stubbed(:ci_pipeline, project: project) } }
it 'serializers the array of pipelines' do
expect(subject).not_to be_empty
+ expect(subject.size).to eq(2)
end
end
end
@@ -41,8 +42,7 @@ RSpec.describe PipelineSerializer do
let(:query) { {} }
let(:serializer) do
- described_class.new(current_user: user)
- .with_pagination(request, response)
+ described_class.new(current_user: user, project: project).with_pagination(request, response)
end
it 'created a paginated serializer' do
@@ -67,7 +67,7 @@ RSpec.describe PipelineSerializer do
context 'when a single pipeline object is present in relation' do
before do
- create(:ci_empty_pipeline)
+ create(:ci_empty_pipeline, project: project)
end
it 'serializes pipeline relation' do
@@ -77,7 +77,7 @@ RSpec.describe PipelineSerializer do
context 'when a multiple pipeline objects are being serialized' do
before do
- create_list(:ci_empty_pipeline, 3)
+ create_list(:ci_empty_pipeline, 3, project: project)
end
it 'serializes appropriate number of objects' do
@@ -100,28 +100,28 @@ RSpec.describe PipelineSerializer do
let!(:merge_request_1) do
create(:merge_request,
- :with_detached_merge_request_pipeline,
- target_project: project,
- target_branch: 'master',
- source_project: project,
- source_branch: 'feature')
+ :with_detached_merge_request_pipeline,
+ target_project: project,
+ target_branch: 'master',
+ source_project: project,
+ source_branch: 'feature')
end
let!(:merge_request_2) do
create(:merge_request,
- :with_detached_merge_request_pipeline,
- target_project: project,
- target_branch: 'master',
- source_project: project,
- source_branch: '2-mb-file')
+ :with_detached_merge_request_pipeline,
+ target_project: project,
+ target_branch: 'master',
+ source_project: project,
+ source_branch: '2-mb-file')
end
- before do
+ before_all do
project.add_developer(user)
end
it 'includes merge requests information' do
- expect(subject.all? { |entry| entry[:merge_request].present? }).to be_truthy
+ expect(subject).to be_all { |entry| entry[:merge_request].present? }
end
it 'preloads related merge requests' do
@@ -138,7 +138,8 @@ RSpec.describe PipelineSerializer do
let(:resource) { Ci::Pipeline.all }
- before do
+ # Create pipelines only once and change their attributes if needed.
+ before_all do
# Since RequestStore.active? is true we have to allow the
# gitaly calls in this block
# Issue: https://gitlab.com/gitlab-org/gitlab-foss/issues/37772
@@ -151,8 +152,6 @@ RSpec.describe PipelineSerializer do
end
context 'with the same ref' do
- let(:ref) { 'feature' }
-
it 'verifies number of queries', :request_store do
recorded = ActiveRecord::QueryRecorder.new { subject }
expected_queries = Gitlab.ee? ? 33 : 30
@@ -163,10 +162,11 @@ RSpec.describe PipelineSerializer do
end
context 'with different refs' do
- def ref
- @sequence ||= 0
- @sequence += 1
- "feature-#{@sequence}"
+ before do
+ # rubocop:disable Rails/SkipsModelValidations
+ Ci::Pipeline.update_all(%(ref = 'feature-' || id))
+ Ci::Build.update_all(%(ref = 'feature-' || stage_id))
+ # rubocop:enable Rails/SkipsModelValidations
end
it 'verifies number of queries', :request_store do
@@ -184,8 +184,6 @@ RSpec.describe PipelineSerializer do
end
context 'with triggered pipelines' do
- let(:ref) { 'feature' }
-
before do
pipeline_1 = create(:ci_pipeline)
build_1 = create(:ci_build, pipeline: pipeline_1)
@@ -210,8 +208,6 @@ RSpec.describe PipelineSerializer do
end
context 'with build environments' do
- let(:ref) { 'feature' }
-
let_it_be(:production) { create(:environment, :production, project: project) }
let_it_be(:staging) { create(:environment, :staging, project: project) }
@@ -222,13 +218,11 @@ RSpec.describe PipelineSerializer do
create(:ci_build, :scheduled, pipeline: pipeline, environment: production.name)
create(:ci_build, :scheduled, pipeline: pipeline, environment: staging.name)
- expect { subject }.not_to exceed_query_limit(1).for_query /SELECT "environments".*/
+ expect { subject }.not_to exceed_query_limit(1).for_query(/SELECT "environments".*/)
end
end
context 'with scheduled and manual builds' do
- let(:ref) { 'feature' }
-
before do
create(:ci_build, :scheduled, pipeline: resource.first)
create(:ci_build, :scheduled, pipeline: resource.second)
@@ -238,7 +232,7 @@ RSpec.describe PipelineSerializer do
it 'sends at most one metadata query for each type of build', :request_store do
# 1 for the existing failed builds and 2 for the added scheduled and manual builds
- expect { subject }.not_to exceed_query_limit(1 + 2).for_query /SELECT "ci_builds_metadata".*/
+ expect { subject }.not_to exceed_query_limit(1 + 2).for_query(/SELECT "ci_builds_metadata".*/)
end
end
@@ -246,25 +240,25 @@ RSpec.describe PipelineSerializer do
create(:ci_empty_pipeline,
project: project,
status: status,
- ref: ref).tap do |pipeline|
- Ci::Build::AVAILABLE_STATUSES.each do |status|
- create_build(pipeline, status, status)
+ ref: 'feature').tap do |pipeline|
+ Ci::Build::AVAILABLE_STATUSES.each do |build_status|
+ create_build(pipeline, status, build_status)
end
end
end
def create_build(pipeline, stage, status)
create(:ci_build, :tags, :triggered, :artifacts,
- pipeline: pipeline, stage: stage,
- name: stage, status: status, ref: pipeline.ref)
+ pipeline: pipeline, stage: stage,
+ name: stage, status: status, ref: pipeline.ref)
end
end
end
describe '#represent_status' do
context 'when represents only status' do
- let(:resource) { create(:ci_pipeline) }
- let(:status) { resource.detailed_status(double('user')) }
+ let(:resource) { build_stubbed(:ci_pipeline) }
+ let(:status) { resource.detailed_status(instance_double('User')) }
subject { serializer.represent_status(resource) }
diff --git a/spec/serializers/project_access_token_entity_spec.rb b/spec/serializers/project_access_token_entity_spec.rb
index 4b5b4d4d77d..8af09b0a45d 100644
--- a/spec/serializers/project_access_token_entity_spec.rb
+++ b/spec/serializers/project_access_token_entity_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe ProjectAccessTokenEntity do
expected_revoke_path = Gitlab::Routing.url_helpers
.revoke_namespace_project_settings_access_token_path(
{ id: token,
- namespace_id: project.namespace.path,
+ namespace_id: project.namespace.full_path,
project_id: project.path })
expect(json).to(
@@ -42,7 +42,7 @@ RSpec.describe ProjectAccessTokenEntity do
expected_revoke_path = Gitlab::Routing.url_helpers
.revoke_namespace_project_settings_access_token_path(
{ id: token,
- namespace_id: project.namespace.path,
+ namespace_id: project.namespace.full_path,
project_id: project.path })
expect(json).to(
diff --git a/spec/services/admin/set_feature_flag_service_spec.rb b/spec/services/admin/set_feature_flag_service_spec.rb
new file mode 100644
index 00000000000..6fa806644c9
--- /dev/null
+++ b/spec/services/admin/set_feature_flag_service_spec.rb
@@ -0,0 +1,300 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Admin::SetFeatureFlagService do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:group) { create(:group) }
+
+ let(:feature_name) { known_feature_flag.name }
+ let(:service) { described_class.new(feature_flag_name: feature_name, params: params) }
+
+ # Find any `development` feature flag name
+ let(:known_feature_flag) do
+ Feature::Definition.definitions
+ .values.find(&:development?)
+ end
+
+ describe '#execute' do
+ before do
+ Feature.reset
+ Flipper.unregister_groups
+ Flipper.register(:perf_team) do |actor|
+ actor.respond_to?(:admin) && actor.admin?
+ end
+ end
+
+ subject { service.execute }
+
+ context 'when enabling the feature flag' do
+ let(:params) { { value: 'true' } }
+
+ it 'enables the feature flag' do
+ expect(Feature).to receive(:enable).with(feature_name)
+ expect(subject).to be_success
+
+ feature_flag = subject.payload[:feature_flag]
+ expect(feature_flag.name).to eq(feature_name)
+ end
+
+ it 'logs the event' do
+ expect(Feature.logger).to receive(:info).once
+
+ subject
+ end
+
+ context 'when enabling for a user actor' do
+ let(:params) { { value: 'true', user: user.username } }
+
+ it 'enables the feature flag' do
+ expect(Feature).to receive(:enable).with(feature_name, user)
+ expect(subject).to be_success
+ end
+
+ context 'when user does not exist' do
+ let(:params) { { value: 'true', user: 'unknown-user' } }
+
+ it 'does nothing' do
+ expect(Feature).not_to receive(:enable)
+ expect(subject).to be_error
+ expect(subject.reason).to eq(:actor_not_found)
+ end
+ end
+ end
+
+ context 'when enabling for a feature group' do
+ let(:params) { { value: 'true', feature_group: 'perf_team' } }
+ let(:feature_group) { Feature.group('perf_team') }
+
+ it 'enables the feature flag' do
+ expect(Feature).to receive(:enable).with(feature_name, feature_group)
+ expect(subject).to be_success
+ end
+ end
+
+ context 'when enabling for a project' do
+ let(:params) { { value: 'true', project: project.full_path } }
+
+ it 'enables the feature flag' do
+ expect(Feature).to receive(:enable).with(feature_name, project)
+ expect(subject).to be_success
+ end
+ end
+
+ context 'when enabling for a group' do
+ let(:params) { { value: 'true', group: group.full_path } }
+
+ it 'enables the feature flag' do
+ expect(Feature).to receive(:enable).with(feature_name, group)
+ expect(subject).to be_success
+ end
+
+ context 'when group does not exist' do
+ let(:params) { { value: 'true', group: 'unknown-group' } }
+
+ it 'returns an error' do
+ expect(Feature).not_to receive(:disable)
+ expect(subject).to be_error
+ expect(subject.reason).to eq(:actor_not_found)
+ end
+ end
+ end
+
+ context 'when enabling for a user namespace' do
+ let(:namespace) { user.namespace }
+ let(:params) { { value: 'true', namespace: namespace.full_path } }
+
+ it 'enables the feature flag' do
+ expect(Feature).to receive(:enable).with(feature_name, namespace)
+ expect(subject).to be_success
+ end
+
+ context 'when namespace does not exist' do
+ let(:params) { { value: 'true', namespace: 'unknown-namespace' } }
+
+ it 'returns an error' do
+ expect(Feature).not_to receive(:disable)
+ expect(subject).to be_error
+ expect(subject.reason).to eq(:actor_not_found)
+ end
+ end
+ end
+
+ context 'when enabling for a group namespace' do
+ let(:params) { { value: 'true', namespace: group.full_path } }
+
+ it 'enables the feature flag' do
+ expect(Feature).to receive(:enable).with(feature_name, group)
+ expect(subject).to be_success
+ end
+ end
+
+ context 'when enabling for a user actor and a feature group' do
+ let(:params) { { value: 'true', user: user.username, feature_group: 'perf_team' } }
+ let(:feature_group) { Feature.group('perf_team') }
+
+ it 'enables the feature flag' do
+ expect(Feature).to receive(:enable).with(feature_name, user)
+ expect(Feature).to receive(:enable).with(feature_name, feature_group)
+ expect(subject).to be_success
+ end
+ end
+
+ context 'when enabling given a percentage of time' do
+ let(:params) { { value: '50' } }
+
+ it 'enables the feature flag' do
+ expect(Feature).to receive(:enable_percentage_of_time).with(feature_name, 50)
+ expect(subject).to be_success
+ end
+
+ context 'when value is a float' do
+ let(:params) { { value: '0.01' } }
+
+ it 'enables the feature flag' do
+ expect(Feature).to receive(:enable_percentage_of_time).with(feature_name, 0.01)
+ expect(subject).to be_success
+ end
+ end
+ end
+
+ context 'when enabling given a percentage of actors' do
+ let(:params) { { value: '50', key: 'percentage_of_actors' } }
+
+ it 'enables the feature flag' do
+ expect(Feature).to receive(:enable_percentage_of_actors).with(feature_name, 50)
+ expect(subject).to be_success
+ end
+
+ context 'when value is a float' do
+ let(:params) { { value: '0.01', key: 'percentage_of_actors' } }
+
+ it 'enables the feature flag' do
+ expect(Feature).to receive(:enable_percentage_of_actors).with(feature_name, 0.01)
+ expect(subject).to be_success
+ end
+ end
+ end
+ end
+
+ context 'when disabling the feature flag' do
+ before do
+ Feature.enable(feature_name)
+ end
+
+ let(:params) { { value: 'false' } }
+
+ it 'disables the feature flag' do
+ expect(Feature).to receive(:disable).with(feature_name)
+ expect(subject).to be_success
+
+ feature_flag = subject.payload[:feature_flag]
+ expect(feature_flag.name).to eq(feature_name)
+ end
+
+ it 'logs the event' do
+ expect(Feature.logger).to receive(:info).once
+
+ subject
+ end
+
+ context 'when disabling for a user actor' do
+ let(:params) { { value: 'false', user: user.username } }
+
+ it 'disables the feature flag' do
+ expect(Feature).to receive(:disable).with(feature_name, user)
+ expect(subject).to be_success
+ end
+
+ context 'when user does not exist' do
+ let(:params) { { value: 'false', user: 'unknown-user' } }
+
+ it 'returns an error' do
+ expect(Feature).not_to receive(:disable)
+ expect(subject).to be_error
+ expect(subject.reason).to eq(:actor_not_found)
+ end
+ end
+ end
+
+ context 'when disabling for a feature group' do
+ let(:params) { { value: 'false', feature_group: 'perf_team' } }
+ let(:feature_group) { Feature.group('perf_team') }
+
+ it 'disables the feature flag' do
+ expect(Feature).to receive(:disable).with(feature_name, feature_group)
+ expect(subject).to be_success
+ end
+ end
+
+ context 'when disabling for a project' do
+ let(:params) { { value: 'false', project: project.full_path } }
+
+ it 'disables the feature flag' do
+ expect(Feature).to receive(:disable).with(feature_name, project)
+ expect(subject).to be_success
+ end
+ end
+
+ context 'when disabling for a group' do
+ let(:params) { { value: 'false', group: group.full_path } }
+
+ it 'disables the feature flag' do
+ expect(Feature).to receive(:disable).with(feature_name, group)
+ expect(subject).to be_success
+ end
+
+ context 'when group does not exist' do
+ let(:params) { { value: 'false', group: 'unknown-group' } }
+
+ it 'returns an error' do
+ expect(Feature).not_to receive(:disable)
+ expect(subject).to be_error
+ expect(subject.reason).to eq(:actor_not_found)
+ end
+ end
+ end
+
+ context 'when disabling for a user namespace' do
+ let(:namespace) { user.namespace }
+ let(:params) { { value: 'false', namespace: namespace.full_path } }
+
+ it 'disables the feature flag' do
+ expect(Feature).to receive(:disable).with(feature_name, namespace)
+ expect(subject).to be_success
+ end
+
+ context 'when namespace does not exist' do
+ let(:params) { { value: 'false', namespace: 'unknown-namespace' } }
+
+ it 'returns an error' do
+ expect(Feature).not_to receive(:disable)
+ expect(subject).to be_error
+ expect(subject.reason).to eq(:actor_not_found)
+ end
+ end
+ end
+
+ context 'when disabling for a group namespace' do
+ let(:params) { { value: 'false', namespace: group.full_path } }
+
+ it 'disables the feature flag' do
+ expect(Feature).to receive(:disable).with(feature_name, group)
+ expect(subject).to be_success
+ end
+ end
+
+ context 'when disabling for a user actor and a feature group' do
+ let(:params) { { value: 'false', user: user.username, feature_group: 'perf_team' } }
+ let(:feature_group) { Feature.group('perf_team') }
+
+ it 'disables the feature flag' do
+ expect(Feature).to receive(:disable).with(feature_name, user)
+ expect(Feature).to receive(:disable).with(feature_name, feature_group)
+ expect(subject).to be_success
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/alert_management/create_alert_issue_service_spec.rb b/spec/services/alert_management/create_alert_issue_service_spec.rb
index 083e5b8c6f1..7255a722d26 100644
--- a/spec/services/alert_management/create_alert_issue_service_spec.rb
+++ b/spec/services/alert_management/create_alert_issue_service_spec.rb
@@ -81,7 +81,7 @@ RSpec.describe AlertManagement::CreateAlertIssueService do
it 'checks permissions' do
execute
- expect(user).to have_received(:can?).with(:create_issue, project)
+ expect(user).to have_received(:can?).with(:create_issue, project).exactly(2).times
end
context 'with alert severity' do
@@ -161,7 +161,7 @@ RSpec.describe AlertManagement::CreateAlertIssueService do
it 'has an unsuccessful status' do
expect(execute).to be_error
- expect(execute.message).to eq("Title can't be blank")
+ expect(execute.errors).to contain_exactly("Title can't be blank")
end
end
@@ -170,7 +170,7 @@ RSpec.describe AlertManagement::CreateAlertIssueService do
it 'responds with error' do
expect(execute).to be_error
- expect(execute.message).to eq('Hosts hosts array is over 255 chars')
+ expect(execute.errors).to contain_exactly('Hosts hosts array is over 255 chars')
end
end
diff --git a/spec/services/award_emojis/copy_service_spec.rb b/spec/services/award_emojis/copy_service_spec.rb
index e85c548968e..abb9c65e25d 100644
--- a/spec/services/award_emojis/copy_service_spec.rb
+++ b/spec/services/award_emojis/copy_service_spec.rb
@@ -4,10 +4,12 @@ require 'spec_helper'
RSpec.describe AwardEmojis::CopyService do
let_it_be(:from_awardable) do
- create(:issue, award_emoji: [
- build(:award_emoji, name: 'thumbsup'),
- build(:award_emoji, name: 'thumbsdown')
- ])
+ create(
+ :issue,
+ award_emoji: [
+ build(:award_emoji, name: 'thumbsup'),
+ build(:award_emoji, name: 'thumbsdown')
+ ])
end
describe '#initialize' do
diff --git a/spec/services/boards/issues/create_service_spec.rb b/spec/services/boards/issues/create_service_spec.rb
index 9a6b48c13bf..c4f1eb093dc 100644
--- a/spec/services/boards/issues/create_service_spec.rb
+++ b/spec/services/boards/issues/create_service_spec.rb
@@ -29,9 +29,10 @@ RSpec.describe Boards::Issues::CreateService do
end
it 'adds the label of the list to the issue' do
- issue = service.execute
+ result = service.execute
- expect(issue.labels).to eq [label]
+ expect(result).to be_success
+ expect(result[:issue].labels).to contain_exactly(label)
end
end
end
diff --git a/spec/services/boards/lists/generate_service_spec.rb b/spec/services/boards/lists/generate_service_spec.rb
deleted file mode 100644
index 9597c8e0f54..00000000000
--- a/spec/services/boards/lists/generate_service_spec.rb
+++ /dev/null
@@ -1,45 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Boards::Lists::GenerateService do
- describe '#execute' do
- let(:project) { create(:project) }
- let(:board) { create(:board, project: project) }
- let(:user) { create(:user) }
-
- subject(:service) { described_class.new(project, user) }
-
- before do
- project.add_developer(user)
- end
-
- context 'when board lists is empty' do
- it 'creates the default lists' do
- expect { service.execute(board) }.to change(board.lists, :count).by(2)
- end
- end
-
- context 'when board lists is not empty' do
- it 'does not creates the default lists' do
- create(:list, board: board)
-
- expect { service.execute(board) }.not_to change(board.lists, :count)
- end
- end
-
- context 'when project labels does not contains any list label' do
- it 'creates labels' do
- expect { service.execute(board) }.to change(project.labels, :count).by(2)
- end
- end
-
- context 'when project labels contains some of list label' do
- it 'creates the missing labels' do
- create(:label, project: project, name: 'Doing')
-
- expect { service.execute(board) }.to change(project.labels, :count).by(1)
- end
- end
- end
-end
diff --git a/spec/services/boards/lists/list_service_spec.rb b/spec/services/boards/lists/list_service_spec.rb
index 0c8a8dc7329..2d41de42581 100644
--- a/spec/services/boards/lists/list_service_spec.rb
+++ b/spec/services/boards/lists/list_service_spec.rb
@@ -3,13 +3,40 @@
require 'spec_helper'
RSpec.describe Boards::Lists::ListService do
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+
+ RSpec.shared_examples 'FOSS lists only' do
+ context 'when board contains a non FOSS list' do
+ # This scenario may happen when there used to be an EE license and user downgraded
+ let!(:backlog_list) { create_backlog_list(board) }
+ let_it_be(:milestone) { create(:milestone, group: group) }
+ let_it_be(:assignee_list) do
+ list = build(:list, board: board, user_id: user.id, list_type: List.list_types[:assignee], position: 0)
+ list.save!(validate: false)
+ list
+ end
+
+ let_it_be(:milestone_list) do
+ list = build(:list, board: board, milestone_id: milestone.id, list_type: List.list_types[:milestone], position: 1) # rubocop:disable Layout/LineLength
+ list.save!(validate: false)
+ list
+ end
+
+ it "returns only FOSS board's lists" do
+ # just making sure these non FOSS lists actually exist on the board
+ expect(board.lists.with_types([List.list_types[:assignee], List.list_types[:milestone]]).count).to eq 2
+ # check that the FOSS lists are not returned from the service
+ expect(service.execute(board)).to match_array [backlog_list, list, board.lists.closed.first]
+ end
+ end
+ end
describe '#execute' do
let(:service) { described_class.new(parent, user) }
context 'when board parent is a project' do
- let_it_be(:project) { create(:project) }
+ let_it_be(:project) { create(:project, group: group) }
let_it_be_with_reload(:board) { create(:board, project: project) }
let_it_be(:label) { create(:label, project: project) }
let_it_be(:list) { create(:list, board: board, label: label) }
@@ -18,10 +45,10 @@ RSpec.describe Boards::Lists::ListService do
let(:parent) { project }
it_behaves_like 'lists list service'
+ it_behaves_like 'FOSS lists only'
end
context 'when board parent is a group' do
- let_it_be(:group) { create(:group) }
let_it_be_with_reload(:board) { create(:board, group: group) }
let_it_be(:label) { create(:group_label, group: group) }
let_it_be(:list) { create(:list, board: board, label: label) }
@@ -30,6 +57,7 @@ RSpec.describe Boards::Lists::ListService do
let(:parent) { group }
it_behaves_like 'lists list service'
+ it_behaves_like 'FOSS lists only'
end
def create_backlog_list(board)
diff --git a/spec/services/bulk_imports/create_pipeline_trackers_service_spec.rb b/spec/services/bulk_imports/create_pipeline_trackers_service_spec.rb
index d7b00ba04ab..0de962328c5 100644
--- a/spec/services/bulk_imports/create_pipeline_trackers_service_spec.rb
+++ b/spec/services/bulk_imports/create_pipeline_trackers_service_spec.rb
@@ -75,7 +75,9 @@ RSpec.describe BulkImports::CreatePipelineTrackersService do
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect(logger).to receive(:info).with({
message: 'Pipeline skipped as source instance version not compatible with pipeline',
- entity_id: entity.id,
+ bulk_import_entity_id: entity.id,
+ bulk_import_id: entity.bulk_import_id,
+ importer: 'gitlab_migration',
pipeline_name: 'PipelineClass4',
minimum_source_version: '15.1.0',
maximum_source_version: nil,
@@ -84,7 +86,9 @@ RSpec.describe BulkImports::CreatePipelineTrackersService do
expect(logger).to receive(:info).with({
message: 'Pipeline skipped as source instance version not compatible with pipeline',
- entity_id: entity.id,
+ bulk_import_entity_id: entity.id,
+ bulk_import_id: entity.bulk_import_id,
+ importer: 'gitlab_migration',
pipeline_name: 'PipelineClass5',
minimum_source_version: '16.0.0',
maximum_source_version: nil,
diff --git a/spec/services/bulk_imports/create_service_spec.rb b/spec/services/bulk_imports/create_service_spec.rb
index 4b655dd5d6d..bf174f5d5a2 100644
--- a/spec/services/bulk_imports/create_service_spec.rb
+++ b/spec/services/bulk_imports/create_service_spec.rb
@@ -50,6 +50,11 @@ RSpec.describe BulkImports::CreateService do
expect(last_bulk_import.user).to eq(user)
expect(last_bulk_import.source_version).to eq(source_version.to_s)
expect(last_bulk_import.user).to eq(user)
+ expect_snowplow_event(
+ category: 'BulkImports::CreateService',
+ action: 'create',
+ label: 'bulk_import_group'
+ )
end
it 'creates bulk import entities' do
diff --git a/spec/services/bulk_imports/repository_bundle_export_service_spec.rb b/spec/services/bulk_imports/repository_bundle_export_service_spec.rb
index a7d98a7474a..f0d63de1ab9 100644
--- a/spec/services/bulk_imports/repository_bundle_export_service_spec.rb
+++ b/spec/services/bulk_imports/repository_bundle_export_service_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe BulkImports::RepositoryBundleExportService do
context 'when repository exists' do
it 'bundles repository to disk' do
allow(repository).to receive(:exists?).and_return(true)
+ allow(repository).to receive(:empty?).and_return(false)
expect(repository).to receive(:bundle_to_disk).with(File.join(export_path, "#{export_filename}.bundle"))
service.execute
@@ -31,6 +32,15 @@ RSpec.describe BulkImports::RepositoryBundleExportService do
service.execute
end
end
+
+ context 'when repository is empty' do
+ it 'does not bundle repository to disk' do
+ allow(repository).to receive(:empty?).and_return(true)
+ expect(repository).not_to receive(:bundle_to_disk)
+
+ service.execute
+ end
+ end
end
include_examples 'repository export' do
diff --git a/spec/services/bulk_imports/uploads_export_service_spec.rb b/spec/services/bulk_imports/uploads_export_service_spec.rb
index 39bcacfdc5e..ad6e005485c 100644
--- a/spec/services/bulk_imports/uploads_export_service_spec.rb
+++ b/spec/services/bulk_imports/uploads_export_service_spec.rb
@@ -3,9 +3,11 @@
require 'spec_helper'
RSpec.describe BulkImports::UploadsExportService do
- let_it_be(:project) { create(:project, avatar: fixture_file_upload('spec/fixtures/rails_sample.png', 'image/png')) }
- let_it_be(:upload) { create(:upload, :with_file, :issuable_upload, uploader: FileUploader, model: project) }
let_it_be(:export_path) { Dir.mktmpdir }
+ let_it_be(:project) { create(:project, avatar: fixture_file_upload('spec/fixtures/rails_sample.png', 'image/png')) }
+
+ let!(:upload) { create(:upload, :with_file, :issuable_upload, uploader: FileUploader, model: project) }
+ let(:exported_filepath) { File.join(export_path, upload.secret, upload.retrieve_uploader.filename) }
subject(:service) { described_class.new(project, export_path) }
@@ -15,10 +17,60 @@ RSpec.describe BulkImports::UploadsExportService do
describe '#execute' do
it 'exports project uploads and avatar' do
- subject.execute
+ service.execute
+
+ expect(File).to exist(File.join(export_path, 'avatar', 'rails_sample.png'))
+ expect(File).to exist(exported_filepath)
+ end
+
+ context 'when upload has underlying file missing' do
+ context 'with an upload missing its file' do
+ it 'does not cause errors' do
+ File.delete(upload.absolute_path)
+
+ expect { service.execute }.not_to raise_error
+
+ expect(File).not_to exist(exported_filepath)
+ end
+ end
+
+ context 'when upload is in object storage' do
+ before do
+ stub_uploads_object_storage(FileUploader)
+ end
+
+ shared_examples 'export with invalid upload' do
+ it 'ignores problematic upload and logs exception' do
+ allow(service).to receive(:download_or_copy_upload).and_raise(exception)
+
+ expect(Gitlab::ErrorTracking)
+ .to receive(:log_exception)
+ .with(
+ instance_of(exception), {
+ portable_id: project.id,
+ portable_class: 'Project',
+ upload_id: upload.id
+ }
+ )
+
+ service.execute
+
+ expect(File).not_to exist(exported_filepath)
+ end
+ end
+
+ context 'when filename is too long' do
+ let(:exception) { Errno::ENAMETOOLONG }
+
+ include_examples 'export with invalid upload'
+ end
+
+ context 'when network exception occurs' do
+ let(:exception) { Net::OpenTimeout }
- expect(File.exist?(File.join(export_path, 'avatar', 'rails_sample.png'))).to eq(true)
- expect(File.exist?(File.join(export_path, upload.secret, upload.retrieve_uploader.filename))).to eq(true)
+ include_examples 'export with invalid upload'
+ end
+ end
end
end
end
diff --git a/spec/services/bulk_update_integration_service_spec.rb b/spec/services/bulk_update_integration_service_spec.rb
index 7c5bd1db565..24a868b524d 100644
--- a/spec/services/bulk_update_integration_service_spec.rb
+++ b/spec/services/bulk_update_integration_service_spec.rb
@@ -11,8 +11,8 @@ RSpec.describe BulkUpdateIntegrationService do
let(:excluded_attributes) do
%w[
- id project_id group_id inherit_from_id instance template
- created_at updated_at encrypted_properties encrypted_properties_iv
+ id project_id group_id inherit_from_id instance template
+ created_at updated_at encrypted_properties encrypted_properties_iv
]
end
diff --git a/spec/services/ci/compare_test_reports_service_spec.rb b/spec/services/ci/compare_test_reports_service_spec.rb
index 01d58b2095f..6d3df0f5383 100644
--- a/spec/services/ci/compare_test_reports_service_spec.rb
+++ b/spec/services/ci/compare_test_reports_service_spec.rb
@@ -72,10 +72,11 @@ RSpec.describe Ci::CompareTestReportsService do
it 'loads recent failures on limited test cases to avoid building up a huge DB query', :aggregate_failures do
expect(comparison[:data]).to match_schema('entities/test_reports_comparer')
- expect(recent_failures_per_test_case).to eq([
- { 'count' => 1, 'base_branch' => 'master' },
- { 'count' => 1, 'base_branch' => 'master' }
- ])
+ expect(recent_failures_per_test_case).to eq(
+ [
+ { 'count' => 1, 'base_branch' => 'master' },
+ { 'count' => 1, 'base_branch' => 'master' }
+ ])
expect(new_failures.count).to eq(2)
end
end
diff --git a/spec/services/ci/create_pipeline_service/include_spec.rb b/spec/services/ci/create_pipeline_service/include_spec.rb
index 67d8530525a..3764663fd74 100644
--- a/spec/services/ci/create_pipeline_service/include_spec.rb
+++ b/spec/services/ci/create_pipeline_service/include_spec.rb
@@ -126,51 +126,5 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
it_behaves_like 'not including the file'
end
end
-
- context 'with ci_increase_includes_to_250 enabled on root project' do
- let_it_be(:included_project) do
- create(:project, :repository).tap { |p| p.add_developer(user) }
- end
-
- before do
- stub_const('::Gitlab::Ci::Config::External::Context::MAX_INCLUDES', 0)
- stub_const('::Gitlab::Ci::Config::External::Context::TRIAL_MAX_INCLUDES', 3)
-
- stub_feature_flags(ci_increase_includes_to_250: false)
- stub_feature_flags(ci_increase_includes_to_250: project)
-
- allow(Project)
- .to receive(:find_by_full_path)
- .with(included_project.full_path)
- .and_return(included_project)
-
- allow(included_project.repository)
- .to receive(:blob_data_at).with(included_project.commit.id, '.gitlab-ci.yml')
- .and_return(local_config)
-
- allow(included_project.repository)
- .to receive(:blob_data_at).with(included_project.commit.id, file_location)
- .and_return(File.read(Rails.root.join(file_location)))
- end
-
- let(:config) do
- <<~EOY
- include:
- - project: #{included_project.full_path}
- file: .gitlab-ci.yml
- EOY
- end
-
- let(:local_config) do
- <<~EOY
- include: #{file_location}
-
- job:
- script: exit 0
- EOY
- end
-
- it_behaves_like 'including the file'
- end
end
end
diff --git a/spec/services/ci/create_pipeline_service/limit_active_jobs_spec.rb b/spec/services/ci/create_pipeline_service/limit_active_jobs_spec.rb
new file mode 100644
index 00000000000..003d109a27c
--- /dev/null
+++ b/spec/services/ci/create_pipeline_service/limit_active_jobs_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { project.first_owner }
+ let_it_be(:existing_pipeline) { create(:ci_pipeline, project: project) }
+
+ let(:service) { described_class.new(project, user, ref: 'refs/heads/master') }
+
+ subject(:pipeline) { service.execute(:push).payload }
+
+ before do
+ create_list(:ci_build, 8, pipeline: existing_pipeline)
+ create_list(:ci_bridge, 1, pipeline: existing_pipeline)
+
+ stub_ci_pipeline_yaml_file(<<~YAML)
+ job1:
+ script: echo
+ job3:
+ trigger:
+ project: org/my-project
+ job4:
+ script: echo
+ only: [tags]
+ YAML
+ end
+
+ context 'when project has exceeded the active jobs limit' do
+ before do
+ project.namespace.actual_limits.update!(ci_active_jobs: 10)
+ end
+
+ it 'fails the pipeline before populating it' do
+ expect(pipeline).to be_failed
+ expect(pipeline).to be_job_activity_limit_exceeded
+
+ expect(pipeline.errors.full_messages)
+ .to include("Project exceeded the allowed number of jobs in active pipelines. Retry later.")
+ expect(pipeline.statuses).to be_empty
+ end
+ end
+
+ context 'when project has not exceeded the active jobs limit' do
+ before do
+ project.namespace.actual_limits.update!(ci_active_jobs: 20)
+ end
+
+ it 'creates the pipeline successfully' do
+ expect(pipeline).to be_created
+ end
+ end
+end
diff --git a/spec/services/ci/create_pipeline_service/logger_spec.rb b/spec/services/ci/create_pipeline_service/logger_spec.rb
index 2be23802757..3045f8e92b1 100644
--- a/spec/services/ci/create_pipeline_service/logger_spec.rb
+++ b/spec/services/ci/create_pipeline_service/logger_spec.rb
@@ -20,6 +20,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
{
'count' => a_kind_of(Numeric),
'avg' => a_kind_of(Numeric),
+ 'sum' => a_kind_of(Numeric),
'max' => a_kind_of(Numeric),
'min' => a_kind_of(Numeric)
}
diff --git a/spec/services/ci/create_pipeline_service/rules_spec.rb b/spec/services/ci/create_pipeline_service/rules_spec.rb
index fc57ca66d3a..c737b8cc329 100644
--- a/spec/services/ci/create_pipeline_service/rules_spec.rb
+++ b/spec/services/ci/create_pipeline_service/rules_spec.rb
@@ -540,19 +540,10 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
let(:compare_to) { 'invalid-branch' }
it 'returns an error' do
- expect(pipeline.errors.full_messages).to eq([
- 'Failed to parse rule for job1: rules:changes:compare_to is not a valid ref'
- ])
- end
-
- context 'when the FF ci_rules_changes_compare is not enabled' do
- before do
- stub_feature_flags(ci_rules_changes_compare: false)
- end
-
- it 'ignores compare_to and changes is always true' do
- expect(build_names).to contain_exactly('job1', 'job2')
- end
+ expect(pipeline.errors.full_messages).to eq(
+ [
+ 'Failed to parse rule for job1: rules:changes:compare_to is not a valid ref'
+ ])
end
end
@@ -563,16 +554,6 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
it 'creates job1 and job2' do
expect(build_names).to contain_exactly('job1', 'job2')
end
-
- context 'when the FF ci_rules_changes_compare is not enabled' do
- before do
- stub_feature_flags(ci_rules_changes_compare: false)
- end
-
- it 'ignores compare_to and changes is always true' do
- expect(build_names).to contain_exactly('job1', 'job2')
- end
- end
end
context 'when the rule does not match' do
@@ -581,16 +562,6 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
it 'does not create job1' do
expect(build_names).to contain_exactly('job2')
end
-
- context 'when the FF ci_rules_changes_compare is not enabled' do
- before do
- stub_feature_flags(ci_rules_changes_compare: false)
- end
-
- it 'ignores compare_to and changes is always true' do
- expect(build_names).to contain_exactly('job1', 'job2')
- end
- end
end
end
end
@@ -616,17 +587,6 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
expect(pipeline).to be_created_successfully
expect(build_names).to contain_exactly('job1')
end
-
- context 'when the FF ci_rules_changes_compare is not enabled' do
- before do
- stub_feature_flags(ci_rules_changes_compare: false)
- end
-
- it 'ignores compare_to and changes is always true' do
- expect(pipeline).to be_created_successfully
- expect(build_names).to contain_exactly('job1')
- end
- end
end
context 'when the rule does not match' do
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index c2e80316d26..458692ba1c0 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -293,7 +293,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
pipeline_on_previous_commit
.builds
.joins(:metadata)
- .pluck(:name, 'ci_builds_metadata.interruptible')
+ .pluck(:name, "#{Ci::BuildMetadata.quoted_table_name}.interruptible")
expect(interruptible_status).to contain_exactly(
['build_1_1', true],
@@ -423,7 +423,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
expect(response.message).to eq('Missing CI config file')
expect(response.payload).not_to be_persisted
expect(Ci::Pipeline.count).to eq(0)
- expect(Namespaces::OnboardingPipelineCreatedWorker).not_to receive(:perform_async)
+ expect(Onboarding::PipelineCreatedWorker).not_to receive(:perform_async)
end
shared_examples 'a failed pipeline' do
@@ -1547,7 +1547,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
end
it 'schedules a namespace onboarding create action worker' do
- expect(Namespaces::OnboardingPipelineCreatedWorker)
+ expect(Onboarding::PipelineCreatedWorker)
.to receive(:perform_async).with(project.namespace_id)
pipeline
diff --git a/spec/services/ci/find_exposed_artifacts_service_spec.rb b/spec/services/ci/find_exposed_artifacts_service_spec.rb
index 32d96471f16..6e11c153a75 100644
--- a/spec/services/ci/find_exposed_artifacts_service_spec.rb
+++ b/spec/services/ci/find_exposed_artifacts_service_spec.rb
@@ -157,20 +157,21 @@ RSpec.describe Ci::FindExposedArtifactsService do
subject { described_class.new(project, user).for_pipeline(pipeline, limit: 2) }
it 'returns first 2 results' do
- expect(subject).to eq([
- {
- text: 'artifact 1',
- url: file_project_job_artifacts_path(project, job1, 'ci_artifacts.txt'),
- job_name: job1.name,
- job_path: project_job_path(project, job1)
- },
- {
- text: 'artifact 2',
- url: browse_project_job_artifacts_path(project, job2),
- job_name: job2.name,
- job_path: project_job_path(project, job2)
- }
- ])
+ expect(subject).to eq(
+ [
+ {
+ text: 'artifact 1',
+ url: file_project_job_artifacts_path(project, job1, 'ci_artifacts.txt'),
+ job_name: job1.name,
+ job_path: project_job_path(project, job1)
+ },
+ {
+ text: 'artifact 2',
+ url: browse_project_job_artifacts_path(project, job2),
+ job_name: job2.name,
+ job_path: project_job_path(project, job2)
+ }
+ ])
end
end
@@ -199,20 +200,21 @@ RSpec.describe Ci::FindExposedArtifactsService do
subject { described_class.new(project, user).for_pipeline(pipeline, limit: 2) }
it 'returns the correct path for cross-project MRs' do
- expect(subject).to eq([
- {
- text: 'file artifact',
- url: file_project_job_artifacts_path(foreign_project, job_show, 'ci_artifacts.txt'),
- job_name: job_show.name,
- job_path: project_job_path(foreign_project, job_show)
- },
- {
- text: 'directory artifact',
- url: browse_project_job_artifacts_path(foreign_project, job_browse),
- job_name: job_browse.name,
- job_path: project_job_path(foreign_project, job_browse)
- }
- ])
+ expect(subject).to eq(
+ [
+ {
+ text: 'file artifact',
+ url: file_project_job_artifacts_path(foreign_project, job_show, 'ci_artifacts.txt'),
+ job_name: job_show.name,
+ job_path: project_job_path(foreign_project, job_show)
+ },
+ {
+ text: 'directory artifact',
+ url: browse_project_job_artifacts_path(foreign_project, job_browse),
+ job_name: job_browse.name,
+ job_path: project_job_path(foreign_project, job_browse)
+ }
+ ])
end
end
end
diff --git a/spec/services/ci/generate_kubeconfig_service_spec.rb b/spec/services/ci/generate_kubeconfig_service_spec.rb
index e3088ca6ea7..bfde39780dd 100644
--- a/spec/services/ci/generate_kubeconfig_service_spec.rb
+++ b/spec/services/ci/generate_kubeconfig_service_spec.rb
@@ -9,6 +9,8 @@ RSpec.describe Ci::GenerateKubeconfigService do
let(:pipeline) { build.pipeline }
let(:agent1) { create(:cluster_agent, project: project) }
let(:agent2) { create(:cluster_agent) }
+ let(:authorization1) { create(:agent_project_authorization, agent: agent1) }
+ let(:authorization2) { create(:agent_project_authorization, agent: agent2) }
let(:template) { instance_double(Gitlab::Kubernetes::Kubeconfig::Template) }
@@ -16,7 +18,7 @@ RSpec.describe Ci::GenerateKubeconfigService do
before do
expect(Gitlab::Kubernetes::Kubeconfig::Template).to receive(:new).and_return(template)
- expect(pipeline).to receive(:authorized_cluster_agents).and_return([agent1, agent2])
+ expect(pipeline).to receive(:cluster_agent_authorizations).and_return([authorization1, authorization2])
end
it 'adds a cluster, and a user and context for each available agent' do
@@ -36,11 +38,13 @@ RSpec.describe Ci::GenerateKubeconfigService do
expect(template).to receive(:add_context).with(
name: "#{project.full_path}:#{agent1.name}",
+ namespace: 'production',
cluster: 'gitlab',
user: "agent:#{agent1.id}"
)
expect(template).to receive(:add_context).with(
name: "#{agent2.project.full_path}:#{agent2.name}",
+ namespace: 'production',
cluster: 'gitlab',
user: "agent:#{agent2.id}"
)
diff --git a/spec/services/ci/job_artifacts/create_service_spec.rb b/spec/services/ci/job_artifacts/create_service_spec.rb
index a2259f9813b..030ba84951e 100644
--- a/spec/services/ci/job_artifacts/create_service_spec.rb
+++ b/spec/services/ci/job_artifacts/create_service_spec.rb
@@ -182,7 +182,8 @@ RSpec.describe Ci::JobArtifacts::CreateService do
end
context 'with job partitioning' do
- let(:job) { create(:ci_build, project: project, partition_id: 123) }
+ let(:pipeline) { create(:ci_pipeline, project: project, partition_id: 123) }
+ let(:job) { create(:ci_build, pipeline: pipeline) }
it 'sets partition_id on artifacts' do
expect { subject }.to change { Ci::JobArtifact.count }
diff --git a/spec/services/ci/job_artifacts/delete_service_spec.rb b/spec/services/ci/job_artifacts/delete_service_spec.rb
index 62a755eb44a..78e8be48255 100644
--- a/spec/services/ci/job_artifacts/delete_service_spec.rb
+++ b/spec/services/ci/job_artifacts/delete_service_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe Ci::JobArtifacts::DeleteService do
result = service.execute
expect(result).to be_success
+ expect(result[:destroyed_artifacts_count]).to be(2)
end
it 'deletes erasable artifacts' do
@@ -24,7 +25,7 @@ RSpec.describe Ci::JobArtifacts::DeleteService do
expect { service.execute }.not_to change { build.has_trace? }.from(true)
end
- context 'when project is undergoing statistics refresh' do
+ context 'when project is undergoing stats refresh' do
before do
allow(build.project).to receive(:refreshing_build_artifacts_size?).and_return(true)
end
@@ -36,6 +37,30 @@ RSpec.describe Ci::JobArtifacts::DeleteService do
service.execute
end
+
+ it 'returns an error response with the correct message and reason' do
+ result = service.execute
+
+ expect(result).to be_error
+ expect(result[:message]).to be('Action temporarily disabled. ' \
+ 'The project this job belongs to is undergoing stats refresh.')
+ expect(result[:reason]).to be(:project_stats_refresh)
+ end
+ end
+
+ context 'when an error response is received from DestroyBatchService' do
+ before do
+ allow_next_instance_of(Ci::JobArtifacts::DestroyBatchService) do |service|
+ allow(service).to receive(:execute).and_return({ status: :error, message: 'something went wrong' })
+ end
+ end
+
+ it 'returns an error response with the correct message' do
+ result = service.execute
+
+ expect(result).to be_error
+ expect(result[:message]).to be('something went wrong')
+ end
end
end
end
diff --git a/spec/services/ci/job_token_scope/add_project_service_spec.rb b/spec/services/ci/job_token_scope/add_project_service_spec.rb
index bb6df4268dd..bf7df3a5595 100644
--- a/spec/services/ci/job_token_scope/add_project_service_spec.rb
+++ b/spec/services/ci/job_token_scope/add_project_service_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Ci::JobTokenScope::AddProjectService do
let(:service) { described_class.new(project, current_user) }
- let_it_be(:project) { create(:project, ci_job_token_scope_enabled: true).tap(&:save!) }
+ let_it_be(:project) { create(:project, ci_outbound_job_token_scope_enabled: true).tap(&:save!) }
let_it_be(:target_project) { create(:project) }
let_it_be(:current_user) { create(:user) }
diff --git a/spec/services/ci/job_token_scope/remove_project_service_spec.rb b/spec/services/ci/job_token_scope/remove_project_service_spec.rb
index 155e60ac48e..c3f9081cbd8 100644
--- a/spec/services/ci/job_token_scope/remove_project_service_spec.rb
+++ b/spec/services/ci/job_token_scope/remove_project_service_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Ci::JobTokenScope::RemoveProjectService do
let(:service) { described_class.new(project, current_user) }
- let_it_be(:project) { create(:project, ci_job_token_scope_enabled: true).tap(&:save!) }
+ let_it_be(:project) { create(:project, ci_outbound_job_token_scope_enabled: true).tap(&:save!) }
let_it_be(:target_project) { create(:project) }
let_it_be(:current_user) { create(:user) }
diff --git a/spec/services/ci/pipeline_artifacts/coverage_report_service_spec.rb b/spec/services/ci/pipeline_artifacts/coverage_report_service_spec.rb
index 6d4dcf28108..c4558bddc85 100644
--- a/spec/services/ci/pipeline_artifacts/coverage_report_service_spec.rb
+++ b/spec/services/ci/pipeline_artifacts/coverage_report_service_spec.rb
@@ -35,6 +35,7 @@ RSpec.describe Ci::PipelineArtifacts::CoverageReportService do
end
it 'logs relevant information' do
+ allow(Gitlab::AppLogger).to receive(:info).and_call_original
expect(Gitlab::AppLogger).to receive(:info).with({
project_id: project.id,
pipeline_id: pipeline.id,
@@ -52,28 +53,12 @@ RSpec.describe Ci::PipelineArtifacts::CoverageReportService do
it_behaves_like 'creating or updating a pipeline coverage report'
- context 'when ci_update_unlocked_pipeline_artifacts feature flag is enabled' do
- it "artifact has pipeline's locked status" do
- subject
-
- artifact = Ci::PipelineArtifact.first
-
- expect(artifact.locked).to eq(pipeline.locked)
- end
- end
+ it "artifact has pipeline's locked status" do
+ subject
- context 'when ci_update_unlocked_pipeline_artifacts is disabled' do
- before do
- stub_feature_flags(ci_update_unlocked_pipeline_artifacts: false)
- end
-
- it 'artifact has unknown locked status' do
- subject
+ artifact = Ci::PipelineArtifact.first
- artifact = Ci::PipelineArtifact.first
-
- expect(artifact.locked).to eq('unknown')
- end
+ expect(artifact.locked).to eq(pipeline.locked)
end
end
diff --git a/spec/services/ci/pipeline_artifacts/create_code_quality_mr_diff_report_service_spec.rb b/spec/services/ci/pipeline_artifacts/create_code_quality_mr_diff_report_service_spec.rb
index 75233248113..5d854b61f14 100644
--- a/spec/services/ci/pipeline_artifacts/create_code_quality_mr_diff_report_service_spec.rb
+++ b/spec/services/ci/pipeline_artifacts/create_code_quality_mr_diff_report_service_spec.rb
@@ -51,28 +51,12 @@ RSpec.describe ::Ci::PipelineArtifacts::CreateCodeQualityMrDiffReportService do
end
end
- context 'when ci_update_unlocked_pipeline_artifacts feature flag is enabled' do
- it "artifact has pipeline's locked status" do
- subject
-
- artifact = Ci::PipelineArtifact.first
-
- expect(artifact.locked).to eq(head_pipeline.locked)
- end
- end
-
- context 'when ci_update_unlocked_pipeline_artifacts is disabled' do
- before do
- stub_feature_flags(ci_update_unlocked_pipeline_artifacts: false)
- end
-
- it 'artifact has unknown locked status' do
- subject
+ it "artifact has pipeline's locked status" do
+ subject
- artifact = Ci::PipelineArtifact.first
+ artifact = Ci::PipelineArtifact.first
- expect(artifact.locked).to eq('unknown')
- end
+ expect(artifact.locked).to eq(head_pipeline.locked)
end
it 'does not persist the same artifact twice' do
diff --git a/spec/services/ci/pipeline_artifacts/destroy_all_expired_service_spec.rb b/spec/services/ci/pipeline_artifacts/destroy_all_expired_service_spec.rb
index eb664043567..47e8766c215 100644
--- a/spec/services/ci/pipeline_artifacts/destroy_all_expired_service_spec.rb
+++ b/spec/services/ci/pipeline_artifacts/destroy_all_expired_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::PipelineArtifacts::DestroyAllExpiredService do
+RSpec.describe Ci::PipelineArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_shared_state do
let(:service) { described_class.new }
describe '.execute' do
@@ -85,6 +85,36 @@ RSpec.describe Ci::PipelineArtifacts::DestroyAllExpiredService do
is_expected.to eq(0)
end
end
+
+ context 'with unlocked pipeline artifacts' do
+ let_it_be(:not_expired_artifact) { create(:ci_pipeline_artifact, :artifact_unlocked, expire_at: 2.days.from_now) }
+
+ before do
+ create_list(:ci_pipeline_artifact, 2, :artifact_unlocked, expire_at: 1.week.ago)
+ allow(service).to receive(:legacy_destroy_pipeline_artifacts)
+ end
+
+ it 'destroys all expired artifacts' do
+ expect { subject }.to change { Ci::PipelineArtifact.count }.by(-2)
+ expect(not_expired_artifact.reload).to be_present
+ end
+
+ context 'when the loop limit is reached' do
+ before do
+ stub_const('::Ci::PipelineArtifacts::DestroyAllExpiredService::LOOP_LIMIT', 1)
+ stub_const('::Ci::PipelineArtifacts::DestroyAllExpiredService::BATCH_SIZE', 1)
+ end
+
+ it 'destroys one artifact' do
+ expect { subject }.to change { Ci::PipelineArtifact.count }.by(-1)
+ expect(not_expired_artifact.reload).to be_present
+ end
+
+ it 'reports the number of destroyed artifacts' do
+ is_expected.to eq(1)
+ end
+ end
+ end
end
describe '.destroy_artifacts_batch' do
diff --git a/spec/services/ci/runners/register_runner_service_spec.rb b/spec/services/ci/runners/register_runner_service_spec.rb
index 6d7b39de21e..2d1b109072f 100644
--- a/spec/services/ci/runners/register_runner_service_spec.rb
+++ b/spec/services/ci/runners/register_runner_service_spec.rb
@@ -9,7 +9,6 @@ RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute' do
let(:runner) { execute.payload[:runner] }
before do
- stub_feature_flags(runner_registration_control: false)
stub_application_setting(runners_registration_token: registration_token)
stub_application_setting(valid_runner_registrars: ApplicationSetting::VALID_RUNNER_REGISTRAR_TYPES)
end
@@ -166,25 +165,9 @@ RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute' do
stub_application_setting(valid_runner_registrars: ['group'])
end
- context 'when feature flag is enabled' do
- before do
- stub_feature_flags(runner_registration_control: true)
- end
-
- it 'returns 403 error' do
- expect(execute).to be_error
- expect(execute.http_status).to eq :forbidden
- end
- end
-
- context 'when feature flag is disabled' do
- it 'registers the runner' do
- expect(execute).to be_success
-
- expect(runner).to be_an_instance_of(::Ci::Runner)
- expect(runner.errors).to be_empty
- expect(runner.active).to be true
- end
+ it 'returns 403 error' do
+ expect(execute).to be_error
+ expect(execute.http_status).to eq :forbidden
end
end
end
@@ -244,24 +227,8 @@ RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute' do
stub_application_setting(valid_runner_registrars: ['project'])
end
- context 'when feature flag is enabled' do
- before do
- stub_feature_flags(runner_registration_control: true)
- end
-
- it 'returns error response' do
- is_expected.to be_error
- end
- end
-
- context 'when feature flag is disabled' do
- it 'registers the runner' do
- expect(execute).to be_success
-
- expect(runner).to be_an_instance_of(::Ci::Runner)
- expect(runner.errors).to be_empty
- expect(runner.active).to be true
- end
+ it 'returns error response' do
+ is_expected.to be_error
end
end
end
diff --git a/spec/services/ci/runners/set_runner_associated_projects_service_spec.rb b/spec/services/ci/runners/set_runner_associated_projects_service_spec.rb
index 0d2e237c87b..1f44612947b 100644
--- a/spec/services/ci/runners/set_runner_associated_projects_service_spec.rb
+++ b/spec/services/ci/runners/set_runner_associated_projects_service_spec.rb
@@ -47,7 +47,11 @@ RSpec.describe ::Ci::Runners::SetRunnerAssociatedProjectsService, '#execute' do
it 'reassigns associated projects and returns success response' do
expect(execute).to be_success
- expect(runner.reload.projects.ids).to eq([owner_project.id] + project_ids)
+
+ runner.reload
+
+ expect(runner.owner_project).to eq(owner_project)
+ expect(runner.projects.ids).to match_array([owner_project.id] + project_ids)
end
end
@@ -56,7 +60,11 @@ RSpec.describe ::Ci::Runners::SetRunnerAssociatedProjectsService, '#execute' do
it 'reassigns associated projects and returns success response' do
expect(execute).to be_success
- expect(runner.reload.projects.ids).to eq([owner_project.id] + project_ids)
+
+ runner.reload
+
+ expect(runner.owner_project).to eq(owner_project)
+ expect(runner.projects.ids).to match_array([owner_project.id] + project_ids)
end
end
end
diff --git a/spec/services/ci/unlock_artifacts_service_spec.rb b/spec/services/ci/unlock_artifacts_service_spec.rb
index 776019f03f8..f21afc7fe9e 100644
--- a/spec/services/ci/unlock_artifacts_service_spec.rb
+++ b/spec/services/ci/unlock_artifacts_service_spec.rb
@@ -5,15 +5,11 @@ require 'spec_helper'
RSpec.describe Ci::UnlockArtifactsService do
using RSpec::Parameterized::TableSyntax
- where(:tag, :ci_update_unlocked_job_artifacts, :ci_update_unlocked_pipeline_artifacts) do
- false | false | false
- false | true | false
- true | false | false
- true | true | false
- false | false | true
- false | true | true
- true | false | true
- true | true | true
+ where(:tag, :ci_update_unlocked_job_artifacts) do
+ false | false
+ false | true
+ true | false
+ true | true
end
with_them do
@@ -35,8 +31,7 @@ RSpec.describe Ci::UnlockArtifactsService do
before do
stub_const("#{described_class}::BATCH_SIZE", 1)
- stub_feature_flags(ci_update_unlocked_job_artifacts: ci_update_unlocked_job_artifacts,
- ci_update_unlocked_pipeline_artifacts: ci_update_unlocked_pipeline_artifacts)
+ stub_feature_flags(ci_update_unlocked_job_artifacts: ci_update_unlocked_job_artifacts)
end
describe '#execute' do
@@ -80,7 +75,7 @@ RSpec.describe Ci::UnlockArtifactsService do
end
it 'unlocks pipeline artifact records' do
- if ci_update_unlocked_job_artifacts && ci_update_unlocked_pipeline_artifacts
+ if ci_update_unlocked_job_artifacts
expect { execute }.to change { ::Ci::PipelineArtifact.artifact_unlocked.count }.from(0).to(1)
else
expect { execute }.not_to change { ::Ci::PipelineArtifact.artifact_unlocked.count }
@@ -122,7 +117,7 @@ RSpec.describe Ci::UnlockArtifactsService do
end
it 'unlocks pipeline artifact records' do
- if ci_update_unlocked_job_artifacts && ci_update_unlocked_pipeline_artifacts
+ if ci_update_unlocked_job_artifacts
expect { execute }.to change { ::Ci::PipelineArtifact.artifact_unlocked.count }.from(0).to(1)
else
expect { execute }.not_to change { ::Ci::PipelineArtifact.artifact_unlocked.count }
diff --git a/spec/services/clusters/applications/destroy_service_spec.rb b/spec/services/clusters/applications/destroy_service_spec.rb
deleted file mode 100644
index 7306256e68e..00000000000
--- a/spec/services/clusters/applications/destroy_service_spec.rb
+++ /dev/null
@@ -1,63 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Applications::DestroyService, '#execute' do
- let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
- let(:user) { create(:user) }
- let(:params) { { application: 'prometheus' } }
- let(:service) { described_class.new(cluster, user, params) }
- let(:test_request) { double }
- let(:worker_class) { Clusters::Applications::UninstallWorker }
-
- subject { service.execute(test_request) }
-
- before do
- allow(worker_class).to receive(:perform_async)
- end
-
- context 'application is not installed' do
- it 'raises Clusters::Applications::BaseService::InvalidApplicationError' do
- expect(worker_class).not_to receive(:perform_async)
-
- expect { subject }
- .to raise_exception { Clusters::Applications::BaseService::InvalidApplicationError }
- .and not_change { Clusters::Applications::Prometheus.count }
- .and not_change { Clusters::Applications::Prometheus.with_status(:scheduled).count }
- end
- end
-
- context 'application is installed' do
- context 'application is schedulable' do
- let!(:application) do
- create(:clusters_applications_prometheus, :installed, cluster: cluster)
- end
-
- it 'makes application scheduled!' do
- subject
-
- expect(application.reload).to be_scheduled
- end
-
- it 'schedules UninstallWorker' do
- expect(worker_class).to receive(:perform_async).with(application.name, application.id)
-
- subject
- end
- end
-
- context 'application is not schedulable' do
- let!(:application) do
- create(:clusters_applications_prometheus, :updating, cluster: cluster)
- end
-
- it 'raises StateMachines::InvalidTransition' do
- expect(worker_class).not_to receive(:perform_async)
-
- expect { subject }
- .to raise_exception { StateMachines::InvalidTransition }
- .and not_change { Clusters::Applications::Prometheus.with_status(:scheduled).count }
- end
- end
- end
-end
diff --git a/spec/services/clusters/applications/uninstall_service_spec.rb b/spec/services/clusters/applications/uninstall_service_spec.rb
deleted file mode 100644
index bfe38ba670d..00000000000
--- a/spec/services/clusters/applications/uninstall_service_spec.rb
+++ /dev/null
@@ -1,77 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Applications::UninstallService, '#execute' do
- let(:application) { create(:clusters_applications_prometheus, :scheduled) }
- let(:service) { described_class.new(application) }
- let(:helm_client) { instance_double(Gitlab::Kubernetes::Helm::API) }
- let(:worker_class) { Clusters::Applications::WaitForUninstallAppWorker }
-
- before do
- allow(service).to receive(:helm_api).and_return(helm_client)
- end
-
- context 'when there are no errors' do
- before do
- expect(helm_client).to receive(:uninstall).with(kind_of(Gitlab::Kubernetes::Helm::V3::DeleteCommand))
- allow(worker_class).to receive(:perform_in).and_return(nil)
- end
-
- it 'make the application to be uninstalling' do
- expect(application.cluster).not_to be_nil
- service.execute
-
- expect(application).to be_uninstalling
- end
-
- it 'schedule async installation status check' do
- expect(worker_class).to receive(:perform_in).once
-
- service.execute
- end
- end
-
- context 'when k8s cluster communication fails' do
- let(:error) { Kubeclient::HttpError.new(500, 'system failure', nil) }
-
- before do
- expect(helm_client).to receive(:uninstall).with(kind_of(Gitlab::Kubernetes::Helm::V3::DeleteCommand)).and_raise(error)
- end
-
- include_examples 'logs kubernetes errors' do
- let(:error_name) { 'Kubeclient::HttpError' }
- let(:error_message) { 'system failure' }
- let(:error_code) { 500 }
- end
-
- it 'make the application errored' do
- service.execute
-
- expect(application).to be_uninstall_errored
- expect(application.status_reason).to match('Kubernetes error: 500')
- end
- end
-
- context 'a non kubernetes error happens' do
- let(:application) { create(:clusters_applications_prometheus, :scheduled) }
- let(:error) { StandardError.new('something bad happened') }
-
- before do
- expect(helm_client).to receive(:uninstall).with(kind_of(Gitlab::Kubernetes::Helm::V3::DeleteCommand)).and_raise(error)
- end
-
- include_examples 'logs kubernetes errors' do
- let(:error_name) { 'StandardError' }
- let(:error_message) { 'something bad happened' }
- let(:error_code) { nil }
- end
-
- it 'make the application errored' do
- service.execute
-
- expect(application).to be_uninstall_errored
- expect(application.status_reason).to eq('Failed to uninstall.')
- end
- end
-end
diff --git a/spec/services/design_management/move_designs_service_spec.rb b/spec/services/design_management/move_designs_service_spec.rb
index c8abce77325..519378a8dd4 100644
--- a/spec/services/design_management/move_designs_service_spec.rb
+++ b/spec/services/design_management/move_designs_service_spec.rb
@@ -88,23 +88,24 @@ RSpec.describe DesignManagement::MoveDesignsService do
expect(subject).to be_success
- expect(issue.designs.ordered).to eq([
- # Existing designs which already had a relative_position set.
- # These should stay at the beginning, in the same order.
- other_design1,
- other_design2,
-
- # The designs we're passing into the service.
- # These should be placed between the existing designs, in the correct order.
- previous_design,
- current_design,
- next_design,
-
- # Existing designs which didn't have a relative_position set.
- # These should be placed at the end, in the order of their IDs.
- other_design3,
- other_design4
- ])
+ expect(issue.designs.ordered).to eq(
+ [
+ # Existing designs which already had a relative_position set.
+ # These should stay at the beginning, in the same order.
+ other_design1,
+ other_design2,
+
+ # The designs we're passing into the service.
+ # These should be placed between the existing designs, in the correct order.
+ previous_design,
+ current_design,
+ next_design,
+
+ # Existing designs which didn't have a relative_position set.
+ # These should be placed at the end, in the order of their IDs.
+ other_design3,
+ other_design4
+ ])
end
end
end
diff --git a/spec/services/git/tag_hooks_service_spec.rb b/spec/services/git/tag_hooks_service_spec.rb
index 2d50c64d63c..01a0d2e8600 100644
--- a/spec/services/git/tag_hooks_service_spec.rb
+++ b/spec/services/git/tag_hooks_service_spec.rb
@@ -104,12 +104,12 @@ RSpec.describe Git::TagHooksService, :service do
id: commit.id,
message: commit.safe_message,
url: [
- Gitlab.config.gitlab.url,
- project.namespace.to_param,
- project.to_param,
- '-',
- 'commit',
- commit.id
+ Gitlab.config.gitlab.url,
+ project.namespace.to_param,
+ project.to_param,
+ '-',
+ 'commit',
+ commit.id
].join('/')
)
end
diff --git a/spec/services/google_cloud/enable_cloudsql_service_spec.rb b/spec/services/google_cloud/enable_cloudsql_service_spec.rb
index f267f6d3bc2..aa6d2402d7c 100644
--- a/spec/services/google_cloud/enable_cloudsql_service_spec.rb
+++ b/spec/services/google_cloud/enable_cloudsql_service_spec.rb
@@ -4,15 +4,29 @@ require 'spec_helper'
RSpec.describe GoogleCloud::EnableCloudsqlService do
let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:params) do
+ {
+ google_oauth2_token: 'mock-token',
+ gcp_project_id: 'mock-gcp-project-id',
+ environment_name: 'main'
+ }
+ end
- subject(:result) { described_class.new(project).execute }
+ subject(:result) { described_class.new(project, user, params).execute }
context 'when a project does not have any GCP_PROJECT_IDs configured' do
- it 'returns error' do
- message = 'No GCP projects found. Configure a service account or GCP_PROJECT_ID CI variable.'
+ it 'creates GCP_PROJECT_ID project var' do
+ expect_next_instance_of(GoogleApi::CloudPlatform::Client) do |instance|
+ expect(instance).to receive(:enable_cloud_sql_admin).with('mock-gcp-project-id')
+ expect(instance).to receive(:enable_compute).with('mock-gcp-project-id')
+ expect(instance).to receive(:enable_service_networking).with('mock-gcp-project-id')
+ end
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq(message)
+ expect(result[:status]).to eq(:success)
+ expect(project.variables.count).to eq(1)
+ expect(project.variables.first.key).to eq('GCP_PROJECT_ID')
+ expect(project.variables.first.value).to eq('mock-gcp-project-id')
end
end
@@ -30,6 +44,9 @@ RSpec.describe GoogleCloud::EnableCloudsqlService do
it 'enables cloudsql, compute and service networking Google APIs', :aggregate_failures do
expect_next_instance_of(GoogleApi::CloudPlatform::Client) do |instance|
+ expect(instance).to receive(:enable_cloud_sql_admin).with('mock-gcp-project-id')
+ expect(instance).to receive(:enable_compute).with('mock-gcp-project-id')
+ expect(instance).to receive(:enable_service_networking).with('mock-gcp-project-id')
expect(instance).to receive(:enable_cloud_sql_admin).with('prj-prod')
expect(instance).to receive(:enable_compute).with('prj-prod')
expect(instance).to receive(:enable_service_networking).with('prj-prod')
@@ -44,6 +61,9 @@ RSpec.describe GoogleCloud::EnableCloudsqlService do
context 'when Google APIs raise an error' do
it 'returns error result' do
allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |instance|
+ allow(instance).to receive(:enable_cloud_sql_admin).with('mock-gcp-project-id')
+ allow(instance).to receive(:enable_compute).with('mock-gcp-project-id')
+ allow(instance).to receive(:enable_service_networking).with('mock-gcp-project-id')
allow(instance).to receive(:enable_cloud_sql_admin).with('prj-prod')
allow(instance).to receive(:enable_compute).with('prj-prod')
allow(instance).to receive(:enable_service_networking).with('prj-prod')
diff --git a/spec/services/google_cloud/setup_cloudsql_instance_service_spec.rb b/spec/services/google_cloud/setup_cloudsql_instance_service_spec.rb
index e0a622bfa4a..0a0f05ab4be 100644
--- a/spec/services/google_cloud/setup_cloudsql_instance_service_spec.rb
+++ b/spec/services/google_cloud/setup_cloudsql_instance_service_spec.rb
@@ -8,17 +8,19 @@ RSpec.describe GoogleCloud::SetupCloudsqlInstanceService do
let(:list_databases_empty) { Google::Apis::SqladminV1beta4::ListDatabasesResponse.new(items: []) }
let(:list_users_empty) { Google::Apis::SqladminV1beta4::ListUsersResponse.new(items: []) }
let(:list_databases) do
- Google::Apis::SqladminV1beta4::ListDatabasesResponse.new(items: [
- Google::Apis::SqladminV1beta4::Database.new(name: 'postgres'),
- Google::Apis::SqladminV1beta4::Database.new(name: 'main_db')
- ])
+ Google::Apis::SqladminV1beta4::ListDatabasesResponse.new(
+ items: [
+ Google::Apis::SqladminV1beta4::Database.new(name: 'postgres'),
+ Google::Apis::SqladminV1beta4::Database.new(name: 'main_db')
+ ])
end
let(:list_users) do
- Google::Apis::SqladminV1beta4::ListUsersResponse.new(items: [
- Google::Apis::SqladminV1beta4::User.new(name: 'postgres'),
- Google::Apis::SqladminV1beta4::User.new(name: 'main_user')
- ])
+ Google::Apis::SqladminV1beta4::ListUsersResponse.new(
+ items: [
+ Google::Apis::SqladminV1beta4::User.new(name: 'postgres'),
+ Google::Apis::SqladminV1beta4::User.new(name: 'main_user')
+ ])
end
context 'when unauthorized user triggers worker' do
diff --git a/spec/services/groups/destroy_service_spec.rb b/spec/services/groups/destroy_service_spec.rb
index 9288793cc7a..36e868fa5f1 100644
--- a/spec/services/groups/destroy_service_spec.rb
+++ b/spec/services/groups/destroy_service_spec.rb
@@ -117,12 +117,6 @@ RSpec.describe Groups::DestroyService do
Sidekiq::Testing.fake! { destroy_group(group, user, true) }
end
- after do
- # Clean up stale directories
- TestEnv.rm_storage_dir(project.repository_storage, group.path)
- TestEnv.rm_storage_dir(project.repository_storage, remove_path)
- end
-
it 'verifies original paths and projects still exist' do
expect(TestEnv.storage_dir_exists?(project.repository_storage, group.path)).to be_truthy
expect(TestEnv.storage_dir_exists?(project.repository_storage, remove_path)).to be_falsey
diff --git a/spec/services/groups/import_export/import_service_spec.rb b/spec/services/groups/import_export/import_service_spec.rb
index a4dfec4723a..66b50704939 100644
--- a/spec/services/groups/import_export/import_service_spec.rb
+++ b/spec/services/groups/import_export/import_service_spec.rb
@@ -86,6 +86,16 @@ RSpec.describe Groups::ImportExport::ImportService do
service.execute
end
+
+ it 'tracks the event' do
+ service.execute
+
+ expect_snowplow_event(
+ category: 'Groups::ImportExport::ImportService',
+ action: 'create',
+ label: 'import_group_from_file'
+ )
+ end
end
context 'with a ndjson file' do
@@ -105,12 +115,11 @@ RSpec.describe Groups::ImportExport::ImportService do
context 'when importing a ndjson export' do
let(:user) { create(:user) }
let(:group) { create(:group) }
- let(:service) { described_class.new(group: group, user: user) }
let(:import_file) { fixture_file_upload('spec/fixtures/group_export.tar.gz') }
let(:import_logger) { instance_double(Gitlab::Import::Logger) }
- subject { service.execute }
+ subject(:service) { described_class.new(group: group, user: user) }
before do
ImportExportUpload.create!(group: group, import_file: import_file)
@@ -128,11 +137,21 @@ RSpec.describe Groups::ImportExport::ImportService do
end
it 'imports group structure successfully' do
- expect(subject).to be_truthy
+ expect(service.execute).to be_truthy
+ end
+
+ it 'tracks the event' do
+ service.execute
+
+ expect_snowplow_event(
+ category: 'Groups::ImportExport::ImportService',
+ action: 'create',
+ label: 'import_group_from_file'
+ )
end
it 'removes import file' do
- subject
+ service.execute
expect(group.import_export_upload.import_file.file).to be_nil
end
@@ -141,7 +160,7 @@ RSpec.describe Groups::ImportExport::ImportService do
shared = Gitlab::ImportExport::Shared.new(group)
allow(Gitlab::ImportExport::Shared).to receive(:new).and_return(shared)
- subject
+ service.execute
expect(FileUtils).to have_received(:rm_rf).with(shared.base_path)
expect(Dir.exist?(shared.base_path)).to eq(false)
@@ -154,7 +173,7 @@ RSpec.describe Groups::ImportExport::ImportService do
message: 'Group Import/Export: Import succeeded'
).once
- subject
+ service.execute
end
end
@@ -166,7 +185,7 @@ RSpec.describe Groups::ImportExport::ImportService do
message: a_string_including('Errors occurred')
)
- expect { subject }.to raise_error(Gitlab::ImportExport::Error)
+ expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
end
it 'tracks the error' do
@@ -177,7 +196,7 @@ RSpec.describe Groups::ImportExport::ImportService do
expect(param.message).to include 'does not have required permissions for'
end
- expect { subject }.to raise_error(Gitlab::ImportExport::Error)
+ expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
end
end
@@ -191,7 +210,7 @@ RSpec.describe Groups::ImportExport::ImportService do
message: a_string_including('Errors occurred')
).once
- expect { subject }.to raise_error(Gitlab::ImportExport::Error)
+ expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
end
end
@@ -203,7 +222,7 @@ RSpec.describe Groups::ImportExport::ImportService do
end
it 'successfully imports the group' do
- expect(subject).to be_truthy
+ expect(service.execute).to be_truthy
end
it 'logs the import success' do
@@ -215,7 +234,7 @@ RSpec.describe Groups::ImportExport::ImportService do
message: 'Group Import/Export: Import succeeded'
)
- subject
+ service.execute
end
end
end
@@ -223,12 +242,11 @@ RSpec.describe Groups::ImportExport::ImportService do
context 'when importing a json export' do
let(:user) { create(:user) }
let(:group) { create(:group) }
- let(:service) { described_class.new(group: group, user: user) }
let(:import_file) { fixture_file_upload('spec/fixtures/legacy_group_export.tar.gz') }
let(:import_logger) { instance_double(Gitlab::Import::Logger) }
- subject { service.execute }
+ subject(:service) { described_class.new(group: group, user: user) }
before do
ImportExportUpload.create!(group: group, import_file: import_file)
@@ -246,11 +264,21 @@ RSpec.describe Groups::ImportExport::ImportService do
end
it 'imports group structure successfully' do
- expect(subject).to be_truthy
+ expect(service.execute).to be_truthy
+ end
+
+ it 'tracks the event' do
+ service.execute
+
+ expect_snowplow_event(
+ category: 'Groups::ImportExport::ImportService',
+ action: 'create',
+ label: 'import_group_from_file'
+ )
end
it 'removes import file' do
- subject
+ service.execute
expect(group.import_export_upload.import_file.file).to be_nil
end
@@ -259,7 +287,7 @@ RSpec.describe Groups::ImportExport::ImportService do
shared = Gitlab::ImportExport::Shared.new(group)
allow(Gitlab::ImportExport::Shared).to receive(:new).and_return(shared)
- subject
+ service.execute
expect(FileUtils).to have_received(:rm_rf).with(shared.base_path)
expect(Dir.exist?(shared.base_path)).to eq(false)
@@ -272,7 +300,7 @@ RSpec.describe Groups::ImportExport::ImportService do
message: 'Group Import/Export: Import succeeded'
).once
- subject
+ service.execute
end
end
@@ -284,7 +312,7 @@ RSpec.describe Groups::ImportExport::ImportService do
message: a_string_including('Errors occurred')
)
- expect { subject }.to raise_error(Gitlab::ImportExport::Error)
+ expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
end
it 'tracks the error' do
@@ -295,7 +323,7 @@ RSpec.describe Groups::ImportExport::ImportService do
expect(param.message).to include 'does not have required permissions for'
end
- expect { subject }.to raise_error(Gitlab::ImportExport::Error)
+ expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
end
end
@@ -309,7 +337,7 @@ RSpec.describe Groups::ImportExport::ImportService do
message: a_string_including('Errors occurred')
).once
- expect { subject }.to raise_error(Gitlab::ImportExport::Error)
+ expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
end
end
@@ -321,7 +349,7 @@ RSpec.describe Groups::ImportExport::ImportService do
end
it 'successfully imports the group' do
- expect(subject).to be_truthy
+ expect(service.execute).to be_truthy
end
it 'logs the import success' do
@@ -333,7 +361,7 @@ RSpec.describe Groups::ImportExport::ImportService do
message: 'Group Import/Export: Import succeeded'
)
- subject
+ service.execute
end
end
end
diff --git a/spec/services/import/github/cancel_project_import_service_spec.rb b/spec/services/import/github/cancel_project_import_service_spec.rb
new file mode 100644
index 00000000000..77b8771ee65
--- /dev/null
+++ b/spec/services/import/github/cancel_project_import_service_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Import::Github::CancelProjectImportService do
+ subject(:import_cancel) { described_class.new(project, project.owner) }
+
+ let_it_be(:user) { create(:user) }
+ let_it_be_with_reload(:project) { create(:project, :import_started, import_type: 'github', import_url: 'https://fake.url') }
+
+ describe '.execute' do
+ context 'when user is an owner' do
+ context 'when import is in progress' do
+ it 'update import state to be canceled' do
+ expect(import_cancel.execute).to eq({ status: :success, project: project })
+ end
+ end
+
+ context 'when import is finished' do
+ let(:expected_result) do
+ {
+ status: :error,
+ http_status: :bad_request,
+ message: 'The import cannot be canceled because it is finished'
+ }
+ end
+
+ before do
+ project.import_state.finish!
+ end
+
+ it 'returns error' do
+ expect(import_cancel.execute).to eq(expected_result)
+ end
+ end
+ end
+
+ context 'when user is not allowed to read project' do
+ it 'returns 404' do
+ expect(described_class.new(project, user).execute)
+ .to eq({ status: :error, http_status: :not_found, message: 'Not Found' })
+ end
+ end
+
+ context 'when user is not allowed to cancel project' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'returns 403' do
+ expect(described_class.new(project, user).execute)
+ .to eq({ status: :error, http_status: :forbidden, message: 'Unauthorized access' })
+ end
+ end
+ end
+end
diff --git a/spec/services/import/github_service_spec.rb b/spec/services/import/github_service_spec.rb
index 67a2c237e43..38d84009f08 100644
--- a/spec/services/import/github_service_spec.rb
+++ b/spec/services/import/github_service_spec.rb
@@ -6,7 +6,16 @@ RSpec.describe Import::GithubService do
let_it_be(:user) { create(:user) }
let_it_be(:token) { 'complex-token' }
let_it_be(:access_params) { { github_access_token: 'github-complex-token' } }
- let_it_be(:params) { { repo_id: 123, new_name: 'new_repo', target_namespace: 'root' } }
+ let(:settings) { instance_double(Gitlab::GithubImport::Settings) }
+ let(:optional_stages) { nil }
+ let(:params) do
+ {
+ repo_id: 123,
+ new_name: 'new_repo',
+ target_namespace: 'root',
+ optional_stages: optional_stages
+ }
+ end
subject(:github_importer) { described_class.new(client, user, params) }
@@ -16,6 +25,12 @@ RSpec.describe Import::GithubService do
shared_examples 'handles errors' do |klass|
let(:client) { klass.new(token) }
+ let(:project_double) { instance_double(Project, persisted?: true) }
+
+ before do
+ allow(Gitlab::GithubImport::Settings).to receive(:new).with(project_double).and_return(settings)
+ allow(settings).to receive(:write).with(optional_stages)
+ end
context 'do not raise an exception on input error' do
let(:exception) { Octokit::ClientError.new(status: 404, body: 'Not Found') }
@@ -62,13 +77,14 @@ RSpec.describe Import::GithubService do
expect(client).to receive(:repository).and_return(repository_double)
allow_next_instance_of(Gitlab::LegacyGithubImport::ProjectCreator) do |creator|
- allow(creator).to receive(:execute).and_return(double(persisted?: true))
+ allow(creator).to receive(:execute).and_return(project_double)
end
end
context 'when there is no repository size limit defined' do
it 'skips the check and succeeds' do
expect(subject.execute(access_params, :github)).to include(status: :success)
+ expect(settings).to have_received(:write).with(nil)
end
end
@@ -81,6 +97,7 @@ RSpec.describe Import::GithubService do
it 'succeeds when the repository is smaller than the limit' do
expect(subject.execute(access_params, :github)).to include(status: :success)
+ expect(settings).to have_received(:write).with(nil)
end
it 'returns error when the repository is larger than the limit' do
@@ -100,6 +117,7 @@ RSpec.describe Import::GithubService do
context 'when application size limit is defined' do
it 'succeeds when the repository is smaller than the limit' do
expect(subject.execute(access_params, :github)).to include(status: :success)
+ expect(settings).to have_received(:write).with(nil)
end
it 'returns error when the repository is larger than the limit' do
@@ -109,6 +127,22 @@ RSpec.describe Import::GithubService do
end
end
end
+
+ context 'when optional stages params present' do
+ let(:optional_stages) do
+ {
+ single_endpoint_issue_events_import: true,
+ single_endpoint_notes_import: 'false',
+ attachments_import: false
+ }
+ end
+
+ it 'saves optional stages choice to import_data' do
+ subject.execute(access_params, :github)
+
+ expect(settings).to have_received(:write).with(optional_stages)
+ end
+ end
end
context 'when import source is disabled' do
@@ -170,7 +204,7 @@ RSpec.describe Import::GithubService do
include_examples 'handles errors', Gitlab::GithubImport::Client
end
- context 'when remove_legacy_github_client feature flag is enabled' do
+ context 'when remove_legacy_github_client feature flag is disabled' do
before do
stub_feature_flags(remove_legacy_github_client: false)
end
diff --git a/spec/services/import/gitlab_projects/create_project_service_spec.rb b/spec/services/import/gitlab_projects/create_project_service_spec.rb
index 0da897448b8..59c384bad3c 100644
--- a/spec/services/import/gitlab_projects/create_project_service_spec.rb
+++ b/spec/services/import/gitlab_projects/create_project_service_spec.rb
@@ -139,10 +139,11 @@ RSpec.describe ::Import::GitlabProjects::CreateProjectService, :aggregate_failur
expect(response.http_status).to eq(:bad_request)
expect(response.message)
.to eq(%{Project namespace path can contain only letters, digits, '_', '-' and '.'. Cannot start with '-', end in '.git' or end in '.atom'})
- expect(response.payload).to eq(other_errors: [
- %{Path can contain only letters, digits, '_', '-' and '.'. Cannot start with '-', end in '.git' or end in '.atom'},
- %{Path must not start or end with a special character and must not contain consecutive special characters.}
- ])
+ expect(response.payload).to eq(
+ other_errors: [
+ %{Path can contain only letters, digits, '_', '-' and '.'. Cannot start with '-', end in '.git' or end in '.atom'},
+ %{Path must not start or end with a special character and must not contain consecutive special characters.}
+ ])
end
end
end
diff --git a/spec/services/incident_management/incidents/create_service_spec.rb b/spec/services/incident_management/incidents/create_service_spec.rb
index ac44bc4608c..851b21e1227 100644
--- a/spec/services/incident_management/incidents/create_service_spec.rb
+++ b/spec/services/incident_management/incidents/create_service_spec.rb
@@ -77,7 +77,7 @@ RSpec.describe IncidentManagement::Incidents::CreateService do
it 'responds with errors' do
expect(create_incident).to be_error
- expect(create_incident.message).to eq("Title can't be blank")
+ expect(create_incident.errors).to contain_exactly("Title can't be blank")
end
it 'result payload contains an Issue object' do
@@ -98,7 +98,7 @@ RSpec.describe IncidentManagement::Incidents::CreateService do
it 'responds with errors' do
expect(create_incident).to be_error
- expect(create_incident.message).to eq('Hosts hosts array is over 255 chars')
+ expect(create_incident.errors).to contain_exactly('Hosts hosts array is over 255 chars')
end
end
end
diff --git a/spec/services/incident_management/issuable_escalation_statuses/prepare_update_service_spec.rb b/spec/services/incident_management/issuable_escalation_statuses/prepare_update_service_spec.rb
index 761cc5c92ea..e8208c410d5 100644
--- a/spec/services/incident_management/issuable_escalation_statuses/prepare_update_service_spec.rb
+++ b/spec/services/incident_management/issuable_escalation_statuses/prepare_update_service_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe IncidentManagement::IssuableEscalationStatuses::PrepareUpdateService do
+RSpec.describe IncidentManagement::IssuableEscalationStatuses::PrepareUpdateService, factory_default: :keep do
+ let_it_be(:project) { create_default(:project) }
let_it_be(:escalation_status) { create(:incident_management_issuable_escalation_status, :triggered) }
let_it_be(:user_with_permissions) { create(:user) }
@@ -10,7 +11,7 @@ RSpec.describe IncidentManagement::IssuableEscalationStatuses::PrepareUpdateServ
let(:issue) { escalation_status.issue }
let(:status) { :acknowledged }
let(:params) { { status: status } }
- let(:service) { IncidentManagement::IssuableEscalationStatuses::PrepareUpdateService.new(issue, current_user, params) }
+ let(:service) { described_class.new(issue, current_user, params) }
subject(:result) { service.execute }
@@ -71,9 +72,17 @@ RSpec.describe IncidentManagement::IssuableEscalationStatuses::PrepareUpdateServ
end
end
- context 'when called without params' do
+ context 'when called nil params' do
let(:params) { nil }
+ it 'raises an exception' do
+ expect { result }.to raise_error NoMethodError
+ end
+ end
+
+ context 'when called without params' do
+ let(:params) { {} }
+
it_behaves_like 'successful response', {}
end
diff --git a/spec/services/incident_management/pager_duty/create_incident_issue_service_spec.rb b/spec/services/incident_management/pager_duty/create_incident_issue_service_spec.rb
index fb536df5d17..572b1a20166 100644
--- a/spec/services/incident_management/pager_duty/create_incident_issue_service_spec.rb
+++ b/spec/services/incident_management/pager_duty/create_incident_issue_service_spec.rb
@@ -63,7 +63,7 @@ RSpec.describe IncidentManagement::PagerDuty::CreateIncidentIssueService do
it 'responds with error' do
expect(execute).to be_error
- expect(execute.message).to eq("Title can't be blank")
+ expect(execute.errors).to contain_exactly("Title can't be blank")
end
end
end
diff --git a/spec/services/incident_management/timeline_events/create_service_spec.rb b/spec/services/incident_management/timeline_events/create_service_spec.rb
index b999403e168..a7f448c825f 100644
--- a/spec/services/incident_management/timeline_events/create_service_spec.rb
+++ b/spec/services/incident_management/timeline_events/create_service_spec.rb
@@ -71,7 +71,7 @@ RSpec.describe IncidentManagement::TimelineEvents::CreateService do
context 'when error occurs during creation' do
let(:args) { {} }
- it_behaves_like 'error response', "Occurred at can't be blank, Note can't be blank, and Note html can't be blank"
+ it_behaves_like 'error response', "Occurred at can't be blank and Timeline text can't be blank"
end
context 'with default action' do
@@ -84,50 +84,6 @@ RSpec.describe IncidentManagement::TimelineEvents::CreateService do
expect(result.action).to eq(IncidentManagement::TimelineEvents::DEFAULT_ACTION)
end
- end
-
- context 'with non_default action' do
- it_behaves_like 'success response'
-
- it 'matches the action from arguments', :aggregate_failures do
- result = execute.payload[:timeline_event]
-
- expect(result.action).to eq(args[:action])
- end
- end
-
- context 'with editable param' do
- let(:args) do
- {
- note: 'note',
- occurred_at: Time.current,
- action: 'new comment',
- promoted_from_note: comment,
- editable: editable
- }
- end
-
- context 'when editable is true' do
- let(:editable) { true }
-
- it_behaves_like 'success response'
- end
-
- context 'when editable is false' do
- let(:editable) { false }
-
- it_behaves_like 'success response'
- end
- end
-
- it 'successfully creates a database record', :aggregate_failures do
- expect { execute }.to change { ::IncidentManagement::TimelineEvent.count }.by(1)
- end
-
- context 'when incident_timeline feature flag is enabled' do
- before do
- stub_feature_flags(incident_timeline: project)
- end
it 'creates a system note' do
expect { execute }.to change { incident.notes.reload.count }.by(1)
@@ -168,14 +124,42 @@ RSpec.describe IncidentManagement::TimelineEvents::CreateService do
end
end
- context 'when incident_timeline feature flag is disabled' do
- before do
- stub_feature_flags(incident_timeline: false)
+ context 'with non_default action' do
+ it_behaves_like 'success response'
+
+ it 'matches the action from arguments', :aggregate_failures do
+ result = execute.payload[:timeline_event]
+
+ expect(result.action).to eq(args[:action])
end
+ end
- it 'does not create a system note' do
- expect { execute }.not_to change { incident.notes.reload.count }
+ context 'with editable param' do
+ let(:args) do
+ {
+ note: 'note',
+ occurred_at: Time.current,
+ action: 'new comment',
+ promoted_from_note: comment,
+ editable: editable
+ }
+ end
+
+ context 'when editable is true' do
+ let(:editable) { true }
+
+ it_behaves_like 'success response'
end
+
+ context 'when editable is false' do
+ let(:editable) { false }
+
+ it_behaves_like 'success response'
+ end
+ end
+
+ it 'successfully creates a database record', :aggregate_failures do
+ expect { execute }.to change { ::IncidentManagement::TimelineEvent.count }.by(1)
end
end
diff --git a/spec/services/incident_management/timeline_events/destroy_service_spec.rb b/spec/services/incident_management/timeline_events/destroy_service_spec.rb
index 09026f87116..e1b258960ae 100644
--- a/spec/services/incident_management/timeline_events/destroy_service_spec.rb
+++ b/spec/services/incident_management/timeline_events/destroy_service_spec.rb
@@ -48,10 +48,10 @@ RSpec.describe IncidentManagement::TimelineEvents::DestroyService do
timeline_event.errors.add(:note, 'cannot be removed')
end
- it_behaves_like 'error response', 'Note cannot be removed'
+ it_behaves_like 'error response', 'Timeline text cannot be removed'
end
- context 'success response' do
+ context 'with success response' do
it 'successfully returns the timeline event', :aggregate_failures do
expect(execute).to be_success
@@ -60,27 +60,11 @@ RSpec.describe IncidentManagement::TimelineEvents::DestroyService do
expect(result.id).to eq(timeline_event.id)
end
- it_behaves_like 'an incident management tracked event', :incident_management_timeline_event_deleted
- end
-
- context 'when incident_timeline feature flag is enabled' do
- before do
- stub_feature_flags(incident_timeline: project)
- end
-
it 'creates a system note' do
expect { execute }.to change { incident.notes.reload.count }.by(1)
end
- end
-
- context 'when incident_timeline feature flag is disabled' do
- before do
- stub_feature_flags(incident_timeline: false)
- end
- it 'does not create a system note' do
- expect { execute }.not_to change { incident.notes.reload.count }
- end
+ it_behaves_like 'an incident management tracked event', :incident_management_timeline_event_deleted
end
end
end
diff --git a/spec/services/incident_management/timeline_events/update_service_spec.rb b/spec/services/incident_management/timeline_events/update_service_spec.rb
index f612c72e2a8..5d8518cf2ef 100644
--- a/spec/services/incident_management/timeline_events/update_service_spec.rb
+++ b/spec/services/incident_management/timeline_events/update_service_spec.rb
@@ -12,10 +12,6 @@ RSpec.describe IncidentManagement::TimelineEvents::UpdateService do
let(:params) { { note: 'Updated note', occurred_at: occurred_at } }
let(:current_user) { user }
- before do
- stub_feature_flags(incident_timeline: project)
- end
-
describe '#execute' do
shared_examples 'successful response' do
it 'responds with success', :aggregate_failures do
@@ -70,16 +66,6 @@ RSpec.describe IncidentManagement::TimelineEvents::UpdateService do
it_behaves_like 'passing the correct was_changed value', :occurred_at_and_note
- context 'when incident_timeline feature flag is disabled' do
- before do
- stub_feature_flags(incident_timeline: false)
- end
-
- it 'does not add a system note' do
- expect { execute }.not_to change { incident.notes }
- end
- end
-
context 'when note is nil' do
let(:params) { { occurred_at: occurred_at } }
@@ -98,7 +84,7 @@ RSpec.describe IncidentManagement::TimelineEvents::UpdateService do
context 'when note is blank' do
let(:params) { { note: '', occurred_at: occurred_at } }
- it_behaves_like 'error response', "Note can't be blank"
+ it_behaves_like 'error response', "Timeline text can't be blank"
end
context 'when occurred_at is nil' do
diff --git a/spec/services/issuable/process_assignees_spec.rb b/spec/services/issuable/process_assignees_spec.rb
index 45d57a1772a..9e909b68172 100644
--- a/spec/services/issuable/process_assignees_spec.rb
+++ b/spec/services/issuable/process_assignees_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Issuable::ProcessAssignees do
extra_assignee_ids: %w(2 5 12))
result = process.execute
- expect(result.sort).to eq(%w(5 7 9).sort)
+ expect(result).to contain_exactly(5, 7, 9)
end
it 'combines other ids when assignee_ids is nil' do
@@ -23,7 +23,7 @@ RSpec.describe Issuable::ProcessAssignees do
extra_assignee_ids: %w(2 5 12))
result = process.execute
- expect(result.sort).to eq(%w(1 2 3 5 11 12).sort)
+ expect(result).to contain_exactly(1, 2, 3, 5, 11, 12)
end
it 'combines other ids when both add_assignee_ids and remove_assignee_ids are not empty' do
@@ -34,7 +34,7 @@ RSpec.describe Issuable::ProcessAssignees do
extra_assignee_ids: %w(2 5 12))
result = process.execute
- expect(result.sort).to eq(%w(1 2 3 5 6 12).sort)
+ expect(result).to contain_exactly(1, 2, 3, 5, 6, 12)
end
it 'combines other ids when remove_assignee_ids is not empty' do
@@ -45,7 +45,7 @@ RSpec.describe Issuable::ProcessAssignees do
extra_assignee_ids: %w(2 5 12))
result = process.execute
- expect(result.sort).to eq(%w(1 2 3 5 12).sort)
+ expect(result).to contain_exactly(1, 2, 3, 5, 12)
end
it 'combines other ids when add_assignee_ids is not empty' do
@@ -56,7 +56,7 @@ RSpec.describe Issuable::ProcessAssignees do
extra_assignee_ids: %w(2 5 12))
result = process.execute
- expect(result.sort).to eq(%w(1 2 4 3 5 6 11 12).sort)
+ expect(result).to contain_exactly(1, 2, 4, 3, 5, 6, 11, 12)
end
it 'combines ids when existing_assignee_ids and extra_assignee_ids are omitted' do
@@ -65,7 +65,18 @@ RSpec.describe Issuable::ProcessAssignees do
remove_assignee_ids: %w(4 7 11))
result = process.execute
- expect(result.sort).to eq(%w(2 6).sort)
+ expect(result.sort).to eq([2, 6].sort)
+ end
+
+ it 'handles mixed string and integer arrays' do
+ process = Issuable::ProcessAssignees.new(assignee_ids: %w(5 7 9),
+ add_assignee_ids: [2, 4, 6],
+ remove_assignee_ids: %w(4 7 11),
+ existing_assignee_ids: [1, 3, 11],
+ extra_assignee_ids: %w(2 5 12))
+ result = process.execute
+
+ expect(result).to contain_exactly(1, 2, 3, 5, 6, 12)
end
end
end
diff --git a/spec/services/issues/clone_service_spec.rb b/spec/services/issues/clone_service_spec.rb
index 435488b7f66..67f32b85350 100644
--- a/spec/services/issues/clone_service_spec.rb
+++ b/spec/services/issues/clone_service_spec.rb
@@ -36,6 +36,21 @@ RSpec.describe Issues::CloneService do
context 'issue movable' do
include_context 'user can clone issue'
+ context 'when issue creation fails' do
+ before do
+ allow_next_instance_of(Issues::CreateService) do |create_service|
+ allow(create_service).to receive(:execute).and_return(ServiceResponse.error(message: 'some error'))
+ end
+ end
+
+ it 'raises a clone error' do
+ expect { clone_service.execute(old_issue, new_project) }.to raise_error(
+ Issues::CloneService::CloneError,
+ 'some error'
+ )
+ end
+ end
+
context 'generic issue' do
let!(:new_issue) { clone_service.execute(old_issue, new_project, with_notes: with_notes) }
diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb
index 3d52dc07c4f..5fe4c693451 100644
--- a/spec/services/issues/create_service_spec.rb
+++ b/spec/services/issues/create_service_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Issues::CreateService do
include AfterNextHelpers
let_it_be(:group) { create(:group, :crm_enabled) }
- let_it_be_with_reload(:project) { create(:project, group: group) }
+ let_it_be_with_reload(:project) { create(:project, :public, group: group) }
let_it_be(:user) { create(:user) }
let(:spam_params) { double }
@@ -23,12 +23,27 @@ RSpec.describe Issues::CreateService do
let_it_be(:assignee) { create(:user) }
let_it_be(:milestone) { create(:milestone, project: project) }
- let(:issue) { described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute }
+ let(:result) { described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute }
+ let(:issue) { result[:issue] }
before do
stub_spam_services
end
+ context 'when params are invalid' do
+ let(:opts) { { title: '' } }
+
+ before_all do
+ project.add_guest(assignee)
+ end
+
+ it 'returns an error service response' do
+ expect(result).to be_error
+ expect(result.errors).to include("Title can't be blank")
+ expect(issue).not_to be_persisted
+ end
+ end
+
context 'when params are valid' do
let_it_be(:labels) { create_pair(:label, project: project) }
@@ -60,6 +75,30 @@ RSpec.describe Issues::CreateService do
end
end
+ describe 'authorization' do
+ let_it_be(:project) { create(:project, :private, group: group).tap { |project| project.add_guest(user) } }
+
+ let(:opts) { { title: 'private issue', description: 'please fix' } }
+
+ context 'when the user is authorized' do
+ it 'allows the user to create an issue' do
+ expect(result).to be_success
+ expect(issue).to be_persisted
+ end
+ end
+
+ context 'when the user is not authorized' do
+ let(:user) { create(:user) }
+
+ it 'does not allow the user to create an issue' do
+ expect(result).to be_error
+ expect(result.errors).to contain_exactly('Operation not allowed')
+ expect(result.http_status).to eq(403)
+ expect(issue).to be_nil
+ end
+ end
+ end
+
it 'works if base work item types were not created yet' do
WorkItems::Type.delete_all
@@ -71,6 +110,7 @@ RSpec.describe Issues::CreateService do
it 'creates the issue with the given params' do
expect(Issuable::CommonSystemNotesService).to receive_message_chain(:new, :execute)
+ expect(result).to be_success
expect(issue).to be_persisted
expect(issue).to be_a(::Issue)
expect(issue.title).to eq('Awesome issue')
@@ -89,12 +129,13 @@ RSpec.describe Issues::CreateService do
end
context 'when a build_service is provided' do
- let(:issue) { described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params, build_service: build_service).execute }
+ let(:result) { described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params, build_service: build_service).execute }
let(:issue_from_builder) { WorkItem.new(project: project, title: 'Issue from builder') }
let(:build_service) { double(:build_service, execute: issue_from_builder) }
it 'uses the provided service to build the issue' do
+ expect(result).to be_success
expect(issue).to be_persisted
expect(issue).to be_a(WorkItem)
end
@@ -119,6 +160,7 @@ RSpec.describe Issues::CreateService do
end
it 'sets the correct relative position' do
+ expect(result).to be_success
expect(issue).to be_persisted
expect(issue.relative_position).to be_present
expect(issue.relative_position).to be_between(issue_before.relative_position, issue_after.relative_position)
@@ -196,8 +238,10 @@ RSpec.describe Issues::CreateService do
let_it_be(:non_member) { create(:user) }
it 'filters out params that cannot be set without the :set_issue_metadata permission' do
- issue = described_class.new(project: project, current_user: non_member, params: opts, spam_params: spam_params).execute
+ result = described_class.new(project: project, current_user: non_member, params: opts, spam_params: spam_params).execute
+ issue = result[:issue]
+ expect(result).to be_success
expect(issue).to be_persisted
expect(issue.title).to eq('Awesome issue')
expect(issue.description).to eq('please fix')
@@ -208,8 +252,10 @@ RSpec.describe Issues::CreateService do
end
it 'can create confidential issues' do
- issue = described_class.new(project: project, current_user: non_member, params: { confidential: true }, spam_params: spam_params).execute
+ result = described_class.new(project: project, current_user: non_member, params: opts.merge(confidential: true), spam_params: spam_params).execute
+ issue = result[:issue]
+ expect(result).to be_success
expect(issue.confidential).to be_truthy
end
end
@@ -298,7 +344,7 @@ RSpec.describe Issues::CreateService do
let(:opts) do
{ title: 'Title',
description: 'Description',
- assignees: [assignee] }
+ assignee_ids: [assignee.id] }
end
it 'invalidates open issues counter for assignees when issue is assigned' do
@@ -389,7 +435,7 @@ RSpec.describe Issues::CreateService do
end
it 'schedules a namespace onboarding create action worker' do
- expect(Namespaces::OnboardingIssueCreatedWorker).to receive(:perform_async).with(project.namespace.id)
+ expect(Onboarding::IssueCreatedWorker).to receive(:perform_async).with(project.namespace.id)
issue
end
@@ -404,16 +450,20 @@ RSpec.describe Issues::CreateService do
it 'removes assignee when user id is invalid' do
opts = { title: 'Title', description: 'Description', assignee_ids: [-1] }
- issue = described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute
+ result = described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute
+ issue = result[:issue]
+ expect(result).to be_success
expect(issue.assignees).to be_empty
end
it 'removes assignee when user id is 0' do
opts = { title: 'Title', description: 'Description', assignee_ids: [0] }
- issue = described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute
+ result = described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute
+ issue = result[:issue]
+ expect(result).to be_success
expect(issue.assignees).to be_empty
end
@@ -421,8 +471,10 @@ RSpec.describe Issues::CreateService do
project.add_maintainer(assignee)
opts = { title: 'Title', description: 'Description', assignee_ids: [assignee.id] }
- issue = described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute
+ result = described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute
+ issue = result[:issue]
+ expect(result).to be_success
expect(issue.assignees).to eq([assignee])
end
@@ -439,8 +491,10 @@ RSpec.describe Issues::CreateService do
project.update!(visibility_level: level)
opts = { title: 'Title', description: 'Description', assignee_ids: [assignee.id] }
- issue = described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute
+ result = described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params).execute
+ issue = result[:issue]
+ expect(result).to be_success
expect(issue.assignees).to be_empty
end
end
@@ -449,7 +503,7 @@ RSpec.describe Issues::CreateService do
end
it_behaves_like 'issuable record that supports quick actions' do
- let(:issuable) { described_class.new(project: project, current_user: user, params: params, spam_params: spam_params).execute }
+ let(:issuable) { described_class.new(project: project, current_user: user, params: params, spam_params: spam_params).execute[:issue] }
end
context 'Quick actions' do
@@ -472,6 +526,7 @@ RSpec.describe Issues::CreateService do
end
it 'assigns, sets milestone, and sets contact to issuable from command' do
+ expect(result).to be_success
expect(issue).to be_persisted
expect(issue.assignees).to eq([assignee])
expect(issue.milestone).to eq(milestone)
@@ -493,6 +548,8 @@ RSpec.describe Issues::CreateService do
context 'with permission' do
it 'assigns contact to issue' do
group.add_reporter(user)
+
+ expect(result).to be_success
expect(issue).to be_persisted
expect(issue.issue_customer_relations_contacts.last.contact).to eq(contact)
end
@@ -501,6 +558,8 @@ RSpec.describe Issues::CreateService do
context 'without permission' do
it 'does not assign contact to issue' do
group.add_guest(user)
+
+ expect(result).to be_success
expect(issue).to be_persisted
expect(issue.issue_customer_relations_contacts).to be_empty
end
@@ -535,6 +594,7 @@ RSpec.describe Issues::CreateService do
end
it 'can apply labels' do
+ expect(result).to be_success
expect(issue).to be_persisted
expect(issue.labels).to eq([label])
end
@@ -569,25 +629,32 @@ RSpec.describe Issues::CreateService do
end
it 'sets default title and description values if not provided' do
- issue = described_class.new(
+ result = described_class.new(
project: project, current_user: user,
params: opts,
spam_params: spam_params
).execute
+ issue = result[:issue]
+ expect(result).to be_success
expect(issue).to be_persisted
expect(issue.title).to eq("Follow-up from \"#{merge_request.title}\"")
expect(issue.description).to include("The following discussion from #{merge_request.to_reference} should be addressed")
end
it 'takes params from the request over the default values' do
- issue = described_class.new(project: project, current_user: user,
- params: opts.merge(
- description: 'Custom issue description',
- title: 'My new issue'
- ),
- spam_params: spam_params).execute
+ result = described_class.new(
+ project: project,
+ current_user: user,
+ params: opts.merge(
+ description: 'Custom issue description',
+ title: 'My new issue'
+ ),
+ spam_params: spam_params
+ ).execute
+ issue = result[:issue]
+ expect(result).to be_success
expect(issue).to be_persisted
expect(issue.description).to eq('Custom issue description')
expect(issue.title).to eq('My new issue')
@@ -613,25 +680,32 @@ RSpec.describe Issues::CreateService do
end
it 'sets default title and description values if not provided' do
- issue = described_class.new(
+ result = described_class.new(
project: project, current_user: user,
params: opts,
spam_params: spam_params
).execute
+ issue = result[:issue]
+ expect(result).to be_success
expect(issue).to be_persisted
expect(issue.title).to eq("Follow-up from \"#{merge_request.title}\"")
expect(issue.description).to include("The following discussion from #{merge_request.to_reference} should be addressed")
end
it 'takes params from the request over the default values' do
- issue = described_class.new(project: project, current_user: user,
- params: opts.merge(
- description: 'Custom issue description',
- title: 'My new issue'
- ),
- spam_params: spam_params).execute
+ result = described_class.new(
+ project: project,
+ current_user: user,
+ params: opts.merge(
+ description: 'Custom issue description',
+ title: 'My new issue'
+ ),
+ spam_params: spam_params
+ ).execute
+ issue = result[:issue]
+ expect(result).to be_success
expect(issue).to be_persisted
expect(issue.description).to eq('Custom issue description')
expect(issue.title).to eq('My new issue')
@@ -648,6 +722,7 @@ RSpec.describe Issues::CreateService do
it 'ignores related issue if not accessible' do
expect { issue }.not_to change { IssueLink.count }
+ expect(result).to be_success
expect(issue).to be_persisted
end
@@ -658,6 +733,7 @@ RSpec.describe Issues::CreateService do
it 'adds a link to the issue' do
expect { issue }.to change { IssueLink.count }.by(1)
+ expect(result).to be_success
expect(issue).to be_persisted
expect(issue.related_issues(user)).to eq([related_issue])
end
diff --git a/spec/services/issues/move_service_spec.rb b/spec/services/issues/move_service_spec.rb
index 863df810d01..23180f75eb3 100644
--- a/spec/services/issues/move_service_spec.rb
+++ b/spec/services/issues/move_service_spec.rb
@@ -35,6 +35,23 @@ RSpec.describe Issues::MoveService do
let!(:new_issue) { move_service.execute(old_issue, new_project) }
end
+ context 'when issue creation fails' do
+ include_context 'user can move issue'
+
+ before do
+ allow_next_instance_of(Issues::CreateService) do |create_service|
+ allow(create_service).to receive(:execute).and_return(ServiceResponse.error(message: 'some error'))
+ end
+ end
+
+ it 'raises a move error' do
+ expect { move_service.execute(old_issue, new_project) }.to raise_error(
+ Issues::MoveService::MoveError,
+ 'some error'
+ )
+ end
+ end
+
context 'issue movable' do
include_context 'user can move issue'
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index 634a4206d48..20b1a1f58bb 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -512,6 +512,20 @@ RSpec.describe Issues::UpdateService, :mailer do
expect(note.note).to eq('changed the description')
end
+
+ it 'triggers GraphQL description updated subscription' do
+ expect(GraphqlTriggers).to receive(:issuable_description_updated).with(issue).and_call_original
+
+ update_issue(description: 'Changed description')
+ end
+ end
+
+ context 'when decription is not changed' do
+ it 'does not trigger GraphQL description updated subscription' do
+ expect(GraphqlTriggers).not_to receive(:issuable_description_updated)
+
+ update_issue(title: 'Changed title')
+ end
end
context 'when issue turns confidential' do
@@ -838,6 +852,24 @@ RSpec.describe Issues::UpdateService, :mailer do
service.execute(issue)
end
+ # At the moment of writting old associations are not necessary for update_task
+ # and doing this will prevent fetching associations from the DB and comparing old and new labels
+ it 'does not pass old_associations to the after_update method' do
+ params = {
+ update_task: {
+ index: 1,
+ checked: false,
+ line_source: '- [x] Task 1',
+ line_number: 1
+ }
+ }
+ service = described_class.new(project: project, current_user: user, params: params)
+
+ expect(service).to receive(:after_update).with(issue, {})
+
+ service.execute(issue)
+ end
+
it 'creates system note about task status change' do
note1 = find_note('marked the checklist item **Task 1** as completed')
note2 = find_note('marked the checklist item **Task 2** as completed')
diff --git a/spec/services/jira_connect/create_asymmetric_jwt_service_spec.rb b/spec/services/jira_connect/create_asymmetric_jwt_service_spec.rb
new file mode 100644
index 00000000000..f5359e5b643
--- /dev/null
+++ b/spec/services/jira_connect/create_asymmetric_jwt_service_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe JiraConnect::CreateAsymmetricJwtService do
+ describe '#execute' do
+ let_it_be(:jira_connect_installation) { create(:jira_connect_installation) }
+
+ let(:service) { described_class.new(jira_connect_installation) }
+
+ subject(:jwt_token) { service.execute }
+
+ it 'raises an error' do
+ expect { jwt_token }.to raise_error(ArgumentError, 'jira_connect_installation is not a proxy installation')
+ end
+
+ context 'with proxy installation' do
+ let_it_be(:jira_connect_installation) { create(:jira_connect_installation, instance_url: 'https://gitlab.test') }
+
+ let(:public_key_id) { Atlassian::Jwt.decode(jwt_token, nil, false, algorithm: 'RS256').last['kid'] }
+ let(:public_key_cdn) { 'https://gitlab.com/-/jira_connect/public_keys/' }
+ let(:jwt_verification_claims) do
+ {
+ aud: 'https://gitlab.test/-/jira_connect',
+ iss: jira_connect_installation.client_key,
+ qsh: Atlassian::Jwt.create_query_string_hash('https://gitlab.test/-/jira_connect/events/installed', 'POST', 'https://gitlab.test/-/jira_connect')
+ }
+ end
+
+ subject(:jwt_token) { service.execute }
+
+ it 'stores the public key' do
+ expect { JiraConnect::PublicKey.find(public_key_id) }.not_to raise_error
+ end
+
+ it 'is produces a valid JWT' do
+ public_key = OpenSSL::PKey.read(JiraConnect::PublicKey.find(public_key_id).key)
+ options = jwt_verification_claims.except(:qsh).merge({ verify_aud: true, verify_iss: true, algorithm: 'RS256' })
+
+ decoded_token = Atlassian::Jwt.decode(jwt_token, public_key, true, options).first
+
+ expect(decoded_token).to eq(jwt_verification_claims.stringify_keys)
+ end
+ end
+ end
+end
diff --git a/spec/services/jira_connect/sync_service_spec.rb b/spec/services/jira_connect/sync_service_spec.rb
index 7242b1f41f9..32580a7735f 100644
--- a/spec/services/jira_connect/sync_service_spec.rb
+++ b/spec/services/jira_connect/sync_service_spec.rb
@@ -46,10 +46,11 @@ RSpec.describe JiraConnect::SyncService do
context 'when a request returns an error' do
it 'logs the response as an error' do
- expect_next(client).to store_info([
- { 'errorMessages' => ['some error message'] },
- { 'errorMessages' => ['x'] }
- ])
+ expect_next(client).to store_info(
+ [
+ { 'errorMessages' => ['some error message'] },
+ { 'errorMessages' => ['x'] }
+ ])
expect_log(:error, { 'errorMessages' => ['some error message'] })
expect_log(:error, { 'errorMessages' => ['x'] })
diff --git a/spec/services/members/create_service_spec.rb b/spec/services/members/create_service_spec.rb
index 25696ca209e..756e1cf403c 100644
--- a/spec/services/members/create_service_spec.rb
+++ b/spec/services/members/create_service_spec.rb
@@ -110,12 +110,61 @@ RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_
expect(execute_service[:status]).to eq(:success)
end
end
+
+ context 'when only one user fails validations' do
+ let_it_be(:source) { create(:project, group: create(:group)) }
+ let(:user_id) { [member.id, user_invited_by_id.id] }
+
+ before do
+ # validations will fail because we try to invite them to the project as a guest
+ source.group.add_developer(member)
+ end
+
+ it 'triggers the members added event' do
+ expect(Gitlab::EventStore)
+ .to receive(:publish)
+ .with(an_instance_of(Members::MembersAddedEvent))
+ .and_call_original
+
+ expect(execute_service[:status]).to eq(:error)
+ expect(execute_service[:message])
+ .to include 'Access level should be greater than or equal to Developer inherited membership from group'
+ expect(source.users).not_to include(member)
+ expect(source.users).to include(user_invited_by_id)
+ end
+ end
+
+ context 'when all users fail validations' do
+ let_it_be(:source) { create(:project, group: create(:group)) }
+ let(:user_id) { [member.id, user_invited_by_id.id] }
+
+ before do
+ # validations will fail because we try to invite them to the project as a guest
+ source.group.add_developer(member)
+ source.group.add_developer(user_invited_by_id)
+ end
+
+ it 'does not trigger the members added event' do
+ expect(Gitlab::EventStore)
+ .not_to receive(:publish)
+ .with(an_instance_of(Members::MembersAddedEvent))
+
+ expect(execute_service[:status]).to eq(:error)
+ expect(execute_service[:message])
+ .to include 'Access level should be greater than or equal to Developer inherited membership from group'
+ expect(source.users).not_to include(member, user_invited_by_id)
+ end
+ end
end
context 'when passing no user ids' do
let(:user_id) { '' }
it 'does not add a member' do
+ expect(Gitlab::EventStore)
+ .not_to receive(:publish)
+ .with(an_instance_of(Members::MembersAddedEvent))
+
expect(execute_service[:status]).to eq(:error)
expect(execute_service[:message]).to be_present
expect(source.users).not_to include member
diff --git a/spec/services/members/destroy_service_spec.rb b/spec/services/members/destroy_service_spec.rb
index 9f0daba3327..8559c02be57 100644
--- a/spec/services/members/destroy_service_spec.rb
+++ b/spec/services/members/destroy_service_spec.rb
@@ -95,6 +95,37 @@ RSpec.describe Members::DestroyService do
end
end
+ context 'With ExclusiveLeaseHelpers' do
+ let(:service_object) { described_class.new(current_user) }
+ let!(:member) { group_project.add_developer(member_user) }
+
+ subject(:destroy_member) { service_object.execute(member, **opts) }
+
+ before do
+ group_project.add_maintainer(current_user)
+
+ allow(service_object).to receive(:in_lock) do |_, &block|
+ block.call if lock_obtained
+ end
+ end
+
+ context 'when lock is obtained' do
+ let(:lock_obtained) { true }
+
+ it 'destroys the membership' do
+ expect { destroy_member }.to change { group_project.members.count }.by(-1)
+ end
+ end
+
+ context 'when the lock can not be obtained' do
+ let(:lock_obtained) { false }
+
+ it 'does not destroy the membership' do
+ expect { destroy_member }.not_to change { group_project.members.count }
+ end
+ end
+ end
+
context 'with a member with access' do
before do
group_project.update_attribute(:visibility_level, Gitlab::VisibilityLevel::PRIVATE)
diff --git a/spec/services/merge_requests/close_service_spec.rb b/spec/services/merge_requests/close_service_spec.rb
index 8f448184b45..b3c4ed4c544 100644
--- a/spec/services/merge_requests/close_service_spec.rb
+++ b/spec/services/merge_requests/close_service_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe MergeRequests::CloseService do
let(:merge_request) { create(:merge_request, assignees: [user2], author: create(:user)) }
let(:project) { merge_request.project }
let!(:todo) { create(:todo, :assigned, user: user, project: project, target: merge_request, author: user2) }
+ let(:service) { described_class.new(project: project, current_user: user) }
before do
project.add_maintainer(user)
@@ -16,18 +17,20 @@ RSpec.describe MergeRequests::CloseService do
project.add_guest(guest)
end
+ def execute
+ service.execute(merge_request)
+ end
+
describe '#execute' do
it_behaves_like 'cache counters invalidator'
it_behaves_like 'merge request reviewers cache counters invalidator'
context 'valid params' do
- let(:service) { described_class.new(project: project, current_user: user) }
-
before do
allow(service).to receive(:execute_hooks)
perform_enqueued_jobs do
- @merge_request = service.execute(merge_request)
+ @merge_request = execute
end
end
@@ -73,7 +76,7 @@ RSpec.describe MergeRequests::CloseService do
expect(metrics_service).to receive(:close)
- described_class.new(project: project, current_user: user).execute(merge_request)
+ execute
end
it 'calls the merge request activity counter' do
@@ -81,13 +84,11 @@ RSpec.describe MergeRequests::CloseService do
.to receive(:track_close_mr_action)
.with(user: user)
- described_class.new(project: project, current_user: user).execute(merge_request)
+ execute
end
it 'refreshes the number of open merge requests for a valid MR', :use_clean_rails_memory_store_caching do
- service = described_class.new(project: project, current_user: user)
-
- expect { service.execute(merge_request) }
+ expect { execute }
.to change { project.open_merge_requests_count }.from(1).to(0)
end
@@ -96,25 +97,39 @@ RSpec.describe MergeRequests::CloseService do
expect(service).to receive(:execute_for_merge_request_pipeline).with(merge_request)
end
- described_class.new(project: project, current_user: user).execute(merge_request)
+ execute
end
it 'schedules CleanupRefsService' do
expect(MergeRequests::CleanupRefsService).to receive(:schedule).with(merge_request)
- described_class.new(project: project, current_user: user).execute(merge_request)
+ execute
+ end
+
+ it 'triggers GraphQL subscription mergeRequestMergeStatusUpdated' do
+ expect(GraphqlTriggers).to receive(:merge_request_merge_status_updated).with(merge_request)
+
+ execute
end
context 'current user is not authorized to close merge request' do
+ let(:user) { guest }
+
before do
perform_enqueued_jobs do
- @merge_request = described_class.new(project: project, current_user: guest).execute(merge_request)
+ @merge_request = execute
end
end
it 'does not close the merge request' do
expect(@merge_request).to be_open
end
+
+ it 'does not trigger GraphQL subscription mergeRequestMergeStatusUpdated' do
+ expect(GraphqlTriggers).not_to receive(:merge_request_merge_status_updated)
+
+ execute
+ end
end
end
end
diff --git a/spec/services/merge_requests/create_from_issue_service_spec.rb b/spec/services/merge_requests/create_from_issue_service_spec.rb
index 749b30bff5f..0eefbed252b 100644
--- a/spec/services/merge_requests/create_from_issue_service_spec.rb
+++ b/spec/services/merge_requests/create_from_issue_service_spec.rb
@@ -34,19 +34,19 @@ RSpec.describe MergeRequests::CreateFromIssueService do
expect(result[:message]).to eq('Invalid issue iid')
end
- it 'creates a branch based on issue title', :sidekiq_might_not_need_inline do
+ it 'creates a branch based on issue title' do
service.execute
expect(target_project.repository.branch_exists?(issue.to_branch_name)).to be_truthy
end
- it 'creates a branch using passed name', :sidekiq_might_not_need_inline do
+ it 'creates a branch using passed name' do
service_with_custom_source_branch.execute
expect(target_project.repository.branch_exists?(custom_source_branch)).to be_truthy
end
- it 'creates the new_merge_request system note', :sidekiq_might_not_need_inline do
+ it 'creates the new_merge_request system note' do
expect(SystemNoteService).to receive(:new_merge_request).with(issue, project, user, instance_of(MergeRequest))
service.execute
@@ -60,7 +60,7 @@ RSpec.describe MergeRequests::CreateFromIssueService do
service.execute
end
- it 'creates the new_issue_branch system note when the branch could be created but the merge_request cannot be created', :sidekiq_might_not_need_inline do
+ it 'creates the new_issue_branch system note when the branch could be created but the merge_request cannot be created' do
expect_next_instance_of(MergeRequest) do |instance|
expect(instance).to receive(:valid?).at_least(:once).and_return(false)
end
@@ -81,36 +81,36 @@ RSpec.describe MergeRequests::CreateFromIssueService do
service.execute
end
- it 'creates a merge request', :sidekiq_might_not_need_inline do
+ it 'creates a merge request' do
expect { service.execute }.to change(target_project.merge_requests, :count).by(1)
end
- it 'sets the merge request author to current user and assigns them', :sidekiq_might_not_need_inline do
+ it 'sets the merge request author to current user and assigns them' do
result = service.execute
expect(result[:merge_request].author).to eq(user)
expect(result[:merge_request].assignees).to eq([user])
end
- it 'sets the merge request source branch to the new issue branch', :sidekiq_might_not_need_inline do
+ it 'sets the merge request source branch to the new issue branch' do
result = service.execute
expect(result[:merge_request].source_branch).to eq(issue.to_branch_name)
end
- it 'sets the merge request source branch to the passed branch name', :sidekiq_might_not_need_inline do
+ it 'sets the merge request source branch to the passed branch name' do
result = service_with_custom_source_branch.execute
expect(result[:merge_request].source_branch).to eq(custom_source_branch)
end
- it 'sets the merge request target branch to the project default branch', :sidekiq_might_not_need_inline do
+ it 'sets the merge request target branch to the project default branch' do
result = service.execute
expect(result[:merge_request].target_branch).to eq(target_project.default_branch)
end
- it 'executes quick actions if the build service sets them in the description', :sidekiq_might_not_need_inline do
+ it 'executes quick actions if the build service sets them in the description' do
allow(service).to receive(:merge_request).and_wrap_original do |m, *args|
m.call(*args).tap do |merge_request|
merge_request.description = "/assign #{user.to_reference}"
@@ -122,7 +122,7 @@ RSpec.describe MergeRequests::CreateFromIssueService do
expect(result[:merge_request].assignees).to eq([user])
end
- context 'when ref branch is set', :sidekiq_might_not_need_inline do
+ context 'when ref branch is set' do
subject { described_class.new(project: project, current_user: user, mr_params: { ref: 'feature', **service_params }).execute }
it 'sets the merge request source branch to the new issue branch' do
@@ -213,7 +213,7 @@ RSpec.describe MergeRequests::CreateFromIssueService do
it_behaves_like 'a service that creates a merge request from an issue'
- it 'sets the merge request title to: "Draft: $issue-branch-name', :sidekiq_might_not_need_inline do
+ it 'sets the merge request title to: "Draft: $issue-branch-name' do
result = service.execute
expect(result[:merge_request].title).to eq("Draft: #{issue.to_branch_name.titleize.humanize}")
diff --git a/spec/services/merge_requests/create_service_spec.rb b/spec/services/merge_requests/create_service_spec.rb
index 4102cdc101e..0bc8258af42 100644
--- a/spec/services/merge_requests/create_service_spec.rb
+++ b/spec/services/merge_requests/create_service_spec.rb
@@ -102,7 +102,7 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
description: 'please fix',
source_branch: 'feature',
target_branch: 'master',
- assignees: [user2]
+ assignee_ids: [user2.id]
}
end
diff --git a/spec/services/merge_requests/ff_merge_service_spec.rb b/spec/services/merge_requests/ff_merge_service_spec.rb
index aa5d6dcd1fb..5027acbba0a 100644
--- a/spec/services/merge_requests/ff_merge_service_spec.rb
+++ b/spec/services/merge_requests/ff_merge_service_spec.rb
@@ -108,7 +108,8 @@ RSpec.describe MergeRequests::FfMergeService do
service.execute(merge_request)
- expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(error_message))
+ expect(Gitlab::AppLogger).to have_received(:error)
+ .with(hash_including(message: a_string_matching(error_message)))
end
it 'logs and saves error if there is an PreReceiveError exception' do
@@ -122,7 +123,8 @@ RSpec.describe MergeRequests::FfMergeService do
service.execute(merge_request)
expect(merge_request.merge_error).to include(error_message)
- expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(error_message))
+ expect(Gitlab::AppLogger).to have_received(:error)
+ .with(hash_including(message: a_string_matching(error_message)))
end
it 'does not update squash_commit_sha if squash merge is not successful' do
diff --git a/spec/services/merge_requests/link_lfs_objects_service_spec.rb b/spec/services/merge_requests/link_lfs_objects_service_spec.rb
index 2fb6bbaf02f..96cb72baac2 100644
--- a/spec/services/merge_requests/link_lfs_objects_service_spec.rb
+++ b/spec/services/merge_requests/link_lfs_objects_service_spec.rb
@@ -52,10 +52,11 @@ RSpec.describe MergeRequests::LinkLfsObjectsService, :sidekiq_inline do
it 'calls Projects::LfsPointers::LfsLinkService#execute with OIDs of LFS objects in merge request' do
expect_next_instance_of(Projects::LfsPointers::LfsLinkService) do |service|
- expect(service).to receive(:execute).with(%w[
- 8b12507783d5becacbf2ebe5b01a60024d8728a8f86dcc818bce699e8b3320bc
- 94a72c074cfe574742c9e99e863322f73feff82981d065ff65a0308f44f19f62
- ])
+ expect(service).to receive(:execute).with(
+ %w[
+ 8b12507783d5becacbf2ebe5b01a60024d8728a8f86dcc818bce699e8b3320bc
+ 94a72c074cfe574742c9e99e863322f73feff82981d065ff65a0308f44f19f62
+ ])
end
execute
diff --git a/spec/services/merge_requests/mark_reviewer_reviewed_service_spec.rb b/spec/services/merge_requests/mark_reviewer_reviewed_service_spec.rb
index 4d7bd3d8800..8437876c3cf 100644
--- a/spec/services/merge_requests/mark_reviewer_reviewed_service_spec.rb
+++ b/spec/services/merge_requests/mark_reviewer_reviewed_service_spec.rb
@@ -15,20 +15,26 @@ RSpec.describe MergeRequests::MarkReviewerReviewedService do
end
describe '#execute' do
- describe 'invalid permissions' do
- let(:service) { described_class.new(project: project, current_user: create(:user)) }
-
+ shared_examples_for 'failed service execution' do
it 'returns an error' do
expect(result[:status]).to eq :error
end
+
+ it_behaves_like 'does not trigger GraphQL subscription mergeRequestReviewersUpdated' do
+ let(:action) { result }
+ end
+ end
+
+ describe 'invalid permissions' do
+ let(:service) { described_class.new(project: project, current_user: create(:user)) }
+
+ it_behaves_like 'failed service execution'
end
describe 'reviewer does not exist' do
let(:service) { described_class.new(project: project, current_user: create(:user)) }
- it 'returns an error' do
- expect(result[:status]).to eq :error
- end
+ it_behaves_like 'failed service execution'
end
describe 'reviewer exists' do
@@ -40,6 +46,10 @@ RSpec.describe MergeRequests::MarkReviewerReviewedService do
expect(result[:status]).to eq :success
expect(reviewer.state).to eq 'reviewed'
end
+
+ it_behaves_like 'triggers GraphQL subscription mergeRequestReviewersUpdated' do
+ let(:action) { result }
+ end
end
end
end
diff --git a/spec/services/merge_requests/merge_service_spec.rb b/spec/services/merge_requests/merge_service_spec.rb
index a2d73d8c9b1..d3bf203d6bb 100644
--- a/spec/services/merge_requests/merge_service_spec.rb
+++ b/spec/services/merge_requests/merge_service_spec.rb
@@ -95,6 +95,42 @@ RSpec.describe MergeRequests::MergeService do
end
end
+ context 'running the service once' do
+ let(:ref) { merge_request.to_reference(full: true) }
+ let(:jid) { SecureRandom.hex }
+
+ let(:messages) do
+ [
+ /#{ref} - Git merge started on JID #{jid}/,
+ /#{ref} - Git merge finished on JID #{jid}/,
+ /#{ref} - Post merge started on JID #{jid}/,
+ /#{ref} - Post merge finished on JID #{jid}/,
+ /#{ref} - Merge process finished on JID #{jid}/
+ ]
+ end
+
+ before do
+ merge_request.update!(merge_jid: jid)
+ ::Gitlab::ApplicationContext.push(caller_id: 'MergeWorker')
+ end
+
+ it 'logs status messages' do
+ allow(Gitlab::AppLogger).to receive(:info).and_call_original
+
+ messages.each do |message|
+ expect(Gitlab::AppLogger).to receive(:info).with(
+ hash_including(
+ 'meta.caller_id' => 'MergeWorker',
+ message: message,
+ merge_request_info: ref
+ )
+ ).and_call_original
+ end
+
+ service.execute(merge_request)
+ end
+ end
+
context 'running the service multiple time' do
it 'is idempotent' do
2.times { service.execute(merge_request) }
@@ -315,7 +351,9 @@ RSpec.describe MergeRequests::MergeService do
service.execute(merge_request)
expect(merge_request.merge_error).to eq(error_message)
- expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(error_message))
+ expect(Gitlab::AppLogger).to have_received(:error)
+ .with(hash_including(merge_request_info: merge_request.to_reference(full: true),
+ message: a_string_matching(error_message)))
end
end
@@ -328,7 +366,9 @@ RSpec.describe MergeRequests::MergeService do
service.execute(merge_request)
expect(merge_request.merge_error).to eq(described_class::GENERIC_ERROR_MESSAGE)
- expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(error_message))
+ expect(Gitlab::AppLogger).to have_received(:error)
+ .with(hash_including(merge_request_info: merge_request.to_reference(full: true),
+ message: a_string_matching(error_message)))
end
it 'logs and saves error if user is not authorized' do
@@ -354,7 +394,9 @@ RSpec.describe MergeRequests::MergeService do
service.execute(merge_request)
expect(merge_request.merge_error).to include('Something went wrong during merge pre-receive hook')
- expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(error_message))
+ expect(Gitlab::AppLogger).to have_received(:error)
+ .with(hash_including(merge_request_info: merge_request.to_reference(full: true),
+ message: a_string_matching(error_message)))
end
it 'logs and saves error if commit is not created' do
@@ -366,7 +408,9 @@ RSpec.describe MergeRequests::MergeService do
expect(merge_request).to be_open
expect(merge_request.merge_commit_sha).to be_nil
expect(merge_request.merge_error).to include(described_class::GENERIC_ERROR_MESSAGE)
- expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(described_class::GENERIC_ERROR_MESSAGE))
+ expect(Gitlab::AppLogger).to have_received(:error)
+ .with(hash_including(merge_request_info: merge_request.to_reference(full: true),
+ message: a_string_matching(described_class::GENERIC_ERROR_MESSAGE)))
end
context 'when squashing is required' do
@@ -385,7 +429,9 @@ RSpec.describe MergeRequests::MergeService do
expect(merge_request.merge_commit_sha).to be_nil
expect(merge_request.squash_commit_sha).to be_nil
expect(merge_request.merge_error).to include(error_message)
- expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(error_message))
+ expect(Gitlab::AppLogger).to have_received(:error)
+ .with(hash_including(merge_request_info: merge_request.to_reference(full: true),
+ message: a_string_matching(error_message)))
end
end
@@ -406,7 +452,9 @@ RSpec.describe MergeRequests::MergeService do
expect(merge_request.merge_commit_sha).to be_nil
expect(merge_request.squash_commit_sha).to be_nil
expect(merge_request.merge_error).to include(error_message)
- expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(error_message))
+ expect(Gitlab::AppLogger).to have_received(:error)
+ .with(hash_including(merge_request_info: merge_request.to_reference(full: true),
+ message: a_string_matching(error_message)))
end
it 'logs and saves error if there is an PreReceiveError exception' do
@@ -422,7 +470,9 @@ RSpec.describe MergeRequests::MergeService do
expect(merge_request.merge_commit_sha).to be_nil
expect(merge_request.squash_commit_sha).to be_nil
expect(merge_request.merge_error).to include('Something went wrong during merge pre-receive hook')
- expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(error_message))
+ expect(Gitlab::AppLogger).to have_received(:error)
+ .with(hash_including(merge_request_info: merge_request.to_reference(full: true),
+ message: a_string_matching(error_message)))
end
context 'when fast-forward merge is not allowed' do
@@ -444,7 +494,9 @@ RSpec.describe MergeRequests::MergeService do
expect(merge_request.merge_commit_sha).to be_nil
expect(merge_request.squash_commit_sha).to be_nil
expect(merge_request.merge_error).to include(error_message)
- expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(error_message))
+ expect(Gitlab::AppLogger).to have_received(:error)
+ .with(hash_including(merge_request_info: merge_request.to_reference(full: true),
+ message: a_string_matching(error_message)))
end
end
end
@@ -461,7 +513,9 @@ RSpec.describe MergeRequests::MergeService do
it 'logs and saves error' do
service.execute(merge_request)
- expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(error_message))
+ expect(Gitlab::AppLogger).to have_received(:error)
+ .with(hash_including(merge_request_info: merge_request.to_reference(full: true),
+ message: a_string_matching(error_message)))
end
end
@@ -473,7 +527,9 @@ RSpec.describe MergeRequests::MergeService do
it 'logs and saves error' do
service.execute(merge_request)
- expect(Gitlab::AppLogger).to have_received(:error).with(a_string_matching(error_message))
+ expect(Gitlab::AppLogger).to have_received(:error)
+ .with(hash_including(merge_request_info: merge_request.to_reference(full: true),
+ message: a_string_matching(error_message)))
end
context 'when passing `skip_discussions_check: true` as `options` parameter' do
diff --git a/spec/services/merge_requests/mergeability/logger_spec.rb b/spec/services/merge_requests/mergeability/logger_spec.rb
index a4d544884b9..3e2a1e9f9fd 100644
--- a/spec/services/merge_requests/mergeability/logger_spec.rb
+++ b/spec/services/merge_requests/mergeability/logger_spec.rb
@@ -94,25 +94,6 @@ RSpec.describe MergeRequests::Mergeability::Logger, :request_store do
end
end
- context 'when disabled' do
- before do
- stub_feature_flags(mergeability_checks_logger: false)
- end
-
- it "returns the block's value" do
- expect(logger.instrument(mergeability_name: :expensive_operation) { 123 }).to eq(123)
- end
-
- it 'does not call the logger' do
- expect(Gitlab::AppJsonLogger).not_to receive(:new)
-
- expect(logger.instrument(mergeability_name: :expensive_operation) { Project.count + MergeRequest.count })
- .to eq(2)
-
- logger.commit
- end
- end
-
it 'raises an error when block is not provided' do
expect { logger.instrument(mergeability_name: :expensive_operation) }
.to raise_error(ArgumentError, 'block not given')
diff --git a/spec/services/merge_requests/push_options_handler_service_spec.rb b/spec/services/merge_requests/push_options_handler_service_spec.rb
index 391377ad801..251bf6f0d9d 100644
--- a/spec/services/merge_requests/push_options_handler_service_spec.rb
+++ b/spec/services/merge_requests/push_options_handler_service_spec.rb
@@ -730,6 +730,15 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'with a deleted branch'
it_behaves_like 'with the project default branch'
+
+ context 'when passing in usernames' do
+ # makes sure that usernames starting with numbers aren't treated as IDs
+ let(:user2) { create(:user, username: '123user', developer_projects: [project]) }
+ let(:user3) { create(:user, username: '999user', developer_projects: [project]) }
+ let(:assigned) { { user2.username => 1, user3.username => 1 } }
+
+ it_behaves_like 'with an existing branch that has a merge request open in foss'
+ end
end
describe '`unassign` push option' do
@@ -743,6 +752,13 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'with a deleted branch'
it_behaves_like 'with the project default branch'
+
+ context 'when passing in usernames' do
+ let(:assigned) { { user2.username => 1, user3.username => 1 } }
+ let(:unassigned) { { user1.username => 1, user3.username => 1 } }
+
+ it_behaves_like 'with an existing branch that has a merge request open in foss'
+ end
end
describe 'multiple pushed branches' do
diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb
index 09d06b8b2ab..5174ceaaa82 100644
--- a/spec/services/merge_requests/refresh_service_spec.rb
+++ b/spec/services/merge_requests/refresh_service_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe MergeRequests::RefreshService do
include ProjectForksHelper
- include ProjectHelpers
+ include UserHelpers
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
@@ -189,7 +189,7 @@ RSpec.describe MergeRequests::RefreshService do
subject { service.new(project: @project, current_user: @user).execute(@oldrev, @newrev, 'refs/heads/master') }
- it 'updates the head_pipeline_id for @merge_request', :sidekiq_might_not_need_inline do
+ it 'updates the head_pipeline_id for @merge_request', :sidekiq_inline do
expect { subject }.to change { @merge_request.reload.head_pipeline_id }.from(nil).to(pipeline.id)
end
@@ -306,7 +306,7 @@ RSpec.describe MergeRequests::RefreshService do
subject
end
- it 'sets the latest detached merge request pipeline as a head pipeline', :sidekiq_might_not_need_inline do
+ it 'sets the latest detached merge request pipeline as a head pipeline' do
@merge_request.reload
expect(@merge_request.actual_head_pipeline).to be_merge_request_event
end
@@ -424,7 +424,7 @@ RSpec.describe MergeRequests::RefreshService do
end
end
- context 'push to origin repo target branch', :sidekiq_might_not_need_inline do
+ context 'push to origin repo target branch' do
context 'when all MRs to the target branch had diffs' do
before do
service.new(project: @project, current_user: @user).execute(@oldrev, @newrev, 'refs/heads/feature')
@@ -474,7 +474,7 @@ RSpec.describe MergeRequests::RefreshService do
end
end
- context 'manual merge of source branch', :sidekiq_might_not_need_inline do
+ context 'manual merge of source branch' do
before do
# Merge master -> feature branch
@project.repository.merge(@user, @merge_request.diff_head_sha, @merge_request, 'Test message')
@@ -496,7 +496,7 @@ RSpec.describe MergeRequests::RefreshService do
end
end
- context 'push to fork repo source branch', :sidekiq_might_not_need_inline do
+ context 'push to fork repo source branch' do
let(:refresh_service) { service.new(project: @fork_project, current_user: @user) }
def refresh
@@ -561,7 +561,7 @@ RSpec.describe MergeRequests::RefreshService do
end
end
- context 'push to fork repo target branch', :sidekiq_might_not_need_inline do
+ context 'push to fork repo target branch' do
describe 'changes to merge requests' do
before do
service.new(project: @fork_project, current_user: @user).execute(@oldrev, @newrev, 'refs/heads/feature')
@@ -587,7 +587,7 @@ RSpec.describe MergeRequests::RefreshService do
end
end
- context 'forked projects with the same source branch name as target branch', :sidekiq_might_not_need_inline do
+ context 'forked projects with the same source branch name as target branch' do
let!(:first_commit) do
@fork_project.repository.create_file(@user, 'test1.txt', 'Test data',
message: 'Test commit',
@@ -671,7 +671,7 @@ RSpec.describe MergeRequests::RefreshService do
context 'push new branch that exists in a merge request' do
let(:refresh_service) { service.new(project: @fork_project, current_user: @user) }
- it 'refreshes the merge request', :sidekiq_might_not_need_inline do
+ it 'refreshes the merge request' do
expect(refresh_service).to receive(:execute_hooks)
.with(@fork_merge_request, 'update', old_rev: Gitlab::Git::BLANK_SHA)
allow_any_instance_of(Repository).to receive(:merge_base).and_return(@oldrev)
@@ -799,23 +799,24 @@ RSpec.describe MergeRequests::RefreshService do
it 'does not mark as draft based on commits that do not belong to an MR' do
allow(refresh_service).to receive(:find_new_commits)
- refresh_service.instance_variable_set("@commits", [
- double(
- id: 'aaaaaaa',
- sha: 'aaaaaaa',
- short_id: 'aaaaaaa',
- title: 'Fix issue',
- draft?: false
- ),
- double(
- id: 'bbbbbbb',
- sha: 'bbbbbbbb',
- short_id: 'bbbbbbb',
- title: 'fixup! Fix issue',
- draft?: true,
- to_reference: 'bbbbbbb'
- )
- ])
+ refresh_service.instance_variable_set("@commits",
+ [
+ double(
+ id: 'aaaaaaa',
+ sha: 'aaaaaaa',
+ short_id: 'aaaaaaa',
+ title: 'Fix issue',
+ draft?: false
+ ),
+ double(
+ id: 'bbbbbbb',
+ sha: 'bbbbbbbb',
+ short_id: 'bbbbbbb',
+ title: 'fixup! Fix issue',
+ draft?: true,
+ to_reference: 'bbbbbbb'
+ )
+ ])
refresh_service.execute(@oldrev, @newrev, 'refs/heads/master')
reload_mrs
diff --git a/spec/services/merge_requests/request_review_service_spec.rb b/spec/services/merge_requests/request_review_service_spec.rb
index 8bc31df605c..1d3f92b083f 100644
--- a/spec/services/merge_requests/request_review_service_spec.rb
+++ b/spec/services/merge_requests/request_review_service_spec.rb
@@ -25,20 +25,26 @@ RSpec.describe MergeRequests::RequestReviewService do
end
describe '#execute' do
- describe 'invalid permissions' do
- let(:service) { described_class.new(project: project, current_user: create(:user)) }
-
+ shared_examples_for 'failed service execution' do
it 'returns an error' do
expect(result[:status]).to eq :error
end
+
+ it_behaves_like 'does not trigger GraphQL subscription mergeRequestReviewersUpdated' do
+ let(:action) { result }
+ end
+ end
+
+ describe 'invalid permissions' do
+ let(:service) { described_class.new(project: project, current_user: create(:user)) }
+
+ it_behaves_like 'failed service execution'
end
describe 'reviewer does not exist' do
let(:result) { service.execute(merge_request, create(:user)) }
- it 'returns an error' do
- expect(result[:status]).to eq :error
- end
+ it_behaves_like 'failed service execution'
end
describe 'reviewer exists' do
@@ -64,6 +70,10 @@ RSpec.describe MergeRequests::RequestReviewService do
service.execute(merge_request, user)
end
+
+ it_behaves_like 'triggers GraphQL subscription mergeRequestReviewersUpdated' do
+ let(:action) { result }
+ end
end
end
end
diff --git a/spec/services/merge_requests/update_assignees_service_spec.rb b/spec/services/merge_requests/update_assignees_service_spec.rb
index 3a0b17c2768..2d80d75a262 100644
--- a/spec/services/merge_requests/update_assignees_service_spec.rb
+++ b/spec/services/merge_requests/update_assignees_service_spec.rb
@@ -36,6 +36,20 @@ RSpec.describe MergeRequests::UpdateAssigneesService do
service.execute(merge_request)
end
+ shared_examples 'it updates and enqueues the job' do
+ it 'correctly updates the MR and enqueues the job' do
+ expect_next(MergeRequests::HandleAssigneesChangeService, project: project, current_user: user) do |service|
+ expect(service)
+ .to receive(:async_execute).with(merge_request, [user3], execute_hooks: true)
+ end
+
+ expect { update_merge_request }
+ .to change { merge_request.reload.assignees }.from([user3]).to(new_users)
+ .and change(merge_request, :updated_at)
+ .and change(merge_request, :updated_by).to(user)
+ end
+ end
+
shared_examples 'removing all assignees' do
it 'removes all assignees' do
expect(update_merge_request).to have_attributes(assignees: be_empty, errors: be_none)
@@ -73,16 +87,8 @@ RSpec.describe MergeRequests::UpdateAssigneesService do
it_behaves_like 'removing all assignees'
end
- it 'updates the MR, and queues the more expensive work for later' do
- expect_next(MergeRequests::HandleAssigneesChangeService, project: project, current_user: user) do |service|
- expect(service)
- .to receive(:async_execute).with(merge_request, [user3], execute_hooks: true)
- end
-
- expect { update_merge_request }
- .to change { merge_request.reload.assignees }.from([user3]).to([user2])
- .and change(merge_request, :updated_at)
- .and change(merge_request, :updated_by).to(user)
+ it_behaves_like 'it updates and enqueues the job' do
+ let(:new_users) { [user2] }
end
it 'does not update the assignees if they do not have access' do
diff --git a/spec/services/merge_requests/update_reviewers_service_spec.rb b/spec/services/merge_requests/update_reviewers_service_spec.rb
index 8920141adbb..9f935e1cecf 100644
--- a/spec/services/merge_requests/update_reviewers_service_spec.rb
+++ b/spec/services/merge_requests/update_reviewers_service_spec.rb
@@ -128,6 +128,10 @@ RSpec.describe MergeRequests::UpdateReviewersService do
set_reviewers
end
+ it_behaves_like 'triggers GraphQL subscription mergeRequestReviewersUpdated' do
+ let(:action) { set_reviewers }
+ end
+
it 'calls MergeRequest::ResolveTodosService#async_execute' do
expect_next_instance_of(MergeRequests::ResolveTodosService, merge_request, user) do |service|
expect(service).to receive(:async_execute)
@@ -149,6 +153,14 @@ RSpec.describe MergeRequests::UpdateReviewersService do
set_reviewers
end
+ context 'when reviewers did not change' do
+ let(:opts) { { reviewer_ids: merge_request.reviewer_ids } }
+
+ it_behaves_like 'does not trigger GraphQL subscription mergeRequestReviewersUpdated' do
+ let(:action) { set_reviewers }
+ end
+ end
+
it 'does not update the reviewers if they do not have access' do
opts[:reviewer_ids] = [create(:user).id]
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index 8ebabd64d8a..1d67574b06d 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -425,16 +425,10 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
create(:merge_request, :simple, source_project: project, reviewer_ids: [user2.id])
end
- context 'when merge_request_reviewer feature is enabled' do
- before do
- stub_feature_flags(merge_request_reviewer: true)
- end
-
- let(:opts) { { reviewer_ids: [IssuableFinder::Params::NONE] } }
+ let(:opts) { { reviewer_ids: [IssuableFinder::Params::NONE] } }
- it 'removes reviewers' do
- expect(update_merge_request(opts).reviewers).to eq []
- end
+ it 'removes reviewers' do
+ expect(update_merge_request(opts).reviewers).to eq []
end
end
end
@@ -625,6 +619,20 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
expect(Todo.count).to eq(2)
end
+
+ it 'triggers GraphQL description updated subscription' do
+ expect(GraphqlTriggers).to receive(:issuable_description_updated).with(merge_request).and_call_original
+
+ update_merge_request(description: 'updated description')
+ end
+ end
+
+ context 'when decription is not changed' do
+ it 'does not trigger GraphQL description updated subscription' do
+ expect(GraphqlTriggers).not_to receive(:issuable_description_updated)
+
+ update_merge_request(title: 'updated title')
+ end
end
context 'when is reassigned' do
@@ -685,6 +693,16 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
expect(user2.review_requested_open_merge_requests_count).to eq(1)
expect(user3.review_requested_open_merge_requests_count).to eq(0)
end
+
+ it_behaves_like 'triggers GraphQL subscription mergeRequestReviewersUpdated' do
+ let(:action) { update_merge_request({ reviewer_ids: [user2.id] }) }
+ end
+ end
+
+ context 'when reviewers did not change' do
+ it_behaves_like 'does not trigger GraphQL subscription mergeRequestReviewersUpdated' do
+ let(:action) { update_merge_request({ reviewer_ids: [merge_request.reviewer_ids] }) }
+ end
end
context 'when the milestone is removed' do
@@ -827,6 +845,12 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
should_not_email(non_subscriber)
end
+ it 'triggers GraphQL subscription mergeRequestMergeStatusUpdated' do
+ expect(GraphqlTriggers).to receive(:merge_request_merge_status_updated).with(merge_request)
+
+ update_merge_request(title: 'New title')
+ end
+
context 'when removing through wip_event param' do
it 'removes Draft from the title' do
expect { update_merge_request({ wip_event: "ready" }) }
@@ -853,6 +877,12 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
should_not_email(non_subscriber)
end
+ it 'triggers GraphQL subscription mergeRequestMergeStatusUpdated' do
+ expect(GraphqlTriggers).to receive(:merge_request_merge_status_updated).with(merge_request)
+
+ update_merge_request(title: 'Draft: New title')
+ end
+
context 'when adding through wip_event param' do
it 'adds Draft to the title' do
expect { update_merge_request({ wip_event: "draft" }) }
diff --git a/spec/services/ml/experiment_tracking/candidate_repository_spec.rb b/spec/services/ml/experiment_tracking/candidate_repository_spec.rb
new file mode 100644
index 00000000000..8002b2ebc86
--- /dev/null
+++ b/spec/services/ml/experiment_tracking/candidate_repository_spec.rb
@@ -0,0 +1,199 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Ml::ExperimentTracking::CandidateRepository do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:experiment) { create(:ml_experiments, user: user, project: project) }
+ let_it_be(:candidate) { create(:ml_candidates, user: user, experiment: experiment) }
+
+ let(:repository) { described_class.new(project, user) }
+
+ describe '#by_iid' do
+ let(:iid) { candidate.iid }
+
+ subject { repository.by_iid(iid) }
+
+ it { is_expected.to eq(candidate) }
+
+ context 'when iid does not exist' do
+ let(:iid) { non_existing_record_iid.to_s }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when iid belongs to a different project' do
+ let(:repository) { described_class.new(create(:project), user) }
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ describe '#create!' do
+ subject { repository.create!(experiment, 1234) }
+
+ it 'creates the candidate' do
+ expect(subject.start_time).to eq(1234)
+ expect(subject.iid).not_to be_nil
+ expect(subject.end_time).to be_nil
+ end
+ end
+
+ describe '#update' do
+ let(:end_time) { 123456 }
+ let(:status) { 'running' }
+
+ subject { repository.update(candidate, status, end_time) }
+
+ it { is_expected.to be_truthy }
+
+ context 'when end_time is missing ' do
+ let(:end_time) { nil }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when status is wrong' do
+ let(:status) { 's' }
+
+ it 'fails assigning the value' do
+ expect { subject }.to raise_error(ArgumentError)
+ end
+ end
+
+ context 'when status is missing' do
+ let(:status) { nil }
+
+ it { is_expected.to be_truthy }
+ end
+ end
+
+ describe '#add_metric!' do
+ let(:props) { { name: 'abc', value: 1234, tracked: 12345678, step: 0 } }
+ let(:metrics_before) { candidate.metrics.size }
+
+ before do
+ metrics_before
+ end
+
+ subject { repository.add_metric!(candidate, props[:name], props[:value], props[:tracked], props[:step]) }
+
+ it 'adds a new metric' do
+ expect { subject }.to change { candidate.metrics.size }.by(1)
+ end
+
+ context 'when name missing' do
+ let(:props) { { value: 1234, tracked: 12345678, step: 0 } }
+
+ it 'does not add metric' do
+ expect { subject }.to raise_error(ActiveRecord::RecordInvalid)
+ end
+ end
+ end
+
+ describe '#add_param!' do
+ let(:props) { { name: 'abc', value: 'def' } }
+
+ subject { repository.add_param!(candidate, props[:name], props[:value]) }
+
+ it 'adds a new param' do
+ expect { subject }.to change { candidate.params.size }.by(1)
+ end
+
+ context 'when name missing' do
+ let(:props) { { value: 1234 } }
+
+ it 'throws RecordInvalid' do
+ expect { subject }.to raise_error(ActiveRecord::RecordInvalid)
+ end
+ end
+
+ context 'when param was already added' do
+ it 'throws RecordInvalid' do
+ repository.add_param!(candidate, 'new', props[:value])
+
+ expect { repository.add_param!(candidate, 'new', props[:value]) }.to raise_error(ActiveRecord::RecordInvalid)
+ end
+ end
+ end
+
+ describe "#add_params" do
+ let(:params) do
+ [{ key: 'model_class', value: 'LogisticRegression' }, { 'key': 'pythonEnv', value: '3.10' }]
+ end
+
+ subject { repository.add_params(candidate, params) }
+
+ it 'adds the parameters' do
+ expect { subject }.to change { candidate.reload.params.size }.by(2)
+ end
+
+ context 'if parameter misses key' do
+ let(:params) do
+ [{ value: 'LogisticRegression' }]
+ end
+
+ it 'does not throw and does not add' do
+ expect { subject }.to raise_error(ActiveRecord::ActiveRecordError)
+ end
+ end
+
+ context 'if parameter misses value' do
+ let(:params) do
+ [{ key: 'pythonEnv2' }]
+ end
+
+ it 'does not throw and does not add' do
+ expect { subject }.to raise_error(ActiveRecord::ActiveRecordError)
+ end
+ end
+
+ context 'if parameter repeated do' do
+ let(:params) do
+ [
+ { 'key': 'pythonEnv0', value: '2.7' },
+ { 'key': 'pythonEnv1', value: '3.9' },
+ { 'key': 'pythonEnv1', value: '3.10' }
+ ]
+ end
+
+ before do
+ repository.add_param!(candidate, 'pythonEnv0', '0')
+ end
+
+ it 'does not throw and adds only the first of each kind' do
+ expect { subject }.to change { candidate.reload.params.size }.by(1)
+ end
+ end
+ end
+
+ describe "#add_metrics" do
+ let(:metrics) do
+ [
+ { key: 'mae', value: 2.5, timestamp: 1552550804 },
+ { key: 'rmse', value: 2.7, timestamp: 1552550804 }
+ ]
+ end
+
+ subject { repository.add_metrics(candidate, metrics) }
+
+ it 'adds the metrics' do
+ expect { subject }.to change { candidate.reload.metrics.size }.by(2)
+ end
+
+ context 'when metrics have repeated keys' do
+ let(:metrics) do
+ [
+ { key: 'mae', value: 2.5, timestamp: 1552550804 },
+ { key: 'rmse', value: 2.7, timestamp: 1552550804 },
+ { key: 'mae', value: 2.7, timestamp: 1552550805 }
+ ]
+ end
+
+ it 'adds all of them' do
+ expect { subject }.to change { candidate.reload.metrics.size }.by(3)
+ end
+ end
+ end
+end
diff --git a/spec/services/ml/experiment_tracking/experiment_repository_spec.rb b/spec/services/ml/experiment_tracking/experiment_repository_spec.rb
new file mode 100644
index 00000000000..80e1fa025d1
--- /dev/null
+++ b/spec/services/ml/experiment_tracking/experiment_repository_spec.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Ml::ExperimentTracking::ExperimentRepository do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:experiment) { create(:ml_experiments, user: user, project: project) }
+ let_it_be(:experiment2) { create(:ml_experiments, user: user, project: project) }
+ let_it_be(:experiment3) { create(:ml_experiments, user: user, project: project) }
+ let_it_be(:experiment4) { create(:ml_experiments, user: user) }
+
+ let(:repository) { described_class.new(project, user) }
+
+ describe '#by_iid_or_name' do
+ let(:iid) { experiment.iid }
+ let(:name) { nil }
+
+ subject { repository.by_iid_or_name(iid: iid, name: name) }
+
+ context 'when iid passed' do
+ it('fetches the experiment') { is_expected.to eq(experiment) }
+
+ context 'and name passed' do
+ let(:name) { experiment2.name }
+
+ it('ignores the name') { is_expected.to eq(experiment) }
+ end
+
+ context 'and does not exist' do
+ let(:iid) { non_existing_record_iid }
+
+ it { is_expected.to eq(nil) }
+ end
+ end
+
+ context 'when iid is not passed', 'and name is passed' do
+ let(:iid) { nil }
+
+ context 'when name exists' do
+ let(:name) { experiment2.name }
+
+ it('fetches the experiment') { is_expected.to eq(experiment2) }
+ end
+
+ context 'when name does not exist' do
+ let(:name) { non_existing_record_iid }
+
+ it { is_expected.to eq(nil) }
+ end
+ end
+ end
+
+ describe '#all' do
+ it 'fetches experiments for project' do
+ expect(repository.all).to match_array([experiment, experiment2, experiment3])
+ end
+ end
+
+ describe '#create!' do
+ let(:name) { 'hello' }
+
+ subject { repository.create!(name) }
+
+ it 'creates the candidate' do
+ expect { subject }.to change { repository.all.size }.by(1)
+ end
+
+ context 'when name exists' do
+ let(:name) { experiment.name }
+
+ it 'throws error' do
+ expect { subject }.to raise_error(ActiveRecord::ActiveRecordError)
+ end
+ end
+
+ context 'when name is missing' do
+ let(:name) { nil }
+
+ it 'throws error' do
+ expect { subject }.to raise_error(ActiveRecord::ActiveRecordError)
+ end
+ end
+ end
+end
diff --git a/spec/services/namespaces/package_settings/update_service_spec.rb b/spec/services/namespaces/package_settings/update_service_spec.rb
index ed385f1cd7f..10926c5ef57 100644
--- a/spec/services/namespaces/package_settings/update_service_spec.rb
+++ b/spec/services/namespaces/package_settings/update_service_spec.rb
@@ -33,8 +33,29 @@ RSpec.describe ::Namespaces::PackageSettings::UpdateService do
shared_examples 'updating the namespace package setting' do
it_behaves_like 'updating the namespace package setting attributes',
- from: { maven_duplicates_allowed: true, maven_duplicate_exception_regex: 'SNAPSHOT', generic_duplicates_allowed: true, generic_duplicate_exception_regex: 'foo' },
- to: { maven_duplicates_allowed: false, maven_duplicate_exception_regex: 'RELEASE', generic_duplicates_allowed: false, generic_duplicate_exception_regex: 'bar' }
+ from: {
+ maven_duplicates_allowed: true,
+ maven_duplicate_exception_regex: 'SNAPSHOT',
+ generic_duplicates_allowed: true,
+ generic_duplicate_exception_regex: 'foo',
+ maven_package_requests_forwarding: true,
+ lock_maven_package_requests_forwarding: false,
+ npm_package_requests_forwarding: nil,
+ lock_npm_package_requests_forwarding: false,
+ pypi_package_requests_forwarding: nil,
+ lock_pypi_package_requests_forwarding: false
+ }, to: {
+ maven_duplicates_allowed: false,
+ maven_duplicate_exception_regex: 'RELEASE',
+ generic_duplicates_allowed: false,
+ generic_duplicate_exception_regex: 'bar',
+ maven_package_requests_forwarding: true,
+ lock_maven_package_requests_forwarding: true,
+ npm_package_requests_forwarding: true,
+ lock_npm_package_requests_forwarding: true,
+ pypi_package_requests_forwarding: true,
+ lock_pypi_package_requests_forwarding: true
+ }
it_behaves_like 'returning a success'
@@ -63,10 +84,18 @@ RSpec.describe ::Namespaces::PackageSettings::UpdateService do
context 'with existing namespace package setting' do
let_it_be(:package_settings) { create(:namespace_package_setting, namespace: namespace) }
let_it_be(:params) do
- { maven_duplicates_allowed: false,
+ {
+ maven_duplicates_allowed: false,
maven_duplicate_exception_regex: 'RELEASE',
generic_duplicates_allowed: false,
- generic_duplicate_exception_regex: 'bar' }
+ generic_duplicate_exception_regex: 'bar',
+ maven_package_requests_forwarding: true,
+ lock_maven_package_requests_forwarding: true,
+ npm_package_requests_forwarding: true,
+ lock_npm_package_requests_forwarding: true,
+ pypi_package_requests_forwarding: true,
+ lock_pypi_package_requests_forwarding: true
+ }
end
where(:user_role, :shared_examples_name) do
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index 935dcef1011..8fbf023cda0 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -337,6 +337,27 @@ RSpec.describe NotificationService, :mailer do
end
end
end
+
+ describe '#access_token_revoked' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:pat) { create(:personal_access_token, user: user) }
+
+ subject(:notification_service) { notification.access_token_revoked(user, pat.name) }
+
+ it 'sends email to the token owner' do
+ expect { notification_service }.to have_enqueued_email(user, pat.name, mail: "access_token_revoked_email")
+ end
+
+ context 'when user is not allowed to receive notifications' do
+ before do
+ user.block!
+ end
+
+ it 'does not send email to the token owner' do
+ expect { notification_service }.not_to have_enqueued_email(user, pat.name, mail: "access_token_revoked_email")
+ end
+ end
+ end
end
describe 'SSH Keys' do
diff --git a/spec/services/onboarding/progress_service_spec.rb b/spec/services/onboarding/progress_service_spec.rb
index e9b8ea2e859..8f3f723613e 100644
--- a/spec/services/onboarding/progress_service_spec.rb
+++ b/spec/services/onboarding/progress_service_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Onboarding::ProgressService do
context 'when not onboarded' do
it 'does not schedule a worker' do
- expect(Namespaces::OnboardingProgressWorker).not_to receive(:perform_async)
+ expect(Onboarding::ProgressWorker).not_to receive(:perform_async)
execute_service
end
@@ -28,7 +28,7 @@ RSpec.describe Onboarding::ProgressService do
end
it 'does not schedule a worker' do
- expect(Namespaces::OnboardingProgressWorker).not_to receive(:perform_async)
+ expect(Onboarding::ProgressWorker).not_to receive(:perform_async)
execute_service
end
@@ -36,7 +36,7 @@ RSpec.describe Onboarding::ProgressService do
context 'when action is not yet completed' do
it 'schedules a worker' do
- expect(Namespaces::OnboardingProgressWorker).to receive(:perform_async)
+ expect(Onboarding::ProgressWorker).to receive(:perform_async)
execute_service
end
diff --git a/spec/services/packages/debian/create_package_file_service_spec.rb b/spec/services/packages/debian/create_package_file_service_spec.rb
index c8292b2d5c2..291f6df991c 100644
--- a/spec/services/packages/debian/create_package_file_service_spec.rb
+++ b/spec/services/packages/debian/create_package_file_service_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Packages::Debian::CreatePackageFileService do
include WorkhorseHelpers
let_it_be(:package) { create(:debian_incoming, without_package_files: true) }
+ let_it_be(:current_user) { create(:user) }
describe '#execute' do
let(:file_name) { 'libsample0_1.2.3~alpha2_amd64.deb' }
@@ -20,12 +21,13 @@ RSpec.describe Packages::Debian::CreatePackageFileService do
}.with_indifferent_access
end
- let(:service) { described_class.new(package, params) }
+ let(:service) { described_class.new(package: package, current_user: current_user, params: params) }
subject(:package_file) { service.execute }
shared_examples 'a valid deb' do
it 'creates a new package file', :aggregate_failures do
+ expect(::Packages::Debian::ProcessChangesWorker).not_to receive(:perform_async)
expect(package_file).to be_valid
expect(package_file.file.read).to start_with('!<arch>')
expect(package_file.size).to eq(1124)
@@ -40,6 +42,24 @@ RSpec.describe Packages::Debian::CreatePackageFileService do
end
end
+ shared_examples 'a valid changes' do
+ it 'creates a new package file', :aggregate_failures do
+ expect(::Packages::Debian::ProcessChangesWorker).to receive(:perform_async)
+
+ expect(package_file).to be_valid
+ expect(package_file.file.read).to start_with('Format: 1.8')
+ expect(package_file.size).to eq(2143)
+ expect(package_file.file_name).to eq(file_name)
+ expect(package_file.file_sha1).to eq('54321')
+ expect(package_file.file_sha256).to eq('543212345')
+ expect(package_file.file_md5).to eq('12345')
+ expect(package_file.debian_file_metadatum).to be_valid
+ expect(package_file.debian_file_metadatum.file_type).to eq('unknown')
+ expect(package_file.debian_file_metadatum.architecture).to be_nil
+ expect(package_file.debian_file_metadatum.fields).to be_nil
+ end
+ end
+
context 'with temp file' do
let!(:file) do
upload_path = ::Packages::PackageFileUploader.workhorse_local_upload_path
@@ -52,6 +72,21 @@ RSpec.describe Packages::Debian::CreatePackageFileService do
end
it_behaves_like 'a valid deb'
+
+ context 'with a .changes file' do
+ let(:file_name) { 'sample_1.2.3~alpha2_amd64.changes' }
+ let(:fixture_path) { "spec/fixtures/packages/debian/#{file_name}" }
+
+ it_behaves_like 'a valid changes'
+ end
+
+ context 'when current_user is missing' do
+ let(:current_user) { nil }
+
+ it 'raises an error' do
+ expect { package_file }.to raise_error(ArgumentError, 'Invalid user')
+ end
+ end
end
context 'with remote file' do
@@ -77,37 +112,37 @@ RSpec.describe Packages::Debian::CreatePackageFileService do
it_behaves_like 'a valid deb'
end
- context 'package is missing' do
+ context 'when package is missing' do
let(:package) { nil }
let(:params) { {} }
it 'raises an error' do
- expect { subject.execute }.to raise_error(ArgumentError, 'Invalid package')
+ expect { package_file }.to raise_error(ArgumentError, 'Invalid package')
end
end
- context 'params is empty' do
+ context 'when params is empty' do
let(:params) { {} }
it 'raises an error' do
- expect { subject.execute }.to raise_error(ActiveRecord::RecordInvalid)
+ expect { package_file }.to raise_error(ActiveRecord::RecordInvalid)
end
end
- context 'file is missing' do
+ context 'when file is missing' do
let(:file_name) { 'libsample0_1.2.3~alpha2_amd64.deb' }
let(:file) { nil }
it 'raises an error' do
- expect { subject.execute }.to raise_error(ActiveRecord::RecordInvalid)
+ expect { package_file }.to raise_error(ActiveRecord::RecordInvalid)
end
end
- context 'FIPS mode enabled', :fips_mode do
+ context 'when FIPS mode enabled', :fips_mode do
let(:file) { nil }
it 'raises an error' do
- expect { subject.execute }.to raise_error(::Packages::FIPS::DisabledError)
+ expect { package_file }.to raise_error(::Packages::FIPS::DisabledError)
end
end
end
diff --git a/spec/services/packages/mark_packages_for_destruction_service_spec.rb b/spec/services/packages/mark_packages_for_destruction_service_spec.rb
new file mode 100644
index 00000000000..5c043b89de8
--- /dev/null
+++ b/spec/services/packages/mark_packages_for_destruction_service_spec.rb
@@ -0,0 +1,107 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::MarkPackagesForDestructionService, :sidekiq_inline do
+ let_it_be(:project) { create(:project) }
+ let_it_be_with_reload(:packages) { create_list(:npm_package, 3, project: project) }
+
+ let(:user) { project.owner }
+
+ # The service only accepts ActiveRecord relationships and not arrays.
+ let(:service) { described_class.new(packages: ::Packages::Package.id_in(package_ids), current_user: user) }
+ let(:package_ids) { packages.map(&:id) }
+
+ describe '#execute' do
+ subject { service.execute }
+
+ context 'when the user is authorized' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ context 'when it is successful' do
+ it 'marks the packages as pending destruction' do
+ expect(::Packages::Maven::Metadata::SyncService).not_to receive(:new)
+
+ expect { subject }.to change { ::Packages::Package.pending_destruction.count }.from(0).to(3)
+ .and change { Packages::PackageFile.pending_destruction.count }.from(0).to(3)
+ packages.each { |pkg| expect(pkg.reload).to be_pending_destruction }
+
+ expect(subject).to be_a(ServiceResponse)
+ expect(subject).to be_success
+ expect(subject.message).to eq('Packages were successfully marked as pending destruction')
+ end
+
+ context 'with maven packages' do
+ let_it_be_with_reload(:packages) { create_list(:maven_package, 3, project: project) }
+
+ it 'marks the packages as pending destruction' do
+ expect(::Packages::Maven::Metadata::SyncService).to receive(:new).once.and_call_original
+
+ expect { subject }.to change { ::Packages::Package.pending_destruction.count }.from(0).to(3)
+ .and change { Packages::PackageFile.pending_destruction.count }.from(0).to(9)
+ packages.each { |pkg| expect(pkg.reload).to be_pending_destruction }
+
+ expect(subject).to be_a(ServiceResponse)
+ expect(subject).to be_success
+ expect(subject.message).to eq('Packages were successfully marked as pending destruction')
+ end
+
+ context 'without version' do
+ before do
+ ::Packages::Package.id_in(package_ids).update_all(version: nil)
+ end
+
+ it 'marks the packages as pending destruction' do
+ expect(::Packages::Maven::Metadata::SyncService).not_to receive(:new)
+
+ expect { subject }.to change { ::Packages::Package.pending_destruction.count }.from(0).to(3)
+ .and change { Packages::PackageFile.pending_destruction.count }.from(0).to(9)
+ packages.each { |pkg| expect(pkg.reload).to be_pending_destruction }
+
+ expect(subject).to be_a(ServiceResponse)
+ expect(subject).to be_success
+ expect(subject.message).to eq('Packages were successfully marked as pending destruction')
+ end
+ end
+ end
+ end
+
+ context 'when it is not successful' do
+ before do
+ allow(service).to receive(:can_destroy_packages?).and_raise(StandardError, 'test')
+ end
+
+ it 'returns an error ServiceResponse' do
+ expect(::Packages::Maven::Metadata::SyncService).not_to receive(:new)
+
+ expect { subject }.to not_change { ::Packages::Package.pending_destruction.count }
+ .and not_change { ::Packages::PackageFile.pending_destruction.count }
+
+ expect(subject).to be_a(ServiceResponse)
+ expect(subject).to be_error
+ expect(subject.message).to eq("Failed to mark the packages as pending destruction")
+ expect(subject.status).to eq(:error)
+ end
+ end
+ end
+
+ context 'when the user is not authorized' do
+ let(:user) { nil }
+
+ it 'returns an error ServiceResponse' do
+ expect(::Packages::Maven::Metadata::SyncService).not_to receive(:new)
+
+ expect { subject }.to not_change { ::Packages::Package.pending_destruction.count }
+ .and not_change { ::Packages::PackageFile.pending_destruction.count }
+
+ expect(subject).to be_a(ServiceResponse)
+ expect(subject).to be_error
+ expect(subject.message).to eq("You don't have the permission to perform this action")
+ expect(subject.status).to eq(:error)
+ expect(subject.reason).to eq(:unauthorized)
+ end
+ end
+ end
+end
diff --git a/spec/services/packages/rpm/parse_package_service_spec.rb b/spec/services/packages/rpm/parse_package_service_spec.rb
new file mode 100644
index 00000000000..f330587bfa0
--- /dev/null
+++ b/spec/services/packages/rpm/parse_package_service_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::Rpm::ParsePackageService do
+ let(:package_file) { File.open('spec/fixtures/packages/rpm/hello-0.0.1-1.fc29.x86_64.rpm') }
+
+ describe 'dynamic private methods' do
+ described_class::BUILD_ATTRIBUTES_METHOD_NAMES.each do |attribute|
+ it 'define dynamic build attribute method' do
+ expect(described_class).to be_private_method_defined("build_#{attribute}")
+ end
+ end
+ end
+
+ describe '#execute' do
+ subject { described_class.new(package_file).execute }
+
+ shared_examples 'valid package parsing' do
+ it 'return hash' do
+ expect(subject).to be_a(Hash)
+ end
+
+ it 'has all static attribute keys' do
+ expect(subject.keys).to include(*described_class::STATIC_ATTRIBUTES)
+ end
+
+ it 'includes epoch attribute' do
+ expect(subject[:epoch]).not_to be_blank
+ end
+
+ it 'has all built attributes with array values' do
+ result = subject
+ described_class::BUILD_ATTRIBUTES_METHOD_NAMES.each do |attribute|
+ expect(result).to have_key(attribute)
+ expect(result[attribute]).to be_a(Array)
+ end
+ end
+ end
+
+ context 'when wrong format file received' do
+ let(:package_file) { File.open('spec/fixtures/rails_sample.jpg') }
+
+ it 'raise error' do
+ expect { subject }.to raise_error(ArgumentError)
+ end
+ end
+
+ context 'when valid file uploaded' do
+ context 'when .rpm file uploaded' do
+ it_behaves_like 'valid package parsing'
+ end
+
+ context 'when .src.rpm file uploaded' do
+ let(:package_file) { File.open('spec/fixtures/packages/rpm/hello-0.0.1-1.fc29.src.rpm') }
+
+ it_behaves_like 'valid package parsing'
+ end
+ end
+ end
+end
diff --git a/spec/services/packages/rpm/repository_metadata/base_builder_spec.rb b/spec/services/packages/rpm/repository_metadata/base_builder_spec.rb
index 0fb58cc27d5..524c224177b 100644
--- a/spec/services/packages/rpm/repository_metadata/base_builder_spec.rb
+++ b/spec/services/packages/rpm/repository_metadata/base_builder_spec.rb
@@ -3,7 +3,10 @@ require 'spec_helper'
RSpec.describe Packages::Rpm::RepositoryMetadata::BaseBuilder do
describe '#execute' do
- subject { described_class.new.execute }
+ subject { described_class.new(xml: xml, data: data).execute }
+
+ let(:xml) { nil }
+ let(:data) { {} }
before do
stub_const("#{described_class}::ROOT_TAG", 'test')
@@ -18,5 +21,13 @@ RSpec.describe Packages::Rpm::RepositoryMetadata::BaseBuilder do
expect(result.children.first.attributes['foo1'].value).to eq('bar1')
expect(result.children.first.attributes['foo2'].value).to eq('bar2')
end
+
+ context 'when call with parameters' do
+ let(:xml) { 'test' }
+
+ it 'raise NotImplementedError' do
+ expect { subject }.to raise_error NotImplementedError
+ end
+ end
end
end
diff --git a/spec/services/packages/rpm/repository_metadata/build_primary_xml_spec.rb b/spec/services/packages/rpm/repository_metadata/build_primary_xml_spec.rb
index f5294d6f7f7..147d5862a71 100644
--- a/spec/services/packages/rpm/repository_metadata/build_primary_xml_spec.rb
+++ b/spec/services/packages/rpm/repository_metadata/build_primary_xml_spec.rb
@@ -3,18 +3,32 @@ require 'spec_helper'
RSpec.describe Packages::Rpm::RepositoryMetadata::BuildPrimaryXml do
describe '#execute' do
- subject { described_class.new.execute }
+ subject { described_class.new(xml: xml, data: data).execute }
- context "when generate empty xml" do
- let(:expected_xml) do
- <<~XML
- <?xml version="1.0" encoding="UTF-8"?>
- <metadata xmlns="http://linux.duke.edu/metadata/common" xmlns:rpm="http://linux.duke.edu/metadata/rpm" packages="0"/>
- XML
- end
+ let(:empty_xml) do
+ <<~XML
+ <?xml version="1.0" encoding="UTF-8"?>
+ <metadata xmlns="http://linux.duke.edu/metadata/common" xmlns:rpm="http://linux.duke.edu/metadata/rpm" packages="0"/>
+ XML
+ end
+
+ it_behaves_like 'handling rpm xml file'
+
+ context 'when updating existing xml' do
+ include_context 'with rpm package data'
+
+ let(:xml) { empty_xml }
+ let(:data) { xml_update_params }
+ let(:required_text_only_attributes) { %i[description summary arch name] }
+
+ it 'adds node with required_text_only_attributes' do
+ result = Nokogiri::XML::Document.parse(subject).remove_namespaces!
- it 'generate expected xml' do
- expect(subject).to eq(expected_xml)
+ required_text_only_attributes.each do |attribute|
+ expect(
+ result.at("//#{described_class::ROOT_TAG}/package/#{attribute}").text
+ ).to eq(data[attribute])
+ end
end
end
end
diff --git a/spec/services/packages/rpm/repository_metadata/build_repomd_xml_spec.rb b/spec/services/packages/rpm/repository_metadata/build_repomd_xml_spec.rb
index 29b0f73e3c1..0843a983b7e 100644
--- a/spec/services/packages/rpm/repository_metadata/build_repomd_xml_spec.rb
+++ b/spec/services/packages/rpm/repository_metadata/build_repomd_xml_spec.rb
@@ -62,5 +62,25 @@ RSpec.describe Packages::Rpm::RepositoryMetadata::BuildRepomdXml do
end
end
end
+
+ context 'when data values has unexpected keys' do
+ let(:data) do
+ {
+ filelists: described_class::ALLOWED_DATA_VALUE_KEYS.each_with_object({}) do |key, result|
+ result[:"#{key}-wrong"] = { value: 'value' }
+ end
+ }
+ end
+
+ it 'ignores wrong keys' do
+ result = Nokogiri::XML::Document.parse(subject).remove_namespaces!
+
+ data.each do |tag_name, tag_attributes|
+ tag_attributes.each_key do |key|
+ expect(result.at("//repomd/data[@type=\"#{tag_name}\"]/#{key}")).to be_nil
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/services/pages_domains/create_acme_order_service_spec.rb b/spec/services/pages_domains/create_acme_order_service_spec.rb
index b882c253613..35b2cc56973 100644
--- a/spec/services/pages_domains/create_acme_order_service_spec.rb
+++ b/spec/services/pages_domains/create_acme_order_service_spec.rb
@@ -38,21 +38,13 @@ RSpec.describe PagesDomains::CreateAcmeOrderService do
expect(challenge).to have_received(:request_validation).ordered
end
- it 'generates and saves private key: rsa' do
- stub_feature_flags(pages_lets_encrypt_ecdsa: false)
+ it 'generates and saves private key' do
service.execute
saved_order = PagesDomainAcmeOrder.last
expect { OpenSSL::PKey::RSA.new(saved_order.private_key) }.not_to raise_error
end
- it 'generates and saves private key: ec' do
- service.execute
-
- saved_order = PagesDomainAcmeOrder.last
- expect { OpenSSL::PKey::EC.new(saved_order.private_key) }.not_to raise_error
- end
-
it 'properly saves order attributes' do
service.execute
diff --git a/spec/services/pages_domains/create_service_spec.rb b/spec/services/pages_domains/create_service_spec.rb
new file mode 100644
index 00000000000..cac941fb134
--- /dev/null
+++ b/spec/services/pages_domains/create_service_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::PagesDomains::CreateService do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :in_subgroup) }
+
+ let(:domain) { 'new.domain.com' }
+ let(:attributes) { { domain: domain } }
+
+ subject(:service) { described_class.new(project, user, attributes) }
+
+ context 'when the user does not have the required permissions' do
+ it 'does not create a pages domain and does not publish a PagesDomainCreatedEvent' do
+ expect(service.execute).to be_nil
+
+ expect { service.execute }
+ .to not_publish_event(PagesDomains::PagesDomainCreatedEvent)
+ .and not_change(project.pages_domains, :count)
+ end
+ end
+
+ context 'when the user has the required permissions' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ context 'when it saves the domain successfully' do
+ it 'creates the domain and publishes a PagesDomainCreatedEvent' do
+ pages_domain = nil
+
+ expect { pages_domain = service.execute }
+ .to change(project.pages_domains, :count)
+ .and publish_event(PagesDomains::PagesDomainCreatedEvent)
+ .with(
+ project_id: project.id,
+ namespace_id: project.namespace.id,
+ root_namespace_id: project.root_namespace.id,
+ domain: domain
+ )
+
+ expect(pages_domain).to be_persisted
+ end
+ end
+
+ context 'when it fails to save the domain' do
+ let(:domain) { nil }
+
+ it 'does not create a pages domain and does not publish a PagesDomainCreatedEvent' do
+ pages_domain = nil
+
+ expect { pages_domain = service.execute }
+ .to not_publish_event(PagesDomains::PagesDomainCreatedEvent)
+ .and not_change(project.pages_domains, :count)
+
+ expect(pages_domain).not_to be_persisted
+ end
+ end
+ end
+end
diff --git a/spec/services/pages_domains/delete_service_spec.rb b/spec/services/pages_domains/delete_service_spec.rb
new file mode 100644
index 00000000000..5f98fe3c7f7
--- /dev/null
+++ b/spec/services/pages_domains/delete_service_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::PagesDomains::DeleteService do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:pages_domain) { create(:pages_domain, :with_project) }
+
+ let(:params) do
+ attributes_for(:pages_domain, :with_trusted_chain).slice(:key, :certificate).tap do |params|
+ params[:user_provided_key] = params.delete(:key)
+ params[:user_provided_certificate] = params.delete(:certificate)
+ end
+ end
+
+ subject(:service) { described_class.new(pages_domain.project, user, params) }
+
+ context 'when the user does not have the required permissions' do
+ it 'does not delete the pages domain and does not publish a PagesDomainDeletedEvent' do
+ result_match = -> { expect(service.execute(pages_domain)).to be_nil }
+
+ expect(&result_match)
+ .to not_publish_event(PagesDomains::PagesDomainDeletedEvent)
+ end
+ end
+
+ context 'when the user has the required permissions' do
+ before do
+ pages_domain.project.add_maintainer(user)
+ end
+
+ context 'when it updates the domain successfully' do
+ it 'deletes the domain and publishes a PagesDomainDeletedEvent' do
+ result_match = -> { expect(service.execute(pages_domain)).not_to be_nil }
+
+ expect(&result_match)
+ .to publish_event(PagesDomains::PagesDomainDeletedEvent)
+ .with(
+ project_id: pages_domain.project.id,
+ namespace_id: pages_domain.project.namespace.id,
+ root_namespace_id: pages_domain.project.root_namespace.id,
+ domain: pages_domain.domain
+ )
+ end
+ end
+ end
+end
diff --git a/spec/services/pages_domains/update_service_spec.rb b/spec/services/pages_domains/update_service_spec.rb
new file mode 100644
index 00000000000..f6558f56422
--- /dev/null
+++ b/spec/services/pages_domains/update_service_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe PagesDomains::UpdateService do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:pages_domain) { create(:pages_domain, :with_project) }
+
+ let(:params) do
+ attributes_for(:pages_domain, :with_trusted_chain).slice(:key, :certificate).tap do |params|
+ params[:user_provided_key] = params.delete(:key)
+ params[:user_provided_certificate] = params.delete(:certificate)
+ end
+ end
+
+ subject(:service) { described_class.new(pages_domain.project, user, params) }
+
+ context 'when the user does not have the required permissions' do
+ it 'does not update the pages domain and does not publish a PagesDomainUpdatedEvent' do
+ expect do
+ expect(service.execute(pages_domain)).to be_nil
+ end.to not_publish_event(PagesDomains::PagesDomainUpdatedEvent)
+ end
+ end
+
+ context 'when the user has the required permissions' do
+ before do
+ pages_domain.project.add_maintainer(user)
+ end
+
+ context 'when it updates the domain successfully' do
+ it 'updates the domain' do
+ expect(service.execute(pages_domain)).to eq(true)
+ end
+
+ it 'publishes a PagesDomainUpdatedEvent' do
+ expect { service.execute(pages_domain) }
+ .to publish_event(PagesDomains::PagesDomainUpdatedEvent)
+ .with(
+ project_id: pages_domain.project.id,
+ namespace_id: pages_domain.project.namespace.id,
+ root_namespace_id: pages_domain.project.root_namespace.id,
+ domain: pages_domain.domain
+ )
+ end
+ end
+
+ context 'when it fails to update the domain' do
+ let(:params) { { user_provided_certificate: 'blabla' } }
+
+ it 'does not update a pages domain' do
+ expect(service.execute(pages_domain)).to be(false)
+ end
+
+ it 'does not publish a PagesDomainUpdatedEvent' do
+ expect { service.execute(pages_domain) }
+ .not_to publish_event(PagesDomains::PagesDomainUpdatedEvent)
+ end
+ end
+ end
+end
diff --git a/spec/services/projects/autocomplete_service_spec.rb b/spec/services/projects/autocomplete_service_spec.rb
index 54a21d2f22b..bc95a1f3c8b 100644
--- a/spec/services/projects/autocomplete_service_spec.rb
+++ b/spec/services/projects/autocomplete_service_spec.rb
@@ -154,23 +154,49 @@ RSpec.describe Projects::AutocompleteService do
let_it_be(:project) { create(:project, group: group) }
let_it_be(:contact_1) { create(:contact, group: group) }
let_it_be(:contact_2) { create(:contact, group: group) }
+ let_it_be(:contact_3) { create(:contact, :inactive, group: group) }
- subject { described_class.new(project, user).contacts.as_json }
+ let(:issue) { nil }
+
+ subject { described_class.new(project, user).contacts(issue).as_json }
before do
group.add_developer(user)
end
- it 'returns contact data correctly' do
+ it 'returns CRM contacts from group' do
expected_contacts = [
{ 'id' => contact_1.id, 'email' => contact_1.email,
- 'first_name' => contact_1.first_name, 'last_name' => contact_1.last_name },
+ 'first_name' => contact_1.first_name, 'last_name' => contact_1.last_name, 'state' => contact_1.state },
{ 'id' => contact_2.id, 'email' => contact_2.email,
- 'first_name' => contact_2.first_name, 'last_name' => contact_2.last_name }
+ 'first_name' => contact_2.first_name, 'last_name' => contact_2.last_name, 'state' => contact_2.state },
+ { 'id' => contact_3.id, 'email' => contact_3.email,
+ 'first_name' => contact_3.first_name, 'last_name' => contact_3.last_name, 'state' => contact_3.state }
]
expect(subject).to match_array(expected_contacts)
end
+
+ context 'some contacts are already assigned to the issue' do
+ let(:issue) { create(:issue, project: project) }
+
+ before do
+ issue.customer_relations_contacts << [contact_2, contact_3]
+ end
+
+ it 'marks already assigned contacts as set' do
+ expected_contacts = [
+ { 'id' => contact_1.id, 'email' => contact_1.email,
+ 'first_name' => contact_1.first_name, 'last_name' => contact_1.last_name, 'state' => contact_1.state, 'set' => false },
+ { 'id' => contact_2.id, 'email' => contact_2.email,
+ 'first_name' => contact_2.first_name, 'last_name' => contact_2.last_name, 'state' => contact_2.state, 'set' => true },
+ { 'id' => contact_3.id, 'email' => contact_3.email,
+ 'first_name' => contact_3.first_name, 'last_name' => contact_3.last_name, 'state' => contact_3.state, 'set' => true }
+ ]
+
+ expect(subject).to match_array(expected_contacts)
+ end
+ end
end
describe '#labels_as_hash' do
diff --git a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
index 2008de195ab..8311c4e4d9b 100644
--- a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
@@ -2,372 +2,134 @@
require 'spec_helper'
-RSpec.describe Projects::ContainerRepository::CleanupTagsService, :clean_gitlab_redis_cache do
- using RSpec::Parameterized::TableSyntax
+RSpec.describe Projects::ContainerRepository::CleanupTagsService do
+ let_it_be_with_reload(:container_repository) { create(:container_repository) }
+ let_it_be(:user) { container_repository.project.owner }
- include_context 'for a cleanup tags service'
-
- let_it_be(:user) { create(:user) }
- let_it_be(:project, reload: true) { create(:project, :private) }
-
- let(:repository) { create(:container_repository, :root, project: project) }
- let(:service) { described_class.new(container_repository: repository, current_user: user, params: params) }
- let(:tags) { %w[latest A Ba Bb C D E] }
+ let(:params) { {} }
+ let(:extra_params) { {} }
+ let(:service) { described_class.new(container_repository: container_repository, current_user: user, params: params.merge(extra_params)) }
before do
- project.add_maintainer(user) if user
-
stub_container_registry_config(enabled: true)
-
- stub_container_registry_tags(
- repository: repository.path,
- tags: tags
- )
-
- stub_tag_digest('latest', 'sha256:configA')
- stub_tag_digest('A', 'sha256:configA')
- stub_tag_digest('Ba', 'sha256:configB')
- stub_tag_digest('Bb', 'sha256:configB')
- stub_tag_digest('C', 'sha256:configC')
- stub_tag_digest('D', 'sha256:configD')
- stub_tag_digest('E', nil)
-
- stub_digest_config('sha256:configA', 1.hour.ago)
- stub_digest_config('sha256:configB', 5.days.ago)
- stub_digest_config('sha256:configC', 1.month.ago)
- stub_digest_config('sha256:configD', nil)
end
describe '#execute' do
subject { service.execute }
- it_behaves_like 'handling invalid params',
- service_response_extra: {
- before_truncate_size: 0,
- after_truncate_size: 0,
- before_delete_size: 0,
- cached_tags_count: 0
- },
- supports_caching: true
-
- it_behaves_like 'when regex matching everything is specified',
- delete_expectations: [%w(A Ba Bb C D E)],
- service_response_extra: {
- before_truncate_size: 6,
- after_truncate_size: 6,
- before_delete_size: 6,
- cached_tags_count: 0
- },
- supports_caching: true
-
- it_behaves_like 'when delete regex matching specific tags is used',
- service_response_extra: {
- before_truncate_size: 2,
- after_truncate_size: 2,
- before_delete_size: 2,
- cached_tags_count: 0
- },
- supports_caching: true
-
- it_behaves_like 'when delete regex matching specific tags is used with overriding allow regex',
- service_response_extra: {
- before_truncate_size: 1,
- after_truncate_size: 1,
- before_delete_size: 1,
- cached_tags_count: 0
- },
- supports_caching: true
-
- it_behaves_like 'with allow regex value',
- delete_expectations: [%w(A C D E)],
- service_response_extra: {
- before_truncate_size: 4,
- after_truncate_size: 4,
- before_delete_size: 4,
- cached_tags_count: 0
- },
- supports_caching: true
-
- it_behaves_like 'when keeping only N tags',
- delete_expectations: [%w(Bb Ba C)],
- service_response_extra: {
- before_truncate_size: 4,
- after_truncate_size: 4,
- before_delete_size: 3,
- cached_tags_count: 0
- },
- supports_caching: true
-
- it_behaves_like 'when not keeping N tags',
- delete_expectations: [%w(A Ba Bb C)],
- service_response_extra: {
- before_truncate_size: 4,
- after_truncate_size: 4,
- before_delete_size: 4,
- cached_tags_count: 0
- },
- supports_caching: true
-
- it_behaves_like 'when removing keeping only 3',
- delete_expectations: [%w(Bb Ba C)],
- service_response_extra: {
- before_truncate_size: 6,
- after_truncate_size: 6,
- before_delete_size: 3,
- cached_tags_count: 0
- },
- supports_caching: true
-
- it_behaves_like 'when removing older than 1 day',
- delete_expectations: [%w(Ba Bb C)],
- service_response_extra: {
- before_truncate_size: 6,
- after_truncate_size: 6,
- before_delete_size: 3,
- cached_tags_count: 0
- },
- supports_caching: true
-
- it_behaves_like 'when combining all parameters',
- delete_expectations: [%w(Bb Ba C)],
- service_response_extra: {
- before_truncate_size: 6,
- after_truncate_size: 6,
- before_delete_size: 3,
- cached_tags_count: 0
- },
- supports_caching: true
-
- it_behaves_like 'when running a container_expiration_policy',
- delete_expectations: [%w(Bb Ba C)],
- service_response_extra: {
- before_truncate_size: 6,
- after_truncate_size: 6,
- before_delete_size: 3,
- cached_tags_count: 0
- },
- supports_caching: true
-
- context 'when running a container_expiration_policy with caching' do
- let(:user) { nil }
- let(:params) do
- {
- 'name_regex_delete' => '.*',
- 'keep_n' => 1,
- 'older_than' => '1 day',
- 'container_expiration_policy' => true
- }
- end
-
- it 'expects caching to be used' do
- expect_delete(%w(Bb Ba C), container_expiration_policy: true)
- expect_caching
-
- subject
- end
-
- context 'when setting set to false' do
- before do
- stub_application_setting(container_registry_expiration_policies_caching: false)
- end
-
- it 'does not use caching' do
- expect_delete(%w(Bb Ba C), container_expiration_policy: true)
- expect_no_caching
+ shared_examples 'returning error message' do |message|
+ it "returns error #{message}" do
+ expect(::Projects::ContainerRepository::Gitlab::CleanupTagsService).not_to receive(:new)
+ expect(::Projects::ContainerRepository::ThirdParty::CleanupTagsService).not_to receive(:new)
+ expect(service).not_to receive(:log_info)
- subject
- end
+ expect(subject).to eq(status: :error, message: message)
end
end
- context 'truncating the tags list' do
- let(:params) do
- {
- 'name_regex_delete' => '.*',
- 'keep_n' => 1
- }
- end
-
- shared_examples 'returning the response' do |status:, original_size:, before_truncate_size:, after_truncate_size:, before_delete_size:|
- it 'returns the response' do
- expect_no_caching
+ shared_examples 'handling invalid regular expressions' do
+ shared_examples 'handling invalid regex' do
+ it_behaves_like 'returning error message', 'invalid regex'
- result = subject
+ it 'calls error tracking service' do
+ expect(::Gitlab::ErrorTracking).to receive(:log_exception).and_call_original
- service_response = expected_service_response(
- status: status,
- original_size: original_size,
- deleted: nil
- ).merge(
- before_truncate_size: before_truncate_size,
- after_truncate_size: after_truncate_size,
- before_delete_size: before_delete_size,
- cached_tags_count: 0
- )
-
- expect(result).to eq(service_response)
+ subject
end
end
- where(:max_list_size, :delete_tags_service_status, :expected_status, :expected_truncated) do
- 10 | :success | :success | false
- 10 | :error | :error | false
- 3 | :success | :error | true
- 3 | :error | :error | true
- 0 | :success | :success | false
- 0 | :error | :error | false
- end
+ context 'when name_regex_delete is invalid' do
+ let(:extra_params) { { 'name_regex_delete' => '*test*' } }
- with_them do
- before do
- stub_application_setting(container_registry_cleanup_tags_service_max_list_size: max_list_size)
- allow_next_instance_of(Projects::ContainerRepository::DeleteTagsService) do |service|
- expect(service).to receive(:execute).and_return(status: delete_tags_service_status)
- end
- end
-
- original_size = 7
- keep_n = 1
-
- it_behaves_like(
- 'returning the response',
- status: params[:expected_status],
- original_size: original_size,
- before_truncate_size: original_size - keep_n,
- after_truncate_size: params[:expected_truncated] ? params[:max_list_size] + keep_n : original_size - keep_n,
- before_delete_size: params[:expected_truncated] ? params[:max_list_size] : original_size - keep_n - 1 # one tag is filtered out with older_than filter
- )
+ it_behaves_like 'handling invalid regex'
end
- end
- context 'caching', :freeze_time do
- let(:params) do
- {
- 'name_regex_delete' => '.*',
- 'keep_n' => 1,
- 'older_than' => '1 day',
- 'container_expiration_policy' => true
- }
- end
+ context 'when name_regex is invalid' do
+ let(:extra_params) { { 'name_regex' => '*test*' } }
- let(:tags_and_created_ats) do
- {
- 'A' => 1.hour.ago,
- 'Ba' => 5.days.ago,
- 'Bb' => 5.days.ago,
- 'C' => 1.month.ago,
- 'D' => nil,
- 'E' => nil
- }
+ it_behaves_like 'handling invalid regex'
end
- let(:cacheable_tags) { tags_and_created_ats.reject { |_, value| value.nil? } }
+ context 'when name_regex_keep is invalid' do
+ let(:extra_params) { { 'name_regex_keep' => '*test*' } }
- before do
- expect_delete(%w(Bb Ba C), container_expiration_policy: true)
- # We froze time so we need to set the created_at stubs again
- stub_digest_config('sha256:configA', 1.hour.ago)
- stub_digest_config('sha256:configB', 5.days.ago)
- stub_digest_config('sha256:configC', 1.month.ago)
+ it_behaves_like 'handling invalid regex'
end
+ end
- it 'caches the created_at values' do
- expect_mget(tags_and_created_ats.keys)
- expect_set(cacheable_tags)
-
- expect(subject).to include(cached_tags_count: 0)
+ shared_examples 'handling all types of container repositories' do
+ shared_examples 'calling service' do |service_class, extra_log_data: {}|
+ let(:service_double) { instance_double(service_class.to_s) }
+
+ it "uses cleanup tags service #{service_class}" do
+ expect(service_class).to receive(:new).with(container_repository: container_repository, current_user: user, params: params).and_return(service_double)
+ expect(service_double).to receive(:execute).and_return('return value')
+ expect(service).to receive(:log_info)
+ .with(
+ {
+ container_repository_id: container_repository.id,
+ container_repository_path: container_repository.path,
+ project_id: container_repository.project.id
+ }.merge(extra_log_data))
+ expect(subject).to eq('return value')
+ end
end
- context 'with cached values' do
+ context 'with a migrated repository' do
before do
- ::Gitlab::Redis::Cache.with do |redis|
- redis.set(cache_key('C'), rfc3339(1.month.ago))
- end
+ container_repository.update_column(:migration_state, :import_done)
end
- it 'uses them' do
- expect_mget(tags_and_created_ats.keys)
-
- # because C is already in cache, it should not be cached again
- expect_set(cacheable_tags.except('C'))
-
- # We will ping the container registry for all tags *except* for C because it's cached
- expect(ContainerRegistry::Blob).to receive(:new).with(repository, { "digest" => "sha256:configA" }).and_call_original
- expect(ContainerRegistry::Blob).to receive(:new).with(repository, { "digest" => "sha256:configB" }).twice.and_call_original
- expect(ContainerRegistry::Blob).not_to receive(:new).with(repository, { "digest" => "sha256:configC" })
- expect(ContainerRegistry::Blob).to receive(:new).with(repository, { "digest" => "sha256:configD" }).and_call_original
-
- expect(subject).to include(cached_tags_count: 1)
- end
- end
+ context 'supporting the gitlab api' do
+ before do
+ allow(container_repository.gitlab_api_client).to receive(:supports_gitlab_api?).and_return(true)
+ end
- def expect_mget(keys)
- Gitlab::Redis::Cache.with do |redis|
- expect(redis).to receive(:mget).with(keys.map(&method(:cache_key))).and_call_original
+ it_behaves_like 'calling service', ::Projects::ContainerRepository::Gitlab::CleanupTagsService, extra_log_data: { gitlab_cleanup_tags_service: true }
end
- end
-
- def expect_set(tags)
- selected_tags = tags.map do |tag_name, created_at|
- ex = 1.day.seconds - (Time.zone.now - created_at).seconds
- [tag_name, created_at, ex.to_i] if ex.positive?
- end.compact
-
- return if selected_tags.count.zero?
- Gitlab::Redis::Cache.with do |redis|
- expect(redis).to receive(:pipelined).and_call_original
-
- expect_next_instance_of(Redis::PipelinedConnection) do |pipeline|
- selected_tags.each do |tag_name, created_at, ex|
- expect(pipeline).to receive(:set).with(cache_key(tag_name), rfc3339(created_at), ex: ex)
- end
+ context 'not supporting the gitlab api' do
+ before do
+ allow(container_repository.gitlab_api_client).to receive(:supports_gitlab_api?).and_return(false)
end
+
+ it_behaves_like 'calling service', ::Projects::ContainerRepository::ThirdParty::CleanupTagsService, extra_log_data: { third_party_cleanup_tags_service: true }
end
end
- def cache_key(tag_name)
- "container_repository:{#{repository.id}}:tag:#{tag_name}:created_at"
- end
+ context 'with a non migrated repository' do
+ before do
+ container_repository.update_column(:migration_state, :default)
+ container_repository.update!(created_at: ContainerRepository::MIGRATION_PHASE_1_ENDED_AT - 1.week)
+ end
- def rfc3339(date_time)
- # DateTime rfc3339 is different ActiveSupport::TimeWithZone rfc3339
- # The caching will use DateTime rfc3339
- DateTime.rfc3339(date_time.rfc3339).rfc3339
+ it_behaves_like 'calling service', ::Projects::ContainerRepository::ThirdParty::CleanupTagsService, extra_log_data: { third_party_cleanup_tags_service: true }
end
end
- end
- private
+ context 'with valid user' do
+ it_behaves_like 'handling invalid regular expressions'
+ it_behaves_like 'handling all types of container repositories'
+ end
- def stub_tag_digest(tag, digest)
- allow_any_instance_of(ContainerRegistry::Client)
- .to receive(:repository_tag_digest)
- .with(repository.path, tag) { digest }
+ context 'for container expiration policy' do
+ let(:user) { nil }
+ let(:params) { { 'container_expiration_policy' => true } }
- allow_any_instance_of(ContainerRegistry::Client)
- .to receive(:repository_manifest)
- .with(repository.path, tag) do
- { 'config' => { 'digest' => digest } } if digest
+ it_behaves_like 'handling invalid regular expressions'
+ it_behaves_like 'handling all types of container repositories'
end
- end
- def stub_digest_config(digest, created_at)
- allow_any_instance_of(ContainerRegistry::Client)
- .to receive(:blob)
- .with(repository.path, digest, nil) do
- { 'created' => created_at.to_datetime.rfc3339 }.to_json if created_at
+ context 'with not allowed user' do
+ let_it_be(:user) { create(:user) }
+
+ it_behaves_like 'returning error message', 'access denied'
end
- end
- def expect_caching
- ::Gitlab::Redis::Cache.with do |redis|
- expect(redis).to receive(:mget).and_call_original
- expect(redis).to receive(:pipelined).and_call_original
+ context 'with no user' do
+ let(:user) { nil }
- expect_next_instance_of(Redis::PipelinedConnection) do |pipeline|
- expect(pipeline).to receive(:set).and_call_original
- end
+ it_behaves_like 'returning error message', 'access denied'
end
end
end
diff --git a/spec/services/projects/container_repository/gitlab/cleanup_tags_service_spec.rb b/spec/services/projects/container_repository/gitlab/cleanup_tags_service_spec.rb
index d2cdb667659..59827ea035e 100644
--- a/spec/services/projects/container_repository/gitlab/cleanup_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/gitlab/cleanup_tags_service_spec.rb
@@ -46,8 +46,6 @@ RSpec.describe Projects::ContainerRepository::Gitlab::CleanupTagsService do
context 'with several tags pages' do
let(:tags_page_size) { 2 }
- it_behaves_like 'handling invalid params'
-
it_behaves_like 'when regex matching everything is specified',
delete_expectations: [%w[A], %w[Ba Bb], %w[C D], %w[E]]
@@ -105,8 +103,6 @@ RSpec.describe Projects::ContainerRepository::Gitlab::CleanupTagsService do
context 'with a single tags page' do
let(:tags_page_size) { 1000 }
- it_behaves_like 'handling invalid params'
-
it_behaves_like 'when regex matching everything is specified',
delete_expectations: [%w[A Ba Bb C D E]]
diff --git a/spec/services/projects/container_repository/third_party/cleanup_tags_service_spec.rb b/spec/services/projects/container_repository/third_party/cleanup_tags_service_spec.rb
new file mode 100644
index 00000000000..2d034d577ac
--- /dev/null
+++ b/spec/services/projects/container_repository/third_party/cleanup_tags_service_spec.rb
@@ -0,0 +1,370 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::ContainerRepository::ThirdParty::CleanupTagsService, :clean_gitlab_redis_cache do
+ using RSpec::Parameterized::TableSyntax
+
+ include_context 'for a cleanup tags service'
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project, :private) }
+
+ let(:repository) { create(:container_repository, :root, project: project) }
+ let(:service) { described_class.new(container_repository: repository, current_user: user, params: params) }
+ let(:tags) { %w[latest A Ba Bb C D E] }
+
+ before do
+ project.add_maintainer(user) if user
+
+ stub_container_registry_config(enabled: true)
+
+ stub_container_registry_tags(
+ repository: repository.path,
+ tags: tags
+ )
+
+ stub_tag_digest('latest', 'sha256:configA')
+ stub_tag_digest('A', 'sha256:configA')
+ stub_tag_digest('Ba', 'sha256:configB')
+ stub_tag_digest('Bb', 'sha256:configB')
+ stub_tag_digest('C', 'sha256:configC')
+ stub_tag_digest('D', 'sha256:configD')
+ stub_tag_digest('E', nil)
+
+ stub_digest_config('sha256:configA', 1.hour.ago)
+ stub_digest_config('sha256:configB', 5.days.ago)
+ stub_digest_config('sha256:configC', 1.month.ago)
+ stub_digest_config('sha256:configD', nil)
+ end
+
+ describe '#execute' do
+ subject { service.execute }
+
+ it_behaves_like 'when regex matching everything is specified',
+ delete_expectations: [%w[A Ba Bb C D E]],
+ service_response_extra: {
+ before_truncate_size: 6,
+ after_truncate_size: 6,
+ before_delete_size: 6,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
+ it_behaves_like 'when delete regex matching specific tags is used',
+ service_response_extra: {
+ before_truncate_size: 2,
+ after_truncate_size: 2,
+ before_delete_size: 2,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
+ it_behaves_like 'when delete regex matching specific tags is used with overriding allow regex',
+ service_response_extra: {
+ before_truncate_size: 1,
+ after_truncate_size: 1,
+ before_delete_size: 1,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
+ it_behaves_like 'with allow regex value',
+ delete_expectations: [%w[A C D E]],
+ service_response_extra: {
+ before_truncate_size: 4,
+ after_truncate_size: 4,
+ before_delete_size: 4,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
+ it_behaves_like 'when keeping only N tags',
+ delete_expectations: [%w[Bb Ba C]],
+ service_response_extra: {
+ before_truncate_size: 4,
+ after_truncate_size: 4,
+ before_delete_size: 3,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
+ it_behaves_like 'when not keeping N tags',
+ delete_expectations: [%w[A Ba Bb C]],
+ service_response_extra: {
+ before_truncate_size: 4,
+ after_truncate_size: 4,
+ before_delete_size: 4,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
+ it_behaves_like 'when removing keeping only 3',
+ delete_expectations: [%w[Bb Ba C]],
+ service_response_extra: {
+ before_truncate_size: 6,
+ after_truncate_size: 6,
+ before_delete_size: 3,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
+ it_behaves_like 'when removing older than 1 day',
+ delete_expectations: [%w[Ba Bb C]],
+ service_response_extra: {
+ before_truncate_size: 6,
+ after_truncate_size: 6,
+ before_delete_size: 3,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
+ it_behaves_like 'when combining all parameters',
+ delete_expectations: [%w[Bb Ba C]],
+ service_response_extra: {
+ before_truncate_size: 6,
+ after_truncate_size: 6,
+ before_delete_size: 3,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
+ it_behaves_like 'when running a container_expiration_policy',
+ delete_expectations: [%w[Bb Ba C]],
+ service_response_extra: {
+ before_truncate_size: 6,
+ after_truncate_size: 6,
+ before_delete_size: 3,
+ cached_tags_count: 0
+ },
+ supports_caching: true
+
+ context 'when running a container_expiration_policy with caching' do
+ let(:user) { nil }
+ let(:params) do
+ {
+ 'name_regex_delete' => '.*',
+ 'keep_n' => 1,
+ 'older_than' => '1 day',
+ 'container_expiration_policy' => true
+ }
+ end
+
+ it 'expects caching to be used' do
+ expect_delete(%w[Bb Ba C], container_expiration_policy: true)
+ expect_caching
+
+ subject
+ end
+
+ context 'when setting set to false' do
+ before do
+ stub_application_setting(container_registry_expiration_policies_caching: false)
+ end
+
+ it 'does not use caching' do
+ expect_delete(%w[Bb Ba C], container_expiration_policy: true)
+ expect_no_caching
+
+ subject
+ end
+ end
+ end
+
+ context 'when truncating the tags list' do
+ let(:params) do
+ {
+ 'name_regex_delete' => '.*',
+ 'keep_n' => 1
+ }
+ end
+
+ shared_examples 'returning the response' do
+ |status:, original_size:, before_truncate_size:, after_truncate_size:, before_delete_size:|
+ it 'returns the response' do
+ expect_no_caching
+
+ result = subject
+
+ service_response = expected_service_response(
+ status: status,
+ original_size: original_size,
+ deleted: nil
+ ).merge(
+ before_truncate_size: before_truncate_size,
+ after_truncate_size: after_truncate_size,
+ before_delete_size: before_delete_size,
+ cached_tags_count: 0
+ )
+
+ expect(result).to eq(service_response)
+ end
+ end
+
+ where(:max_list_size, :delete_tags_service_status, :expected_status, :expected_truncated) do
+ 10 | :success | :success | false
+ 10 | :error | :error | false
+ 3 | :success | :error | true
+ 3 | :error | :error | true
+ 0 | :success | :success | false
+ 0 | :error | :error | false
+ end
+
+ with_them do
+ before do
+ stub_application_setting(container_registry_cleanup_tags_service_max_list_size: max_list_size)
+ allow_next_instance_of(Projects::ContainerRepository::DeleteTagsService) do |service|
+ allow(service).to receive(:execute).and_return(status: delete_tags_service_status)
+ end
+ end
+
+ original_size = 7
+ keep_n = 1
+
+ it_behaves_like(
+ 'returning the response',
+ status: params[:expected_status],
+ original_size: original_size,
+ before_truncate_size: original_size - keep_n,
+ after_truncate_size: params[:expected_truncated] ? params[:max_list_size] + keep_n : original_size - keep_n,
+ # one tag is filtered out with older_than filter
+ before_delete_size: params[:expected_truncated] ? params[:max_list_size] : original_size - keep_n - 1
+ )
+ end
+ end
+
+ context 'with caching', :freeze_time do
+ let(:params) do
+ {
+ 'name_regex_delete' => '.*',
+ 'keep_n' => 1,
+ 'older_than' => '1 day',
+ 'container_expiration_policy' => true
+ }
+ end
+
+ let(:tags_and_created_ats) do
+ {
+ 'A' => 1.hour.ago,
+ 'Ba' => 5.days.ago,
+ 'Bb' => 5.days.ago,
+ 'C' => 1.month.ago,
+ 'D' => nil,
+ 'E' => nil
+ }
+ end
+
+ let(:cacheable_tags) { tags_and_created_ats.reject { |_, value| value.nil? } }
+
+ before do
+ expect_delete(%w[Bb Ba C], container_expiration_policy: true)
+ # We froze time so we need to set the created_at stubs again
+ stub_digest_config('sha256:configA', 1.hour.ago)
+ stub_digest_config('sha256:configB', 5.days.ago)
+ stub_digest_config('sha256:configC', 1.month.ago)
+ end
+
+ it 'caches the created_at values' do
+ expect_mget(tags_and_created_ats.keys)
+ expect_set(cacheable_tags)
+
+ expect(subject).to include(cached_tags_count: 0)
+ end
+
+ context 'with cached values' do
+ before do
+ ::Gitlab::Redis::Cache.with do |redis|
+ redis.set(cache_key('C'), rfc3339(1.month.ago))
+ end
+ end
+
+ it 'uses them' do
+ expect_mget(tags_and_created_ats.keys)
+
+ # because C is already in cache, it should not be cached again
+ expect_set(cacheable_tags.except('C'))
+
+ # We will ping the container registry for all tags *except* for C because it's cached
+ expect(ContainerRegistry::Blob)
+ .to receive(:new).with(repository, { "digest" => "sha256:configA" }).and_call_original
+ expect(ContainerRegistry::Blob)
+ .to receive(:new).with(repository, { "digest" => "sha256:configB" }).twice.and_call_original
+ expect(ContainerRegistry::Blob).not_to receive(:new).with(repository, { "digest" => "sha256:configC" })
+ expect(ContainerRegistry::Blob)
+ .to receive(:new).with(repository, { "digest" => "sha256:configD" }).and_call_original
+
+ expect(subject).to include(cached_tags_count: 1)
+ end
+ end
+
+ def expect_mget(keys)
+ Gitlab::Redis::Cache.with do |redis|
+ parameters = keys.map { |k| cache_key(k) }
+ expect(redis).to receive(:mget).with(parameters).and_call_original
+ end
+ end
+
+ def expect_set(tags)
+ selected_tags = tags.map do |tag_name, created_at|
+ ex = 1.day.seconds - (Time.zone.now - created_at).seconds
+ [tag_name, created_at, ex.to_i] if ex.positive?
+ end.compact
+
+ return if selected_tags.count.zero?
+
+ Gitlab::Redis::Cache.with do |redis|
+ expect(redis).to receive(:pipelined).and_call_original
+
+ expect_next_instance_of(Redis::PipelinedConnection) do |pipeline|
+ selected_tags.each do |tag_name, created_at, ex|
+ expect(pipeline).to receive(:set).with(cache_key(tag_name), rfc3339(created_at), ex: ex)
+ end
+ end
+ end
+ end
+
+ def cache_key(tag_name)
+ "container_repository:{#{repository.id}}:tag:#{tag_name}:created_at"
+ end
+
+ def rfc3339(date_time)
+ # DateTime rfc3339 is different ActiveSupport::TimeWithZone rfc3339
+ # The caching will use DateTime rfc3339
+ DateTime.rfc3339(date_time.rfc3339).rfc3339
+ end
+ end
+ end
+
+ private
+
+ def stub_tag_digest(tag, digest)
+ allow(repository.client)
+ .to receive(:repository_tag_digest)
+ .with(repository.path, tag) { digest }
+
+ allow(repository.client)
+ .to receive(:repository_manifest)
+ .with(repository.path, tag) do
+ { 'config' => { 'digest' => digest } } if digest
+ end
+ end
+
+ def stub_digest_config(digest, created_at)
+ allow(repository.client)
+ .to receive(:blob)
+ .with(repository.path, digest, nil) do
+ { 'created' => created_at.to_datetime.rfc3339 }.to_json if created_at
+ end
+ end
+
+ def expect_caching
+ ::Gitlab::Redis::Cache.with do |redis|
+ expect(redis).to receive(:mget).and_call_original
+ expect(redis).to receive(:pipelined).and_call_original
+
+ expect_next_instance_of(Redis::PipelinedConnection) do |pipeline|
+ expect(pipeline).to receive(:set).and_call_original
+ end
+ end
+ end
+end
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index 8269dbebccb..f7f02769f6a 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -146,20 +146,6 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
expect { destroy_project(project, user, {}) }.to change(MergeRequestDiff, :count).by(-1)
expect { another_project_mr.reload }.not_to raise_error
end
-
- context 'when extract_mr_diff_deletions feature flag is disabled' do
- before do
- stub_feature_flags(extract_mr_diff_deletions: false)
- end
-
- it 'also deletes merge request diffs' do
- merge_request_diffs = merge_request.merge_request_diffs
- expect(merge_request_diffs.size).to eq(1)
-
- expect { destroy_project(project, user, {}) }.to change(MergeRequestDiff, :count).by(-1)
- expect { another_project_mr.reload }.not_to raise_error
- end
- end
end
it_behaves_like 'deleting the project'
diff --git a/spec/services/projects/import_service_spec.rb b/spec/services/projects/import_service_spec.rb
index ab9f99f893d..6dc72948541 100644
--- a/spec/services/projects/import_service_spec.rb
+++ b/spec/services/projects/import_service_spec.rb
@@ -276,6 +276,15 @@ RSpec.describe Projects::ImportService do
expect(result[:status]).to eq :error
expect(result[:message]).to include('Only allowed ports are 80, 443')
end
+
+ it 'fails with file scheme' do
+ project.import_url = "file:///tmp/dir.git"
+
+ result = subject.execute
+
+ expect(result[:status]).to eq :error
+ expect(result[:message]).to include('Only allowed schemes are http, https')
+ end
end
it_behaves_like 'measurable service' do
diff --git a/spec/services/projects/update_repository_storage_service_spec.rb b/spec/services/projects/update_repository_storage_service_spec.rb
index 17d01a57221..ee8f7fb2ef2 100644
--- a/spec/services/projects/update_repository_storage_service_spec.rb
+++ b/spec/services/projects/update_repository_storage_service_spec.rb
@@ -37,10 +37,6 @@ RSpec.describe Projects::UpdateRepositoryStorageService do
context 'when the move succeeds' do
it 'moves the repository to the new storage and unmarks the repository as read-only' do
- old_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- project.repository.path_to_repo
- end
-
expect(project_repository_double).to receive(:replicate)
.with(project.repository.raw)
expect(project_repository_double).to receive(:checksum)
@@ -53,7 +49,6 @@ RSpec.describe Projects::UpdateRepositoryStorageService do
expect(result).to be_success
expect(project).not_to be_repository_read_only
expect(project.repository_storage).to eq('test_second_storage')
- expect(gitlab_shell.repository_exists?('default', old_path)).to be(false)
expect(project.project_repository.shard_name).to eq('test_second_storage')
end
end
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index 85d3e99109d..7d8951bf111 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -11,10 +11,27 @@ RSpec.describe Projects::UpdateService do
create(:project, creator: user, namespace: user.namespace)
end
+ shared_examples 'publishing Projects::ProjectAttributesChangedEvent' do |params:, attributes:|
+ it "publishes Projects::ProjectAttributesChangedEvent" do
+ expect { update_project(project, user, params) }
+ .to publish_event(Projects::ProjectAttributesChangedEvent)
+ .with(
+ project_id: project.id,
+ namespace_id: project.namespace_id,
+ root_namespace_id: project.root_namespace.id,
+ attributes: attributes
+ )
+ end
+ end
+
describe '#execute' do
let(:admin) { create(:admin) }
context 'when changing visibility level' do
+ it_behaves_like 'publishing Projects::ProjectAttributesChangedEvent',
+ params: { visibility_level: Gitlab::VisibilityLevel::INTERNAL },
+ attributes: %w[updated_at visibility_level]
+
context 'when visibility_level changes to INTERNAL' do
it 'updates the project to internal' do
expect(TodosDestroyer::ProjectPrivateWorker).not_to receive(:perform_in)
@@ -290,7 +307,7 @@ RSpec.describe Projects::UpdateService do
context 'when we update project but not enabling a wiki' do
it 'does not try to create an empty wiki' do
- TestEnv.rm_storage_dir(project.repository_storage, project.wiki.path)
+ project.wiki.repository.raw.remove
result = update_project(project, user, { name: 'test1' })
@@ -311,7 +328,7 @@ RSpec.describe Projects::UpdateService do
context 'when enabling a wiki' do
it 'creates a wiki' do
project.project_feature.update!(wiki_access_level: ProjectFeature::DISABLED)
- TestEnv.rm_storage_dir(project.repository_storage, project.wiki.path)
+ project.wiki.repository.raw.remove
result = update_project(project, user, project_feature_attributes: { wiki_access_level: ProjectFeature::ENABLED })
@@ -323,7 +340,7 @@ RSpec.describe Projects::UpdateService do
it 'logs an error and creates a metric when wiki can not be created' do
project.project_feature.update!(wiki_access_level: ProjectFeature::DISABLED)
- expect_any_instance_of(ProjectWiki).to receive(:wiki).and_raise(Wiki::CouldNotCreateWikiError)
+ expect_any_instance_of(ProjectWiki).to receive(:create_wiki_repository).and_raise(Wiki::CouldNotCreateWikiError)
expect_any_instance_of(described_class).to receive(:log_error).with("Could not create wiki for #{project.full_name}")
counter = double(:counter)
@@ -348,7 +365,37 @@ RSpec.describe Projects::UpdateService do
end
end
+ context 'when changes project features' do
+ # Using some sample features for testing.
+ # Not using all the features because some of them must be enabled/disabled together
+ %w[issues wiki forking].each do |feature_name|
+ let(:feature) { "#{feature_name}_access_level" }
+ let(:params) do
+ { project_feature_attributes: { feature => ProjectFeature::ENABLED } }
+ end
+
+ before do
+ project.project_feature.update!(feature => ProjectFeature::DISABLED)
+ end
+
+ it 'publishes Projects::ProjectFeaturesChangedEvent' do
+ expect { update_project(project, user, params) }
+ .to publish_event(Projects::ProjectFeaturesChangedEvent)
+ .with(
+ project_id: project.id,
+ namespace_id: project.namespace_id,
+ root_namespace_id: project.root_namespace.id,
+ features: ["updated_at", feature]
+ )
+ end
+ end
+ end
+
context 'when archiving a project' do
+ it_behaves_like 'publishing Projects::ProjectAttributesChangedEvent',
+ params: { archived: true },
+ attributes: %w[updated_at archived]
+
it 'publishes a ProjectTransferedEvent' do
expect { update_project(project, user, archived: true) }
.to publish_event(Projects::ProjectArchivedEvent)
diff --git a/spec/services/repositories/changelog_service_spec.rb b/spec/services/repositories/changelog_service_spec.rb
index 3615747e191..47ebd55022f 100644
--- a/spec/services/repositories/changelog_service_spec.rb
+++ b/spec/services/repositories/changelog_service_spec.rb
@@ -67,10 +67,11 @@ RSpec.describe Repositories::ChangelogService do
allow(MergeRequestDiffCommit)
.to receive(:oldest_merge_request_id_per_commit)
.with(project.id, [commit2.id, commit1.id])
- .and_return([
- { sha: sha2, merge_request_id: mr1.id },
- { sha: sha3, merge_request_id: mr2.id }
- ])
+ .and_return(
+ [
+ { sha: sha2, merge_request_id: mr1.id },
+ { sha: sha3, merge_request_id: mr2.id }
+ ])
service = described_class
.new(project, creator, version: '1.0.0', from: sha1, to: sha3)
@@ -135,10 +136,11 @@ RSpec.describe Repositories::ChangelogService do
allow(MergeRequestDiffCommit)
.to receive(:oldest_merge_request_id_per_commit)
.with(project.id, [commit2.id, commit1.id])
- .and_return([
- { sha: sha2, merge_request_id: mr1.id },
- { sha: sha3, merge_request_id: mr2.id }
- ])
+ .and_return(
+ [
+ { sha: sha2, merge_request_id: mr1.id },
+ { sha: sha3, merge_request_id: mr2.id }
+ ])
service = described_class
.new(project, creator, version: '1.0.0', from: sha1, to: sha3)
diff --git a/spec/services/resource_events/merge_into_notes_service_spec.rb b/spec/services/resource_events/merge_into_notes_service_spec.rb
index abe00e72f20..ebfd942066f 100644
--- a/spec/services/resource_events/merge_into_notes_service_spec.rb
+++ b/spec/services/resource_events/merge_into_notes_service_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe ResourceEvents::MergeIntoNotesService do
notes = described_class.new(resource, user).execute([note1, note2])
- expected = [note1, event1, note2, event2].map(&:discussion_id)
+ expected = [note1, event1, note2, event2].map(&:reload).map(&:discussion_id)
expect(notes.map(&:discussion_id)).to eq expected
end
@@ -65,7 +65,7 @@ RSpec.describe ResourceEvents::MergeIntoNotesService do
last_fetched_at: 2.days.ago).execute
expect(notes.count).to eq 1
- expect(notes.first.discussion_id).to eq event.discussion_id
+ expect(notes.first.discussion_id).to eq event.reload.discussion_id
end
it "preloads the note author's status" do
diff --git a/spec/services/resource_events/synthetic_milestone_notes_builder_service_spec.rb b/spec/services/resource_events/synthetic_milestone_notes_builder_service_spec.rb
index 9c6b6a33b57..f368e107c60 100644
--- a/spec/services/resource_events/synthetic_milestone_notes_builder_service_spec.rb
+++ b/spec/services/resource_events/synthetic_milestone_notes_builder_service_spec.rb
@@ -19,10 +19,11 @@ RSpec.describe ResourceEvents::SyntheticMilestoneNotesBuilderService do
notes = described_class.new(issue, user).execute
expect(notes.map(&:created_at)).to eq(events.map(&:created_at))
- expect(notes.map(&:note)).to eq([
- "changed milestone to %#{milestone.iid}",
- 'removed milestone'
- ])
+ expect(notes.map(&:note)).to eq(
+ [
+ "changed milestone to %#{milestone.iid}",
+ 'removed milestone'
+ ])
end
it_behaves_like 'filters by paginated notes', :resource_milestone_event
diff --git a/spec/services/snippets/update_repository_storage_service_spec.rb b/spec/services/snippets/update_repository_storage_service_spec.rb
index fdea3615fb1..9874189f73a 100644
--- a/spec/services/snippets/update_repository_storage_service_spec.rb
+++ b/spec/services/snippets/update_repository_storage_service_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe Snippets::UpdateRepositoryStorageService do
- include Gitlab::ShellAdapter
-
subject { described_class.new(repository_storage_move) }
describe "#execute" do
@@ -32,10 +30,6 @@ RSpec.describe Snippets::UpdateRepositoryStorageService do
context 'when the move succeeds' do
it 'moves the repository to the new storage and unmarks the repository as read-only' do
- old_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- snippet.repository.path_to_repo
- end
-
expect(snippet_repository_double).to receive(:replicate)
.with(snippet.repository.raw)
expect(snippet_repository_double).to receive(:checksum)
@@ -48,7 +42,6 @@ RSpec.describe Snippets::UpdateRepositoryStorageService do
expect(result).to be_success
expect(snippet).not_to be_repository_read_only
expect(snippet.repository_storage).to eq(destination)
- expect(gitlab_shell.repository_exists?('default', old_path)).to be(false)
expect(snippet.snippet_repository.shard_name).to eq(destination)
end
end
diff --git a/spec/services/users/destroy_service_spec.rb b/spec/services/users/destroy_service_spec.rb
index b32599d4af8..03e1811c8a5 100644
--- a/spec/services/users/destroy_service_spec.rb
+++ b/spec/services/users/destroy_service_spec.rb
@@ -388,24 +388,95 @@ RSpec.describe Users::DestroyService do
context 'batched nullify' do
let(:other_user) { create(:user) }
+ # rubocop:disable Layout/LineLength
+ def nullify_in_batches_regexp(table, column, user, batch_size: 100)
+ %r{^UPDATE "#{table}" SET "#{column}" = NULL WHERE "#{table}"."id" IN \(SELECT "#{table}"."id" FROM "#{table}" WHERE "#{table}"."#{column}" = #{user.id} LIMIT #{batch_size}\)}
+ end
+
+ def delete_in_batches_regexps(table, column, user, items, batch_size: 1000)
+ select_query = %r{^SELECT "#{table}".* FROM "#{table}" WHERE "#{table}"."#{column}" = #{user.id}.*ORDER BY "#{table}"."id" ASC LIMIT #{batch_size}}
+
+ [select_query] + items.map { |item| %r{^DELETE FROM "#{table}" WHERE "#{table}"."id" = #{item.id}} }
+ end
+ # rubocop:enable Layout/LineLength
+
it 'nullifies related associations in batches' do
expect(other_user).to receive(:nullify_dependent_associations_in_batches).and_call_original
described_class.new(user).execute(other_user, skip_authorization: true)
end
- it 'nullifies last_updated_issues, closed_issues, resource_label_events' do
+ it 'nullifies issues and resource associations', :aggregate_failures do
issue = create(:issue, closed_by: other_user, updated_by: other_user)
resource_label_event = create(:resource_label_event, user: other_user)
+ resource_state_event = create(:resource_state_event, user: other_user)
+ todos = create_list(:todo, 2, project: issue.project, user: other_user, author: other_user, target: issue)
+ event = create(:event, project: issue.project, author: other_user)
- described_class.new(user).execute(other_user, skip_authorization: true)
+ query_recorder = ActiveRecord::QueryRecorder.new do
+ described_class.new(user).execute(other_user, skip_authorization: true)
+ end
issue.reload
resource_label_event.reload
+ resource_state_event.reload
expect(issue.closed_by).to be_nil
expect(issue.updated_by).to be_nil
expect(resource_label_event.user).to be_nil
+ expect(resource_state_event.user).to be_nil
+ expect(other_user.authored_todos).to be_empty
+ expect(other_user.todos).to be_empty
+ expect(other_user.authored_events).to be_empty
+
+ expected_queries = [
+ nullify_in_batches_regexp(:issues, :updated_by_id, other_user),
+ nullify_in_batches_regexp(:issues, :closed_by_id, other_user),
+ nullify_in_batches_regexp(:resource_label_events, :user_id, other_user),
+ nullify_in_batches_regexp(:resource_state_events, :user_id, other_user)
+ ]
+
+ expected_queries += delete_in_batches_regexps(:todos, :user_id, other_user, todos)
+ expected_queries += delete_in_batches_regexps(:todos, :author_id, other_user, todos)
+ expected_queries += delete_in_batches_regexps(:events, :author_id, other_user, [event])
+
+ expect(query_recorder.log).to include(*expected_queries)
+ end
+
+ it 'nullifies merge request associations', :aggregate_failures do
+ merge_request = create(:merge_request, source_project: project, target_project: project,
+ assignee: other_user, updated_by: other_user, merge_user: other_user)
+ merge_request.metrics.update!(merged_by: other_user, latest_closed_by: other_user)
+ merge_request.reviewers = [other_user]
+ merge_request.assignees = [other_user]
+
+ query_recorder = ActiveRecord::QueryRecorder.new do
+ described_class.new(user).execute(other_user, skip_authorization: true)
+ end
+
+ merge_request.reload
+
+ expect(merge_request.updated_by).to be_nil
+ expect(merge_request.assignee).to be_nil
+ expect(merge_request.assignee_id).to be_nil
+ expect(merge_request.metrics.merged_by).to be_nil
+ expect(merge_request.metrics.latest_closed_by).to be_nil
+ expect(merge_request.reviewers).to be_empty
+ expect(merge_request.assignees).to be_empty
+
+ expected_queries = [
+ nullify_in_batches_regexp(:merge_requests, :updated_by_id, other_user),
+ nullify_in_batches_regexp(:merge_requests, :assignee_id, other_user),
+ nullify_in_batches_regexp(:merge_request_metrics, :merged_by_id, other_user),
+ nullify_in_batches_regexp(:merge_request_metrics, :latest_closed_by_id, other_user)
+ ]
+
+ expected_queries += delete_in_batches_regexps(:merge_request_assignees, :user_id, other_user,
+ merge_request.assignees)
+ expected_queries += delete_in_batches_regexps(:merge_request_reviewers, :user_id, other_user,
+ merge_request.reviewers)
+
+ expect(query_recorder.log).to include(*expected_queries)
end
end
end
diff --git a/spec/services/users/dismiss_namespace_callout_service_spec.rb b/spec/services/users/dismiss_namespace_callout_service_spec.rb
deleted file mode 100644
index fbcdb66c9e8..00000000000
--- a/spec/services/users/dismiss_namespace_callout_service_spec.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Users::DismissNamespaceCalloutService do
- describe '#execute' do
- let_it_be(:user) { create(:user) }
-
- let(:params) { { feature_name: feature_name, namespace_id: user.namespace.id } }
- let(:feature_name) { Users::NamespaceCallout.feature_names.each_key.first }
-
- subject(:execute) do
- described_class.new(
- container: nil, current_user: user, params: params
- ).execute
- end
-
- it_behaves_like 'dismissing user callout', Users::NamespaceCallout
-
- it 'sets the namespace_id' do
- expect(execute.namespace_id).to eq(user.namespace.id)
- end
- end
-end
diff --git a/spec/services/users/refresh_authorized_projects_service_spec.rb b/spec/services/users/refresh_authorized_projects_service_spec.rb
index e6ccb2b16e7..e33886d2add 100644
--- a/spec/services/users/refresh_authorized_projects_service_spec.rb
+++ b/spec/services/users/refresh_authorized_projects_service_spec.rb
@@ -108,8 +108,8 @@ RSpec.describe Users::RefreshAuthorizedProjectsService do
describe '#update_authorizations' do
context 'when there are no rows to add and remove' do
it 'does not change authorizations' do
- expect(user).not_to receive(:remove_project_authorizations)
- expect(ProjectAuthorization).not_to receive(:insert_authorizations)
+ expect(ProjectAuthorization).not_to receive(:delete_all_in_batches_for_user)
+ expect(ProjectAuthorization).not_to receive(:insert_all_in_batches)
service.update_authorizations([], [])
end
diff --git a/spec/services/web_hook_service_spec.rb b/spec/services/web_hook_service_spec.rb
index fed3ae7a543..551c3dbcc82 100644
--- a/spec/services/web_hook_service_spec.rb
+++ b/spec/services/web_hook_service_spec.rb
@@ -75,7 +75,8 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state
'Content-Type' => 'application/json',
'User-Agent' => "GitLab/#{Gitlab::VERSION}",
'X-Gitlab-Event' => 'Push Hook',
- 'X-Gitlab-Event-UUID' => uuid
+ 'X-Gitlab-Event-UUID' => uuid,
+ 'X-Gitlab-Instance' => Gitlab.config.gitlab.base_url
}
end
@@ -164,7 +165,7 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state
end
end
- it 'POSTs the data as JSON' do
+ it 'POSTs the data as JSON and returns expected headers' do
stub_full_request(project_hook.url, method: :post)
service_instance.execute
@@ -174,6 +175,22 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state
).once
end
+ context 'when webhooks_gitlab_instance_header flag is disabled' do
+ before do
+ stub_feature_flags(webhooks_gitlab_instance_header: false)
+ end
+
+ it 'excludes the X-Gitlab-Instance header' do
+ stub_full_request(project_hook.url, method: :post)
+
+ service_instance.execute
+
+ expect(WebMock).to have_requested(:post, stubbed_hostname(project_hook.url)).with(
+ headers: headers.except('X-Gitlab-Instance')
+ ).once
+ end
+ end
+
context 'when the data is a Gitlab::DataBuilder::Pipeline' do
let(:pipeline) { create(:ci_pipeline, project: project) }
let(:data) { ::Gitlab::DataBuilder::Pipeline.new(pipeline) }
diff --git a/spec/services/web_hooks/log_execution_service_spec.rb b/spec/services/web_hooks/log_execution_service_spec.rb
index 1967a8368fb..1b8ff9f2a05 100644
--- a/spec/services/web_hooks/log_execution_service_spec.rb
+++ b/spec/services/web_hooks/log_execution_service_spec.rb
@@ -41,12 +41,21 @@ RSpec.describe WebHooks::LogExecutionService do
service.execute
end
+ it 'does not update the last failure when the feature flag is disabled' do
+ stub_feature_flags(web_hooks_disable_failed: false)
+
+ expect(project_hook).not_to receive(:update_last_failure)
+
+ service.execute
+ end
+
context 'obtaining an exclusive lease' do
let(:lease_key) { "web_hooks:update_hook_failure_state:#{project_hook.id}" }
it 'updates failure state using a lease that ensures fresh state is written' do
service = described_class.new(hook: project_hook, log_data: data, response_category: :error)
- WebHook.find(project_hook.id).update!(backoff_count: 1)
+ # Write state somewhere else, so that the hook is out-of-date
+ WebHook.find(project_hook.id).update!(recent_failures: 5, disabled_until: 10.minutes.from_now, backoff_count: 1)
lease = stub_exclusive_lease(lease_key, timeout: described_class::LOCK_TTL)
@@ -69,6 +78,8 @@ RSpec.describe WebHooks::LogExecutionService do
subject(:service) { described_class.new(hook: project_hook, log_data: data, response_category: response_category) }
before do
+ # stub LOCK_RETRY to be 0 in order for tests to run quicker
+ stub_const("#{described_class.name}::LOCK_RETRY", 0)
stub_exclusive_lease_taken(lease_key, timeout: described_class::LOCK_TTL)
allow(project_hook).to receive(:executable?).and_return(executable)
end
@@ -146,36 +157,10 @@ RSpec.describe WebHooks::LogExecutionService do
data[:response_status] = '500'
end
- it 'does not increment the failure count' do
- expect { service.execute }.not_to change(project_hook, :recent_failures)
- end
-
it 'backs off' do
- expect { service.execute }.to change(project_hook, :disabled_until)
- end
-
- it 'increases the backoff count' do
- expect { service.execute }.to change(project_hook, :backoff_count).by(1)
- end
-
- context 'when the previous cool-off was near the maximum' do
- before do
- project_hook.update!(disabled_until: 5.minutes.ago, backoff_count: 8)
- end
+ expect(project_hook).to receive(:backoff!)
- it 'sets the disabled_until attribute' do
- expect { service.execute }.to change(project_hook, :disabled_until).to(1.day.from_now)
- end
- end
-
- context 'when we have backed-off many many times' do
- before do
- project_hook.update!(disabled_until: 5.minutes.ago, backoff_count: 365)
- end
-
- it 'sets the disabled_until attribute' do
- expect { service.execute }.to change(project_hook, :disabled_until).to(1.day.from_now)
- end
+ service.execute
end
end
end
diff --git a/spec/services/work_items/update_service_spec.rb b/spec/services/work_items/update_service_spec.rb
index e8b82b0b4f2..1761d1104dd 100644
--- a/spec/services/work_items/update_service_spec.rb
+++ b/spec/services/work_items/update_service_spec.rb
@@ -88,6 +88,26 @@ RSpec.describe WorkItems::UpdateService do
end
end
+ context 'when decription is changed' do
+ let(:opts) { { description: 'description changed' } }
+
+ it 'triggers GraphQL description updated subscription' do
+ expect(GraphqlTriggers).to receive(:issuable_description_updated).with(work_item).and_call_original
+
+ update_work_item
+ end
+ end
+
+ context 'when decription is not changed' do
+ let(:opts) { { title: 'title changed' } }
+
+ it 'does not trigger GraphQL description updated subscription' do
+ expect(GraphqlTriggers).not_to receive(:issuable_description_updated)
+
+ update_work_item
+ end
+ end
+
context 'when updating state_event' do
context 'when state_event is close' do
let(:opts) { { state_event: 'close' } }
@@ -292,5 +312,65 @@ RSpec.describe WorkItems::UpdateService do
end
end
end
+
+ describe 'label updates' do
+ let_it_be(:label1) { create(:label, project: project) }
+ let_it_be(:label2) { create(:label, project: project) }
+
+ context 'when labels are changed' do
+ let(:label) { create(:label, project: project) }
+ let(:opts) { { label_ids: [label1.id] } }
+
+ it 'tracks users updating work item labels' do
+ expect(Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter).to receive(:track_work_item_labels_changed_action).with(author: current_user)
+
+ update_work_item
+ end
+
+ it_behaves_like 'broadcasting issuable labels updates' do
+ let(:issuable) { work_item }
+ let(:label_a) { label1 }
+ let(:label_b) { label2 }
+
+ def update_issuable(update_params)
+ described_class.new(
+ project: project,
+ current_user: current_user,
+ params: update_params,
+ spam_params: spam_params,
+ widget_params: widget_params
+ ).execute(work_item)
+ end
+ end
+ end
+
+ context 'when labels are not changed' do
+ shared_examples 'work item update that does not track label updates' do
+ it 'does not track users updating work item labels' do
+ expect(Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter).not_to receive(:track_work_item_labels_changed_action)
+
+ update_work_item
+ end
+ end
+
+ context 'when labels param is not provided' do
+ let(:opts) { { title: 'not updating labels' } }
+
+ it_behaves_like 'work item update that does not track label updates'
+ end
+
+ context 'when labels param is provided but labels remain unchanged' do
+ let(:opts) { { label_ids: [] } }
+
+ it_behaves_like 'work item update that does not track label updates'
+ end
+
+ context 'when labels param is provided invalid values' do
+ let(:opts) { { label_ids: [non_existing_record_id] } }
+
+ it_behaves_like 'work item update that does not track label updates'
+ end
+ end
+ end
end
end
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index c75f651fb92..8a1fa486bde 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-if $".include?(File.expand_path('fast_spec_helper.rb', __dir__))
+if $LOADED_FEATURES.include?(File.expand_path('fast_spec_helper.rb', __dir__))
warn 'Detected fast_spec_helper is loaded first than spec_helper.'
warn 'If running test files using both spec_helper and fast_spec_helper,'
warn 'make sure spec_helper is loaded first, or run rspec with `-r spec_helper`.'
@@ -140,7 +140,6 @@ RSpec.configure do |config|
config.include FixtureHelpers
config.include NonExistingRecordsHelpers
config.include GitlabRoutingHelper
- config.include StubExperiments
config.include StubGitlabCalls
config.include NextFoundInstanceOf
config.include NextInstanceOf
@@ -181,13 +180,15 @@ RSpec.configure do |config|
config.include RSpec::Benchmark::Matchers, type: :benchmark
config.include DetailedErrorHelpers
+ config.include_context 'when rendered has no HTML escapes', type: :view
+
include StubFeatureFlags
include StubSnowplow
include StubMember
if ENV['CI'] || ENV['RETRIES']
- # This includes the first try, i.e. tests will be run 4 times before failing.
- config.default_retry_count = ENV.fetch('RETRIES', 3).to_i + 1
+ # This includes the first try, i.e. tests will be run 2 times before failing.
+ config.default_retry_count = ENV.fetch('RETRIES', 1).to_i + 1
# Do not retry controller tests because rspec-retry cannot properly
# reset the controller which may contain data from last attempt. See
@@ -310,9 +311,6 @@ RSpec.configure do |config|
# See https://docs.gitlab.com/ee/development/feature_flags/#selectively-disable-by-actor
stub_feature_flags(legacy_merge_request_state_check_for_merged_result_pipelines: false)
- # Will be removed in https://gitlab.com/gitlab-org/gitlab/-/issues/369875
- stub_feature_flags(override_group_level_protected_environment_settings_permission: false)
-
allow(Gitlab::GitalyClient).to receive(:can_use_disk?).and_return(enable_rugged)
else
unstub_all_feature_flags
@@ -407,8 +405,7 @@ RSpec.configure do |config|
with_sidekiq_server_middleware do |chain|
Gitlab::SidekiqMiddleware.server_configurator(
metrics: false, # The metrics don't go anywhere in tests
- arguments_logger: false, # We're not logging the regular messages for inline jobs
- memory_killer: false # This is not a thing we want to do inline in tests
+ arguments_logger: false # We're not logging the regular messages for inline jobs
).call(chain)
chain.add DisableQueryLimit
chain.insert_after ::Gitlab::SidekiqMiddleware::RequestStoreMiddleware, IsolatedRequestStore
diff --git a/spec/support/capybara.rb b/spec/support/capybara.rb
index a5d845f5177..57065400220 100644
--- a/spec/support/capybara.rb
+++ b/spec/support/capybara.rb
@@ -7,7 +7,7 @@ require 'capybara-screenshot/rspec'
require 'selenium-webdriver'
# Give CI some extra time
-timeout = ENV['CI'] || ENV['CI_SERVER'] ? 60 : 30
+timeout = ENV['CI'] || ENV['CI_SERVER'] ? 30 : 10
# Support running Capybara on a specific port to allow saving commonly used pages
Capybara.server_port = ENV['CAPYBARA_PORT'] if ENV['CAPYBARA_PORT']
diff --git a/spec/support/capybara_slow_finder.rb b/spec/support/capybara_slow_finder.rb
new file mode 100644
index 00000000000..975ddd52c1f
--- /dev/null
+++ b/spec/support/capybara_slow_finder.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+module Capybara
+ MESSAGE = <<~MSG
+ Timeout (%{timeout}s) reached while running a waiting Capybara finder.
+ Consider using a non-waiting finder.
+
+ See https://www.cloudbees.com/blog/faster-rails-tests
+ MSG
+
+ module Node
+ class Base
+ # Inspired by https://github.com/ngauthier/capybara-slow_finder_errors
+ module SlowFinder
+ def synchronize(seconds = nil, errors: nil)
+ start_time = Gitlab::Metrics::System.monotonic_time
+
+ super
+ rescue Capybara::ElementNotFound => e
+ seconds ||= Capybara.default_max_wait_time
+
+ raise e unless seconds > 0 && Gitlab::Metrics::System.monotonic_time - start_time > seconds
+
+ message = format(MESSAGE, timeout: seconds)
+ raise e, "#{$!}\n\n#{message}", e.backtrace
+ end
+ end
+
+ prepend SlowFinder
+ end
+ end
+end
diff --git a/spec/support/cross_database_modification.rb b/spec/support/cross_database_modification.rb
deleted file mode 100644
index e0d91001c03..00000000000
--- a/spec/support/cross_database_modification.rb
+++ /dev/null
@@ -1,9 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.configure do |config|
- config.after do |example|
- [::ApplicationRecord, ::Ci::ApplicationRecord].each do |base_class|
- base_class.gitlab_transactions_stack.clear if base_class.respond_to?(:gitlab_transactions_stack)
- end
- end
-end
diff --git a/spec/support/database/multiple_databases.rb b/spec/support/database/multiple_databases.rb
index 05f26e57e9c..25c3b6e74ce 100644
--- a/spec/support/database/multiple_databases.rb
+++ b/spec/support/database/multiple_databases.rb
@@ -2,6 +2,15 @@
module Database
module MultipleDatabases
+ def run_and_cleanup(example)
+ # Each example may call `migrate!`, so we must ensure we are migrated down every time
+ schema_migrate_down!
+
+ example.run
+
+ delete_from_all_tables!(except: deletion_except_tables)
+ end
+
def skip_if_multiple_databases_not_setup
skip 'Skipping because multiple databases not set up' unless Gitlab::Database.has_config?(:ci)
end
@@ -22,6 +31,21 @@ module Database
model.establish_connection(new_db_config)
end
+ def ensure_schema_and_empty_tables
+ # Ensure all schemas for both databases are migrated back
+ Gitlab::Database.database_base_models.each do |_, base_model|
+ with_reestablished_active_record_base do
+ reconfigure_db_connection(
+ model: ActiveRecord::Base,
+ config_model: base_model
+ )
+
+ schema_migrate_up!
+ delete_from_all_tables!(except: deletion_except_tables)
+ end
+ end
+ end
+
# The usage of this method switches temporarily used `connection_handler`
# allowing full manipulation of ActiveRecord::Base connections without
# having side effects like:
@@ -87,6 +111,16 @@ module Database
end
RSpec.configure do |config|
+ # Ensure database versions are memoized to prevent query counts from
+ # being affected by version checks. Note that
+ # Gitlab::Database.check_postgres_version_and_print_warning is called
+ # at startup, but that generates its own
+ # `Gitlab::Database::Reflection` so the result is not memoized by
+ # callers of `ApplicationRecord.database.version`, such as
+ # `Gitlab::Database::AsWithMaterialized.materialized_supported?`.
+ # TODO This can be removed once https://gitlab.com/gitlab-org/gitlab/-/issues/325639 is completed.
+ [ApplicationRecord, ::Ci::ApplicationRecord].each { |record| record.database.version }
+
config.around(:each, :reestablished_active_record_base) do |example|
with_reestablished_active_record_base(reconnect: example.metadata.fetch(:reconnect, true)) do
example.run
@@ -99,7 +133,15 @@ RSpec.configure do |config|
end
end
+ config.append_after(:context, :migration) do
+ break if recreate_databases_and_seed_if_needed
+
+ ensure_schema_and_empty_tables
+ end
+
config.around(:each, :migration) do |example|
+ self.class.use_transactional_tests = false
+
migration_schema = example.metadata[:migration]
migration_schema = :gitlab_main if migration_schema == true
base_model = Gitlab::Database.schemas_to_base_models.fetch(migration_schema).first
@@ -112,11 +154,13 @@ RSpec.configure do |config|
config_model: base_model
)
- example.run
+ run_and_cleanup(example)
end
else
- example.run
+ run_and_cleanup(example)
end
+
+ self.class.use_transactional_tests = true
end
end
diff --git a/spec/support/database/prevent_cross_database_modification.rb b/spec/support/database/prevent_cross_database_modification.rb
index c509aecf9b8..759e8316cc5 100644
--- a/spec/support/database/prevent_cross_database_modification.rb
+++ b/spec/support/database/prevent_cross_database_modification.rb
@@ -14,18 +14,22 @@ RSpec.configure do |config|
# By default allow cross-modifications as we want to observe only transactions
# within a specific block of execution which is defined be `before(:each)` and `after(:each)`
config.before(:all) do
- ::Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModification.suppress = true
+ ::Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModification.suppress_in_rspec = true
end
# Using before and after blocks because the around block causes problems with the let_it_be
# record creations. It makes an extra savepoint which breaks the transaction count logic.
config.before do |example_file|
- ::Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModification.suppress =
+ ::Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModification.suppress_in_rspec =
CROSS_DB_MODIFICATION_ALLOW_LIST.include?(example_file.file_path_rerun_argument)
end
# Reset after execution to preferred state
config.after do |example_file|
- ::Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModification.suppress = true
+ ::Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModification.suppress_in_rspec = true
+
+ [::ApplicationRecord, ::Ci::ApplicationRecord].each do |base_class|
+ base_class.gitlab_transactions_stack.clear if base_class.respond_to?(:gitlab_transactions_stack)
+ end
end
end
diff --git a/spec/support/database_cleaner.rb b/spec/support/database_cleaner.rb
index f8ddf3e66a5..7bd1f0c5dfa 100644
--- a/spec/support/database_cleaner.rb
+++ b/spec/support/database_cleaner.rb
@@ -13,19 +13,6 @@ RSpec.configure do |config|
DatabaseCleaner.clean_with(:deletion)
end
- config.append_after(:context, :migration) do
- delete_from_all_tables!(except: ['work_item_types'])
-
- # Postgres maximum number of columns in a table is 1600 (https://github.com/postgres/postgres/blob/de41869b64d57160f58852eab20a27f248188135/src/include/access/htup_details.h#L23-L47).
- # We drop and recreate the database if any table has more than 1200 columns, just to be safe.
- if any_connection_class_with_more_than_allowed_columns?
- recreate_all_databases!
-
- # Seed required data as recreating DBs will delete it
- TestEnv.seed_db
- end
- end
-
config.around(:each, :delete) do |example|
self.class.use_transactional_tests = false
@@ -35,14 +22,4 @@ RSpec.configure do |config|
self.class.use_transactional_tests = true
end
-
- config.around(:each, :migration) do |example|
- self.class.use_transactional_tests = false
-
- example.run
-
- delete_from_all_tables!(except: ['work_item_types'])
-
- self.class.use_transactional_tests = true
- end
end
diff --git a/spec/support/db_cleaner.rb b/spec/support/db_cleaner.rb
index e3a05f17593..24cdbe04fc2 100644
--- a/spec/support/db_cleaner.rb
+++ b/spec/support/db_cleaner.rb
@@ -78,22 +78,32 @@ module DbCleaner
puts "Databases re-creation done in #{Gitlab::Metrics::System.monotonic_time - start}"
end
+ def recreate_databases_and_seed_if_needed
+ # Postgres maximum number of columns in a table is 1600 (https://github.com/postgres/postgres/blob/de41869b64d57160f58852eab20a27f248188135/src/include/access/htup_details.h#L23-L47).
+ # We drop and recreate the database if any table has more than 1200 columns, just to be safe.
+ return false unless any_connection_class_with_more_than_allowed_columns?
+
+ recreate_all_databases!
+
+ # Seed required data as recreating DBs will delete it
+ TestEnv.seed_db
+
+ true
+ end
+
def force_disconnect_all_connections!
- all_connection_classes.each do |connection_class|
- # We use `connection_pool` to avoid going through
- # Load Balancer since it does retry ops
- pool = connection_class.connection_pool
-
- # Force disconnect https://www.cybertec-postgresql.com/en/terminating-database-connections-in-postgresql/
- pool.connection.execute(<<-SQL)
- SELECT pg_terminate_backend(pid)
- FROM pg_stat_activity
- WHERE datname = #{pool.connection.quote(pool.db_config.database)}
- AND pid != pg_backend_pid();
- SQL
-
- connection_class.connection_pool.disconnect!
+ cmd = <<~SQL
+ SELECT pg_terminate_backend(pg_stat_activity.pid)
+ FROM pg_stat_activity
+ WHERE datname = current_database()
+ AND pid <> pg_backend_pid();
+ SQL
+
+ Gitlab::Database::EachDatabase.each_database_connection(include_shared: false) do |connection|
+ connection.execute(cmd)
end
+
+ ActiveRecord::Base.clear_all_connections! # rubocop:disable Database/MultipleDatabases
end
end
diff --git a/spec/support/finder_collection_allowlist.yml b/spec/support/finder_collection_allowlist.yml
index 1ac8e49fb45..c8af07905c2 100644
--- a/spec/support/finder_collection_allowlist.yml
+++ b/spec/support/finder_collection_allowlist.yml
@@ -55,6 +55,7 @@
- Security::FindingsFinder
- Security::PipelineVulnerabilitiesFinder
- Security::ScanExecutionPoliciesFinder
+- Security::ScanResultPoliciesFinder
- Security::TrainingProviders::BaseUrlFinder
- Security::TrainingUrlsFinder
- Security::TrainingProviders::KontraUrlFinder
diff --git a/spec/support/gitlab_stubs/gitlab_ci.yml b/spec/support/gitlab_stubs/gitlab_ci.yml
index b6a66cfa2c6..94523591765 100644
--- a/spec/support/gitlab_stubs/gitlab_ci.yml
+++ b/spec/support/gitlab_stubs/gitlab_ci.yml
@@ -7,9 +7,12 @@ before_script:
- bundle exec rake db:create
variables:
+ KEY_VALUE_VAR:
+ value: 'value x'
+ description: 'value of KEY_VALUE_VAR'
DB_NAME: postgres
ENVIRONMENT_VAR:
- value: 'env var value'
+ value: ['env var value', 'env var value2']
description: 'env var description'
stages:
diff --git a/spec/support/helpers/exclusive_lease_helpers.rb b/spec/support/helpers/exclusive_lease_helpers.rb
index 95cfc56c273..06e5ae5427c 100644
--- a/spec/support/helpers/exclusive_lease_helpers.rb
+++ b/spec/support/helpers/exclusive_lease_helpers.rb
@@ -2,6 +2,8 @@
module ExclusiveLeaseHelpers
def stub_exclusive_lease(key = nil, uuid = 'uuid', renew: false, timeout: nil)
+ prepare_exclusive_lease_stub
+
key ||= instance_of(String)
timeout ||= instance_of(Integer)
@@ -37,4 +39,21 @@ module ExclusiveLeaseHelpers
.to receive(:cancel)
.with(key, uuid)
end
+
+ private
+
+ # This prepares the stub to be able to stub specific lease keys
+ # while allowing unstubbed lease keys to behave as original.
+ #
+ # allow(Gitlab::ExclusiveLease).to receive(:new).and_call_original
+ # can only be called once to prevent resetting stubs when
+ # `stub_exclusive_lease` is called multiple times.
+ def prepare_exclusive_lease_stub
+ return if @exclusive_lease_allowed_to_call_original
+
+ allow(Gitlab::ExclusiveLease)
+ .to receive(:new).and_call_original
+
+ @exclusive_lease_allowed_to_call_original = true
+ end
end
diff --git a/spec/support/helpers/features/web_ide_spec_helpers.rb b/spec/support/helpers/features/web_ide_spec_helpers.rb
index 70dedc3ac50..551749a43de 100644
--- a/spec/support/helpers/features/web_ide_spec_helpers.rb
+++ b/spec/support/helpers/features/web_ide_spec_helpers.rb
@@ -8,7 +8,6 @@
# ...
#
# ide_visit(project)
-# ide_create_new_file('path/to/file.txt', content: 'Lorem ipsum')
# ide_commit
#
module WebIdeSpecHelpers
@@ -40,29 +39,6 @@ module WebIdeSpecHelpers
row.matches_css?('.folder.is-open')
end
- # Creates a file in the IDE by expanding directories
- # then using the dropdown next to the parent directory
- #
- # - Throws an error if the parent directory is not found
- def ide_create_new_file(path, content: '')
- parent_path = path.split('/')[0...-1].join('/')
-
- container = ide_traverse_to_file(parent_path)
-
- if container
- click_file_action(container, 'New file')
- else
- ide_tree_actions.click_button('New file')
- end
-
- within '#ide-new-entry' do
- find('input').fill_in(with: path)
- click_button('Create file')
- end
-
- ide_set_editor_value(content)
- end
-
# Deletes a file by traversing to `path`
# then clicking the 'Delete' action.
#
diff --git a/spec/support/helpers/git_helpers.rb b/spec/support/helpers/git_helpers.rb
index 99c5871ba54..72bba419116 100644
--- a/spec/support/helpers/git_helpers.rb
+++ b/spec/support/helpers/git_helpers.rb
@@ -2,7 +2,9 @@
module GitHelpers
def rugged_repo(repository)
- path = File.join(TestEnv.repos_path, repository.disk_path + '.git')
+ path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
+ File.join(TestEnv.repos_path, repository.disk_path + '.git')
+ end
Rugged::Repository.new(path)
end
diff --git a/spec/support/helpers/graphql_helpers.rb b/spec/support/helpers/graphql_helpers.rb
index 9d745f2cb70..b2fc6ae3286 100644
--- a/spec/support/helpers/graphql_helpers.rb
+++ b/spec/support/helpers/graphql_helpers.rb
@@ -17,6 +17,9 @@ module GraphqlHelpers
# makes an underscored string look like a fieldname
# "merge_request" => "mergeRequest"
def self.fieldnamerize(underscored_field_name)
+ # Skip transformation for a field with leading underscore
+ return underscored_field_name.to_s if underscored_field_name.start_with?('_')
+
underscored_field_name.to_s.camelize(:lower)
end
@@ -717,7 +720,7 @@ module GraphqlHelpers
end
def allow_high_graphql_transaction_threshold
- stub_const("Gitlab::QueryLimiting::Transaction::THRESHOLD", 1000)
+ allow(Gitlab::QueryLimiting::Transaction).to receive(:threshold).and_return(1000)
end
def allow_high_graphql_query_size
diff --git a/spec/support/helpers/html_escaped_helpers.rb b/spec/support/helpers/html_escaped_helpers.rb
index 7f6825e9598..7cbea7e7428 100644
--- a/spec/support/helpers/html_escaped_helpers.rb
+++ b/spec/support/helpers/html_escaped_helpers.rb
@@ -21,4 +21,35 @@ module HtmlEscapedHelpers
match_data
end
+
+ # Checks if +content+ contains HTML escaped tags and raises an exception
+ # if it does.
+ #
+ # See #match_html_escaped_tags for details.
+ def ensure_no_html_escaped_tags!(content, example)
+ match_data = match_html_escaped_tags(content)
+ return unless match_data
+
+ # Truncate
+ pre_match = match_data.pre_match.last(50)
+ match = match_data[0]
+ post_match = match_data.post_match.first(50)
+
+ string = "#{pre_match}«#{match}»#{post_match}"
+
+ raise <<~MESSAGE
+ The following string contains HTML escaped tags:
+
+ #{string}
+
+ Please consider using `.html_safe`.
+
+ This check can be disabled via:
+
+ it #{example.description.inspect}, :skip_html_escaped_tags_check do
+ ...
+ end
+
+ MESSAGE
+ end
end
diff --git a/spec/support/helpers/ldap_helpers.rb b/spec/support/helpers/ldap_helpers.rb
index 2f5f8be518c..48b593fb3d1 100644
--- a/spec/support/helpers/ldap_helpers.rb
+++ b/spec/support/helpers/ldap_helpers.rb
@@ -69,6 +69,32 @@ module LdapHelpers
allow_any_instance_of(Gitlab::Auth::Ldap::Adapter)
.to receive(:ldap_search).and_raise(Gitlab::Auth::Ldap::LdapConnectionError)
end
+
+ def stub_ldap_access(user, provider, provider_label)
+ ldap_server_config =
+ {
+ 'label' => provider_label,
+ 'provider_name' => provider,
+ 'attributes' => {},
+ 'encryption' => 'plain',
+ 'uid' => 'uid',
+ 'base' => 'dc=example,dc=com'
+ }
+ uid = 'my-uid'
+ allow(::Gitlab::Auth::Ldap::Config).to receive_messages(enabled: true, servers: [ldap_server_config])
+ allow(Gitlab::Auth::OAuth::Provider).to receive_messages(providers: [provider.to_sym])
+
+ Ldap::OmniauthCallbacksController.define_providers!
+ Rails.application.reload_routes!
+
+ mock_auth_hash(provider, uid, user.email)
+ allow(Gitlab::Auth::Ldap::Access).to receive(:allowed?).with(user).and_return(true)
+
+ allow_next_instance_of(ActionDispatch::Routing::RoutesProxy) do |instance|
+ allow(instance).to receive(:"user_#{provider}_omniauth_callback_path")
+ .and_return("/users/auth/#{provider}/callback")
+ end
+ end
end
LdapHelpers.include_mod_with('LdapHelpers')
diff --git a/spec/support/helpers/login_helpers.rb b/spec/support/helpers/login_helpers.rb
index 87a1f5459ec..44237b821c3 100644
--- a/spec/support/helpers/login_helpers.rb
+++ b/spec/support/helpers/login_helpers.rb
@@ -119,6 +119,16 @@ module LoginHelpers
click_button "oauth-login-#{provider}"
end
+ def sign_in_using_ldap!(user, ldap_tab, ldap_name)
+ visit new_user_session_path
+ click_link ldap_tab
+ fill_in 'username', with: user.username
+ fill_in 'password', with: user.password
+ within("##{ldap_name}") do
+ click_button 'Sign in'
+ end
+ end
+
def register_via(provider, uid, email, additional_info: {})
mock_auth_hash(provider, uid, email, additional_info: additional_info)
visit new_user_registration_path
diff --git a/spec/support/helpers/migrations_helpers/vulnerabilities_helper.rb b/spec/support/helpers/migrations_helpers/vulnerabilities_helper.rb
new file mode 100644
index 00000000000..0a86d7abc83
--- /dev/null
+++ b/spec/support/helpers/migrations_helpers/vulnerabilities_helper.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+module MigrationHelpers
+ module VulnerabilitiesHelper
+ # rubocop:disable Metrics/ParameterLists
+ def create_finding!(
+ vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:,
+ name: "test", severity: 7, confidence: 7, report_type: 0,
+ project_fingerprint: '123qweasdzxc', location_fingerprint: 'test',
+ metadata_version: 'test', raw_metadata: 'test', uuid: 'b1cee17e-3d7a-11ed-b878-0242ac120002')
+ table(:vulnerability_occurrences).create!(
+ vulnerability_id: vulnerability_id,
+ project_id: project_id,
+ name: name,
+ severity: severity,
+ confidence: confidence,
+ report_type: report_type,
+ project_fingerprint: project_fingerprint,
+ scanner_id: scanner_id,
+ primary_identifier_id: primary_identifier_id,
+ location_fingerprint: location_fingerprint,
+ metadata_version: metadata_version,
+ raw_metadata: raw_metadata,
+ uuid: uuid
+ )
+ end
+ # rubocop:enable Metrics/ParameterLists
+
+ def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0)
+ table(:vulnerabilities).create!(
+ project_id: project_id,
+ author_id: author_id,
+ title: title,
+ severity: severity,
+ confidence: confidence,
+ report_type: report_type
+ )
+ end
+ end
+end
diff --git a/spec/support/helpers/project_helpers.rb b/spec/support/helpers/project_helpers.rb
index 2427ed2bcc9..daa07e385ea 100644
--- a/spec/support/helpers/project_helpers.rb
+++ b/spec/support/helpers/project_helpers.rb
@@ -1,22 +1,6 @@
# frozen_string_literal: true
module ProjectHelpers
- # @params target [Project] membership target
- # @params membership [Symbol] accepts the membership levels :guest, :reporter...
- # and phony levels :non_member and :anonymous
- def create_user_from_membership(target, membership)
- case membership
- when :anonymous
- nil
- when :non_member
- create(:user, name: membership)
- when :admin
- create(:user, :admin, name: 'admin')
- else
- create(:user, name: membership).tap { |u| target.add_member(u, membership) }
- end
- end
-
def update_feature_access_level(project, access_level, additional_params = {})
features = ProjectFeature::FEATURES.dup
features.delete(:pages)
diff --git a/spec/support/helpers/seed_helper.rb b/spec/support/helpers/seed_helper.rb
deleted file mode 100644
index 9628762d46a..00000000000
--- a/spec/support/helpers/seed_helper.rb
+++ /dev/null
@@ -1,67 +0,0 @@
-# frozen_string_literal: true
-
-require_relative 'test_env'
-
-# This file is specific to specs in spec/lib/gitlab/git/
-
-SEED_STORAGE_PATH = Gitlab::GitalyClient::StorageSettings.allow_disk_access { TestEnv.repos_path }
-TEST_REPO_PATH = 'gitlab-git-test.git'
-TEST_NORMAL_REPO_PATH = 'not-bare-repo.git'
-TEST_MUTABLE_REPO_PATH = 'mutable-repo.git'
-TEST_BROKEN_REPO_PATH = 'broken-repo.git'
-
-module SeedHelper
- GITLAB_GIT_TEST_REPO_URL = File.expand_path('../gitlab-git-test.git', __dir__)
-
- def ensure_seeds
- if File.exist?(SEED_STORAGE_PATH)
- FileUtils.rm_r(SEED_STORAGE_PATH)
- end
-
- FileUtils.mkdir_p(SEED_STORAGE_PATH)
-
- create_bare_seeds
- create_normal_seeds
- create_mutable_seeds
- create_broken_seeds
- end
-
- def create_bare_seeds
- system(git_env, *%W(#{Gitlab.config.git.bin_path} clone --bare #{GITLAB_GIT_TEST_REPO_URL}),
- chdir: SEED_STORAGE_PATH,
- out: '/dev/null',
- err: '/dev/null')
- end
-
- def create_normal_seeds
- system(git_env, *%W(#{Gitlab.config.git.bin_path} clone #{TEST_REPO_PATH} #{TEST_NORMAL_REPO_PATH}),
- chdir: SEED_STORAGE_PATH,
- out: '/dev/null',
- err: '/dev/null')
- end
-
- def create_mutable_seeds
- system(git_env, *%W(#{Gitlab.config.git.bin_path} clone --bare #{TEST_REPO_PATH} #{TEST_MUTABLE_REPO_PATH}),
- chdir: SEED_STORAGE_PATH,
- out: '/dev/null',
- err: '/dev/null')
-
- mutable_repo_full_path = File.join(SEED_STORAGE_PATH, TEST_MUTABLE_REPO_PATH)
- system(git_env, *%W(#{Gitlab.config.git.bin_path} branch -t feature origin/feature),
- chdir: mutable_repo_full_path, out: '/dev/null', err: '/dev/null')
-
- system(git_env, *%W(#{Gitlab.config.git.bin_path} remote add expendable #{GITLAB_GIT_TEST_REPO_URL}),
- chdir: mutable_repo_full_path, out: '/dev/null', err: '/dev/null')
- end
-
- def create_broken_seeds
- system(git_env, *%W(#{Gitlab.config.git.bin_path} clone --bare #{TEST_REPO_PATH} #{TEST_BROKEN_REPO_PATH}),
- chdir: SEED_STORAGE_PATH,
- out: '/dev/null',
- err: '/dev/null')
-
- refs_path = File.join(SEED_STORAGE_PATH, TEST_BROKEN_REPO_PATH, 'refs')
-
- FileUtils.rm_r(refs_path)
- end
-end
diff --git a/spec/support/helpers/stub_configuration.rb b/spec/support/helpers/stub_configuration.rb
index c08e35912c3..f41457d2420 100644
--- a/spec/support/helpers/stub_configuration.rb
+++ b/spec/support/helpers/stub_configuration.rb
@@ -81,7 +81,7 @@ module StubConfiguration
messages['default'] ||= Gitlab.config.repositories.storages.default
messages.each do |storage_name, storage_hash|
if !storage_hash.key?('path') || storage_hash['path'] == Gitlab::GitalyClient::StorageSettings::Deprecated
- storage_hash['path'] = TestEnv.repos_path
+ storage_hash['path'] = Gitlab::GitalyClient::StorageSettings.allow_disk_access { TestEnv.repos_path }
end
messages[storage_name] = Gitlab::GitalyClient::StorageSettings.new(storage_hash.to_h)
diff --git a/spec/support/helpers/stub_experiments.rb b/spec/support/helpers/stub_experiments.rb
deleted file mode 100644
index 8995b8f5f7b..00000000000
--- a/spec/support/helpers/stub_experiments.rb
+++ /dev/null
@@ -1,37 +0,0 @@
-# frozen_string_literal: true
-
-module StubExperiments
- # Stub Experiment with `key: true/false`
- #
- # @param [Hash] experiment where key is feature name and value is boolean whether active or not.
- #
- # Examples
- # - `stub_experiment(signup_flow: false)` ... Disables `signup_flow` experiment.
- def stub_experiment(experiments)
- allow(Gitlab::Experimentation).to receive(:active?).and_call_original
-
- experiments.each do |experiment_key, enabled|
- allow(Gitlab::Experimentation).to receive(:active?).with(experiment_key) { enabled }
- end
- end
-
- # Stub Experiment for user with `key: true/false`
- #
- # @param [Hash] experiment where key is feature name and value is boolean whether enabled or not.
- #
- # Examples
- # - `stub_experiment_for_subject(signup_flow: false)` ... Disable `signup_flow` experiment for user.
- def stub_experiment_for_subject(experiments)
- allow(Gitlab::Experimentation).to receive(:in_experiment_group?).and_call_original
-
- experiments.each do |experiment_key, enabled|
- allow(Gitlab::Experimentation).to receive(:in_experiment_group?).with(experiment_key, anything) { enabled }
- end
- end
-
- private
-
- def feature_flag_suffix
- Gitlab::Experimentation::Experiment::FEATURE_FLAG_SUFFIX
- end
-end
diff --git a/spec/support/helpers/stub_gitlab_calls.rb b/spec/support/helpers/stub_gitlab_calls.rb
index 749554f7786..e5c30769531 100644
--- a/spec/support/helpers/stub_gitlab_calls.rb
+++ b/spec/support/helpers/stub_gitlab_calls.rb
@@ -96,9 +96,9 @@ module StubGitlabCalls
def stub_commonmark_sourcepos_disabled
render_options = Banzai::Filter::MarkdownEngines::CommonMark::RENDER_OPTIONS
- allow_any_instance_of(Banzai::Filter::MarkdownEngines::CommonMark)
- .to receive(:render_options)
- .and_return(render_options)
+ allow_next_instance_of(Banzai::Filter::MarkdownEngines::CommonMark) do |instance|
+ allow(instance).to receive(:render_options).and_return(render_options)
+ end
end
private
diff --git a/spec/support/helpers/stub_object_storage.rb b/spec/support/helpers/stub_object_storage.rb
index 661c1c683b0..87e2a71b1cd 100644
--- a/spec/support/helpers/stub_object_storage.rb
+++ b/spec/support/helpers/stub_object_storage.rb
@@ -13,13 +13,16 @@ module StubObjectStorage
enabled: true,
proxy_download: false,
background_upload: false,
- direct_upload: false
+ direct_upload: false,
+ cdn: {}
)
+
new_config = config.to_h.deep_symbolize_keys.merge({
enabled: enabled,
proxy_download: proxy_download,
background_upload: background_upload,
- direct_upload: direct_upload
+ direct_upload: direct_upload,
+ cdn: cdn
})
# Needed for ObjectStorage::Config compatibility
@@ -30,6 +33,10 @@ module StubObjectStorage
allow(config).to receive(:background_upload) { background_upload }
allow(config).to receive(:direct_upload) { direct_upload }
+ uploader_config = Settingslogic.new(new_config.deep_stringify_keys)
+ allow(uploader).to receive(:object_store_options).and_return(uploader_config)
+ allow(uploader.options).to receive(:object_store).and_return(uploader_config)
+
return unless enabled
stub_object_storage(connection_params: uploader.object_store_credentials,
@@ -74,6 +81,12 @@ module StubObjectStorage
**params)
end
+ def stub_rpm_repository_file_object_storage(**params)
+ stub_object_storage_uploader(config: Gitlab.config.packages.object_store,
+ uploader: ::Packages::Rpm::RepositoryFileUploader,
+ **params)
+ end
+
def stub_composer_cache_object_storage(**params)
stub_object_storage_uploader(config: Gitlab.config.packages.object_store,
uploader: ::Packages::Composer::CacheUploader,
diff --git a/spec/support/helpers/test_env.rb b/spec/support/helpers/test_env.rb
index 691f978550a..c58353558df 100644
--- a/spec/support/helpers/test_env.rb
+++ b/spec/support/helpers/test_env.rb
@@ -295,14 +295,6 @@ module TestEnv
end
end
- def rm_storage_dir(storage, dir)
- Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- target_repo_refs_path = File.join(GitalySetup.repos_path(storage), dir)
- FileUtils.remove_dir(target_repo_refs_path)
- end
- rescue Errno::ENOENT
- end
-
def storage_dir_exists?(storage, dir)
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
File.exist?(File.join(GitalySetup.repos_path(storage), dir))
@@ -348,6 +340,14 @@ module TestEnv
Capybara.current_session.visit '/'
end
+ def factory_repo_path
+ @factory_repo_path ||= Rails.root.join('tmp', 'tests', factory_repo_name)
+ end
+
+ def forked_repo_path
+ @forked_repo_path ||= Rails.root.join('tmp', 'tests', forked_repo_name)
+ end
+
def factory_repo_bundle_path
"#{factory_repo_path}.bundle"
end
@@ -377,18 +377,10 @@ module TestEnv
]
end
- def factory_repo_path
- @factory_repo_path ||= Rails.root.join('tmp', 'tests', factory_repo_name)
- end
-
def factory_repo_name
'gitlab-test'
end
- def forked_repo_path
- @forked_repo_path ||= Rails.root.join('tmp', 'tests', forked_repo_name)
- end
-
def forked_repo_name
'gitlab-test-fork'
end
diff --git a/spec/support/helpers/usage_data_helpers.rb b/spec/support/helpers/usage_data_helpers.rb
index 1aea3545ae0..b4f0cbd8527 100644
--- a/spec/support/helpers/usage_data_helpers.rb
+++ b/spec/support/helpers/usage_data_helpers.rb
@@ -1,21 +1,6 @@
# frozen_string_literal: true
module UsageDataHelpers
- SMAU_KEYS = %i(
- snippet_create
- snippet_update
- snippet_comment
- merge_request_comment
- commit_comment
- wiki_pages_create
- wiki_pages_update
- wiki_pages_delete
- navbar_searches
- cycle_analytics_views
- productivity_analytics_views
- source_code_pushes
- ).freeze
-
COUNTS_KEYS = %i(
assignee_lists
ci_builds
@@ -83,7 +68,6 @@ module UsageDataHelpers
projects_with_error_tracking_enabled
projects_with_enabled_alert_integrations
projects_with_expiration_policy_enabled
- projects_with_expiration_policy_disabled
projects_with_expiration_policy_enabled_with_keep_n_unset
projects_with_expiration_policy_enabled_with_keep_n_set_to_1
projects_with_expiration_policy_enabled_with_keep_n_set_to_5
@@ -118,7 +102,7 @@ module UsageDataHelpers
uploads
web_hooks
user_preferences_user_gitpod_enabled
- ).push(*SMAU_KEYS)
+ ).freeze
USAGE_DATA_KEYS = %i(
active_user_count
diff --git a/spec/support/helpers/user_helpers.rb b/spec/support/helpers/user_helpers.rb
new file mode 100644
index 00000000000..30fa5b3ad8d
--- /dev/null
+++ b/spec/support/helpers/user_helpers.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+module UserHelpers
+ def create_user_from_membership(target, membership)
+ generate_user_from_membership(:create, target, membership)
+ end
+
+ def build_user_from_membership(target, membership)
+ generate_user_from_membership(:build, target, membership)
+ end
+
+ private
+
+ # @param method [Symbol] FactoryBot methods :create, :build, :build_stubbed
+ # @param target [Project, Group] membership target
+ # @param membership [Symbol] accepts the membership levels :guest, :reporter...
+ # and pseudo levels :non_member and :anonymous
+ def generate_user_from_membership(method, target, membership)
+ case membership
+ when :anonymous
+ nil
+ when :non_member
+ FactoryBot.send(method, :user, name: membership)
+ when :admin
+ FactoryBot.send(method, :user, :admin, name: 'admin')
+ else
+ # `.tap` can only be used with `create`, and if we want to `build` a user,
+ # it is more performant than creating a `project_member` or `group_member`
+ # with a built user
+ create(:user, name: membership).tap { |u| target.add_member(u, membership) }
+ end
+ end
+end
diff --git a/spec/support/matchers/event_store.rb b/spec/support/matchers/event_store.rb
index 4ecb924b3ed..582ea202187 100644
--- a/spec/support/matchers/event_store.rb
+++ b/spec/support/matchers/event_store.rb
@@ -6,7 +6,7 @@ RSpec::Matchers.define :publish_event do |expected_event_class|
supports_block_expectations
match do |proc|
- raise ArgumentError, 'This matcher only supports block expectation' unless proc.respond_to?(:call)
+ raise ArgumentError, 'publish_event matcher only supports block expectation' unless proc.respond_to?(:call)
@events ||= []
@@ -22,6 +22,8 @@ RSpec::Matchers.define :publish_event do |expected_event_class|
end
def match_data?(actual, expected)
+ return if actual.blank? || expected.blank?
+
values_match?(actual.keys, expected.keys) &&
actual.keys.all? do |key|
values_match?(expected[key], actual[key])
@@ -33,11 +35,20 @@ RSpec::Matchers.define :publish_event do |expected_event_class|
end
failure_message do
- "expected #{expected_event_class} with #{@expected_data} to be published, but got #{@events}"
+ message = "expected #{expected_event_class} with #{@expected_data || 'no data'} to be published"
+
+ if @events.present?
+ <<~MESSAGE
+ #{message}, but only the following events were published:
+ #{events_list}
+ MESSAGE
+ else
+ "#{message}, but no events were published."
+ end
end
match_when_negated do |proc|
- raise ArgumentError, 'This matcher only supports block expectation' unless proc.respond_to?(:call)
+ raise ArgumentError, 'publish_event matcher only supports block expectation' unless proc.respond_to?(:call)
allow(Gitlab::EventStore).to receive(:publish)
@@ -45,4 +56,48 @@ RSpec::Matchers.define :publish_event do |expected_event_class|
expect(Gitlab::EventStore).not_to have_received(:publish).with(instance_of(expected_event_class))
end
+
+ def events_list
+ @events.map do |event|
+ " - #{event.class.name} with #{event.data}"
+ end.join("\n")
+ end
+end
+
+# not_publish_event enables multiple assertions on a single block, for example:
+# expect { Model.create(invalid: :attribute) }
+# .to not_change(Model, :count)
+# .and not_publish_event(ModelCreated)
+RSpec::Matchers.define :not_publish_event do |expected_event_class|
+ include RSpec::Matchers::Composable
+
+ supports_block_expectations
+
+ match do |proc|
+ raise ArgumentError, 'not_publish_event matcher only supports block expectation' unless proc.respond_to?(:call)
+
+ @events ||= []
+
+ allow(Gitlab::EventStore).to receive(:publish) do |published_event|
+ @events << published_event
+ end
+
+ proc.call
+
+ @events.none? do |event|
+ event.instance_of?(expected_event_class)
+ end
+ end
+
+ failure_message do
+ "expected #{expected_event_class} not to be published"
+ end
+
+ chain :with do |_| # rubocop: disable Lint/UnreachableLoop
+ raise ArgumentError, 'not_publish_event does not permit .with to avoid ambiguity'
+ end
+
+ match_when_negated do |proc|
+ raise ArgumentError, 'not_publish_event matcher does not support negation. Use `expect {}.to publish_event` instead'
+ end
end
diff --git a/spec/support/migration.rb b/spec/support/migration.rb
index 3c359af886d..24e2fc2ff31 100644
--- a/spec/support/migration.rb
+++ b/spec/support/migration.rb
@@ -19,13 +19,9 @@ RSpec.configure do |config|
# Each example may call `migrate!`, so we must ensure we are migrated down every time
config.before(:each, :migration) do
use_fake_application_settings
-
- schema_migrate_down!
end
config.after(:context, :migration) do
- schema_migrate_up!
-
Gitlab::CurrentSettings.clear_in_memory_application_settings!
end
end
diff --git a/spec/support/models/partitionable_check.rb b/spec/support/models/partitionable_check.rb
new file mode 100644
index 00000000000..2c09c1b3408
--- /dev/null
+++ b/spec/support/models/partitionable_check.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+module PartitioningTesting
+ module CascadeCheck
+ extend ActiveSupport::Concern
+
+ included do
+ after_create :check_partition_cascade_value
+ end
+
+ def check_partition_cascade_value
+ raise 'Partition value not found' unless partition_scope_value
+ raise 'Default value detected' if partition_id == 100
+
+ return if partition_id == partition_scope_value
+
+ raise "partition_id was expected to equal #{partition_scope_value} but it was #{partition_id}."
+ end
+ end
+
+ module DefaultPartitionValue
+ extend ActiveSupport::Concern
+
+ class_methods do
+ def current_partition_value
+ current = super
+
+ if current == 100
+ 54321
+ else
+ current
+ end
+ end
+ end
+ end
+end
+
+Ci::Partitionable::Testing::PARTITIONABLE_MODELS.each do |klass|
+ model = klass.safe_constantize
+
+ if klass == 'Ci::Pipeline'
+ model.prepend(PartitioningTesting::DefaultPartitionValue)
+ else
+ model.include(PartitioningTesting::CascadeCheck)
+ end
+end
diff --git a/spec/support/rspec_order_todo.yml b/spec/support/rspec_order_todo.yml
index b5e3d707d50..c4377e368ee 100644
--- a/spec/support/rspec_order_todo.yml
+++ b/spec/support/rspec_order_todo.yml
@@ -189,7 +189,6 @@
- './ee/spec/controllers/subscriptions_controller_spec.rb'
- './ee/spec/controllers/subscriptions/groups_controller_spec.rb'
- './ee/spec/controllers/trial_registrations_controller_spec.rb'
-- './ee/spec/controllers/trials_controller_spec.rb'
- './ee/spec/controllers/users_controller_spec.rb'
- './ee/spec/db/production/license_spec.rb'
- './ee/spec/elastic_integration/global_search_spec.rb'
@@ -533,7 +532,6 @@
- './ee/spec/features/trial_registrations/company_information_spec.rb'
- './ee/spec/features/trial_registrations/signin_spec.rb'
- './ee/spec/features/trial_registrations/signup_spec.rb'
-- './ee/spec/features/trials/capture_lead_spec.rb'
- './ee/spec/features/trials/select_namespace_spec.rb'
- './ee/spec/features/trials/show_trial_banner_spec.rb'
- './ee/spec/features/users/arkose_labs_csp_spec.rb'
@@ -2017,7 +2015,6 @@
- './ee/spec/models/milestone_spec.rb'
- './ee/spec/models/namespace_limit_spec.rb'
- './ee/spec/models/namespace_setting_spec.rb'
-- './ee/spec/models/namespaces/free_user_cap/preview_spec.rb'
- './ee/spec/models/namespaces/free_user_cap_spec.rb'
- './ee/spec/models/namespaces/free_user_cap/standard_spec.rb'
- './ee/spec/models/namespaces/storage/root_excess_size_spec.rb'
@@ -3266,12 +3263,10 @@
- './ee/spec/services/users_ops_dashboard_projects/destroy_service_spec.rb'
- './ee/spec/services/users/update_highest_member_role_service_spec.rb'
- './ee/spec/services/vulnerabilities/confirm_service_spec.rb'
-- './ee/spec/services/vulnerabilities/create_from_security_finding_service_spec.rb'
- './ee/spec/services/vulnerabilities/create_service_spec.rb'
- './ee/spec/services/vulnerabilities/destroy_dismissal_feedback_service_spec.rb'
- './ee/spec/services/vulnerabilities/dismiss_service_spec.rb'
- './ee/spec/services/vulnerabilities/finding_dismiss_service_spec.rb'
-- './ee/spec/services/vulnerabilities/findings/create_from_security_finding_service_spec.rb'
- './ee/spec/services/vulnerabilities/historical_statistics/adjustment_service_spec.rb'
- './ee/spec/services/vulnerabilities/historical_statistics/deletion_service_spec.rb'
- './ee/spec/services/vulnerabilities/manually_create_service_spec.rb'
@@ -8850,7 +8845,6 @@
- './spec/presenters/award_emoji_presenter_spec.rb'
- './spec/presenters/blob_presenter_spec.rb'
- './spec/presenters/blobs/notebook_presenter_spec.rb'
-- './spec/presenters/blobs/unfold_presenter_spec.rb'
- './spec/presenters/ci/bridge_presenter_spec.rb'
- './spec/presenters/ci/build_presenter_spec.rb'
- './spec/presenters/ci/build_runner_presenter_spec.rb'
@@ -9535,7 +9529,6 @@
- './spec/serializers/deploy_keys/basic_deploy_key_entity_spec.rb'
- './spec/serializers/deploy_keys/deploy_key_entity_spec.rb'
- './spec/serializers/deployment_cluster_entity_spec.rb'
-- './spec/serializers/deployment_entity_spec.rb'
- './spec/serializers/deployment_serializer_spec.rb'
- './spec/serializers/detailed_status_entity_spec.rb'
- './spec/serializers/diff_file_base_entity_spec.rb'
@@ -9552,7 +9545,6 @@
- './spec/serializers/entity_request_spec.rb'
- './spec/serializers/environment_entity_spec.rb'
- './spec/serializers/environment_serializer_spec.rb'
-- './spec/serializers/environment_status_entity_spec.rb'
- './spec/serializers/evidences/evidence_entity_spec.rb'
- './spec/serializers/evidences/evidence_serializer_spec.rb'
- './spec/serializers/evidences/issue_entity_spec.rb'
@@ -9634,7 +9626,6 @@
- './spec/serializers/personal_access_token_entity_spec.rb'
- './spec/serializers/personal_access_token_serializer_spec.rb'
- './spec/serializers/pipeline_details_entity_spec.rb'
-- './spec/serializers/pipeline_serializer_spec.rb'
- './spec/serializers/project_access_token_entity_spec.rb'
- './spec/serializers/project_access_token_serializer_spec.rb'
- './spec/serializers/project_import_entity_spec.rb'
@@ -10017,7 +10008,6 @@
- './spec/services/incident_management/issuable_escalation_statuses/after_update_service_spec.rb'
- './spec/services/incident_management/issuable_escalation_statuses/build_service_spec.rb'
- './spec/services/incident_management/issuable_escalation_statuses/create_service_spec.rb'
-- './spec/services/incident_management/issuable_escalation_statuses/prepare_update_service_spec.rb'
- './spec/services/incident_management/pager_duty/create_incident_issue_service_spec.rb'
- './spec/services/incident_management/pager_duty/process_webhook_service_spec.rb'
- './spec/services/incident_management/timeline_events/create_service_spec.rb'
diff --git a/spec/support/services/issuable_update_service_shared_examples.rb b/spec/support/services/issuable_update_service_shared_examples.rb
index c168df7a7d2..94061b140f4 100644
--- a/spec/support/services/issuable_update_service_shared_examples.rb
+++ b/spec/support/services/issuable_update_service_shared_examples.rb
@@ -47,7 +47,7 @@ RSpec.shared_examples 'broadcasting issuable labels updates' do
it 'triggers the GraphQL subscription' do
expect(GraphqlTriggers).to receive(:issuable_labels_updated).with(issuable)
- update_issuable({ add_label_ids: [label_b.id] })
+ update_issuable(add_label_ids: [label_b.id])
end
end
@@ -55,7 +55,7 @@ RSpec.shared_examples 'broadcasting issuable labels updates' do
it 'triggers the GraphQL subscription' do
expect(GraphqlTriggers).to receive(:issuable_labels_updated).with(issuable)
- update_issuable({ remove_label_ids: [label_a.id] })
+ update_issuable(remove_label_ids: [label_a.id])
end
end
@@ -63,7 +63,7 @@ RSpec.shared_examples 'broadcasting issuable labels updates' do
it 'does not trigger the GraphQL subscription' do
expect(GraphqlTriggers).not_to receive(:issuable_labels_updated).with(issuable)
- update_issuable({ label_ids: [label_a.id] })
+ update_issuable(label_ids: [label_a.id])
end
end
end
diff --git a/spec/support/shared_contexts/finders/merge_requests_finder_shared_contexts.rb b/spec/support/shared_contexts/finders/merge_requests_finder_shared_contexts.rb
index 91b6baac610..8a64efe9df5 100644
--- a/spec/support/shared_contexts/finders/merge_requests_finder_shared_contexts.rb
+++ b/spec/support/shared_contexts/finders/merge_requests_finder_shared_contexts.rb
@@ -50,8 +50,8 @@ RSpec.shared_context 'MergeRequestsFinder multiple projects with merge requests
allow_gitaly_n_plus_1 { create(:project, group: subgroup) }
end
- let!(:label) { create(:label, project: project1) }
- let!(:label2) { create(:label, project: project1) }
+ let_it_be(:label) { create(:label, project: project1) }
+ let_it_be(:label2) { create(:label, project: project1) }
let!(:merge_request1) do
create(:merge_request, assignees: [user], author: user, reviewers: [user2],
@@ -87,13 +87,16 @@ RSpec.shared_context 'MergeRequestsFinder multiple projects with merge requests
let!(:label_link) { create(:label_link, label: label, target: merge_request2) }
let!(:label_link2) { create(:label_link, label: label2, target: merge_request3) }
- before do
+ before_all do
project1.add_maintainer(user)
- project2.add_developer(user)
- project3.add_developer(user)
project4.add_developer(user)
project5.add_developer(user)
project6.add_developer(user)
+ end
+
+ before do
+ project2.add_developer(user)
+ project3.add_developer(user)
project2.add_developer(user2)
end
diff --git a/spec/support/shared_contexts/html_safe_shared_context.rb b/spec/support/shared_contexts/html_safe_shared_context.rb
new file mode 100644
index 00000000000..9bdaea9fe64
--- /dev/null
+++ b/spec/support/shared_contexts/html_safe_shared_context.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'when rendered has no HTML escapes' do
+ # Check once per example if `rendered` contains HTML escapes.
+ let(:rendered) do |example|
+ super().tap do |rendered|
+ next if example.metadata[:skip_html_escaped_tags_check]
+
+ HtmlEscapedHelpers.ensure_no_html_escaped_tags!(rendered, example)
+ end
+ end
+end
+
+RSpec.shared_context 'when page has no HTML escapes' do
+ # Check once per example if `page` contains HTML escapes.
+ let(:page) do |example|
+ super().tap do |page|
+ next if example.metadata[:skip_html_escaped_tags_check]
+
+ HtmlEscapedHelpers.ensure_no_html_escaped_tags!(page.native.to_s, example)
+ end
+ end
+end
diff --git a/spec/support/shared_contexts/lib/api/helpers/packages/dependency_proxy_helpers_shared_context.rb b/spec/support/shared_contexts/lib/api/helpers/packages/dependency_proxy_helpers_shared_context.rb
index 7c8b6250d24..1963248142c 100644
--- a/spec/support/shared_contexts/lib/api/helpers/packages/dependency_proxy_helpers_shared_context.rb
+++ b/spec/support/shared_contexts/lib/api/helpers/packages/dependency_proxy_helpers_shared_context.rb
@@ -1,6 +1,11 @@
# frozen_string_literal: true
RSpec.shared_context 'dependency proxy helpers context' do
+ def allow_fetch_cascade_application_setting(attribute:, return_value:)
+ allow(Gitlab::CurrentSettings).to receive(:public_send).with(attribute.to_sym).and_return(return_value)
+ allow(Gitlab::CurrentSettings).to receive(:public_send).with("lock_#{attribute}").and_return(false)
+ end
+
def allow_fetch_application_setting(attribute:, return_value:)
attributes = double
allow(::Gitlab::CurrentSettings.current_application_settings).to receive(:attributes).and_return(attributes)
diff --git a/spec/support/shared_contexts/markdown_golden_master_shared_examples.rb b/spec/support/shared_contexts/markdown_golden_master_shared_examples.rb
index 168aef0f174..72e23e6d5fa 100644
--- a/spec/support/shared_contexts/markdown_golden_master_shared_examples.rb
+++ b/spec/support/shared_contexts/markdown_golden_master_shared_examples.rb
@@ -13,6 +13,8 @@ RSpec.shared_context 'API::Markdown Golden Master shared context' do |markdown_y
let_it_be(:project) { create(:project, :public, :repository, group: group) }
let_it_be(:label) { create(:label, project: project, title: 'bug') }
+ let_it_be(:label2) { create(:label, project: project, title: 'UX bug') }
+
let_it_be(:milestone) { create(:milestone, project: project, title: '1.1') }
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:merge_request) { create(:merge_request, source_project: project) }
diff --git a/spec/support/shared_contexts/policies/group_policy_shared_context.rb b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
index 893d3702407..bb1b794c2b6 100644
--- a/spec/support/shared_contexts/policies/group_policy_shared_context.rb
+++ b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
@@ -74,6 +74,8 @@ RSpec.shared_context 'GroupPolicy context' do
read_group_runners
admin_group_runners
register_group_runners
+ read_billing
+ edit_billing
]
end
diff --git a/spec/support/shared_contexts/rubocop_default_rspec_language_config_context.rb b/spec/support/shared_contexts/rubocop_default_rspec_language_config_context.rb
new file mode 100644
index 00000000000..a207c6ae9d1
--- /dev/null
+++ b/spec/support/shared_contexts/rubocop_default_rspec_language_config_context.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+# From https://github.com/rubocop/rubocop-rspec/blob/master/spec/shared/default_rspec_language_config_context.rb
+# This can be removed once we have https://github.com/rubocop/rubocop-rspec/pull/1377
+
+RSpec.shared_context 'with default RSpec/Language config' do
+ include_context 'config'
+
+ # Deep duplication is needed to prevent config leakage between examples
+ let(:other_cops) do
+ default_language = RuboCop::ConfigLoader
+ .default_configuration['RSpec']['Language']
+ default_include = RuboCop::ConfigLoader
+ .default_configuration['RSpec']['Include']
+ { 'RSpec' =>
+ {
+ 'Include' => default_include,
+ 'Language' => deep_dup(default_language)
+ } }
+ end
+
+ def deep_dup(object)
+ case object
+ when Array
+ object.map { |item| deep_dup(item) }
+ when Hash
+ object.transform_values { |value| deep_dup(value) }
+ else
+ object # only collections undergo modifications and need duping
+ end
+ end
+end
diff --git a/spec/support/shared_contexts/services/packages/rpm/xml_shared_context.rb b/spec/support/shared_contexts/services/packages/rpm/xml_shared_context.rb
new file mode 100644
index 00000000000..784092d40da
--- /dev/null
+++ b/spec/support/shared_contexts/services/packages/rpm/xml_shared_context.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'with rpm package data' do
+ def xml_update_params
+ Gitlab::Json.parse(fixture_file('packages/rpm/payload.json')).with_indifferent_access
+ end
+end
diff --git a/spec/support/shared_contexts/views/html_safe_render_shared_context.rb b/spec/support/shared_contexts/views/html_safe_render_shared_context.rb
deleted file mode 100644
index 3acca60c901..00000000000
--- a/spec/support/shared_contexts/views/html_safe_render_shared_context.rb
+++ /dev/null
@@ -1,39 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.shared_context 'when rendered view has no HTML escapes', type: :view do
- # Check once per example if `rendered` contains HTML escapes.
- let(:rendered) do |example|
- super().tap do |rendered|
- next if example.metadata[:skip_html_escaped_tags_check]
-
- ensure_no_html_escaped_tags!(rendered, example)
- end
- end
-
- def ensure_no_html_escaped_tags!(content, example)
- match_data = HtmlEscapedHelpers.match_html_escaped_tags(content)
- return unless match_data
-
- # Truncate
- pre_match = match_data.pre_match.last(50)
- match = match_data[0]
- post_match = match_data.post_match.first(50)
-
- string = "#{pre_match}«#{match}»#{post_match}"
-
- raise <<~MESSAGE
- The following string contains HTML escaped tags:
-
- #{string}
-
- Please consider using `.html_safe`.
-
- This check can be disabled via:
-
- it #{example.description.inspect}, :skip_html_escaped_tags_check do
- ...
- end
-
- MESSAGE
- end
-end
diff --git a/spec/support/shared_examples/bulk_imports/common/pipelines/wiki_pipeline_examples.rb b/spec/support/shared_examples/bulk_imports/common/pipelines/wiki_pipeline_examples.rb
index 7e7460cd602..cd4432af4ed 100644
--- a/spec/support/shared_examples/bulk_imports/common/pipelines/wiki_pipeline_examples.rb
+++ b/spec/support/shared_examples/bulk_imports/common/pipelines/wiki_pipeline_examples.rb
@@ -57,5 +57,53 @@ RSpec.shared_examples 'wiki pipeline imports a wiki for an entity' do
expect(tracker.entity.failures.first.exception_message).to eq('Only allowed schemes are http, https')
end
end
+
+ context 'when wiki is disabled' do
+ before do
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
+ allow(client)
+ .to receive(:get)
+ .and_raise(
+ BulkImports::NetworkError.new(
+ 'Unsuccessful response 403 from ...',
+ response: response_double
+ )
+ )
+ end
+ end
+
+ describe 'unsuccessful response' do
+ shared_examples 'does not raise an error' do
+ it 'does not raise an error' do
+ expect(parent.wiki).not_to receive(:ensure_repository)
+ expect(parent.wiki.repository).not_to receive(:ensure_repository)
+
+ expect { subject.run }.not_to raise_error
+ end
+ end
+
+ context 'when response is forbidden' do
+ let(:response_double) { instance_double(HTTParty::Response, forbidden?: true, code: 403) }
+
+ include_examples 'does not raise an error'
+ end
+
+ context 'when response is not found' do
+ let(:response_double) { instance_double(HTTParty::Response, forbidden?: false, not_found?: true) }
+
+ include_examples 'does not raise an error'
+ end
+
+ context 'when response is not 403' do
+ let(:response_double) { instance_double(HTTParty::Response, forbidden?: false, not_found?: false, code: 301) }
+
+ it 'marks tracker as failed' do
+ subject.run
+
+ expect(tracker.failed?).to eq(true)
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/support/shared_examples/controllers/snowplow_event_tracking_examples.rb b/spec/support/shared_examples/controllers/snowplow_event_tracking_examples.rb
index 4af3c0cc6cc..6749ebd471f 100644
--- a/spec/support/shared_examples/controllers/snowplow_event_tracking_examples.rb
+++ b/spec/support/shared_examples/controllers/snowplow_event_tracking_examples.rb
@@ -32,7 +32,8 @@ RSpec.shared_examples 'Snowplow event tracking' do |overrides: {}|
user: try(:user),
project: try(:project),
label: try(:label),
- property: try(:property)
+ property: try(:property),
+ context: try(:context)
}.merge(overrides).compact.merge(extra)
subject
@@ -40,3 +41,12 @@ RSpec.shared_examples 'Snowplow event tracking' do |overrides: {}|
expect_snowplow_event(**params)
end
end
+
+RSpec.shared_examples 'Snowplow event tracking with RedisHLL context' do |overrides: {}|
+ it_behaves_like 'Snowplow event tracking', overrides: overrides do
+ let(:context) do
+ event = try(:property) || action
+ [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: event).to_context.to_json]
+ end
+ end
+end
diff --git a/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb b/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb
index 885c0229038..5d77ed5fdfc 100644
--- a/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb
@@ -48,7 +48,7 @@ RSpec.shared_examples 'wiki controller actions' do
context 'when the wiki repository cannot be created' do
before do
expect(Wiki).to receive(:for_container).and_return(wiki)
- expect(wiki).to receive(:wiki) { raise Wiki::CouldNotCreateWikiError }
+ expect(wiki).to receive(:create_wiki_repository) { raise Wiki::CouldNotCreateWikiError }
end
it 'redirects to the wiki container and displays an error message' do
@@ -200,7 +200,7 @@ RSpec.shared_examples 'wiki controller actions' do
context 'the sidebar fails to load' do
before do
allow(Wiki).to receive(:for_container).and_return(wiki)
- wiki.wiki
+ wiki.create_wiki_repository
expect(wiki).to receive(:find_sidebar) do
raise ::Gitlab::Git::CommandTimedOut, 'Deadline Exceeded'
end
@@ -288,7 +288,7 @@ RSpec.shared_examples 'wiki controller actions' do
context 'when page is a file' do
include WikiHelpers
- where(:file_name) { ['dk.png', 'unsanitized.svg', 'git-cheat-sheet.pdf'] }
+ where(:file_name) { ['dk.png', 'unsanitized.svg', 'sample.pdf'] }
with_them do
let(:id) { upload_file_to_wiki(wiki, user, file_name) }
@@ -300,7 +300,7 @@ RSpec.shared_examples 'wiki controller actions' do
expect(response.headers['Content-Disposition']).to match(/^inline/)
expect(response.headers[Gitlab::Workhorse::DETECT_HEADER]).to eq('true')
expect(response.cache_control[:public]).to be(false)
- expect(response.headers['Cache-Control']).to eq('max-age=60, private')
+ expect(response.headers['Cache-Control']).to eq('max-age=60, private, must-revalidate, stale-while-revalidate=60, stale-if-error=300, s-maxage=60')
end
end
end
diff --git a/spec/support/shared_examples/features/access_tokens_shared_examples.rb b/spec/support/shared_examples/features/access_tokens_shared_examples.rb
index 0fc45b154d8..cd255abd7a8 100644
--- a/spec/support/shared_examples/features/access_tokens_shared_examples.rb
+++ b/spec/support/shared_examples/features/access_tokens_shared_examples.rb
@@ -10,11 +10,11 @@ end
RSpec.shared_examples 'resource access tokens creation' do |resource_type|
def active_resource_access_tokens
- find('.table.active-tokens')
+ find("[data-testid='active-tokens']")
end
def created_resource_access_token
- find('#created-personal-access-token').value
+ find_field('new-access-token').value
end
it 'allows creation of an access token', :aggregate_failures do
@@ -106,7 +106,7 @@ end
RSpec.shared_examples 'active resource access tokens' do
def active_resource_access_tokens
- find('.table.active-tokens')
+ find("[data-testid='active-tokens']")
end
it 'shows active access tokens' do
@@ -129,24 +129,22 @@ RSpec.shared_examples 'active resource access tokens' do
end
RSpec.shared_examples 'inactive resource access tokens' do |no_active_tokens_text|
- def no_resource_access_tokens_message
- find('.settings-message')
+ def active_resource_access_tokens
+ find("[data-testid='active-tokens']")
end
it 'allows revocation of an active token' do
visit resource_settings_access_tokens_path
accept_gl_confirm(button_text: 'Revoke') { click_on 'Revoke' }
- expect(page).to have_selector('.settings-message')
- expect(no_resource_access_tokens_message).to have_text(no_active_tokens_text)
+ expect(active_resource_access_tokens).to have_text(no_active_tokens_text)
end
it 'removes expired tokens from active section' do
resource_access_token.update!(expires_at: 5.days.ago)
visit resource_settings_access_tokens_path
- expect(page).to have_selector('.settings-message')
- expect(no_resource_access_tokens_message).to have_text(no_active_tokens_text)
+ expect(active_resource_access_tokens).to have_text(no_active_tokens_text)
end
context 'when resource access token creation is not allowed' do
@@ -158,8 +156,39 @@ RSpec.shared_examples 'inactive resource access tokens' do |no_active_tokens_tex
visit resource_settings_access_tokens_path
accept_gl_confirm(button_text: 'Revoke') { click_on 'Revoke' }
- expect(page).to have_selector('.settings-message')
- expect(no_resource_access_tokens_message).to have_text(no_active_tokens_text)
+ expect(active_resource_access_tokens).to have_text(no_active_tokens_text)
+ end
+ end
+end
+
+RSpec.shared_examples '#create access token' do
+ let(:url) { {} }
+ let_it_be(:admin) { create(:admin) }
+ let_it_be(:token_attributes) { attributes_for(:personal_access_token) }
+
+ before do
+ sign_in(admin)
+ end
+
+ context "when POST is successful" do
+ it "renders JSON with a new token" do
+ post url, params: { personal_access_token: token_attributes }
+
+ parsed_body = Gitlab::Json.parse(response.body)
+ expect(parsed_body['new_token']).not_to be_blank
+ expect(parsed_body['errors']).to be_blank
+ expect(response).to have_gitlab_http_status(:success)
+ end
+ end
+
+ context "when POST is unsuccessful" do
+ it "renders JSON with an error" do
+ post url, params: { personal_access_token: token_attributes.merge(scopes: []) }
+
+ parsed_body = Gitlab::Json.parse(response.body)
+ expect(parsed_body['new_token']).to be_blank
+ expect(parsed_body['errors']).not_to be_blank
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
end
diff --git a/spec/support/shared_examples/features/comments_on_merge_request_files_shared_examples.rb b/spec/support/shared_examples/features/comments_on_merge_request_files_shared_examples.rb
index f7cdc4c61ec..8a07e52019c 100644
--- a/spec/support/shared_examples/features/comments_on_merge_request_files_shared_examples.rb
+++ b/spec/support/shared_examples/features/comments_on_merge_request_files_shared_examples.rb
@@ -1,10 +1,6 @@
# frozen_string_literal: true
RSpec.shared_examples 'comment on merge request file' do
- before do
- stub_feature_flags(remove_user_attributes_projects: false)
- end
-
it 'adds a comment' do
click_diff_line(find_by_scrolling("[id='#{sample_commit.line_code}']"))
diff --git a/spec/support/shared_examples/features/content_editor_shared_examples.rb b/spec/support/shared_examples/features/content_editor_shared_examples.rb
index 21f264a8b6a..7863548e7f3 100644
--- a/spec/support/shared_examples/features/content_editor_shared_examples.rb
+++ b/spec/support/shared_examples/features/content_editor_shared_examples.rb
@@ -35,6 +35,34 @@ RSpec.shared_examples 'edits content using the content editor' do
attach_file('content_editor_image', Rails.root.join('spec', 'fixtures', fixture_name), make_visible: true)
end
+ def wait_until_hidden_field_is_updated(value)
+ expect(page).to have_field('wiki[content]', with: value, type: 'hidden')
+ end
+
+ it 'saves page content in local storage if the user navigates away' do
+ switch_to_content_editor
+
+ expect(page).to have_css(content_editor_testid)
+
+ type_in_content_editor ' Typing text in the content editor'
+
+ wait_until_hidden_field_is_updated /Typing text in the content editor/
+
+ refresh
+
+ expect(page).to have_text('Typing text in the content editor')
+
+ refresh # also retained after second refresh
+
+ expect(page).to have_text('Typing text in the content editor')
+
+ click_link 'Cancel' # draft is deleted on cancel
+
+ page.go_back
+
+ expect(page).not_to have_text('Typing text in the content editor')
+ end
+
describe 'formatting bubble menu' do
it 'shows a formatting bubble menu for a regular paragraph and headings' do
switch_to_content_editor
@@ -189,4 +217,101 @@ RSpec.shared_examples 'edits content using the content editor' do
end
end
end
+
+ describe 'autocomplete suggestions' do
+ let(:suggestions_dropdown) { '[data-testid="content-editor-suggestions-dropdown"]' }
+
+ before do
+ if defined?(project)
+ create(:issue, project: project, title: 'My Cool Linked Issue')
+ create(:merge_request, source_project: project, title: 'My Cool Merge Request')
+ create(:label, project: project, title: 'My Cool Label')
+ create(:milestone, project: project, title: 'My Cool Milestone')
+
+ project.add_maintainer(create(:user, name: 'abc123', username: 'abc123'))
+ else # group wikis
+ project = create(:project, group: group)
+
+ create(:issue, project: project, title: 'My Cool Linked Issue')
+ create(:merge_request, source_project: project, title: 'My Cool Merge Request')
+ create(:group_label, group: group, title: 'My Cool Label')
+ create(:milestone, group: group, title: 'My Cool Milestone')
+
+ project.add_maintainer(create(:user, name: 'abc123', username: 'abc123'))
+ end
+
+ switch_to_content_editor
+
+ type_in_content_editor :enter
+ end
+
+ it 'shows suggestions for members with descriptions' do
+ type_in_content_editor '@a'
+
+ expect(find(suggestions_dropdown)).to have_text('abc123')
+ expect(find(suggestions_dropdown)).to have_text('all')
+ expect(find(suggestions_dropdown)).to have_text('Group Members (2)')
+
+ send_keys [:arrow_down, :enter]
+
+ expect(page).not_to have_css(suggestions_dropdown)
+ expect(page).to have_text('@abc123')
+ end
+
+ it 'shows suggestions for merge requests' do
+ type_in_content_editor '!'
+
+ expect(find(suggestions_dropdown)).to have_text('My Cool Merge Request')
+
+ send_keys :enter
+
+ expect(page).not_to have_css(suggestions_dropdown)
+ expect(page).to have_text('!1')
+ end
+
+ it 'shows suggestions for issues' do
+ type_in_content_editor '#'
+
+ expect(find(suggestions_dropdown)).to have_text('My Cool Linked Issue')
+
+ send_keys :enter
+
+ expect(page).not_to have_css(suggestions_dropdown)
+ expect(page).to have_text('#1')
+ end
+
+ it 'shows suggestions for milestones' do
+ type_in_content_editor '%'
+
+ expect(find(suggestions_dropdown)).to have_text('My Cool Milestone')
+
+ send_keys :enter
+
+ expect(page).not_to have_css(suggestions_dropdown)
+ expect(page).to have_text('%My Cool Milestone')
+ end
+
+ it 'shows suggestions for emojis' do
+ type_in_content_editor ':smile'
+
+ expect(find(suggestions_dropdown)).to have_text('🙂 slight_smile')
+ expect(find(suggestions_dropdown)).to have_text('😸 smile_cat')
+
+ send_keys :enter
+
+ expect(page).not_to have_css(suggestions_dropdown)
+
+ expect(page).to have_text('🙂')
+ end
+
+ it 'doesn\'t show suggestions dropdown if there are no suggestions to show' do
+ type_in_content_editor '%'
+
+ expect(find(suggestions_dropdown)).to have_text('My Cool Milestone')
+
+ type_in_content_editor 'x'
+
+ expect(page).not_to have_css(suggestions_dropdown)
+ end
+ end
end
diff --git a/spec/support/shared_examples/features/deploy_token_shared_examples.rb b/spec/support/shared_examples/features/deploy_token_shared_examples.rb
index 79ad5bd6c7f..9fe08e5c996 100644
--- a/spec/support/shared_examples/features/deploy_token_shared_examples.rb
+++ b/spec/support/shared_examples/features/deploy_token_shared_examples.rb
@@ -14,32 +14,32 @@ RSpec.shared_examples 'a deploy token in settings' do
end
end
- it 'add a new deploy token' do
+ it 'add a new deploy token', :js do
visit page_path
- fill_in 'deploy_token_name', with: 'new_deploy_key'
- fill_in 'deploy_token_expires_at', with: (Date.today + 1.month).to_s
- fill_in 'deploy_token_username', with: 'deployer'
- check 'deploy_token_read_repository'
- check 'deploy_token_read_registry'
+ fill_in _('Name'), with: 'new_deploy_key'
+ fill_in _('Expiration date (optional)'), with: (Date.today + 1.month).to_s
+ fill_in _('Username (optional)'), with: 'deployer'
+ check 'read_repository'
+ check 'read_registry'
click_button 'Create deploy token'
expect(page).to have_content("Your new #{entity_type} deploy token has been created")
within('.created-deploy-token-container') do
- expect(page).to have_selector("input[name='deploy-token-user'][value='deployer']")
- expect(page).to have_selector("input[name='deploy-token'][readonly='readonly']")
+ expect(find("input[name='deploy-token-user']").value).to eq("deployer")
+ expect(find("input[name='deploy-token'][readonly='readonly']")).to be_visible
end
- expect(find("input#deploy_token_name").value).to eq nil
+ expect(find("input#deploy_token_name").value).to be_empty
expect(find("input#deploy_token_read_repository").checked?).to eq false
end
- context "with form errors" do
+ context "with form errors", :js do
before do
visit page_path
- fill_in "deploy_token_name", with: "new_deploy_key"
- fill_in "deploy_token_username", with: "deployer"
+ fill_in _('Name'), with: "new_deploy_key"
+ fill_in _('Username (optional)'), with: "deployer"
click_button "Create deploy token"
end
diff --git a/spec/support/shared_examples/features/discussion_comments_shared_example.rb b/spec/support/shared_examples/features/discussion_comments_shared_example.rb
index f209070d82a..68c0d06e7d0 100644
--- a/spec/support/shared_examples/features/discussion_comments_shared_example.rb
+++ b/spec/support/shared_examples/features/discussion_comments_shared_example.rb
@@ -209,7 +209,7 @@ RSpec.shared_examples 'thread comments for issue, epic and merge request' do |re
wait_for_all_requests
expect(page).to have_content(comment)
- expect(page).to have_content "@#{user.username} closed"
+ expect(page).to have_content "#{user.name} closed"
new_comment = all(comments_selector).last
@@ -334,7 +334,7 @@ RSpec.shared_examples 'thread comments for issue, epic and merge request' do |re
click_button 'Start thread & close issue'
expect(page).to have_content(comment)
- expect(page).to have_content "@#{user.username} closed"
+ expect(page).to have_content "#{user.name} closed"
new_discussion = all(comments_selector)[-2]
diff --git a/spec/support/shared_examples/features/project_upload_files_shared_examples.rb b/spec/support/shared_examples/features/project_upload_files_shared_examples.rb
index 0a5ad5a59c0..7737f8a73c5 100644
--- a/spec/support/shared_examples/features/project_upload_files_shared_examples.rb
+++ b/spec/support/shared_examples/features/project_upload_files_shared_examples.rb
@@ -77,9 +77,9 @@ RSpec.shared_examples 'it uploads and commits a new pdf file' do |drop: false|
end
if drop
- find(".upload-dropzone-card").drop(File.join(Rails.root, 'spec', 'fixtures', 'git-cheat-sheet.pdf'))
+ find(".upload-dropzone-card").drop(File.join(Rails.root, 'spec', 'fixtures', 'sample.pdf'))
else
- attach_file('upload_file', File.join(Rails.root, 'spec', 'fixtures', 'git-cheat-sheet.pdf'), make_visible: true)
+ attach_file('upload_file', File.join(Rails.root, 'spec', 'fixtures', 'sample.pdf'), make_visible: true)
end
page.within('#modal-upload-blob') do
@@ -90,7 +90,7 @@ RSpec.shared_examples 'it uploads and commits a new pdf file' do |drop: false|
wait_for_all_requests
- visit(project_blob_path(project, 'upload_image/git-cheat-sheet.pdf'))
+ visit(project_blob_path(project, 'upload_image/sample.pdf'))
expect(page).to have_css('.js-pdf-viewer')
end
diff --git a/spec/support/shared_examples/features/runners_shared_examples.rb b/spec/support/shared_examples/features/runners_shared_examples.rb
index 31ee08ea9db..1d4af944187 100644
--- a/spec/support/shared_examples/features/runners_shared_examples.rb
+++ b/spec/support/shared_examples/features/runners_shared_examples.rb
@@ -146,6 +146,18 @@ RSpec.shared_examples 'pauses, resumes and deletes a runner' do
end
end
+RSpec.shared_examples 'filters by tag' do
+ it 'shows correct runner when tag matches' do
+ expect(page).to have_content found_runner
+ expect(page).to have_content missing_runner
+
+ input_filtered_search_filter_is_only('Tags', tag)
+
+ expect(page).to have_content found_runner
+ expect(page).not_to have_content missing_runner
+ end
+end
+
RSpec.shared_examples 'submits edit runner form' do
it 'breadcrumb contains runner id and token' do
page.within '[data-testid="breadcrumb-links"]' do
diff --git a/spec/support/shared_examples/features/search/search_timeouts_shared_examples.rb b/spec/support/shared_examples/features/search/search_timeouts_shared_examples.rb
index 095c48cade8..84dc2b20ddc 100644
--- a/spec/support/shared_examples/features/search/search_timeouts_shared_examples.rb
+++ b/spec/support/shared_examples/features/search/search_timeouts_shared_examples.rb
@@ -3,6 +3,7 @@
RSpec.shared_examples 'search timeouts' do |scope|
context 'when search times out' do
before do
+ stub_feature_flags(search_page_vertical_nav: false)
allow_next_instance_of(SearchService) do |service|
allow(service).to receive(:search_objects).and_raise(ActiveRecord::QueryCanceled)
end
diff --git a/spec/support/shared_examples/features/sidebar/sidebar_due_date_shared_examples.rb b/spec/support/shared_examples/features/sidebar/sidebar_due_date_shared_examples.rb
index 95c0a76d726..206116d66c8 100644
--- a/spec/support/shared_examples/features/sidebar/sidebar_due_date_shared_examples.rb
+++ b/spec/support/shared_examples/features/sidebar/sidebar_due_date_shared_examples.rb
@@ -20,6 +20,8 @@ RSpec.shared_examples 'date sidebar widget' do
scroll_to(button)
button.click
+ execute_script('document.querySelector(".issuable-sidebar")?.scrollBy(0, 50)')
+
click_button today.to_s
wait_for_requests
diff --git a/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb
index 8081c51577a..ed885d7a226 100644
--- a/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb
@@ -147,6 +147,18 @@ RSpec.shared_examples 'User creates wiki page' do
end
end
+ it 'saves page content in local storage if the user navigates away', :js do
+ fill_in(:wiki_title, with: "Test title")
+ fill_in(:wiki_content, with: "This is a test")
+ fill_in(:wiki_message, with: "Test commit message")
+
+ refresh
+
+ expect(page).to have_field(:wiki_title, with: "Test title")
+ expect(page).to have_field(:wiki_content, with: "This is a test")
+ expect(page).to have_field(:wiki_message, with: "Test commit message")
+ end
+
it 'creates a wiki page with Org markup', :aggregate_failures, :js do
org_content = <<~ORG
* Heading
diff --git a/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
index 5c63d6a973d..0334187e4b1 100644
--- a/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
@@ -78,6 +78,18 @@ RSpec.shared_examples 'User updates wiki page' do
expect(page).to have_content('My awesome wiki!')
end
+ it 'saves page content in local storage if the user navigates away', :js do
+ fill_in(:wiki_title, with: "Test title")
+ fill_in(:wiki_content, with: "This is a test")
+ fill_in(:wiki_message, with: "Test commit message")
+
+ refresh
+
+ expect(page).to have_field(:wiki_title, with: "Test title")
+ expect(page).to have_field(:wiki_content, with: "This is a test")
+ expect(page).to have_field(:wiki_message, with: "Test commit message")
+ end
+
it 'updates the commit message as the title is changed', :js do
fill_in(:wiki_title, with: '& < > \ \ { } &')
diff --git a/spec/support/shared_examples/features/wiki/user_views_wiki_pages_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_views_wiki_pages_shared_examples.rb
index 32cb2b1d187..9b5326026b1 100644
--- a/spec/support/shared_examples/features/wiki/user_views_wiki_pages_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_views_wiki_pages_shared_examples.rb
@@ -53,37 +53,4 @@ RSpec.shared_examples 'User views wiki pages' do
end
end
end
-
- context 'ordered by created_at' do
- let(:pages_ordered_by_created_at) { [wiki_page1, wiki_page2, wiki_page3] }
-
- before do
- page.within('.wiki-sort-dropdown') do
- click_button('Title')
- click_button('Created date')
- end
- end
-
- context 'asc' do
- it 'pages are displayed in direct order' do
- pages.each.with_index do |page_title, index|
- expect(page_title.text).to eq(pages_ordered_by_created_at[index].title)
- end
- end
- end
-
- context 'desc' do
- before do
- page.within('.wiki-sort-dropdown') do
- page.find('.rspec-reverse-sort').click
- end
- end
-
- it 'pages are displayed in reversed order' do
- pages.reverse_each.with_index do |page_title, index|
- expect(page_title.text).to eq(pages_ordered_by_created_at[index].title)
- end
- end
- end
- end
end
diff --git a/spec/support/shared_examples/graphql/n_plus_one_query_examples.rb b/spec/support/shared_examples/graphql/n_plus_one_query_examples.rb
index faf1bb204c9..b4afde311ba 100644
--- a/spec/support/shared_examples/graphql/n_plus_one_query_examples.rb
+++ b/spec/support/shared_examples/graphql/n_plus_one_query_examples.rb
@@ -1,13 +1,23 @@
# frozen_string_literal: true
-RSpec.shared_examples 'N+1 query check' do
+
+RSpec.shared_examples 'N+1 query check' do |threshold: 0, skip_cached: true|
it 'prevents N+1 queries' do
execute_query # "warm up" to prevent undeterministic counts
expect(graphql_errors).to be_blank # Sanity check - ex falso quodlibet!
- control = ActiveRecord::QueryRecorder.new { execute_query }
+ control = ActiveRecord::QueryRecorder.new(skip_cached: skip_cached) { execute_query }
expect(control.count).to be > 0
search_params[:iids] << extra_iid_for_second_query
- expect { execute_query }.not_to exceed_query_limit(control)
+
+ expect { execute_query }.not_to exceed_query_count_limit(control, skip_cached: skip_cached, threshold: threshold)
+ end
+
+ def exceed_query_count_limit(control, skip_cached: true, threshold: 0)
+ if skip_cached
+ exceed_query_limit(control).with_threshold(threshold)
+ else
+ exceed_all_query_limit(control).with_threshold(threshold)
+ end
end
end
diff --git a/spec/support/shared_examples/lib/cache_helpers_shared_examples.rb b/spec/support/shared_examples/lib/cache_helpers_shared_examples.rb
index 82a9e8130f7..2e00abe2f8e 100644
--- a/spec/support/shared_examples/lib/cache_helpers_shared_examples.rb
+++ b/spec/support/shared_examples/lib/cache_helpers_shared_examples.rb
@@ -54,31 +54,6 @@ RSpec.shared_examples_for 'object cache helper' do
allow(Gitlab::ApplicationContext).to receive(:current_context_attribute).with(:caller_id).and_return(caller_id)
end
- context 'when feature flag is off' do
- before do
- stub_feature_flags(add_timing_to_certain_cache_actions: false)
- end
-
- it 'does not call increment' do
- expect(transaction).not_to receive(:increment).with(:cached_object_operations_total, any_args)
-
- subject
- end
-
- it 'does not call histogram' do
- expect(Gitlab::Metrics).not_to receive(:histogram)
-
- subject
- end
-
- it "is valid JSON" do
- parsed = Gitlab::Json.parse(subject.to_s)
-
- expect(parsed).to be_a(Hash)
- expect(parsed["id"]).to eq(presentable.id)
- end
- end
-
it 'increments the counter' do
expect(transaction)
.to receive(:increment)
@@ -157,34 +132,6 @@ RSpec.shared_examples_for 'collection cache helper' do
allow(Gitlab::ApplicationContext).to receive(:current_context_attribute).with(:caller_id).and_return(caller_id)
end
- context 'when feature flag is off' do
- before do
- stub_feature_flags(add_timing_to_certain_cache_actions: false)
- end
-
- it 'does not call increment' do
- expect(transaction).not_to receive(:increment).with(:cached_object_operations_total, any_args)
-
- subject
- end
-
- it 'does not call histogram' do
- expect(Gitlab::Metrics).not_to receive(:histogram)
-
- subject
- end
-
- it "is valid JSON" do
- parsed = Gitlab::Json.parse(subject.to_s)
-
- expect(parsed).to be_an(Array)
-
- presentable.each_with_index do |item, i|
- expect(parsed[i]["id"]).to eq(item.id)
- end
- end
- end
-
context 'when presentable has a group by clause' do
let(:presentable) { MergeRequest.group(:id) }
diff --git a/spec/support/shared_examples/lib/gitlab/memory/watchdog/monitor_result_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/memory/watchdog/monitor_result_shared_examples.rb
new file mode 100644
index 00000000000..98c0e7d506b
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/memory/watchdog/monitor_result_shared_examples.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'returns Watchdog Monitor result' do |threshold_violated:|
+ it 'returns if threshold is violated and payload' do
+ result = monitor.call
+
+ expect(result[:threshold_violated]).to eq(threshold_violated)
+ expect(result[:payload]).to eq(payload)
+ end
+end
diff --git a/spec/support/shared_examples/lib/gitlab/regex_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/regex_shared_examples.rb
new file mode 100644
index 00000000000..150741c6344
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/regex_shared_examples.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'regex rejecting path traversal' do
+ it { is_expected.not_to match('a../b') }
+ it { is_expected.not_to match('a..%2fb') }
+ it { is_expected.not_to match('a%2e%2e%2fb') }
+ it { is_expected.not_to match('a%2e%2e/b') }
+end
diff --git a/spec/support/shared_examples/lib/gitlab/template/template_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/template/template_shared_examples.rb
index 6b6e25ca1dd..4b4a7f4ce9d 100644
--- a/spec/support/shared_examples/lib/gitlab/template/template_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/template/template_shared_examples.rb
@@ -47,3 +47,47 @@ RSpec.shared_examples 'file template shared examples' do |filename, file_extensi
end
end
end
+
+RSpec.shared_examples 'acts as branch pipeline' do |jobs|
+ context 'when branch pipeline' do
+ let(:pipeline_branch) { default_branch }
+ let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch) }
+ let(:pipeline) { service.execute!(:push).payload }
+
+ it 'includes a job' do
+ expect(pipeline.builds.pluck(:name)).to match_array(jobs)
+ end
+ end
+end
+
+RSpec.shared_examples 'acts as MR pipeline' do |jobs, files|
+ context 'when MR pipeline' do
+ let(:pipeline_branch) { 'patch-1' }
+ let(:service) { MergeRequests::CreatePipelineService.new(project: project, current_user: user) }
+ let(:pipeline) { service.execute(merge_request).payload }
+
+ let(:merge_request) do
+ create(:merge_request,
+ source_project: project,
+ source_branch: pipeline_branch,
+ target_project: project,
+ target_branch: default_branch)
+ end
+
+ before do
+ files.each do |filename, contents|
+ project.repository.create_file(
+ project.creator,
+ filename,
+ contents,
+ message: "Add #{filename}",
+ branch_name: pipeline_branch)
+ end
+ end
+
+ it 'includes a job' do
+ expect(pipeline).to be_merge_request_event
+ expect(pipeline.builds.pluck(:name)).to match_array(jobs)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/boards/listable_shared_examples.rb b/spec/support/shared_examples/models/boards/listable_shared_examples.rb
index 250a4c1b1bd..ac8655a907f 100644
--- a/spec/support/shared_examples/models/boards/listable_shared_examples.rb
+++ b/spec/support/shared_examples/models/boards/listable_shared_examples.rb
@@ -27,14 +27,6 @@ RSpec.shared_examples 'boards listable model' do |list_factory|
.to eq([list1, list3, list4, list2])
end
end
-
- describe '.without_types' do
- it 'excludes lists of given types' do
- lists = described_class.without_types([:label, :closed])
-
- expect(lists).to match_array([list1])
- end
- end
end
describe '#destroyable?' do
diff --git a/spec/support/shared_examples/models/chat_integration_shared_examples.rb b/spec/support/shared_examples/models/chat_integration_shared_examples.rb
index 6cfeeabc952..6d0462a9ee8 100644
--- a/spec/support/shared_examples/models/chat_integration_shared_examples.rb
+++ b/spec/support/shared_examples/models/chat_integration_shared_examples.rb
@@ -166,10 +166,14 @@ RSpec.shared_examples "chat integration" do |integration_name|
let(:opts) { { title: "Awesome issue", description: "please fix" } }
let(:sample_data) do
service = Issues::CreateService.new(project: project, current_user: user, params: opts, spam_params: nil)
- issue = service.execute
+ issue = service.execute[:issue]
service.hook_data(issue, "open")
end
+ before do
+ project.add_developer(user)
+ end
+
it_behaves_like "triggered #{integration_name} integration"
end
diff --git a/spec/support/shared_examples/models/concerns/cascading_namespace_setting_shared_examples.rb b/spec/support/shared_examples/models/concerns/cascading_namespace_setting_shared_examples.rb
new file mode 100644
index 00000000000..a4db4e25db3
--- /dev/null
+++ b/spec/support/shared_examples/models/concerns/cascading_namespace_setting_shared_examples.rb
@@ -0,0 +1,355 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.shared_examples 'a cascading namespace setting boolean attribute' do
+ |settings_attribute_name:, settings_association: :namespace_settings|
+ let_it_be_with_reload(:group) { create(:group) }
+ let_it_be_with_reload(:subgroup) { create(:group, parent: group) }
+ let(:group_settings) { group.send(settings_association) }
+ let(:subgroup_settings) { subgroup.send(settings_association) }
+
+ describe "##{settings_attribute_name}" do
+ subject(:cascading_attribute) { subgroup_settings.send(settings_attribute_name) }
+
+ before do
+ stub_application_setting(settings_attribute_name => false)
+ end
+
+ context 'when there is no parent' do
+ context 'and the value is not nil' do
+ before do
+ group_settings.update!(settings_attribute_name => true)
+ end
+
+ it 'returns the local value' do
+ expect(group_settings.send(settings_attribute_name)).to eq(true)
+ end
+ end
+
+ context 'and the value is nil' do
+ before do
+ group_settings.update!(settings_attribute_name => nil)
+ end
+
+ it 'returns the application settings value' do
+ expect(group_settings.send(settings_attribute_name)).to eq(false)
+ end
+ end
+ end
+
+ context 'when parent does not lock the attribute' do
+ context 'and value is not nil' do
+ before do
+ group_settings.update!(settings_attribute_name => false)
+ end
+
+ it 'returns local setting when present' do
+ subgroup_settings.update!(settings_attribute_name => true)
+
+ expect(cascading_attribute).to eq(true)
+ end
+
+ it 'returns the parent value when local value is nil' do
+ subgroup_settings.update!(settings_attribute_name => nil)
+
+ expect(cascading_attribute).to eq(false)
+ end
+
+ it 'returns the correct dirty value' do
+ subgroup_settings.send("#{settings_attribute_name}=", true)
+
+ expect(cascading_attribute).to eq(true)
+ end
+
+ it 'does not return the application setting value when parent value is false' do
+ stub_application_setting(settings_attribute_name => true)
+
+ expect(cascading_attribute).to eq(false)
+ end
+ end
+
+ context 'and the value is nil' do
+ before do
+ group_settings.update!(settings_attribute_name => nil, "lock_#{settings_attribute_name}".to_sym => false)
+ subgroup_settings.update!(settings_attribute_name => nil)
+
+ subgroup_settings.clear_memoization(settings_attribute_name)
+ end
+
+ it 'cascades to the application settings value' do
+ expect(cascading_attribute).to eq(false)
+ end
+ end
+
+ context 'when multiple ancestors set a value' do
+ let(:third_level_subgroup) { create(:group, parent: subgroup) }
+
+ before do
+ group_settings.update!(settings_attribute_name => true)
+ subgroup_settings.update!(settings_attribute_name => false)
+ end
+
+ it 'returns the closest ancestor value' do
+ expect(third_level_subgroup.send(settings_association).send(settings_attribute_name)).to eq(false)
+ end
+ end
+ end
+
+ context 'when parent locks the attribute' do
+ before do
+ subgroup_settings.update!(settings_attribute_name => true)
+ group_settings.update!("lock_#{settings_attribute_name}" => true, settings_attribute_name => false)
+
+ subgroup_settings.clear_memoization(settings_attribute_name)
+ subgroup_settings.clear_memoization("#{settings_attribute_name}_locked_ancestor")
+ end
+
+ it 'returns the parent value' do
+ expect(cascading_attribute).to eq(false)
+ end
+
+ it 'does not allow the local value to be saved' do
+ subgroup_settings.send("#{settings_attribute_name}=", nil)
+
+ expect { subgroup_settings.save! }
+ .to raise_error(ActiveRecord::RecordInvalid,
+ /cannot be changed because it is locked by an ancestor/)
+ end
+ end
+
+ context 'when the application settings locks the attribute' do
+ before do
+ subgroup_settings.update!(settings_attribute_name => true)
+ stub_application_setting("lock_#{settings_attribute_name}" => true, settings_attribute_name => true)
+ end
+
+ it 'returns the application setting value' do
+ expect(cascading_attribute).to eq(true)
+ end
+
+ it 'does not allow the local value to be saved' do
+ subgroup_settings.send("#{settings_attribute_name}=", false)
+
+ expect { subgroup_settings.save! }
+ .to raise_error(
+ ActiveRecord::RecordInvalid,
+ /cannot be changed because it is locked by an ancestor/
+ )
+ end
+ end
+
+ context 'when parent locked the attribute then the application settings locks it' do
+ before do
+ subgroup_settings.update!(settings_attribute_name => true)
+ group_settings.update!("lock_#{settings_attribute_name}" => true, settings_attribute_name => false)
+ stub_application_setting("lock_#{settings_attribute_name}" => true, settings_attribute_name => true)
+
+ subgroup_settings.clear_memoization(settings_attribute_name)
+ subgroup_settings.clear_memoization("#{settings_attribute_name}_locked_ancestor")
+ end
+
+ it 'returns the application setting value' do
+ expect(cascading_attribute).to eq(true)
+ end
+ end
+ end
+
+ describe "##{settings_attribute_name}?" do
+ before do
+ subgroup_settings.update!(settings_attribute_name => true)
+ group_settings.update!("lock_#{settings_attribute_name}" => true, settings_attribute_name => false)
+
+ subgroup_settings.clear_memoization(settings_attribute_name)
+ subgroup_settings.clear_memoization("#{settings_attribute_name}_locked_ancestor")
+ end
+
+ it 'aliases the method when the attribute is a boolean' do
+ expect(subgroup_settings.send("#{settings_attribute_name}?"))
+ .to eq(subgroup_settings.send(settings_attribute_name))
+ end
+ end
+
+ describe "##{settings_attribute_name}=" do
+ before do
+ subgroup_settings.update!(settings_attribute_name => nil)
+ group_settings.update!(settings_attribute_name => true)
+ end
+
+ it 'does not save the value locally when it matches the cascaded value' do
+ subgroup_settings.update!(settings_attribute_name => true)
+
+ expect(subgroup_settings.read_attribute(settings_attribute_name)).to eq(nil)
+ end
+ end
+
+ describe "##{settings_attribute_name}_locked?" do
+ shared_examples 'not locked' do
+ it 'is not locked by an ancestor' do
+ expect(subgroup_settings.send("#{settings_attribute_name}_locked_by_ancestor?")).to eq(false)
+ end
+
+ it 'is not locked by application setting' do
+ expect(subgroup_settings.send("#{settings_attribute_name}_locked_by_application_setting?")).to eq(false)
+ end
+
+ it 'does not return a locked namespace' do
+ expect(subgroup_settings.send("#{settings_attribute_name}_locked_ancestor")).to be_nil
+ end
+ end
+
+ context 'when attribute is locked by self' do
+ before do
+ subgroup_settings.update!("lock_#{settings_attribute_name}" => true)
+ end
+
+ it 'is not locked by default' do
+ expect(subgroup_settings.send("#{settings_attribute_name}_locked?")).to eq(false)
+ end
+
+ it 'is locked when including self' do
+ expect(subgroup_settings.send("#{settings_attribute_name}_locked?", include_self: true)).to eq(true)
+ end
+ end
+
+ context 'when parent does not lock the attribute' do
+ it_behaves_like 'not locked'
+ end
+
+ context 'when parent locks the attribute' do
+ before do
+ group_settings.update!("lock_#{settings_attribute_name}".to_sym => true, settings_attribute_name => false)
+
+ subgroup_settings.clear_memoization(settings_attribute_name)
+ subgroup_settings.clear_memoization("#{settings_attribute_name}_locked_ancestor")
+ end
+
+ it 'is locked by an ancestor' do
+ expect(subgroup_settings.send("#{settings_attribute_name}_locked_by_ancestor?")).to eq(true)
+ end
+
+ it 'is not locked by application setting' do
+ expect(subgroup_settings.send("#{settings_attribute_name}_locked_by_application_setting?")).to eq(false)
+ end
+
+ it 'returns a locked namespace settings object' do
+ expect(subgroup_settings.send("#{settings_attribute_name}_locked_ancestor").namespace_id)
+ .to eq(group_settings.namespace_id)
+ end
+ end
+
+ context 'when not locked by application settings' do
+ before do
+ stub_application_setting("lock_#{settings_attribute_name}" => false)
+ end
+
+ it_behaves_like 'not locked'
+ end
+
+ context 'when locked by application settings' do
+ before do
+ stub_application_setting("lock_#{settings_attribute_name}" => true)
+ end
+
+ it 'is not locked by an ancestor' do
+ expect(subgroup_settings.send("#{settings_attribute_name}_locked_by_ancestor?")).to eq(false)
+ end
+
+ it 'is locked by application setting' do
+ expect(subgroup_settings.send("#{settings_attribute_name}_locked_by_application_setting?")).to eq(true)
+ end
+
+ it 'does not return a locked namespace' do
+ expect(subgroup_settings.send("#{settings_attribute_name}_locked_ancestor")).to be_nil
+ end
+ end
+ end
+
+ describe "#lock_#{settings_attribute_name}=" do
+ context 'when parent locks the attribute' do
+ before do
+ group_settings.update!("lock_#{settings_attribute_name}".to_sym => true, settings_attribute_name => false)
+
+ subgroup_settings.clear_memoization(settings_attribute_name)
+ subgroup_settings.clear_memoization("#{settings_attribute_name}_locked_ancestor")
+ end
+
+ it 'does not allow the attribute to be saved' do
+ subgroup_settings.send("lock_#{settings_attribute_name}=", true)
+
+ expect { subgroup_settings.save! }
+ .to raise_error(ActiveRecord::RecordInvalid,
+ /cannot be changed because it is locked by an ancestor/)
+ end
+ end
+
+ context 'when parent does not lock the attribute' do
+ before do
+ group_settings.update!("lock_#{settings_attribute_name}" => false, settings_attribute_name => false)
+
+ subgroup_settings.send("lock_#{settings_attribute_name}=", true)
+ end
+
+ it 'allows the lock to be set when the attribute is not nil' do
+ subgroup_settings.send("#{settings_attribute_name}=", true)
+
+ expect(subgroup_settings.save).to eq(true)
+ end
+
+ it 'does not allow the lock to be saved when the attribute is nil' do
+ subgroup_settings.send("#{settings_attribute_name}=", nil)
+
+ expect { subgroup_settings.save! }
+ .to raise_error(ActiveRecord::RecordInvalid,
+ /cannot be nil when locking the attribute/)
+ end
+
+ it 'copies the cascaded value when locking the attribute if the local value is nil', :aggregate_failures do
+ subgroup_settings.send("#{settings_attribute_name}=", nil)
+ subgroup_settings.send("lock_#{settings_attribute_name}=", true)
+
+ expect(subgroup_settings.read_attribute(settings_attribute_name)).to eq(false)
+ end
+ end
+
+ context 'when application settings locks the attribute' do
+ before do
+ stub_application_setting("lock_#{settings_attribute_name}".to_sym => true)
+ end
+
+ it 'does not allow the attribute to be saved' do
+ subgroup_settings.send("lock_#{settings_attribute_name}=", true)
+
+ expect { subgroup_settings.save! }
+ .to raise_error(ActiveRecord::RecordInvalid,
+ /cannot be changed because it is locked by an ancestor/)
+ end
+ end
+
+ context 'when application_settings does not lock the attribute' do
+ before do
+ stub_application_setting("lock_#{settings_attribute_name}".to_sym => false)
+ end
+
+ it 'allows the attribute to be saved' do
+ subgroup_settings.send("#{settings_attribute_name}=", true)
+ subgroup_settings.send("lock_#{settings_attribute_name}=", true)
+
+ expect(subgroup_settings.save).to eq(true)
+ end
+ end
+ end
+
+ describe 'after update callback' do
+ before do
+ group_settings.update!("lock_#{settings_attribute_name}" => false, settings_attribute_name => false)
+ subgroup_settings.update!("lock_#{settings_attribute_name}" => true, settings_attribute_name => false)
+ end
+
+ it 'clears descendant locks' do
+ group_settings.update!("lock_#{settings_attribute_name}" => true, settings_attribute_name => true)
+
+ expect(subgroup_settings.reload.send("lock_#{settings_attribute_name}")).to eq(false)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/concerns/counter_attribute_shared_examples.rb b/spec/support/shared_examples/models/concerns/counter_attribute_shared_examples.rb
index f3a12578912..a658d02f09a 100644
--- a/spec/support/shared_examples/models/concerns/counter_attribute_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/counter_attribute_shared_examples.rb
@@ -92,8 +92,8 @@ RSpec.shared_examples_for CounterAttribute do |counter_attributes|
it 'obtains an exclusive lease during processing' do
expect(model)
.to receive(:in_lock)
- .with(model.counter_lock_key(incremented_attribute), ttl: described_class::WORKER_LOCK_TTL)
- .and_call_original
+ .with(model.counter_lock_key(incremented_attribute), ttl: described_class::WORKER_LOCK_TTL)
+ .and_call_original
subject
end
@@ -104,7 +104,14 @@ RSpec.shared_examples_for CounterAttribute do |counter_attributes|
model.delayed_increment_counter(incremented_attribute, -3)
end
- it 'updates the record and logs it' do
+ it 'updates the record and logs it', :aggregate_failures do
+ expect(Gitlab::AppLogger).to receive(:info).with(
+ hash_including(
+ message: 'Acquiring lease for project statistics update',
+ attributes: [incremented_attribute]
+ )
+ )
+
expect(Gitlab::AppLogger).to receive(:info).with(
hash_including(
message: 'Flush counter attribute to database',
@@ -124,14 +131,14 @@ RSpec.shared_examples_for CounterAttribute do |counter_attributes|
it 'removes the increment entry from Redis' do
Gitlab::Redis::SharedState.with do |redis|
- key_exists = redis.exists(model.counter_key(incremented_attribute))
+ key_exists = redis.exists?(model.counter_key(incremented_attribute))
expect(key_exists).to be_truthy
end
subject
Gitlab::Redis::SharedState.with do |redis|
- key_exists = redis.exists(model.counter_key(incremented_attribute))
+ key_exists = redis.exists?(model.counter_key(incremented_attribute))
expect(key_exists).to be_falsey
end
end
@@ -162,7 +169,7 @@ RSpec.shared_examples_for CounterAttribute do |counter_attributes|
subject
Gitlab::Redis::SharedState.with do |redis|
- key_exists = redis.exists(model.counter_flushed_key(incremented_attribute))
+ key_exists = redis.exists?(model.counter_flushed_key(incremented_attribute))
expect(key_exists).to be_falsey
end
end
@@ -186,31 +193,88 @@ RSpec.shared_examples_for CounterAttribute do |counter_attributes|
end
end
- describe '#clear_counter!' do
+ describe '#reset_counter!' do
let(:attribute) { counter_attributes.first }
before do
+ model.update!(attribute => 123)
model.increment_counter(attribute, 10)
end
- it 'deletes the counter key for the given attribute and logs it' do
- expect(Gitlab::AppLogger).to receive(:info).with(
- hash_including(
- message: 'Clear counter attribute',
- attribute: attribute,
- project_id: model.project_id,
- 'correlation_id' => an_instance_of(String),
- 'meta.feature_category' => 'test',
- 'meta.caller_id' => 'caller'
- )
- )
+ subject { model.reset_counter!(attribute) }
- model.clear_counter!(attribute)
+ it 'resets the attribute value to 0 and clears existing counter', :aggregate_failures do
+ expect { subject }.to change { model.reload.send(attribute) }.from(123).to(0)
Gitlab::Redis::SharedState.with do |redis|
- key_exists = redis.exists(model.counter_key(attribute))
+ key_exists = redis.exists?(model.counter_key(attribute))
expect(key_exists).to be_falsey
end
end
+
+ it_behaves_like 'obtaining lease to update database' do
+ context 'when the execution raises error' do
+ before do
+ allow(model).to receive(:update!).and_raise(StandardError, 'Something went wrong')
+ end
+
+ it 'reraises error' do
+ expect { subject }.to raise_error(StandardError, 'Something went wrong')
+ end
+ end
+ end
+ end
+
+ describe '#update_counters_with_lease' do
+ let(:increments) { { build_artifacts_size: 1, packages_size: 2 } }
+
+ subject { model.update_counters_with_lease(increments) }
+
+ it 'updates counters of the record' do
+ expect { subject }
+ .to change { model.reload.build_artifacts_size }.by(1)
+ .and change { model.reload.packages_size }.by(2)
+ end
+
+ it_behaves_like 'obtaining lease to update database' do
+ context 'when the execution raises error' do
+ before do
+ allow(model.class).to receive(:update_counters).and_raise(StandardError, 'Something went wrong')
+ end
+
+ it 'reraises error' do
+ expect { subject }.to raise_error(StandardError, 'Something went wrong')
+ end
+ end
+ end
+ end
+end
+
+RSpec.shared_examples 'obtaining lease to update database' do
+ context 'when it is unable to obtain lock' do
+ before do
+ allow(model).to receive(:in_lock).and_raise(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
+ end
+
+ it 'logs a warning' do
+ allow(model).to receive(:in_lock).and_raise(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
+
+ expect(Gitlab::AppLogger).to receive(:warn).once
+
+ expect { subject }.not_to raise_error
+ end
+ end
+
+ context 'when feature flag counter_attribute_db_lease_for_update is disabled' do
+ before do
+ stub_feature_flags(counter_attribute_db_lease_for_update: false)
+ allow(model).to receive(:in_lock).and_call_original
+ end
+
+ it 'does not attempt to get a lock' do
+ expect(model).not_to receive(:in_lock)
+
+ subject
+ end
end
end
diff --git a/spec/support/shared_examples/models/concerns/has_wiki_shared_examples.rb b/spec/support/shared_examples/models/concerns/has_wiki_shared_examples.rb
index 0357b7462fb..65bc6c10490 100644
--- a/spec/support/shared_examples/models/concerns/has_wiki_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/has_wiki_shared_examples.rb
@@ -15,7 +15,7 @@ RSpec.shared_examples 'model with wiki' do
context 'when the repository cannot be created' do
before do
- expect(container.wiki).to receive(:wiki) { raise Wiki::CouldNotCreateWikiError }
+ expect(container.wiki).to receive(:create_wiki_repository) { raise Wiki::CouldNotCreateWikiError }
end
it 'returns false and adds a validation error' do
diff --git a/spec/support/shared_examples/models/concerns/participable_shared_examples.rb b/spec/support/shared_examples/models/concerns/participable_shared_examples.rb
new file mode 100644
index 00000000000..ec7a9105bb2
--- /dev/null
+++ b/spec/support/shared_examples/models/concerns/participable_shared_examples.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'visible participants for issuable with read ability' do |model_class|
+ let_it_be(:user1) { create(:user) }
+
+ let(:model) { model_class.to_s.classify.constantize }
+
+ before do
+ allow(Ability).to receive(:allowed?).with(anything, :"read_#{model_class}", anything).and_return(true)
+ allow(model).to receive(:participant_attrs).and_return([:bar])
+ end
+
+ shared_examples 'check for participables read ability' do |ability_name|
+ it 'receives expected ability' do
+ instance = model.new
+
+ allow(instance).to receive(:bar).and_return(participable_source)
+
+ expect(Ability).to receive(:allowed?).with(anything, ability_name, instance)
+
+ expect(instance.visible_participants(user1)).to be_empty
+ end
+ end
+
+ context 'when source is an award emoji' do
+ let(:participable_source) { build(:award_emoji, :upvote) }
+
+ it_behaves_like 'check for participables read ability', :read_issuable_participables
+ end
+
+ context 'when source is a note' do
+ let(:participable_source) { build(:note) }
+
+ it_behaves_like 'check for participables read ability', :read_note
+ end
+
+ context 'when source is an internal note' do
+ let(:participable_source) { build(:note, :confidential) }
+
+ it_behaves_like 'check for participables read ability', :read_internal_note
+ end
+end
diff --git a/spec/support/shared_examples/models/concerns/timebox_shared_examples.rb b/spec/support/shared_examples/models/concerns/timebox_shared_examples.rb
index d06e8391a9a..e4958779957 100644
--- a/spec/support/shared_examples/models/concerns/timebox_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/timebox_shared_examples.rb
@@ -1,10 +1,7 @@
# frozen_string_literal: true
RSpec.shared_examples 'a timebox' do |timebox_type|
- let(:project) { create(:project, :public) }
- let(:group) { create(:group) }
let(:timebox_args) { [] }
- let(:timebox) { create(timebox_type, *timebox_args, project: project) }
let(:issue) { create(:issue, project: project) }
let(:user) { create(:user) }
let(:timebox_table_name) { timebox_type.to_s.pluralize.to_sym }
@@ -14,28 +11,6 @@ RSpec.shared_examples 'a timebox' do |timebox_type|
let(:open_on_left) { nil }
let(:open_on_right) { nil }
- describe 'modules' do
- context 'with a project' do
- it_behaves_like 'AtomicInternalId' do
- let(:internal_id_attribute) { :iid }
- let(:instance) { build(timebox_type, *timebox_args, project: create(:project), group: nil) }
- let(:scope) { :project }
- let(:scope_attrs) { { project: instance.project } }
- let(:usage) { timebox_table_name }
- end
- end
-
- context 'with a group' do
- it_behaves_like 'AtomicInternalId' do
- let(:internal_id_attribute) { :iid }
- let(:instance) { build(timebox_type, *timebox_args, project: nil, group: create(:group)) }
- let(:scope) { :group }
- let(:scope_attrs) { { namespace: instance.group } }
- let(:usage) { timebox_table_name }
- end
- end
- end
-
describe "Validation" do
before do
allow(subject).to receive(:set_iid).and_return(false)
@@ -65,21 +40,9 @@ RSpec.shared_examples 'a timebox' do |timebox_type|
expect(timebox.errors[:due_date]).to include("date must not be after 9999-12-31")
end
end
-
- describe '#timebox_type_check' do
- it 'is invalid if it has both project_id and group_id' do
- timebox = build(timebox_type, *timebox_args, group: group)
- timebox.project = project
-
- expect(timebox).not_to be_valid
- expect(timebox.errors[:project_id]).to include("#{timebox_type} should belong either to a project or a group.")
- end
- end
end
describe "Associations" do
- it { is_expected.to belong_to(:project) }
- it { is_expected.to belong_to(:group) }
it { is_expected.to have_many(:issues) }
it { is_expected.to have_many(:merge_requests) }
it { is_expected.to have_many(:labels).through(:issues) }
@@ -91,38 +54,6 @@ RSpec.shared_examples 'a timebox' do |timebox_type|
end
end
- describe '#project_timebox?' do
- context 'when project_id is present' do
- it 'returns true' do
- expect(timebox.project_timebox?).to be_truthy
- end
- end
-
- context 'when project_id is not present' do
- let(:timebox) { build(timebox_type, *timebox_args, group: group) }
-
- it 'returns false' do
- expect(timebox.project_timebox?).to be_falsey
- end
- end
- end
-
- describe '#group_timebox?' do
- context 'when group_id is present' do
- let(:timebox) { build(timebox_type, *timebox_args, group: group) }
-
- it 'returns true' do
- expect(timebox.group_timebox?).to be_truthy
- end
- end
-
- context 'when group_id is not present' do
- it 'returns false' do
- expect(timebox.group_timebox?).to be_falsey
- end
- end
- end
-
describe '#safe_title' do
let(:timebox) { create(timebox_type, *timebox_args, title: "<b>foo & bar -> 2.2</b>") }
@@ -131,22 +62,6 @@ RSpec.shared_examples 'a timebox' do |timebox_type|
end
end
- describe '#resource_parent' do
- context 'when group is present' do
- let(:timebox) { build(timebox_type, *timebox_args, group: group) }
-
- it 'returns the group' do
- expect(timebox.resource_parent).to eq(group)
- end
- end
-
- context 'when project is present' do
- it 'returns the project' do
- expect(timebox.resource_parent).to eq(project)
- end
- end
- end
-
describe "#title" do
let(:timebox) { create(timebox_type, *timebox_args, title: "<b>foo & bar -> 2.2</b>") }
@@ -155,39 +70,6 @@ RSpec.shared_examples 'a timebox' do |timebox_type|
end
end
- describe '#merge_requests_enabled?' do
- context "per project" do
- it "is true for projects with MRs enabled" do
- project = create(:project, :merge_requests_enabled)
- timebox = create(timebox_type, *timebox_args, project: project)
-
- expect(timebox.merge_requests_enabled?).to be_truthy
- end
-
- it "is false for projects with MRs disabled" do
- project = create(:project, :repository_enabled, :merge_requests_disabled)
- timebox = create(timebox_type, *timebox_args, project: project)
-
- expect(timebox.merge_requests_enabled?).to be_falsey
- end
-
- it "is false for projects with repository disabled" do
- project = create(:project, :repository_disabled)
- timebox = create(timebox_type, *timebox_args, project: project)
-
- expect(timebox.merge_requests_enabled?).to be_falsey
- end
- end
-
- context "per group" do
- let(:timebox) { create(timebox_type, *timebox_args, group: group) }
-
- it "is always true for groups, for performance reasons" do
- expect(timebox.merge_requests_enabled?).to be_truthy
- end
- end
- end
-
describe '#to_ability_name' do
it 'returns timebox' do
timebox = build(timebox_type, *timebox_args)
diff --git a/spec/support/shared_examples/models/integrations/has_web_hook_shared_examples.rb b/spec/support/shared_examples/models/integrations/has_web_hook_shared_examples.rb
index e309aa50c6e..31ec25249d7 100644
--- a/spec/support/shared_examples/models/integrations/has_web_hook_shared_examples.rb
+++ b/spec/support/shared_examples/models/integrations/has_web_hook_shared_examples.rb
@@ -4,7 +4,7 @@ RSpec.shared_examples Integrations::HasWebHook do
include AfterNextHelpers
describe 'associations' do
- it { is_expected.to have_one(:service_hook).inverse_of(:integration).with_foreign_key(:service_id) }
+ it { is_expected.to have_one(:service_hook).inverse_of(:integration).with_foreign_key(:integration_id) }
end
describe 'callbacks' do
diff --git a/spec/support/shared_examples/models/project_ci_cd_settings_shared_examples.rb b/spec/support/shared_examples/models/project_ci_cd_settings_shared_examples.rb
index c92e819db19..3caf58da4d2 100644
--- a/spec/support/shared_examples/models/project_ci_cd_settings_shared_examples.rb
+++ b/spec/support/shared_examples/models/project_ci_cd_settings_shared_examples.rb
@@ -5,12 +5,14 @@ RSpec.shared_examples 'ci_cd_settings delegation' do
context 'when ci_cd_settings is destroyed but project is not' do
it 'allows methods delegated to ci_cd_settings to be nil', :aggregate_failures do
- project = create(:project)
attributes = project.ci_cd_settings.attributes.keys - %w(id project_id) - exclude_attributes
+
+ expect(attributes).to match_array(attributes_with_prefix.keys)
+
project.ci_cd_settings.destroy!
project.reload
- attributes.each do |attr|
- method = project.respond_to?("ci_#{attr}") ? "ci_#{attr}" : attr
+ attributes_with_prefix.each do |attr, prefix|
+ method = "#{prefix}#{attr}"
expect(project.send(method)).to be_nil, "#{attr} was not nil"
end
end
@@ -20,8 +22,6 @@ end
RSpec.shared_examples 'a ci_cd_settings predicate method' do |prefix: ''|
using RSpec::Parameterized::TableSyntax
- let_it_be(:project) { create(:project) }
-
context 'when ci_cd_settings is nil' do
before do
allow(project).to receive(:ci_cd_settings).and_return(nil)
diff --git a/spec/support/shared_examples/models/wiki_shared_examples.rb b/spec/support/shared_examples/models/wiki_shared_examples.rb
index 5f6a10bd754..b1aa90449e1 100644
--- a/spec/support/shared_examples/models/wiki_shared_examples.rb
+++ b/spec/support/shared_examples/models/wiki_shared_examples.rb
@@ -1,6 +1,8 @@
# frozen_string_literal: true
RSpec.shared_examples 'wiki model' do
+ using RSpec::Parameterized::TableSyntax
+
let_it_be(:user) { create(:user, :commit_email) }
let(:wiki_container) { raise NotImplementedError }
@@ -124,36 +126,6 @@ RSpec.shared_examples 'wiki model' do
end
end
- describe '#wiki' do
- it 'contains a Gitlab::Git::Wiki instance' do
- expect(subject.wiki).to be_a Gitlab::Git::Wiki
- end
-
- it 'creates a new wiki repo if one does not yet exist' do
- expect(subject.create_page('index', 'test content')).to be_truthy
- end
-
- it 'creates a new wiki repo with a default commit message' do
- expect(subject.create_page('index', 'test content', :markdown, '')).to be_truthy
-
- page = subject.find_page('index')
-
- expect(page.last_version.message).to eq("#{user.username} created page: index")
- end
-
- context 'when the repository cannot be created' do
- let(:wiki_container) { wiki_container_without_repo }
-
- before do
- expect(subject.repository).to receive(:create_if_not_exists) { false }
- end
-
- it 'raises CouldNotCreateWikiError' do
- expect { subject.wiki }.to raise_exception(Wiki::CouldNotCreateWikiError)
- end
- end
- end
-
describe '#empty?' do
context 'when the wiki repository is empty' do
it 'returns true' do
@@ -180,70 +152,71 @@ RSpec.shared_examples 'wiki model' do
it 'returns false' do
expect(subject.empty?).to be(false)
end
-
- it 'only instantiates a Wiki page once' do
- expect(WikiPage).to receive(:new).once.and_call_original
-
- subject.empty?
- end
end
end
end
describe '#list_pages' do
- let(:wiki_pages) { subject.list_pages }
+ shared_examples 'wiki model #list_pages' do
+ let(:wiki_pages) { subject.list_pages }
- before do
- subject.create_page('index', 'This is an index')
- subject.create_page('index2', 'This is an index2')
- subject.create_page('an index3', 'This is an index3')
- end
+ before do
+ subject.create_page('index', 'This is an index')
+ subject.create_page('index2', 'This is an index2')
+ subject.create_page('an index3', 'This is an index3')
+ end
- it 'returns an array of WikiPage instances' do
- expect(wiki_pages).to be_present
- expect(wiki_pages).to all(be_a(WikiPage))
- end
+ it 'returns an array of WikiPage instances' do
+ expect(wiki_pages).to be_present
+ expect(wiki_pages).to all(be_a(WikiPage))
+ end
- it 'does not load WikiPage content by default' do
- wiki_pages.each do |page|
- expect(page.content).to be_empty
+ it 'does not load WikiPage content by default' do
+ wiki_pages.each do |page|
+ expect(page.content).to be_empty
+ end
end
- end
- it 'returns all pages by default' do
- expect(wiki_pages.count).to eq(3)
- end
+ it 'returns all pages by default' do
+ expect(wiki_pages.count).to eq(3)
+ end
- context 'with limit option' do
- it 'returns limited set of pages' do
- expect(subject.list_pages(limit: 1).count).to eq(1)
+ context 'with limit option' do
+ it 'returns limited set of pages' do
+ expect(subject.list_pages(limit: 1).count).to eq(1)
+ end
end
- end
- context 'with sorting options' do
- it 'returns pages sorted by title by default' do
- pages = ['an index3', 'index', 'index2']
+ context 'with sorting options' do
+ it 'returns pages sorted by title by default' do
+ pages = ['an index3', 'index', 'index2']
- expect(subject.list_pages.map(&:title)).to eq(pages)
- expect(subject.list_pages(direction: 'desc').map(&:title)).to eq(pages.reverse)
+ expect(subject.list_pages.map(&:title)).to eq(pages)
+ expect(subject.list_pages(direction: 'desc').map(&:title)).to eq(pages.reverse)
+ end
end
- it 'returns pages sorted by created_at' do
- pages = ['index', 'index2', 'an index3']
+ context 'with load_content option' do
+ let(:pages) { subject.list_pages(load_content: true) }
- expect(subject.list_pages(sort: 'created_at').map(&:title)).to eq(pages)
- expect(subject.list_pages(sort: 'created_at', direction: 'desc').map(&:title)).to eq(pages.reverse)
+ it 'loads WikiPage content' do
+ expect(pages.first.content).to eq('This is an index3')
+ expect(pages.second.content).to eq('This is an index')
+ expect(pages.third.content).to eq('This is an index2')
+ end
end
end
- context 'with load_content option' do
- let(:pages) { subject.list_pages(load_content: true) }
-
- it 'loads WikiPage content' do
- expect(pages.first.content).to eq('This is an index3')
- expect(pages.second.content).to eq('This is an index')
- expect(pages.third.content).to eq('This is an index2')
+ context 'list pages with legacy wiki rpcs' do
+ before do
+ stub_feature_flags(wiki_list_page_with_normal_repository_rpcs: false)
end
+
+ it_behaves_like 'wiki model #list_pages'
+ end
+
+ context 'list pages with normal repository rpcs' do
+ it_behaves_like 'wiki model #list_pages'
end
end
@@ -338,6 +311,74 @@ RSpec.shared_examples 'wiki model' do
end
end
+ context "wiki repository's default branch is updated" do
+ before do
+ old_default_branch = wiki.default_branch
+ subject.create_page('page in updated default branch', 'content')
+ subject.repository.add_branch(user, 'another_branch', old_default_branch)
+ subject.repository.rm_branch(user, old_default_branch)
+ subject.repository.expire_status_cache
+ end
+
+ it 'returns the page in the updated default branch' do
+ wiki = described_class.new(wiki_container, user)
+ page = wiki.find_page('page in updated default branch')
+
+ expect(wiki.default_branch).to eql('another_branch')
+ expect(page.title).to eq('page in updated default branch')
+ end
+ end
+
+ context "wiki repository's HEAD is updated" do
+ before do
+ subject.create_page('page in updated HEAD', 'content')
+ subject.repository.add_branch(user, 'another_branch', subject.default_branch)
+ subject.repository.change_head('another_branch')
+ subject.repository.expire_status_cache
+ end
+
+ it 'returns the page in the new HEAD' do
+ wiki = described_class.new(wiki_container, user)
+ page = subject.find_page('page in updated HEAD')
+
+ expect(wiki.default_branch).to eql('another_branch')
+ expect(page.title).to eq('page in updated HEAD')
+ end
+ end
+
+ context 'pages with relative paths' do
+ where(:path, :title) do
+ [
+ ['~hello.md', '~Hello'],
+ ['hello~world.md', 'Hello~World'],
+ ['~~~hello.md', '~~~Hello'],
+ ['~/hello.md', '~/Hello'],
+ ['hello.md', '/Hello'],
+ ['hello.md', '../Hello'],
+ ['hello.md', './Hello'],
+ ['dir/hello.md', '/dir/Hello']
+ ]
+ end
+
+ with_them do
+ before do
+ wiki.repository.create_file(
+ user, path, "content of wiki file",
+ branch_name: wiki.default_branch,
+ message: "created page #{path}",
+ author_email: user.email,
+ author_name: user.name
+ )
+ end
+
+ it "can find page with `#{params[:title]}` title" do
+ page = subject.find_page(title)
+
+ expect(page.content).to eq("content of wiki file")
+ end
+ end
+ end
+
context 'pages with different file extensions' do
where(:extension, :path, :title) do
[
@@ -378,14 +419,6 @@ RSpec.shared_examples 'wiki model' do
end
end
- context 'find page with legacy wiki service' do
- before do
- stub_feature_flags(wiki_find_page_with_normal_repository_rpcs: false)
- end
-
- it_behaves_like 'wiki model #find_page'
- end
-
context 'find page with normal repository RPCs' do
it_behaves_like 'wiki model #find_page'
end
@@ -404,14 +437,6 @@ RSpec.shared_examples 'wiki model' do
end
end
- context 'find sidebar with legacy wiki service' do
- before do
- stub_feature_flags(wiki_find_page_with_normal_repository_rpcs: false)
- end
-
- it_behaves_like 'wiki model #find_sidebar'
- end
-
context 'find sidebar with normal repository RPCs' do
it_behaves_like 'wiki model #find_sidebar'
end
@@ -421,7 +446,7 @@ RSpec.shared_examples 'wiki model' do
let(:image) { File.open(Rails.root.join('spec', 'fixtures', 'big-image.png')) }
before do
- subject.wiki # Make sure the wiki repo exists
+ subject.create_wiki_repository # Make sure the wiki repo exists
subject.repository.create_file(user, 'image.png', image, branch_name: subject.default_branch, message: 'add image')
end
@@ -456,6 +481,22 @@ RSpec.shared_examples 'wiki model' do
expect(file.raw_data).to be_empty
end
end
+
+ context "wiki repository's default branch is updated" do
+ before do
+ old_default_branch = wiki.default_branch
+ subject.repository.add_branch(user, 'another_branch', old_default_branch)
+ subject.repository.rm_branch(user, old_default_branch)
+ subject.repository.expire_status_cache
+ end
+
+ it 'returns the page in the updated default branch' do
+ wiki = described_class.new(wiki_container, user)
+ file = wiki.find_file('image.png')
+
+ expect(file.mime_type).to eq('image/png')
+ end
+ end
end
describe '#create_page' do
@@ -480,7 +521,7 @@ RSpec.shared_examples 'wiki model' do
it 'sets the correct commit message' do
subject.create_page('test page', 'some content', :markdown, 'commit message')
- expect(subject.list_pages.first.page.version.message).to eq('commit message')
+ expect(subject.list_pages.first.version.message).to eq('commit message')
end
it 'sets the correct commit email' do
@@ -577,6 +618,8 @@ RSpec.shared_examples 'wiki model' do
'foo' | :org | ['foo.md'] | false
'foo' | :markdown | ['dir/foo.md'] | true
'/foo' | :markdown | ['foo.md'] | false
+ '~foo' | :markdown | [] | true
+ '~~~foo' | :markdown | [] | true
'./foo' | :markdown | ['foo.md'] | false
'../foo' | :markdown | ['foo.md'] | false
'../../foo' | :markdown | ['foo.md'] | false
@@ -607,14 +650,6 @@ RSpec.shared_examples 'wiki model' do
end
it_behaves_like 'create_page tests'
-
- context 'create page with legacy find_page wiki service' do
- it_behaves_like 'create_page tests' do
- before do
- stub_feature_flags(wiki_find_page_with_normal_repository_rpcs: false)
- end
- end
- end
end
describe '#update_page' do
@@ -687,6 +722,8 @@ RSpec.shared_examples 'wiki model' do
using RSpec::Parameterized::TableSyntax
where(:original_title, :original_format, :updated_title, :updated_format, :expected_title, :expected_path) do
+ 'test page' | :markdown | '~new test page' | :asciidoc | '~new test page' | '~new-test-page.asciidoc'
+ 'test page' | :markdown | '~~~new test page' | :asciidoc | '~~~new test page' | '~~~new-test-page.asciidoc'
'test page' | :markdown | 'new test page' | :asciidoc | 'new test page' | 'new-test-page.asciidoc'
'test page' | :markdown | 'new dir/new test page' | :asciidoc | 'new dir/new test page' | 'new-dir/new-test-page.asciidoc'
'test dir/test page' | :markdown | 'new dir/new test page' | :asciidoc | 'new dir/new test page' | 'new-dir/new-test-page.asciidoc'
@@ -696,13 +733,13 @@ RSpec.shared_examples 'wiki model' do
'test dir/test page' | :markdown | nil | :markdown | 'test dir/test page' | 'test-dir/test-page.md'
'test page' | :markdown | '' | :markdown | 'test page' | 'test-page.md'
'test.page' | :markdown | '' | :markdown | 'test.page' | 'test.page.md'
- 'testpage' | :markdown | '../testpage' | :markdown | 'testpage' | 'testpage.md'
- 'dir/testpage' | :markdown | 'dir/../testpage' | :markdown | 'testpage' | 'testpage.md'
- 'dir/testpage' | :markdown | './dir/testpage' | :markdown | 'dir/testpage' | 'dir/testpage.md'
- 'dir/testpage' | :markdown | '../dir/testpage' | :markdown | 'dir/testpage' | 'dir/testpage.md'
- 'dir/testpage' | :markdown | '../dir/../testpage' | :markdown | 'testpage' | 'testpage.md'
- 'dir/testpage' | :markdown | '../dir/../dir/testpage' | :markdown | 'dir/testpage' | 'dir/testpage.md'
- 'dir/testpage' | :markdown | '../dir/../another/testpage' | :markdown | 'another/testpage' | 'another/testpage.md'
+ 'testpage' | :markdown | '../testpage' | :markdown | 'testpage' | 'testpage.md'
+ 'dir/testpage' | :markdown | 'dir/../testpage' | :markdown | 'testpage' | 'testpage.md'
+ 'dir/testpage' | :markdown | './dir/testpage' | :markdown | 'dir/testpage' | 'dir/testpage.md'
+ 'dir/testpage' | :markdown | '../dir/testpage' | :markdown | 'dir/testpage' | 'dir/testpage.md'
+ 'dir/testpage' | :markdown | '../dir/../testpage' | :markdown | 'testpage' | 'testpage.md'
+ 'dir/testpage' | :markdown | '../dir/../dir/testpage' | :markdown | 'dir/testpage' | 'dir/testpage.md'
+ 'dir/testpage' | :markdown | '../dir/../another/testpage' | :markdown | 'another/testpage' | 'another/testpage.md'
end
end
@@ -711,17 +748,6 @@ RSpec.shared_examples 'wiki model' do
include_context 'extended examples'
end
- context 'update page with legacy find_page wiki service' do
- it_behaves_like 'update_page tests' do
- before do
- stub_feature_flags(wiki_find_page_with_normal_repository_rpcs: false)
- end
-
- include_context 'common examples'
- include_context 'extended examples'
- end
- end
-
context 'when format is invalid' do
let!(:page) { create(:wiki_page, wiki: subject, title: 'test page') }
@@ -862,7 +888,7 @@ RSpec.shared_examples 'wiki model' do
end
describe '#create_wiki_repository' do
- let(:head_path) { Rails.root.join(TestEnv.repos_path, "#{wiki.disk_path}.git", 'HEAD') }
+ let(:head_path) { Gitlab::GitalyClient::StorageSettings.allow_disk_access { Rails.root.join(TestEnv.repos_path, "#{wiki.disk_path}.git", 'HEAD') } }
let(:default_branch) { 'foo' }
before do
@@ -895,4 +921,40 @@ RSpec.shared_examples 'wiki model' do
end
end
end
+
+ describe '#preview_slug' do
+ where(:title, :file_extension, :format, :expected_slug) do
+ 'The Best Thing' | :md | :markdown | 'The-Best-Thing'
+ 'The Best Thing' | :txt | :plaintext | 'The-Best-Thing'
+ 'A Subject/Title Here' | :txt | :plaintext | 'A-Subject/Title-Here'
+ 'A subject' | :txt | :plaintext | 'A-subject'
+ 'A 1/B 2/C 3' | :txt | :plaintext | 'A-1/B-2/C-3'
+ 'subject/title' | :txt | :plaintext | 'subject/title'
+ 'subject/title.md' | :txt | :plaintext | 'subject/title.md'
+ 'foo%2Fbar' | :txt | :plaintext | 'foo%2Fbar'
+ '' | :md | :markdown | '.md'
+ '' | :txt | :plaintext | '.txt'
+ end
+
+ with_them do
+ before do
+ subject.repository.create_file(
+ user, "#{title}.#{file_extension}", 'content',
+ branch_name: subject.default_branch,
+ message: "Add #{title}"
+ )
+ end
+
+ it do
+ expect(described_class.preview_slug(title, file_extension)).to eq(expected_slug)
+ end
+
+ it 'matches the slug generated by gitaly' do
+ skip('Gitaly cannot generate a slug for an empty title') unless title.present?
+
+ gitaly_slug = subject.list_pages.first.slug
+ expect(described_class.preview_slug(title, file_extension)).to eq(gitaly_slug)
+ end
+ end
+ end
end
diff --git a/spec/support/shared_examples/policies/wiki_policies_shared_examples.rb b/spec/support/shared_examples/policies/wiki_policies_shared_examples.rb
index 991d6289373..b9d4709efd5 100644
--- a/spec/support/shared_examples/policies/wiki_policies_shared_examples.rb
+++ b/spec/support/shared_examples/policies/wiki_policies_shared_examples.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
RSpec.shared_examples 'model with wiki policies' do
- include ProjectHelpers
+ include UserHelpers
include AdminModeHelper
let(:container) { raise NotImplementedError }
diff --git a/spec/support/shared_examples/projects/container_repository/cleanup_tags_service_shared_examples.rb b/spec/support/shared_examples/projects/container_repository/cleanup_tags_service_shared_examples.rb
index 9c2d30a9c8c..f7731af8dc6 100644
--- a/spec/support/shared_examples/projects/container_repository/cleanup_tags_service_shared_examples.rb
+++ b/spec/support/shared_examples/projects/container_repository/cleanup_tags_service_shared_examples.rb
@@ -1,53 +1,5 @@
# frozen_string_literal: true
-RSpec.shared_examples 'handling invalid params' do |service_response_extra: {}, supports_caching: false|
- context 'when no params are specified' do
- let(:params) { {} }
-
- it_behaves_like 'not removing anything',
- service_response_extra: service_response_extra,
- supports_caching: supports_caching
- end
-
- context 'with invalid regular expressions' do
- shared_examples 'handling an invalid regex' do
- it 'keeps all tags' do
- expect(Projects::ContainerRepository::DeleteTagsService)
- .not_to receive(:new)
- expect_no_caching unless supports_caching
-
- subject
- end
-
- it { is_expected.to eq(status: :error, message: 'invalid regex') }
-
- it 'calls error tracking service' do
- expect(Gitlab::ErrorTracking).to receive(:log_exception).and_call_original
-
- subject
- end
- end
-
- context 'when name_regex_delete is invalid' do
- let(:params) { { 'name_regex_delete' => '*test*' } }
-
- it_behaves_like 'handling an invalid regex'
- end
-
- context 'when name_regex is invalid' do
- let(:params) { { 'name_regex' => '*test*' } }
-
- it_behaves_like 'handling an invalid regex'
- end
-
- context 'when name_regex_keep is invalid' do
- let(:params) { { 'name_regex_keep' => '*test*' } }
-
- it_behaves_like 'handling an invalid regex'
- end
- end
-end
-
RSpec.shared_examples 'when regex matching everything is specified' do
|service_response_extra: {}, supports_caching: false, delete_expectations:|
let(:params) do
@@ -227,20 +179,6 @@ RSpec.shared_examples 'when running a container_expiration_policy' do
is_expected.to eq(expected_service_response(deleted: delete_expectations.flatten).merge(service_response_extra))
end
end
-
- context 'without container_expiration_policy param' do
- let(:params) do
- {
- 'name_regex_delete' => '.*',
- 'keep_n' => 1,
- 'older_than' => '1 day'
- }
- end
-
- it 'fails' do
- is_expected.to eq(status: :error, message: 'access denied')
- end
- end
end
RSpec.shared_examples 'not removing anything' do |service_response_extra: {}, supports_caching: false|
diff --git a/spec/support/shared_examples/quick_actions/issuable/max_issuable_examples.rb b/spec/support/shared_examples/quick_actions/issuable/max_issuable_examples.rb
new file mode 100644
index 00000000000..e725de8ad31
--- /dev/null
+++ b/spec/support/shared_examples/quick_actions/issuable/max_issuable_examples.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'does not exceed the issuable size limit' do
+ let(:user1) { create(:user) }
+ let(:user2) { create(:user) }
+ let(:user3) { create(:user) }
+
+ before do
+ project.add_maintainer(user)
+ project.add_maintainer(user1)
+ project.add_maintainer(user2)
+ project.add_maintainer(user3)
+ end
+
+ context 'when feature flag is turned on' do
+ context "when the number of users of issuable does exceed the limit" do
+ before do
+ stub_const("Issuable::MAX_NUMBER_OF_ASSIGNEES_OR_REVIEWERS", 2)
+ end
+
+ it 'will not add more than the allowed number of users' do
+ allow_next_instance_of(update_service) do |service|
+ expect(service).not_to receive(:execute)
+ end
+
+ note = described_class.new(project, user, opts.merge(
+ note: note_text,
+ noteable_type: noteable_type,
+ noteable_id: issuable.id,
+ confidential: false
+ )).execute
+
+ expect(note.errors[:validation]).to match_array([validation_message])
+ end
+ end
+
+ context "when the number of users does not exceed the limit" do
+ before do
+ stub_const("Issuable::MAX_NUMBER_OF_ASSIGNEES_OR_REVIEWERS", 6)
+ end
+
+ it 'calls execute and does not return an error' do
+ allow_next_instance_of(update_service) do |service|
+ expect(service).to receive(:execute).and_call_original
+ end
+
+ note = described_class.new(project, user, opts.merge(
+ note: note_text,
+ noteable_type: noteable_type,
+ noteable_id: issuable.id,
+ confidential: false
+ )).execute
+
+ expect(note.errors[:validation]).to be_empty
+ end
+ end
+ end
+
+ context 'when feature flag is off' do
+ before do
+ stub_feature_flags(feature_flag_hash)
+ end
+
+ context "when the number of users of issuable does exceed the limit" do
+ before do
+ stub_const("Issuable::MAX_NUMBER_OF_ASSIGNEES_OR_REVIEWERS", 2)
+ end
+
+ it 'will not add more than the allowed number of users' do
+ allow_next_instance_of(MergeRequests::UpdateService) do |service|
+ expect(service).to receive(:execute).and_call_original
+ end
+
+ note = described_class.new(project, user, opts.merge(
+ note: note_text,
+ noteable_type: 'MergeRequest',
+ noteable_id: issuable.id,
+ confidential: false
+ )).execute
+
+ expect(note.errors[:validation]).to be_empty
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/requests/access_tokens_controller_shared_examples.rb b/spec/support/shared_examples/requests/access_tokens_controller_shared_examples.rb
index 017e6274cb0..59e641e2af6 100644
--- a/spec/support/shared_examples/requests/access_tokens_controller_shared_examples.rb
+++ b/spec/support/shared_examples/requests/access_tokens_controller_shared_examples.rb
@@ -2,18 +2,13 @@
RSpec.shared_examples 'GET resource access tokens available' do
let_it_be(:active_resource_access_token) { create(:personal_access_token, user: bot_user) }
- let_it_be(:inactive_resource_access_token) { create(:personal_access_token, :revoked, user: bot_user) }
it 'retrieves active resource access tokens' do
subject
- expect(assigns(:active_resource_access_tokens)).to contain_exactly(active_resource_access_token)
- end
-
- it 'retrieves inactive resource access tokens' do
- subject
-
- expect(assigns(:inactive_resource_access_tokens)).to contain_exactly(inactive_resource_access_token)
+ token_entities = assigns(:active_resource_access_tokens)
+ expect(token_entities.length).to eq(1)
+ expect(token_entities[0][:name]).to eq(active_resource_access_token.name)
end
it 'lists all available scopes' do
@@ -21,15 +16,6 @@ RSpec.shared_examples 'GET resource access tokens available' do
expect(assigns(:scopes)).to eq(Gitlab::Auth.resource_bot_scopes)
end
-
- it 'retrieves newly created personal access token value' do
- token_value = 'random-value'
- allow(PersonalAccessToken).to receive(:redis_getdel).with("#{user.id}:#{resource.id}").and_return(token_value)
-
- subject
-
- expect(assigns(:new_resource_access_token)).to eq(token_value)
- end
end
RSpec.shared_examples 'POST resource access tokens available' do
@@ -37,10 +23,13 @@ RSpec.shared_examples 'POST resource access tokens available' do
PersonalAccessToken.order(:created_at).last
end
- it 'returns success message' do
+ it 'renders JSON with a token' do
subject
- expect(flash[:notice]).to match('Your new access token has been created.')
+ parsed_body = Gitlab::Json.parse(response.body)
+ expect(parsed_body['new_token']).not_to be_blank
+ expect(parsed_body['errors']).to be_blank
+ expect(response).to have_gitlab_http_status(:success)
end
it 'creates resource access token' do
@@ -59,12 +48,6 @@ RSpec.shared_examples 'POST resource access tokens available' do
expect(created_token.user).to be_project_bot
end
- it 'stores newly created token redis store' do
- expect(PersonalAccessToken).to receive(:redis_store!)
-
- subject
- end
-
it { expect { subject }.to change { User.count }.by(1) }
it { expect { subject }.to change { PersonalAccessToken.count }.by(1) }
@@ -87,10 +70,13 @@ RSpec.shared_examples 'POST resource access tokens available' do
expect { subject }.not_to change { User.count }
end
- it 'shows a failure alert' do
+ it 'renders JSON with an error' do
subject
- expect(flash[:alert]).to match("Failed to create new access token: Failed!")
+ parsed_body = Gitlab::Json.parse(response.body)
+ expect(parsed_body['new_token']).to be_blank
+ expect(parsed_body['errors']).to contain_exactly('Failed!')
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
end
diff --git a/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb
index de7032450a5..14a83d2889b 100644
--- a/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb
@@ -24,7 +24,7 @@ RSpec.shared_examples 'Debian packages upload request' do |status, body = nil|
if status == :created
it 'creates package files', :aggregate_failures do
expect(::Packages::Debian::FindOrCreateIncomingService).to receive(:new).with(container, user).and_call_original
- expect(::Packages::Debian::CreatePackageFileService).to receive(:new).with(be_a(Packages::Package), be_an(Hash)).and_call_original
+ expect(::Packages::Debian::CreatePackageFileService).to receive(:new).with(package: be_a(Packages::Package), current_user: be_an(User), params: be_an(Hash)).and_call_original
if file_name.end_with? '.changes'
expect(::Packages::Debian::ProcessChangesWorker).to receive(:perform_async)
diff --git a/spec/support/shared_examples/requests/api/helm_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/helm_packages_shared_examples.rb
index 06ed0448b50..8bf6b162508 100644
--- a/spec/support/shared_examples/requests/api/helm_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/helm_packages_shared_examples.rb
@@ -247,6 +247,15 @@ RSpec.shared_examples 'handling helm chart index requests' do
end
end
+ context 'with access to package registry for everyone' do
+ before do
+ project.update!(visibility: Gitlab::VisibilityLevel::PRIVATE)
+ project.project_feature.update!(package_registry_access_level: ProjectFeature::PUBLIC)
+ end
+
+ it_behaves_like 'process helm service index request', :anonymous, :success
+ end
+
context 'when an invalid token is passed' do
let(:headers) { basic_auth_header(user.username, 'wrong') }
diff --git a/spec/support/shared_examples/requests/api/hooks_shared_examples.rb b/spec/support/shared_examples/requests/api/hooks_shared_examples.rb
index 013945bd578..d666a754d9f 100644
--- a/spec/support/shared_examples/requests/api/hooks_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/hooks_shared_examples.rb
@@ -134,6 +134,7 @@ RSpec.shared_examples 'web-hook API endpoints' do |prefix|
context 'the hook is backed-off' do
before do
+ WebHook::FAILURE_THRESHOLD.times { hook.backoff! }
hook.backoff!
end
diff --git a/spec/support/shared_examples/requests/api/issuable_update_shared_examples.rb b/spec/support/shared_examples/requests/api/issuable_update_shared_examples.rb
index a3378d4619b..1045a92f332 100644
--- a/spec/support/shared_examples/requests/api/issuable_update_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/issuable_update_shared_examples.rb
@@ -20,7 +20,7 @@ RSpec.shared_examples 'issuable update endpoint' do
end
it 'updates the issuable with labels param as array' do
- stub_const("Gitlab::QueryLimiting::Transaction::THRESHOLD", 110)
+ allow(Gitlab::QueryLimiting::Transaction).to receive(:threshold).and_return(110)
params = { labels: ['label1', 'label2', 'foo, bar', '&,?'] }
diff --git a/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb
index b651ffc8996..85ac2b5e1ea 100644
--- a/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb
@@ -260,7 +260,11 @@ RSpec.shared_examples 'handling get metadata requests' do |scope: :project|
project.send("add_#{user_role}", user) if user_role
project.update!(visibility: visibility.to_s)
package.update!(name: package_name) unless package_name == 'non-existing-package'
- allow_fetch_application_setting(attribute: "npm_package_requests_forwarding", return_value: request_forward)
+ if scope == :instance
+ allow_fetch_application_setting(attribute: "npm_package_requests_forwarding", return_value: request_forward)
+ else
+ allow_fetch_cascade_application_setting(attribute: "npm_package_requests_forwarding", return_value: request_forward)
+ end
end
example_name = "#{params[:expected_result]} metadata request"
diff --git a/spec/support/shared_examples/requests/api/npm_packages_tags_shared_examples.rb b/spec/support/shared_examples/requests/api/npm_packages_tags_shared_examples.rb
index 86b6975bf9f..1d79a61fbb0 100644
--- a/spec/support/shared_examples/requests/api/npm_packages_tags_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/npm_packages_tags_shared_examples.rb
@@ -14,6 +14,7 @@ RSpec.shared_examples 'accept package tags request' do |status:|
before do
allow_fetch_application_setting(attribute: "npm_package_requests_forwarding", return_value: false)
+ allow_fetch_cascade_application_setting(attribute: "npm_package_requests_forwarding", return_value: false)
end
context 'with valid package name' do
diff --git a/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb
index f411b5699a9..11e19d8d067 100644
--- a/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb
@@ -291,7 +291,7 @@ RSpec.shared_examples 'pypi simple API endpoint' do
end
before do
- allow_fetch_application_setting(attribute: "pypi_package_requests_forwarding", return_value: forward)
+ allow_fetch_cascade_application_setting(attribute: "pypi_package_requests_forwarding", return_value: forward)
end
it_behaves_like params[:shared_examples_name], :reporter, params[:expected_status]
diff --git a/spec/support/shared_examples/requests/projects/google_cloud/google_oauth2_token_examples.rb b/spec/support/shared_examples/requests/projects/google_cloud/google_oauth2_token_examples.rb
index 379327be0db..e2e2658d803 100644
--- a/spec/support/shared_examples/requests/projects/google_cloud/google_oauth2_token_examples.rb
+++ b/spec/support/shared_examples/requests/projects/google_cloud/google_oauth2_token_examples.rb
@@ -18,6 +18,7 @@ RSpec.shared_examples 'requires valid Google Oauth2 token' do
allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |client|
allow(client).to receive(:validate_token).and_return(true)
allow(client).to receive(:list_projects).and_return(mock_gcp_projects) if mock_gcp_projects
+ allow(client).to receive(:create_cloudsql_instance)
end
allow_next_instance_of(BranchesFinder) do |finder|
diff --git a/spec/support/shared_examples/serializers/issuable_current_user_properties_shared_examples.rb b/spec/support/shared_examples/serializers/issuable_current_user_properties_shared_examples.rb
new file mode 100644
index 00000000000..6c285bfba91
--- /dev/null
+++ b/spec/support/shared_examples/serializers/issuable_current_user_properties_shared_examples.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'issuable entity current_user properties' do
+ describe 'can_create_confidential_note' do
+ subject do
+ described_class.new(resource, request: request)
+ .as_json[:current_user][:can_create_confidential_note]
+ end
+
+ context 'when user can create confidential notes' do
+ before do
+ resource.resource_parent.add_reporter(user)
+ end
+
+ it { is_expected.to be(true) }
+ end
+
+ context 'when user cannot create confidential notes' do
+ it { is_expected.to eq(false) }
+ end
+ end
+end
diff --git a/spec/support/shared_examples/services/base_rpm_service_shared_examples.rb b/spec/support/shared_examples/services/base_rpm_service_shared_examples.rb
new file mode 100644
index 00000000000..c9520852a5b
--- /dev/null
+++ b/spec/support/shared_examples/services/base_rpm_service_shared_examples.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'handling rpm xml file' do
+ include_context 'with rpm package data'
+
+ let(:xml) { nil }
+ let(:data) { {} }
+
+ context 'when generate empty xml' do
+ it 'generate expected xml' do
+ expect(subject).to eq(empty_xml)
+ end
+ end
+
+ context 'when updating existing xml' do
+ let(:xml) { empty_xml }
+ let(:data) { xml_update_params }
+
+ shared_examples 'changing root tag attribute' do
+ it "increment previous 'packages' value by 1" do
+ previous_value = Nokogiri::XML(xml).at(described_class::ROOT_TAG).attributes["packages"].value.to_i
+ new_value = Nokogiri::XML(subject).at(described_class::ROOT_TAG).attributes["packages"].value.to_i
+
+ expect(previous_value + 1).to eq(new_value)
+ end
+ end
+
+ it 'generate valid xml add expected xml node to existing xml' do
+ # Have one root attribute
+ result = Nokogiri::XML::Document.parse(subject).remove_namespaces!
+ expect(result.children.count).to eq(1)
+
+ # Root node has 1 child with generated node
+ expect(result.xpath("//#{described_class::ROOT_TAG}/package").count).to eq(1)
+ end
+
+ context 'when empty xml' do
+ it_behaves_like 'changing root tag attribute'
+ end
+
+ context 'when xml has children' do
+ let(:xml) { described_class.new(xml: empty_xml, data: data).execute }
+
+ it 'has children nodes' do
+ result = Nokogiri::XML::Document.parse(xml).remove_namespaces!
+ expect(result.children.count).to be > 0
+ end
+
+ it_behaves_like 'changing root tag attribute'
+ end
+ end
+end
diff --git a/spec/support/shared_examples/services/merge_request_shared_examples.rb b/spec/support/shared_examples/services/merge_request_shared_examples.rb
index b3ba0a1be93..cfd75d3cfcd 100644
--- a/spec/support/shared_examples/services/merge_request_shared_examples.rb
+++ b/spec/support/shared_examples/services/merge_request_shared_examples.rb
@@ -19,29 +19,13 @@ RSpec.shared_examples 'reviewer_ids filter' do
let(:reviewer2) { create(:user) }
context 'when the current user can admin the merge_request' do
- context 'when merge_request_reviewer feature is enabled' do
+ context 'with a reviewer who can read the merge_request' do
before do
- stub_feature_flags(merge_request_reviewer: true)
+ project.add_developer(reviewer1)
end
- context 'with a reviewer who can read the merge_request' do
- before do
- project.add_developer(reviewer1)
- end
-
- it 'contains reviewers who can read the merge_request' do
- expect(execute.reviewers).to contain_exactly(reviewer1)
- end
- end
- end
-
- context 'when merge_request_reviewer feature is disabled' do
- before do
- stub_feature_flags(merge_request_reviewer: false)
- end
-
- it 'contains no reviewers' do
- expect(execute.reviewers).to eq []
+ it 'contains reviewers who can read the merge_request' do
+ expect(execute.reviewers).to contain_exactly(reviewer1)
end
end
end
diff --git a/spec/support/shared_examples/services/reviewers_change_trigger_shared_examples.rb b/spec/support/shared_examples/services/reviewers_change_trigger_shared_examples.rb
new file mode 100644
index 00000000000..cc37ea0c6f0
--- /dev/null
+++ b/spec/support/shared_examples/services/reviewers_change_trigger_shared_examples.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'triggers GraphQL subscription mergeRequestReviewersUpdated' do
+ specify do
+ expect(GraphqlTriggers).to receive(:merge_request_reviewers_updated).with(merge_request)
+
+ action
+ end
+end
+
+RSpec.shared_examples 'does not trigger GraphQL subscription mergeRequestReviewersUpdated' do
+ specify do
+ expect(GraphqlTriggers).not_to receive(:merge_request_reviewers_updated)
+
+ action
+ end
+end
diff --git a/spec/support/view_component.rb b/spec/support/view_component.rb
index 9166a06fc8c..912bfda6d33 100644
--- a/spec/support/view_component.rb
+++ b/spec/support/view_component.rb
@@ -4,4 +4,11 @@ require 'view_component/test_helpers'
RSpec.configure do |config|
config.include ViewComponent::TestHelpers, type: :component
config.include Capybara::RSpecMatchers, type: :component
+ config.include Devise::Test::ControllerHelpers, type: :component
+
+ config.before(:each, type: :component) do
+ @request = controller.request
+ end
+
+ config.include_context 'when page has no HTML escapes', type: :component
end
diff --git a/spec/support_specs/capybara_slow_finder_spec.rb b/spec/support_specs/capybara_slow_finder_spec.rb
new file mode 100644
index 00000000000..b0438a7a78b
--- /dev/null
+++ b/spec/support_specs/capybara_slow_finder_spec.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'capybara'
+require 'support/capybara_slow_finder'
+
+RSpec.describe Capybara::Node::Base::SlowFinder do # rubocop:disable RSpec/FilePath
+ context 'without timeout' do
+ context 'when element is found' do
+ let(:slow_finder) do
+ Class.new do
+ def synchronize(seconds = nil, errors: nil)
+ true
+ end
+
+ prepend Capybara::Node::Base::SlowFinder
+ end.new
+ end
+
+ it 'does not raise error' do
+ expect { slow_finder.synchronize }.not_to raise_error
+ end
+ end
+
+ context 'when element is not found' do
+ let(:slow_finder) do
+ Class.new do
+ def synchronize(seconds = nil, errors: nil)
+ raise Capybara::ElementNotFound
+ end
+
+ prepend Capybara::Node::Base::SlowFinder
+ end.new
+ end
+
+ it 'raises Capybara::ElementNotFound error' do
+ expect { slow_finder.synchronize }.to raise_error(Capybara::ElementNotFound)
+ end
+ end
+ end
+
+ context 'with timeout' do
+ let(:timeout) { 0.01 }
+
+ let(:slow_finder) do
+ Class.new do
+ def synchronize(seconds = nil, errors: nil)
+ sleep 0.02
+
+ raise Capybara::ElementNotFound
+ end
+
+ prepend Capybara::Node::Base::SlowFinder
+ end.new
+ end
+
+ context 'with default timeout' do
+ it 'raises a timeout error' do
+ expect(Capybara).to receive(:default_max_wait_time).and_return(timeout)
+
+ expect { slow_finder.synchronize }.to raise_error_element_not_found
+ end
+ end
+
+ context 'when passed as paramater' do
+ it 'raises a timeout error' do
+ expect { slow_finder.synchronize(timeout) }.to raise_error_element_not_found
+ end
+ end
+
+ def raise_error_element_not_found
+ raise_error(
+ Capybara::ElementNotFound,
+ /\n\nTimeout \(#{timeout}s\) reached while running a waiting Capybara finder./
+ )
+ end
+ end
+end
diff --git a/spec/support_specs/database/multiple_databases_spec.rb b/spec/support_specs/database/multiple_databases_spec.rb
index b4cfa253813..0b019462077 100644
--- a/spec/support_specs/database/multiple_databases_spec.rb
+++ b/spec/support_specs/database/multiple_databases_spec.rb
@@ -3,6 +3,28 @@
require 'spec_helper'
RSpec.describe 'Database::MultipleDatabases' do
+ let(:query) do
+ <<~SQL
+ WITH cte AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported} (SELECT 1) SELECT 1;
+ SQL
+ end
+
+ it 'preloads database version for ApplicationRecord' do
+ counts = ActiveRecord::QueryRecorder
+ .new { ApplicationRecord.connection.execute(query) }
+ .count
+
+ expect(counts).to eq(1)
+ end
+
+ it 'preloads database version for Ci::ApplicationRecord' do
+ counts = ActiveRecord::QueryRecorder
+ .new { Ci::ApplicationRecord.connection.execute(query) }
+ .count
+
+ expect(counts).to eq(1)
+ end
+
describe '.with_reestablished_active_record_base' do
context 'when doing establish_connection' do
context 'on ActiveRecord::Base' do
diff --git a/spec/support_specs/helpers/graphql_helpers_spec.rb b/spec/support_specs/helpers/graphql_helpers_spec.rb
index c02e4adf983..12a6e561257 100644
--- a/spec/support_specs/helpers/graphql_helpers_spec.rb
+++ b/spec/support_specs/helpers/graphql_helpers_spec.rb
@@ -133,6 +133,23 @@ RSpec.describe GraphqlHelpers do
expect(graphql_dig_at(data, :foo, :nodes, :bar, :nodes, :id)).to eq([nil, 2, 3, nil])
end
+
+ it 'supports fields with leading underscore' do
+ web_path = '/namespace1/project1/-/packages/997'
+ data = {
+ 'packages' => {
+ 'nodes' => [
+ {
+ '_links' => {
+ 'webPath' => web_path
+ }
+ }
+ ]
+ }
+ }
+
+ expect(graphql_dig_at(data, :packages, :nodes, :_links, :web_path)).to match_array([web_path])
+ end
end
describe 'var' do
@@ -417,4 +434,22 @@ RSpec.describe GraphqlHelpers do
end
end
end
+
+ describe '.fieldnamerize' do
+ subject { described_class.fieldnamerize(field) }
+
+ let(:field) { 'merge_request' }
+
+ it 'makes an underscored string look like a fieldname' do
+ is_expected.to eq('mergeRequest')
+ end
+
+ context 'when field has a leading underscore' do
+ let(:field) { :_links }
+
+ it 'skips a transformation' do
+ is_expected.to eq('_links')
+ end
+ end
+ end
end
diff --git a/spec/support_specs/helpers/html_escaped_helpers_spec.rb b/spec/support_specs/helpers/html_escaped_helpers_spec.rb
index 337f7ecc659..77ca6231881 100644
--- a/spec/support_specs/helpers/html_escaped_helpers_spec.rb
+++ b/spec/support_specs/helpers/html_escaped_helpers_spec.rb
@@ -40,4 +40,33 @@ RSpec.describe HtmlEscapedHelpers do
specify { expect(actual_match).to eq(expected_match) }
end
end
+
+ describe '#ensure_no_html_escaped_tags!' do
+ subject { |example| described_class.ensure_no_html_escaped_tags!(content, example) }
+
+ context 'when content contains HTML escaped chars' do
+ let(:content) { 'See &lt;a href=""&gt;Link&lt;/a&gt;' }
+
+ it 'raises an exception' do
+ parts = [
+ 'The following string contains HTML escaped tags:',
+ 'See «&lt;a» href=""&gt;Link&lt;/a&gt;',
+ 'This check can be disabled via:',
+ %(it "raises an exception", :skip_html_escaped_tags_check do)
+ ]
+
+ regexp = Regexp.new(parts.join('.*'), Regexp::MULTILINE)
+
+ expect { subject }.to raise_error(regexp)
+ end
+ end
+
+ context 'when content does not contain HTML escaped tags' do
+ let(:content) { 'See <a href="">Link</a>' }
+
+ it 'does not raise anything' do
+ expect(subject).to be_nil
+ end
+ end
+ end
end
diff --git a/spec/support_specs/helpers/stub_method_calls_spec.rb b/spec/support_specs/helpers/stub_method_calls_spec.rb
index 837a2162bcd..7a842f83cd2 100644
--- a/spec/support_specs/helpers/stub_method_calls_spec.rb
+++ b/spec/support_specs/helpers/stub_method_calls_spec.rb
@@ -1,6 +1,8 @@
# frozen_string_literal: true
-require 'spec_helper'
+require 'fast_spec_helper'
+
+require_relative '../../support/helpers/stub_method_calls'
RSpec.describe StubMethodCalls do
include described_class
diff --git a/spec/support_specs/matchers/event_store_spec.rb b/spec/support_specs/matchers/event_store_spec.rb
new file mode 100644
index 00000000000..3614d05fde8
--- /dev/null
+++ b/spec/support_specs/matchers/event_store_spec.rb
@@ -0,0 +1,126 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'json_schemer'
+
+load File.expand_path('../../../spec/support/matchers/event_store.rb', __dir__)
+
+RSpec.describe 'event store matchers', :aggregate_errors do
+ let(:event_type1) do
+ Class.new(Gitlab::EventStore::Event) do
+ def schema
+ {
+ 'type' => 'object',
+ 'properties' => {
+ 'id' => { 'type' => 'integer' }
+ },
+ 'required' => %w[id]
+ }
+ end
+ end
+ end
+
+ let(:event_type2) do
+ Class.new(Gitlab::EventStore::Event) do
+ def schema
+ {
+ 'type' => 'object',
+ 'properties' => {
+ 'id' => { 'type' => 'integer' }
+ },
+ 'required' => %w[id]
+ }
+ end
+ end
+ end
+
+ before do
+ stub_const('FakeEventType1', event_type1)
+ stub_const('FakeEventType2', event_type2)
+ end
+
+ def publishing_event(event_type, data = {})
+ ::Gitlab::EventStore.publish(event_type.new(data: data))
+ end
+
+ describe 'publish_event' do
+ it 'requires a block matcher' do
+ matcher = -> { expect(:anything).to publish_event(:anything) } # rubocop: disable RSpec/ExpectActual
+
+ expect(&matcher).to raise_error(
+ ArgumentError,
+ 'publish_event matcher only supports block expectation'
+ )
+ end
+
+ it 'validates the event type' do
+ valid_event_type = -> do
+ expect { publishing_event(FakeEventType1, { 'id' => 1 }) }
+ .to publish_event(FakeEventType1).with('id' => 1)
+ end
+
+ expect(&valid_event_type).not_to raise_error
+
+ invalid_event_type = -> do
+ expect { publishing_event(FakeEventType1, { 'id' => 1 }) }
+ .to publish_event(FakeEventType2).with('id' => 1)
+ end
+
+ expect(&invalid_event_type).to raise_error <<~MESSAGE
+ expected FakeEventType2 with {"id"=>1} to be published, but only the following events were published:
+ - FakeEventType1 with {"id"=>1}
+ MESSAGE
+ end
+
+ it 'validates the event data' do
+ missing_data = -> do
+ expect { publishing_event(FakeEventType1, { 'id' => 1 }) }
+ .to publish_event(FakeEventType1)
+ end
+
+ expect(&missing_data).to raise_error <<~MESSAGE
+ expected FakeEventType1 with no data to be published, but only the following events were published:
+ - FakeEventType1 with {"id"=>1}
+ MESSAGE
+
+ different_data = -> do
+ expect { publishing_event(FakeEventType1, { 'id' => 1 }) }
+ .to publish_event(FakeEventType1).with({ 'id' => 2 })
+ end
+
+ expect(&different_data).to raise_error <<~MESSAGE
+ expected FakeEventType1 with {"id"=>2} to be published, but only the following events were published:
+ - FakeEventType1 with {"id"=>1}
+ MESSAGE
+ end
+ end
+
+ describe 'not_publish_event' do
+ it 'requires a block matcher' do
+ matcher = -> { expect(:anything).to not_publish_event(:anything) } # rubocop: disable RSpec/ExpectActual
+
+ expect(&matcher)
+ .to raise_error(ArgumentError, 'not_publish_event matcher only supports block expectation')
+ end
+
+ it 'does not permit .with' do
+ matcher = -> do
+ expect { publishing_event(FakeEventType1, { 'id' => 1 }) }
+ .to not_publish_event(FakeEventType2).with({ 'id' => 1 })
+ end
+
+ expect(&matcher)
+ .to raise_error(ArgumentError, 'not_publish_event does not permit .with to avoid ambiguity')
+ end
+
+ it 'validates the event type' do
+ matcher = -> do
+ expect { publishing_event(FakeEventType1, { 'id' => 1 }) }
+ .to not_publish_event(FakeEventType1)
+ end
+
+ expect(&matcher)
+ .to raise_error('expected FakeEventType1 not to be published')
+ end
+ end
+end
diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb
index dc112b885ae..dc74f25db87 100644
--- a/spec/tasks/gitlab/backup_rake_spec.rb
+++ b/spec/tasks/gitlab/backup_rake_spec.rb
@@ -4,9 +4,10 @@ require 'rake_helper'
RSpec.describe 'gitlab:app namespace rake task', :delete do
let(:enable_registry) { true }
- let(:backup_tasks) { %w{db repo uploads builds artifacts pages lfs terraform_state registry packages} }
+ let(:backup_restore_pid_path) { "#{Rails.application.root}/tmp/backup_restore.pid" }
+ let(:backup_tasks) { %w[db repo uploads builds artifacts pages lfs terraform_state registry packages] }
let(:backup_types) do
- %w{main_db repositories uploads builds artifacts pages lfs terraform_state registry packages}.tap do |array|
+ %w[main_db repositories uploads builds artifacts pages lfs terraform_state registry packages].tap do |array|
array.insert(1, 'ci_db') if Gitlab::Database.has_config?(:ci)
end
end
@@ -20,11 +21,19 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
end
def backup_files
- %w(backup_information.yml artifacts.tar.gz builds.tar.gz lfs.tar.gz terraform_state.tar.gz pages.tar.gz packages.tar.gz)
+ %w[
+ backup_information.yml
+ artifacts.tar.gz
+ builds.tar.gz
+ lfs.tar.gz
+ terraform_state.tar.gz
+ pages.tar.gz
+ packages.tar.gz
+ ]
end
def backup_directories
- %w(db repositories)
+ %w[db repositories]
end
before(:all) do
@@ -58,11 +67,88 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
end
end
+ describe 'lock parallel backups' do
+ using RSpec::Parameterized::TableSyntax
+
+ context 'when a process is running' do
+ let(:pid_file) { instance_double(File) }
+
+ it 'exits the new process' do
+ allow(File).to receive(:open).and_call_original
+ allow(File).to receive(:open).with(backup_restore_pid_path, any_args).and_yield(pid_file)
+ allow(pid_file).to receive(:read).and_return('123456')
+ allow(pid_file).to receive(:flock).with(any_args)
+
+ expect { run_rake_task('gitlab:backup:create') }.to raise_error(SystemExit).and output(
+ <<~HEREDOC
+ Backup and restore in progress:
+ There is a backup and restore task in progress. Please, try to run the current task once the previous one ends.
+ If there is no other process running, please remove the PID file manually: rm #{backup_restore_pid_path}
+ HEREDOC
+ ).to_stdout
+ end
+ end
+
+ context 'when no processes are running' do
+ let(:progress) { $stdout }
+ let(:pid_file) { instance_double(File, write: 12345) }
+
+ where(:tasks_name, :rake_task) do
+ %w[main_db ci_db] | 'gitlab:backup:db:restore'
+ 'repositories' | 'gitlab:backup:repo:restore'
+ 'builds' | 'gitlab:backup:builds:restore'
+ 'uploads' | 'gitlab:backup:uploads:restore'
+ 'artifacts' | 'gitlab:backup:artifacts:restore'
+ 'pages' | 'gitlab:backup:pages:restore'
+ 'lfs' | 'gitlab:backup:lfs:restore'
+ 'terraform_state' | 'gitlab:backup:terraform_state:restore'
+ 'registry' | 'gitlab:backup:registry:restore'
+ 'packages' | 'gitlab:backup:packages:restore'
+ end
+
+ with_them do
+ before do
+ allow(Kernel).to receive(:system).and_return(true)
+ allow(YAML).to receive(:load_file).and_return({ gitlab_version: Gitlab::VERSION })
+ allow(File).to receive(:delete).with(backup_restore_pid_path).and_return(1)
+ allow(File).to receive(:open).and_call_original
+ allow(File).to receive(:open).with(backup_restore_pid_path, any_args).and_yield(pid_file)
+ allow(pid_file).to receive(:read).and_return('')
+ allow(pid_file).to receive(:flock).with(any_args)
+ allow(pid_file).to receive(:write).with(12345).and_return(true)
+ allow(pid_file).to receive(:flush)
+ allow(progress).to receive(:puts).at_least(:once)
+
+ allow_next_instance_of(::Backup::Manager) do |instance|
+ Array(tasks_name).each do |task|
+ allow(instance).to receive(:run_restore_task).with(task)
+ end
+ end
+ end
+
+ it 'locks the PID file' do
+ expect(pid_file).to receive(:flock).with(File::LOCK_EX)
+ expect(pid_file).to receive(:flock).with(File::LOCK_UN)
+
+ run_rake_task(rake_task)
+ end
+
+ it 'deletes the PID file and logs a message' do
+ expect(File).to receive(:delete).with(backup_restore_pid_path)
+ expect(progress).to receive(:puts).with(/-- Deleting backup and restore lock file/)
+
+ run_rake_task(rake_task)
+ end
+ end
+ end
+ end
+
describe 'backup_restore' do
- context 'gitlab version' do
+ context 'with gitlab version' do
before do
allow(Dir).to receive(:glob).and_return(['1_gitlab_backup.tar'])
allow(File).to receive(:exist?).and_return(true)
+ allow(File).to receive(:exist?).with(backup_restore_pid_path).and_return(false)
allow(Kernel).to receive(:system).and_return(true)
allow(FileUtils).to receive(:cp_r).and_return(true)
allow(FileUtils).to receive(:mv).and_return(true)
@@ -72,7 +158,7 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
let(:gitlab_version) { Gitlab::VERSION }
- context 'restore with matching gitlab version' do
+ context 'when restore matches gitlab version' do
before do
allow(YAML).to receive(:load_file)
.and_return({ gitlab_version: gitlab_version })
@@ -124,6 +210,7 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
backup_tar = Dir.glob(File.join(Gitlab.config.backup.path, '*_gitlab_backup.tar')).last
allow(Dir).to receive(:glob).and_return([backup_tar])
allow(File).to receive(:exist?).and_return(true)
+ allow(File).to receive(:exist?).with(backup_restore_pid_path).and_return(false)
allow(Kernel).to receive(:system).and_return(true)
allow(FileUtils).to receive(:cp_r).and_return(true)
allow(FileUtils).to receive(:mv).and_return(true)
@@ -161,74 +248,42 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
let!(:project) { create(:project, :repository) }
- describe 'backup creation and deletion using custom_hooks' do
- let(:user_backup_path) { "repositories/#{project.disk_path}" }
-
+ context 'with specific backup tasks' do
before do
stub_env('SKIP', 'db')
- path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- File.join(project.repository.path_to_repo, 'custom_hooks')
- end
- FileUtils.mkdir_p(path)
- FileUtils.touch(File.join(path, "dummy.txt"))
end
- context 'project uses custom_hooks and successfully creates backup' do
- it 'creates custom_hooks.tar and project bundle' do
- expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout_from_any_process
-
- tar_contents, exit_status = Gitlab::Popen.popen(%W{tar -tvf #{backup_tar}})
-
- expect(exit_status).to eq(0)
- expect(tar_contents).to match(user_backup_path)
- expect(tar_contents).to match("#{user_backup_path}/.+/001.custom_hooks.tar")
- expect(tar_contents).to match("#{user_backup_path}/.+/001.bundle")
- end
-
- it 'restores files correctly' do
- expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout_from_any_process
- expect { run_rake_task('gitlab:backup:restore') }.to output.to_stdout_from_any_process
-
- repo_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- project.repository.path
- end
- expect(Dir.entries(File.join(repo_path, 'custom_hooks'))).to include("dummy.txt")
+ it 'prints a progress message to stdout' do
+ backup_tasks.each do |task|
+ expect { run_rake_task("gitlab:backup:#{task}:create") }.to output(/Dumping /).to_stdout_from_any_process
end
end
- context 'specific backup tasks' do
- it 'prints a progress message to stdout' do
- backup_tasks.each do |task|
- expect { run_rake_task("gitlab:backup:#{task}:create") }.to output(/Dumping /).to_stdout_from_any_process
- end
- end
-
- it 'logs the progress to log file' do
- ci_database_status = Gitlab::Database.has_config?(:ci) ? "[SKIPPED]" : "[DISABLED]"
- expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping main_database ... [SKIPPED]")
- expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping ci_database ... #{ci_database_status}")
- expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping repositories ... ")
- expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping repositories ... done")
- expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping uploads ... ")
- expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping uploads ... done")
- expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping builds ... ")
- expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping builds ... done")
- expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping artifacts ... ")
- expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping artifacts ... done")
- expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping pages ... ")
- expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping pages ... done")
- expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping lfs objects ... ")
- expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping lfs objects ... done")
- expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping terraform states ... ")
- expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping terraform states ... done")
- expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping container registry images ... ")
- expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping container registry images ... done")
- expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping packages ... ")
- expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping packages ... done")
-
- backup_tasks.each do |task|
- run_rake_task("gitlab:backup:#{task}:create")
- end
+ it 'logs the progress to log file' do
+ ci_database_status = Gitlab::Database.has_config?(:ci) ? "[SKIPPED]" : "[DISABLED]"
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping main_database ... [SKIPPED]")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping ci_database ... #{ci_database_status}")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping repositories ... ")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping repositories ... done")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping uploads ... ")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping uploads ... done")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping builds ... ")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping builds ... done")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping artifacts ... ")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping artifacts ... done")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping pages ... ")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping pages ... done")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping lfs objects ... ")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping lfs objects ... done")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping terraform states ... ")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping terraform states ... done")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping container registry images ... ")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping container registry images ... done")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping packages ... ")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping packages ... done")
+
+ backup_tasks.each do |task|
+ run_rake_task("gitlab:backup:#{task}:create")
end
end
end
@@ -264,18 +319,18 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
end
end
- context 'tar creation' do
- context 'archive file permissions' do
+ context 'with tar creation' do
+ context 'with archive file permissions' do
it 'sets correct permissions on the tar file' do
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout_from_any_process
- expect(File.exist?(backup_tar)).to be_truthy
+ expect(File).to exist(backup_tar)
expect(File::Stat.new(backup_tar).mode.to_s(8)).to eq('100600')
end
context 'with custom archive_permissions' do
before do
- allow(Gitlab.config.backup).to receive(:archive_permissions).and_return(0651)
+ allow(Gitlab.config.backup).to receive(:archive_permissions).and_return(0o651)
end
it 'uses the custom permissions' do
@@ -290,11 +345,21 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout_from_any_process
tar_contents, exit_status = Gitlab::Popen.popen(
- %W{tar -tvf #{backup_tar} db uploads.tar.gz repositories builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz terraform_state.tar.gz registry.tar.gz packages.tar.gz}
+ %W[
+ tar -tvf #{backup_tar}
+ db
+ uploads.tar.gz
+ repositories
+ builds.tar.gz
+ artifacts.tar.gz
+ pages.tar.gz
+ lfs.tar.gz
+ terraform_state.tar.gz
+ registry.tar.gz
+ packages.tar.gz
+ ]
)
- puts "CONTENT: #{tar_contents}"
-
expect(exit_status).to eq(0)
expect(tar_contents).to match('db')
expect(tar_contents).to match('uploads.tar.gz')
@@ -306,27 +371,31 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
expect(tar_contents).to match('terraform_state.tar.gz')
expect(tar_contents).to match('registry.tar.gz')
expect(tar_contents).to match('packages.tar.gz')
- expect(tar_contents).not_to match(%r{^.{4,9}[rwx].* (database.sql.gz|uploads.tar.gz|repositories|builds.tar.gz|pages.tar.gz|artifacts.tar.gz|registry.tar.gz)/$})
+ expect(tar_contents).not_to match(%r{^.{4,9}[rwx].* (database.sql.gz|uploads.tar.gz|repositories|builds.tar.gz|
+ pages.tar.gz|artifacts.tar.gz|registry.tar.gz)/$})
end
it 'deletes temp directories' do
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout_from_any_process
temp_dirs = Dir.glob(
- File.join(Gitlab.config.backup.path, '{db,repositories,uploads,builds,artifacts,pages,lfs,terraform_state,registry,packages}')
+ File.join(
+ Gitlab.config.backup.path,
+ '{db,repositories,uploads,builds,artifacts,pages,lfs,terraform_state,registry,packages}'
+ )
)
expect(temp_dirs).to be_empty
end
- context 'registry disabled' do
+ context 'when registry is disabled' do
let(:enable_registry) { false }
it 'does not create registry.tar.gz' do
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout_from_any_process
tar_contents, exit_status = Gitlab::Popen.popen(
- %W{tar -tvf #{backup_tar}}
+ %W[tar -tvf #{backup_tar}]
)
expect(exit_status).to eq(0)
@@ -335,7 +404,7 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
end
end
- context 'multiple repository storages' do
+ context 'with multiple repository storages' do
include StubConfiguration
let(:default_storage_name) { 'default' }
@@ -344,10 +413,10 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
before do
# We only need a backup of the repositories for this test
stub_env('SKIP', 'db,uploads,builds,artifacts,lfs,terraform_state,registry')
- stub_storage_settings( second_storage_name => {
- 'gitaly_address' => Gitlab.config.repositories.storages.default.gitaly_address,
- 'path' => TestEnv::SECOND_STORAGE_PATH
- })
+ stub_storage_settings(second_storage_name => {
+ 'gitaly_address' => Gitlab.config.repositories.storages.default.gitaly_address,
+ 'path' => TestEnv::SECOND_STORAGE_PATH
+ })
end
shared_examples 'includes repositories in all repository storages' do
@@ -368,27 +437,27 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout_from_any_process
tar_contents, exit_status = Gitlab::Popen.popen(
- %W{tar -tvf #{backup_tar} repositories}
+ %W[tar -tvf #{backup_tar} repositories]
)
tar_lines = tar_contents.lines.grep(/\.bundle/)
expect(exit_status).to eq(0)
- [
- "#{project_a.disk_path}/.+/001.bundle",
- "#{project_a.disk_path}.wiki/.+/001.bundle",
- "#{project_a.disk_path}.design/.+/001.bundle",
- "#{project_b.disk_path}/.+/001.bundle",
- "#{project_snippet_a.disk_path}/.+/001.bundle",
- "#{project_snippet_b.disk_path}/.+/001.bundle"
+ %W[
+ #{project_a.disk_path}/.+/001.bundle
+ #{project_a.disk_path}.wiki/.+/001.bundle
+ #{project_a.disk_path}.design/.+/001.bundle
+ #{project_b.disk_path}/.+/001.bundle
+ #{project_snippet_a.disk_path}/.+/001.bundle
+ #{project_snippet_b.disk_path}/.+/001.bundle
].each do |repo_name|
expect(tar_lines).to include(a_string_matching(repo_name))
end
end
end
- context 'no concurrency' do
+ context 'with no concurrency' do
it_behaves_like 'includes repositories in all repository storages'
end
@@ -400,7 +469,7 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
it_behaves_like 'includes repositories in all repository storages'
end
- context 'REPOSITORIES_STORAGES set' do
+ context 'when REPOSITORIES_STORAGES is set' do
before do
stub_env('REPOSITORIES_STORAGES', default_storage_name)
end
@@ -422,25 +491,25 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout_from_any_process
tar_contents, exit_status = Gitlab::Popen.popen(
- %W{tar -tvf #{backup_tar} repositories}
+ %W[tar -tvf #{backup_tar} repositories]
)
tar_lines = tar_contents.lines.grep(/\.bundle/)
expect(exit_status).to eq(0)
- [
- "#{project_a.disk_path}/.+/001.bundle",
- "#{project_a.disk_path}.wiki/.+/001.bundle",
- "#{project_a.disk_path}.design/.+/001.bundle",
- "#{project_snippet_a.disk_path}/.+/001.bundle"
+ %W[
+ #{project_a.disk_path}/.+/001.bundle
+ #{project_a.disk_path}.wiki/.+/001.bundle
+ #{project_a.disk_path}.design/.+/001.bundle
+ #{project_snippet_a.disk_path}/.+/001.bundle
].each do |repo_name|
expect(tar_lines).to include(a_string_matching(repo_name))
end
- [
- "#{project_b.disk_path}/.+/001.bundle",
- "#{project_snippet_b.disk_path}/.+/001.bundle"
+ %W[
+ #{project_b.disk_path}/.+/001.bundle
+ #{project_snippet_b.disk_path}/.+/001.bundle
].each do |repo_name|
expect(tar_lines).not_to include(a_string_matching(repo_name))
end
@@ -448,7 +517,7 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
end
end
- context 'concurrency settings' do
+ context 'with concurrency settings' do
before do
# We only need a backup of the repositories for this test
stub_env('SKIP', 'db,uploads,builds,artifacts,lfs,terraform_state,registry')
@@ -463,13 +532,18 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
expect(::Backup::Repositories).to receive(:new)
.with(anything, strategy: anything, storages: [], paths: [])
.and_call_original
- expect(::Backup::GitalyBackup).to receive(:new).with(anything, max_parallelism: 5, storage_parallelism: 2, incremental: false).and_call_original
+ expect(::Backup::GitalyBackup).to receive(:new).with(
+ anything,
+ max_parallelism: 5,
+ storage_parallelism: 2,
+ incremental: false
+ ).and_call_original
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout_from_any_process
end
end
- context 'CRON env is set' do
+ context 'when CRON env is set' do
before do
stub_env('CRON', '1')
end
@@ -481,7 +555,7 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
end
# backup_create task
- describe "Skipping items in a backup" do
+ describe "skipping items in a backup" do
before do
stub_env('SKIP', 'an-unknown-type,repositories,uploads,anotherunknowntype')
@@ -492,7 +566,19 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout_from_any_process
tar_contents, _exit_status = Gitlab::Popen.popen(
- %W{tar -tvf #{backup_tar} db uploads.tar.gz repositories builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz terraform_state.tar.gz registry.tar.gz packages.tar.gz}
+ %W[
+ tar -tvf #{backup_tar}
+ db
+ uploads.tar.gz
+ repositories
+ builds.tar.gz
+ artifacts.tar.gz
+ pages.tar.gz
+ lfs.tar.gz
+ terraform_state.tar.gz
+ registry.tar.gz
+ packages.tar.gz
+ ]
)
expect(tar_contents).to match('db/')
@@ -515,7 +601,7 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
.to receive(:invoke).and_return(true)
expect_next_instance_of(::Backup::Manager) do |instance|
- (backup_types - %w{repositories uploads}).each do |subtask|
+ (backup_types - %w[repositories uploads]).each do |subtask|
expect(instance).to receive(:run_restore_task).with(subtask).ordered
end
expect(instance).not_to receive(:run_restore_task)
diff --git a/spec/tasks/gitlab/db/lock_writes_rake_spec.rb b/spec/tasks/gitlab/db/lock_writes_rake_spec.rb
index d03e15224cb..ebea644bbf0 100644
--- a/spec/tasks/gitlab/db/lock_writes_rake_spec.rb
+++ b/spec/tasks/gitlab/db/lock_writes_rake_spec.rb
@@ -89,6 +89,26 @@ RSpec.describe 'gitlab:db:lock_writes', :silence_stdout, :reestablished_active_r
end
end
+ context 'when running in dry_run mode' do
+ before do
+ stub_env('DRY_RUN', 'true')
+ end
+
+ it 'allows writes on the main tables on the ci database' do
+ run_rake_task('gitlab:db:lock_writes')
+ expect do
+ ci_connection.execute("delete from projects")
+ end.not_to raise_error
+ end
+
+ it 'allows writes on the ci tables on the main database' do
+ run_rake_task('gitlab:db:lock_writes')
+ expect do
+ main_connection.execute("delete from ci_builds")
+ end.not_to raise_error
+ end
+ end
+
context 'multiple shared databases' do
before do
allow(::Gitlab::Database).to receive(:db_config_share_with).and_return(nil)
diff --git a/spec/tasks/gitlab/db/truncate_legacy_tables_rake_spec.rb b/spec/tasks/gitlab/db/truncate_legacy_tables_rake_spec.rb
index f9ebb985255..e95c2e241a8 100644
--- a/spec/tasks/gitlab/db/truncate_legacy_tables_rake_spec.rb
+++ b/spec/tasks/gitlab/db/truncate_legacy_tables_rake_spec.rb
@@ -43,10 +43,6 @@ RSpec.describe 'gitlab:db:truncate_legacy_tables', :silence_stdout, :reestablish
end
shared_examples 'truncating legacy tables' do
- before do
- allow(ENV).to receive(:[]).and_return(nil)
- end
-
context 'when tables are not locked for writes' do
it 'raises an error when trying to truncate the tables' do
error_message = /is not locked for writes. Run the rake task gitlab:db:lock_writes first/
@@ -97,7 +93,7 @@ RSpec.describe 'gitlab:db:truncate_legacy_tables', :silence_stdout, :reestablish
context 'when running in dry_run mode' do
before do
- allow(ENV).to receive(:[]).with("DRY_RUN").and_return("true")
+ stub_env('DRY_RUN', 'true')
end
it 'does not truncate any tables' do
@@ -115,7 +111,7 @@ RSpec.describe 'gitlab:db:truncate_legacy_tables', :silence_stdout, :reestablish
context 'when passing until_table parameter via environment variable' do
before do
- allow(ENV).to receive(:[]).with("UNTIL_TABLE").and_return(legacy_table)
+ stub_env('UNTIL_TABLE', legacy_table)
end
it 'sends the table name to TablesTruncate' do
diff --git a/spec/tasks/gitlab/db_rake_spec.rb b/spec/tasks/gitlab/db_rake_spec.rb
index 8f8178cde4d..08bec9fda78 100644
--- a/spec/tasks/gitlab/db_rake_spec.rb
+++ b/spec/tasks/gitlab/db_rake_spec.rb
@@ -665,21 +665,15 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout do
end
describe '#migrate_with_instrumentation' do
- describe '#up' do
- subject { run_rake_task('gitlab:db:migration_testing:up') }
-
- it 'delegates to the migration runner' do
- expect(::Gitlab::Database::Migrations::Runner).to receive_message_chain(:up, :run)
+ let(:runner) { instance_double(::Gitlab::Database::Migrations::Runner) }
- subject
- end
- end
-
- describe '#down' do
- subject { run_rake_task('gitlab:db:migration_testing:down') }
+ describe '#up (legacy mode)' do
+ subject { run_rake_task('gitlab:db:migration_testing:up') }
- it 'delegates to the migration runner' do
- expect(::Gitlab::Database::Migrations::Runner).to receive_message_chain(:down, :run)
+ it 'delegates to the migration runner in legacy mode' do
+ expect(::Gitlab::Database::Migrations::Runner).to receive(:up).with(database: 'main', legacy_mode: true)
+ .and_return(runner)
+ expect(runner).to receive(:run)
subject
end
@@ -699,31 +693,51 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout do
end
end
- describe '#sample_batched_background_migrations' do
- let(:batched_runner) { instance_double(::Gitlab::Database::Migrations::TestBatchedBackgroundRunner) }
+ where(:db) do
+ Gitlab::Database::DATABASE_NAMES.map(&:to_sym)
+ end
+
+ with_them do
+ describe '#up' do
+ subject { run_rake_task("gitlab:db:migration_testing:up:#{db}") }
- it 'delegates to the migration runner for the main database with a default sample duration' do
- expect(::Gitlab::Database::Migrations::Runner).to receive(:batched_background_migrations)
- .with(for_database: 'main').and_return(batched_runner)
- expect(batched_runner).to receive(:run_jobs).with(for_duration: 30.minutes)
+ it 'delegates to the migration runner' do
+ expect(::Gitlab::Database::Migrations::Runner).to receive(:up).with(database: db).and_return(runner)
+ expect(runner).to receive(:run)
- run_rake_task('gitlab:db:migration_testing:sample_batched_background_migrations')
+ subject
+ end
end
- it 'delegates to the migration runner for a specified database with a default sample duration' do
- expect(::Gitlab::Database::Migrations::Runner).to receive(:batched_background_migrations)
- .with(for_database: 'ci').and_return(batched_runner)
- expect(batched_runner).to receive(:run_jobs).with(for_duration: 30.minutes)
+ describe '#down' do
+ subject { run_rake_task("gitlab:db:migration_testing:down:#{db}") }
+
+ it 'delegates to the migration runner' do
+ expect(::Gitlab::Database::Migrations::Runner).to receive(:down).with(database: db).and_return(runner)
+ expect(runner).to receive(:run)
- run_rake_task('gitlab:db:migration_testing:sample_batched_background_migrations', '[ci]')
+ subject
+ end
end
- it 'delegates to the migration runner for a specified database and sample duration' do
- expect(::Gitlab::Database::Migrations::Runner).to receive(:batched_background_migrations)
- .with(for_database: 'ci').and_return(batched_runner)
- expect(batched_runner).to receive(:run_jobs).with(for_duration: 100.seconds)
+ describe '#sample_batched_background_migrations' do
+ let(:batched_runner) { instance_double(::Gitlab::Database::Migrations::TestBatchedBackgroundRunner) }
+
+ it 'delegates to the migration runner for a specified database with a default sample duration' do
+ expect(::Gitlab::Database::Migrations::Runner).to receive(:batched_background_migrations)
+ .with(for_database: db).and_return(batched_runner)
+ expect(batched_runner).to receive(:run_jobs).with(for_duration: 30.minutes)
- run_rake_task('gitlab:db:migration_testing:sample_batched_background_migrations', '[ci, 100]')
+ run_rake_task("gitlab:db:migration_testing:sample_batched_background_migrations:#{db}")
+ end
+
+ it 'delegates to the migration runner for a specified database and sample duration' do
+ expect(::Gitlab::Database::Migrations::Runner).to receive(:batched_background_migrations)
+ .with(for_database: db).and_return(batched_runner)
+ expect(batched_runner).to receive(:run_jobs).with(for_duration: 100.seconds)
+
+ run_rake_task("gitlab:db:migration_testing:sample_batched_background_migrations:#{db}", '[100]')
+ end
end
end
end
diff --git a/spec/tasks/gitlab/usage_data_rake_spec.rb b/spec/tasks/gitlab/usage_data_rake_spec.rb
index f54d06f406f..7ddba4ceb9b 100644
--- a/spec/tasks/gitlab/usage_data_rake_spec.rb
+++ b/spec/tasks/gitlab/usage_data_rake_spec.rb
@@ -69,6 +69,15 @@ RSpec.describe 'gitlab:usage data take tasks', :silence_stdout do
expect { run_rake_task('gitlab:usage_data:generate_and_send') }.to output(/.*201.*/).to_stdout
end
+ describe 'generate_ci_template_events' do
+ it "generates #{Gitlab::UsageDataCounters::CiTemplateUniqueCounter::KNOWN_EVENTS_FILE_PATH}" do
+ FileUtils.rm_rf(Gitlab::UsageDataCounters::CiTemplateUniqueCounter::KNOWN_EVENTS_FILE_PATH)
+ run_rake_task('gitlab:usage_data:generate_ci_template_events')
+
+ expect(File.exist?(Gitlab::UsageDataCounters::CiTemplateUniqueCounter::KNOWN_EVENTS_FILE_PATH)).to be true
+ end
+ end
+
private
def stub_response(url: service_ping_payload_url, body:, status: 201)
diff --git a/spec/tooling/danger/config_files_spec.rb b/spec/tooling/danger/config_files_spec.rb
index 0e01908a1dd..88b327df63f 100644
--- a/spec/tooling/danger/config_files_spec.rb
+++ b/spec/tooling/danger/config_files_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe Tooling::Danger::ConfigFiles do
let(:file_lines) do
[
"---",
- "name: about_your_company_registration_flow",
+ "name: about_some_new_flow",
"introduced_by_url: #{url}",
"rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/355909",
"milestone: '14.10'"
diff --git a/spec/tooling/danger/project_helper_spec.rb b/spec/tooling/danger/project_helper_spec.rb
index 4cc5df385a5..6793a4b8de3 100644
--- a/spec/tooling/danger/project_helper_spec.rb
+++ b/spec/tooling/danger/project_helper_spec.rb
@@ -98,10 +98,11 @@ RSpec.describe Tooling::Danger::ProjectHelper do
'vendor/languages.yml' | [:backend]
'file_hooks/examples/' | [:backend]
- 'Gemfile' | [:backend]
- 'Gemfile.lock' | [:backend]
- 'Rakefile' | [:backend]
- 'FOO_VERSION' | [:backend]
+ 'Gemfile' | [:backend]
+ 'Gemfile.lock' | [:backend]
+ 'Gemfile.checksum' | [:backend]
+ 'Rakefile' | [:backend]
+ 'FOO_VERSION' | [:backend]
'scripts/glfm/bar.rb' | [:backend]
'scripts/glfm/bar.js' | [:frontend]
diff --git a/spec/tooling/danger/specs_spec.rb b/spec/tooling/danger/specs_spec.rb
index 6c1fbbb903d..d6aed86e7dc 100644
--- a/spec/tooling/danger/specs_spec.rb
+++ b/spec/tooling/danger/specs_spec.rb
@@ -13,7 +13,9 @@ RSpec.describe Tooling::Danger::Specs do
include_context "with dangerfile"
let(:fake_danger) { DangerSpecHelper.fake_danger.include(described_class) }
- let(:fake_project_helper) { double('fake-project-helper', helper: fake_helper).tap { |h| h.class.include(Tooling::Danger::ProjectHelper) } }
+ let(:fake_project_helper) { instance_double('Tooling::Danger::ProjectHelper') }
+ let(:filename) { 'spec/foo_spec.rb' }
+
let(:file_lines) do
[
" describe 'foo' do",
@@ -32,6 +34,7 @@ RSpec.describe Tooling::Danger::Specs do
let(:matching_lines) do
[
+ "+ expect(foo).to match(['should not error'])",
"+ expect(foo).to match(['bar'])",
"+ expect(foo).to match(['bar'])",
"+ expect(foo).to match ['bar']",
@@ -42,31 +45,28 @@ RSpec.describe Tooling::Danger::Specs do
]
end
+ let(:changed_lines) do
+ [
+ " expect(foo).to match(['bar'])",
+ " expect(foo).to match(['bar'])",
+ " expect(foo).to match ['bar']",
+ " expect(foo).to eq(['bar'])",
+ " expect(foo).to eq ['bar']",
+ "- expect(foo).to match(['bar'])",
+ "- expect(foo).to match(['bar'])",
+ "- expect(foo).to match ['bar']",
+ "- expect(foo).to eq(['bar'])",
+ "- expect(foo).to eq ['bar']",
+ "+ expect(foo).to eq([])"
+ ] + matching_lines
+ end
+
subject(:specs) { fake_danger.new(helper: fake_helper) }
before do
allow(specs).to receive(:project_helper).and_return(fake_project_helper)
- end
-
- describe '#add_suggestions_for_match_with_array' do
- let(:filename) { 'spec/foo_spec.rb' }
-
- before do
- expect(specs).to receive(:added_line_matching_match_with_array).and_return(matching_lines)
- allow(specs.project_helper).to receive(:file_lines).and_return(file_lines)
- end
-
- it 'adds suggestions at the correct lines' do
- expect(specs).to receive(:markdown).with(format(described_class::SUGGEST_MR_COMMENT, suggested_line: " expect(foo).to match_array(['bar'])"), file: filename, line: 2)
- expect(specs).to receive(:markdown).with(format(described_class::SUGGEST_MR_COMMENT, suggested_line: " expect(foo).to match_array(['bar'])"), file: filename, line: 4)
- expect(specs).to receive(:markdown).with(format(described_class::SUGGEST_MR_COMMENT, suggested_line: " expect(foo).to match_array ['bar']"), file: filename, line: 6)
- expect(specs).to receive(:markdown).with(format(described_class::SUGGEST_MR_COMMENT, suggested_line: " expect(foo).to match_array(['bar'])"), file: filename, line: 7)
- expect(specs).to receive(:markdown).with(format(described_class::SUGGEST_MR_COMMENT, suggested_line: " expect(foo).to match_array ['bar']"), file: filename, line: 8)
- expect(specs).to receive(:markdown).with(format(described_class::SUGGEST_MR_COMMENT, suggested_line: " expect(foo).to(match_array(['bar']))"), file: filename, line: 9)
- expect(specs).to receive(:markdown).with(format(described_class::SUGGEST_MR_COMMENT, suggested_line: " expect(foo).to(match_array(['bar']))"), file: filename, line: 10)
-
- specs.add_suggestions_for_match_with_array(filename)
- end
+ allow(specs.helper).to receive(:changed_lines).with(filename).and_return(matching_lines)
+ allow(specs.project_helper).to receive(:file_lines).and_return(file_lines)
end
describe '#changed_specs_files' do
@@ -105,30 +105,116 @@ RSpec.describe Tooling::Danger::Specs do
end
end
- describe '#added_line_matching_match_with_array' do
- let(:filename) { 'spec/foo_spec.rb' }
+ describe '#add_suggestions_for_match_with_array' do
+ let(:template) do
+ <<~MARKDOWN
+ ```suggestion
+ %<suggested_line>s
+ ```
+
+ If order of the result is not important, please consider using `match_array` to avoid flakiness.
+ MARKDOWN
+ end
+
+ it 'adds suggestions at the correct lines' do
+ [
+ { suggested_line: " expect(foo).to match_array(['bar'])", number: 2 },
+ { suggested_line: " expect(foo).to match_array(['bar'])", number: 4 },
+ { suggested_line: " expect(foo).to match_array ['bar']", number: 6 },
+ { suggested_line: " expect(foo).to match_array(['bar'])", number: 7 },
+ { suggested_line: " expect(foo).to match_array ['bar']", number: 8 },
+ { suggested_line: " expect(foo).to(match_array(['bar']))", number: 9 },
+ { suggested_line: " expect(foo).to(match_array(['bar']))", number: 10 }
+ ].each do |test_case|
+ comment = format(template, suggested_line: test_case[:suggested_line])
+ expect(specs).to receive(:markdown).with(comment, file: filename, line: test_case[:number])
+ end
+
+ specs.add_suggestions_for_match_with_array(filename)
+ end
+ end
+
+ describe '#add_suggestions_for_project_factory_usage' do
+ let(:template) do
+ <<~MARKDOWN
+ ```suggestion
+ %<suggested_line>s
+ ```
+
+ Project creations are very slow. Use `let_it_be`, `build` or `build_stubbed` if possible.
+ See [testing best practices](https://docs.gitlab.com/ee/development/testing_guide/best_practices.html#optimize-factory-usage)
+ for background information and alternative options.
+ MARKDOWN
+ end
+
+ let(:file_lines) do
+ [
+ " let(:project) { create(:project) }",
+ " let_it_be(:project) { create(:project, :repository)",
+ " let!(:project) { create(:project) }",
+ " let(:var) { create(:project) }",
+ " let(:merge_request) { create(:merge_request, project: project)",
+ " context 'when merge request exists' do",
+ " it { is_expected.to be_success }",
+ " end",
+ " let!(:var) { create(:project) }",
+ " let(:project) { create(:thing) }",
+ " let(:project) { build(:project) }",
+ " let(:project) do",
+ " create(:project)",
+ " end",
+ " let(:project) { create(:project, :repository) }",
+ " str = 'let(:project) { create(:project) }'",
+ " let(:project) { create(:project_empty_repo) }",
+ " let(:project) { create(:forked_project_with_submodules) }",
+ " let(:project) { create(:project_with_design) }",
+ " let(:authorization) { create(:project_authorization) }"
+ ]
+ end
+
+ let(:matching_lines) do
+ [
+ "+ let(:should_not_error) { create(:project) }",
+ "+ let(:project) { create(:project) }",
+ "+ let!(:project) { create(:project) }",
+ "+ let(:var) { create(:project) }",
+ "+ let!(:var) { create(:project) }",
+ "+ let(:project) { create(:project, :repository) }",
+ "+ let(:project) { create(:project_empty_repo) }",
+ "+ let(:project) { create(:forked_project_with_submodules) }",
+ "+ let(:project) { create(:project_with_design) }"
+ ]
+ end
+
let(:changed_lines) do
[
- " expect(foo).to match(['bar'])",
- " expect(foo).to match(['bar'])",
- " expect(foo).to match ['bar']",
- " expect(foo).to eq(['bar'])",
- " expect(foo).to eq ['bar']",
- "- expect(foo).to match(['bar'])",
- "- expect(foo).to match(['bar'])",
- "- expect(foo).to match ['bar']",
- "- expect(foo).to eq(['bar'])",
- "- expect(foo).to eq ['bar']",
- "+ expect(foo).to eq([])"
+ "+ line which doesn't exist in the file and should not cause an error",
+ "+ let_it_be(:project) { create(:project, :repository)",
+ "+ let(:project) { create(:thing) }",
+ "+ let(:project) do",
+ "+ create(:project)",
+ "+ end",
+ "+ str = 'let(:project) { create(:project) }'",
+ "+ let(:authorization) { create(:project_authorization) }"
] + matching_lines
end
- before do
- allow(specs.helper).to receive(:changed_lines).with(filename).and_return(changed_lines)
- end
+ it 'adds suggestions at the correct lines', :aggregate_failures do
+ [
+ { suggested_line: " let_it_be(:project) { create(:project) }", number: 1 },
+ { suggested_line: " let_it_be(:project) { create(:project) }", number: 3 },
+ { suggested_line: " let_it_be(:var) { create(:project) }", number: 4 },
+ { suggested_line: " let_it_be(:var) { create(:project) }", number: 9 },
+ { suggested_line: " let_it_be(:project) { create(:project, :repository) }", number: 15 },
+ { suggested_line: " let_it_be(:project) { create(:project_empty_repo) }", number: 17 },
+ { suggested_line: " let_it_be(:project) { create(:forked_project_with_submodules) }", number: 18 },
+ { suggested_line: " let_it_be(:project) { create(:project_with_design) }", number: 19 }
+ ].each do |test_case|
+ comment = format(template, suggested_line: test_case[:suggested_line])
+ expect(specs).to receive(:markdown).with(comment, file: filename, line: test_case[:number])
+ end
- it 'returns all lines using an array equality matcher' do
- expect(specs.added_line_matching_match_with_array(filename)).to match_array(matching_lines)
+ specs.add_suggestions_for_project_factory_usage(filename)
end
end
end
diff --git a/spec/tooling/quality/test_level_spec.rb b/spec/tooling/quality/test_level_spec.rb
index f4eea28b66f..6084dc194da 100644
--- a/spec/tooling/quality/test_level_spec.rb
+++ b/spec/tooling/quality/test_level_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'spec_helper'
+require 'fast_spec_helper'
require_relative '../../../tooling/quality/test_level'
diff --git a/spec/uploaders/job_artifact_uploader_spec.rb b/spec/uploaders/job_artifact_uploader_spec.rb
index 9ccf216d1fc..a7fd040837f 100644
--- a/spec/uploaders/job_artifact_uploader_spec.rb
+++ b/spec/uploaders/job_artifact_uploader_spec.rb
@@ -23,6 +23,14 @@ RSpec.describe JobArtifactUploader do
it_behaves_like "builds correct paths",
store_dir: %r[\h{2}/\h{2}/\h{64}/\d{4}_\d{1,2}_\d{1,2}/\d+/\d+\z]
+
+ describe '#cdn_enabled_url' do
+ it 'returns URL and false' do
+ result = uploader.cdn_enabled_url(nil, '127.0.0.1')
+
+ expect(result.used_cdn).to be false
+ end
+ end
end
context 'file is stored in valid local_path' do
diff --git a/spec/uploaders/object_storage/cdn/google_cdn_spec.rb b/spec/uploaders/object_storage/cdn/google_cdn_spec.rb
index b72f6d66d69..8e209dabddc 100644
--- a/spec/uploaders/object_storage/cdn/google_cdn_spec.rb
+++ b/spec/uploaders/object_storage/cdn/google_cdn_spec.rb
@@ -30,6 +30,8 @@ RSpec.describe ObjectStorage::CDN::GoogleCDN,
'2600:1900:4180:0000:0000:0000:0000:0000' | false
'10.10.1.5' | false
'fc00:0000:0000:0000:0000:0000:0000:0000' | false
+ '127.0.0.1' | false
+ '169.254.0.0' | false
end
with_them do
@@ -68,6 +70,26 @@ RSpec.describe ObjectStorage::CDN::GoogleCDN,
expect(subject.use_cdn?(public_ip)).to be false
end
end
+
+ context 'when URL is a domain' do
+ before do
+ options[:url] = 'cdn.gitlab.example.com'
+ end
+
+ it 'returns false' do
+ expect(subject.use_cdn?(public_ip)).to be false
+ end
+ end
+
+ context 'when URL uses HTTP' do
+ before do
+ options[:url] = 'http://cdn.gitlab.example.com'
+ end
+
+ it 'returns false' do
+ expect(subject.use_cdn?(public_ip)).to be false
+ end
+ end
end
describe '#signed_url' do
diff --git a/spec/uploaders/object_storage/cdn_spec.rb b/spec/uploaders/object_storage/cdn_spec.rb
index 246cb1bf349..f99450b274f 100644
--- a/spec/uploaders/object_storage/cdn_spec.rb
+++ b/spec/uploaders/object_storage/cdn_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe ObjectStorage::CDN do
'provider' => 'google',
'url' => 'https://gitlab.example.com',
'key_name' => 'test-key',
- 'key' => '12345'
+ 'key' => Base64.urlsafe_encode64('12345')
}
}
}.freeze
@@ -31,21 +31,49 @@ RSpec.describe ObjectStorage::CDN do
end
let(:object) { build_stubbed(:user) }
+ let(:public_ip) { '18.245.0.1' }
+
+ let_it_be(:project) { build(:project) }
subject { uploader_class.new(object, :file) }
context 'with CDN config' do
before do
+ stub_artifacts_object_storage(enabled: true)
uploader_class.options = Settingslogic.new(Gitlab.config.uploads.deep_merge(cdn_options))
end
- describe '#use_cdn?' do
- it 'returns true' do
- expect_next_instance_of(ObjectStorage::CDN::GoogleCDN) do |cdn|
- expect(cdn).to receive(:use_cdn?).and_return(true)
+ describe '#cdn_enabled_url' do
+ context 'with ci_job_artifacts_cdn feature flag disabled' do
+ before do
+ stub_feature_flags(ci_job_artifacts_cdn: false)
end
- expect(subject.use_cdn?('18.245.0.1')).to be true
+ it 'calls #url' do
+ expect(subject).to receive(:url).and_call_original
+ expect(subject).not_to receive(:cdn_signed_url)
+
+ result = subject.cdn_enabled_url(project, public_ip)
+
+ expect(result.used_cdn).to be false
+ end
+ end
+
+ context 'with ci_job_artifacts_cdn feature flag enabled' do
+ it 'calls #cdn_signed_url' do
+ expect(subject).not_to receive(:url)
+ expect(subject).to receive(:cdn_signed_url).and_call_original
+
+ result = subject.cdn_enabled_url(project, public_ip)
+
+ expect(result.used_cdn).to be true
+ end
+ end
+ end
+
+ describe '#use_cdn?' do
+ it 'returns true' do
+ expect(subject.use_cdn?(public_ip)).to be true
end
end
@@ -67,7 +95,7 @@ RSpec.describe ObjectStorage::CDN do
describe '#use_cdn?' do
it 'returns false' do
- expect(subject.use_cdn?('18.245.0.1')).to be false
+ expect(subject.use_cdn?(public_ip)).to be false
end
end
end
@@ -79,7 +107,7 @@ RSpec.describe ObjectStorage::CDN do
end
it 'raises an error' do
- expect { subject.use_cdn?('18.245.0.1') }.to raise_error("Unknown CDN provider: amazon")
+ expect { subject.use_cdn?(public_ip) }.to raise_error("Unknown CDN provider: amazon")
end
end
end
diff --git a/spec/uploaders/packages/rpm/repository_file_uploader_spec.rb b/spec/uploaders/packages/rpm/repository_file_uploader_spec.rb
new file mode 100644
index 00000000000..720e109533b
--- /dev/null
+++ b/spec/uploaders/packages/rpm/repository_file_uploader_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::Rpm::RepositoryFileUploader do
+ let_it_be(:repository_file) { create(:rpm_repository_file) }
+ let(:uploader) { described_class.new(repository_file, :file) }
+ let(:path) { Gitlab.config.packages.storage_path }
+
+ subject { uploader }
+
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[^\h{2}/\h{2}/\h{64}/projects/\d+/rpm/repository_files/\d+$],
+ cache_dir: %r{/packages/tmp/cache},
+ work_dir: %r{/packages/tmp/work}
+
+ context 'when object store is remote' do
+ before do
+ stub_rpm_repository_file_object_storage
+ end
+
+ include_context 'with storage', described_class::Store::REMOTE
+
+ it_behaves_like 'builds correct paths',
+ store_dir: %r[^\h{2}/\h{2}/\h{64}/projects/\d+/rpm/repository_files/\d+$]
+ end
+
+ describe 'remote file' do
+ let(:repository_file) { create(:rpm_repository_file, :object_storage) }
+
+ context 'with object storage enabled' do
+ before do
+ stub_rpm_repository_file_object_storage
+ end
+
+ it 'can store file remotely' do
+ allow(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async)
+
+ repository_file
+
+ expect(repository_file.file_store).to eq(described_class::Store::REMOTE)
+ expect(repository_file.file.path).not_to be_blank
+ end
+ end
+ end
+end
diff --git a/spec/views/admin/application_settings/_package_registry.html.haml_spec.rb b/spec/views/admin/application_settings/_package_registry.html.haml_spec.rb
index 18a2e29adab..cb41f2e636e 100644
--- a/spec/views/admin/application_settings/_package_registry.html.haml_spec.rb
+++ b/spec/views/admin/application_settings/_package_registry.html.haml_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe 'admin/application_settings/_package_registry' do
it 'does not display the plan name when there is only one plan' do
subject
- expect(page).not_to have_content('Default')
+ expect(page).not_to have_selector('a[data-action="plan0"]')
end
end
diff --git a/spec/views/admin/application_settings/ci_cd.html.haml_spec.rb b/spec/views/admin/application_settings/ci_cd.html.haml_spec.rb
index 4d40bf5671e..e4ebdd706d4 100644
--- a/spec/views/admin/application_settings/ci_cd.html.haml_spec.rb
+++ b/spec/views/admin/application_settings/ci_cd.html.haml_spec.rb
@@ -15,42 +15,17 @@ RSpec.describe 'admin/application_settings/ci_cd.html.haml' do
end
describe 'CI CD Runner Registration' do
- context 'when feature flag is enabled' do
- before do
- stub_feature_flags(runner_registration_control: true)
- end
+ it 'has the setting section' do
+ render
- it 'has the setting section' do
- render
-
- expect(rendered).to have_css("#js-runner-settings")
- end
-
- it 'renders the correct setting section content' do
- render
-
- expect(rendered).to have_content("Runner registration")
- expect(rendered).to have_content("If no options are selected, only administrators can register runners.")
- end
+ expect(rendered).to have_css("#js-runner-settings")
end
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(runner_registration_control: false)
- end
-
- it 'does not have the setting section' do
- render
-
- expect(rendered).not_to have_css("#js-runner-settings")
- end
-
- it 'does not render the correct setting section content' do
- render
+ it 'renders the correct setting section content' do
+ render
- expect(rendered).not_to have_content("Runner registration")
- expect(rendered).not_to have_content("If no options are selected, only administrators can register runners.")
- end
+ expect(rendered).to have_content("Runner registration")
+ expect(rendered).to have_content("If no options are selected, only administrators can register runners.")
end
end
end
diff --git a/spec/views/admin/broadcast_messages/index.html.haml_spec.rb b/spec/views/admin/broadcast_messages/index.html.haml_spec.rb
index e1dc76428df..ba998085bf9 100644
--- a/spec/views/admin/broadcast_messages/index.html.haml_spec.rb
+++ b/spec/views/admin/broadcast_messages/index.html.haml_spec.rb
@@ -3,20 +3,22 @@
require 'spec_helper'
RSpec.describe 'admin/broadcast_messages/index' do
- describe 'Target roles select and table column' do
- let(:feature_flag_state) { true }
+ let(:role_targeted_broadcast_messages) { true }
+ let(:vue_broadcast_messages) { false }
- let_it_be(:message) { create(:broadcast_message, broadcast_type: 'banner', target_access_levels: [Gitlab::Access::GUEST, Gitlab::Access::DEVELOPER]) }
+ let_it_be(:message) { create(:broadcast_message, broadcast_type: 'banner', target_access_levels: [Gitlab::Access::GUEST, Gitlab::Access::DEVELOPER]) }
- before do
- assign(:broadcast_messages, BroadcastMessage.page(1))
- assign(:broadcast_message, BroadcastMessage.new)
+ before do
+ assign(:broadcast_messages, BroadcastMessage.page(1))
+ assign(:broadcast_message, BroadcastMessage.new)
- stub_feature_flags(role_targeted_broadcast_messages: feature_flag_state)
+ stub_feature_flags(role_targeted_broadcast_messages: role_targeted_broadcast_messages)
+ stub_feature_flags(vue_broadcast_messages: vue_broadcast_messages)
- render
- end
+ render
+ end
+ describe 'Target roles select and table column' do
it 'rendered' do
expect(rendered).to have_content('Target roles')
expect(rendered).to have_content('Owner')
@@ -24,7 +26,7 @@ RSpec.describe 'admin/broadcast_messages/index' do
end
context 'when feature flag is off' do
- let(:feature_flag_state) { false }
+ let(:role_targeted_broadcast_messages) { false }
it 'is not rendered' do
expect(rendered).not_to have_content('Target roles')
@@ -33,4 +35,18 @@ RSpec.describe 'admin/broadcast_messages/index' do
end
end
end
+
+ describe 'Vue application' do
+ it 'is not rendered' do
+ expect(rendered).not_to have_selector('#js-broadcast-messages')
+ end
+
+ context 'when feature flag is on' do
+ let(:vue_broadcast_messages) { true }
+
+ it 'is rendered' do
+ expect(rendered).to have_selector('#js-broadcast-messages')
+ end
+ end
+ end
end
diff --git a/spec/views/events/event/_common.html.haml_spec.rb b/spec/views/events/event/_common.html.haml_spec.rb
index 0de84e2fdb8..ad8e5c2ef77 100644
--- a/spec/views/events/event/_common.html.haml_spec.rb
+++ b/spec/views/events/event/_common.html.haml_spec.rb
@@ -7,33 +7,41 @@ RSpec.describe 'events/event/_common.html.haml' do
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:user) { create(:user) }
+ before do
+ render partial: 'events/event/common', locals: { event: event.present }
+ end
+
context 'when it is a work item event' do
- let(:work_item) { create(:work_item, project: project) }
+ let_it_be(:work_item) { create(:work_item, :task, project: project) }
- let(:event) do
+ let_it_be(:event) do
create(:event, :created, project: project, target: work_item, target_type: 'WorkItem', author: user)
end
it 'renders the correct url' do
- render partial: 'events/event/common', locals: { event: event.present }
-
expect(rendered).to have_link(
work_item.reference_link_text, href: "/#{project.full_path}/-/work_items/#{work_item.id}"
)
end
+
+ it 'uses issue_type for the target_name' do
+ expect(rendered).to have_content("#{s_('Event|opened')} task #{work_item.to_reference}")
+ end
end
- context 'when it is an isssue event' do
- let(:issue) { create(:issue, project: project) }
+ context 'when it is an issue event' do
+ let_it_be(:issue) { create(:issue, project: project) }
- let(:event) do
+ let_it_be(:event) do
create(:event, :created, project: project, target: issue, author: user)
end
it 'renders the correct url' do
- render partial: 'events/event/common', locals: { event: event.present }
-
expect(rendered).to have_link(issue.reference_link_text, href: "/#{project.full_path}/-/issues/#{issue.iid}")
end
+
+ it 'uses issue_type for the target_name' do
+ expect(rendered).to have_content("#{s_('Event|opened')} issue #{issue.to_reference}")
+ end
end
end
diff --git a/spec/views/groups/new.html.haml_spec.rb b/spec/views/groups/new.html.haml_spec.rb
index 5c7378e8dc7..0bbc4c1d717 100644
--- a/spec/views/groups/new.html.haml_spec.rb
+++ b/spec/views/groups/new.html.haml_spec.rb
@@ -36,4 +36,11 @@ RSpec.describe 'groups/new.html.haml' do
expect(rendered).to have_field('Public')
end
end
+
+ describe 'role field' do
+ it 'does have a default selection' do
+ expect(rendered).to have_content('Role')
+ expect(rendered).to have_select('Role', selected: 'Software Developer')
+ end
+ end
end
diff --git a/spec/views/layouts/_flash.html.haml_spec.rb b/spec/views/layouts/_flash.html.haml_spec.rb
index a4bed09368f..d88977b194a 100644
--- a/spec/views/layouts/_flash.html.haml_spec.rb
+++ b/spec/views/layouts/_flash.html.haml_spec.rb
@@ -3,9 +3,20 @@
require 'spec_helper'
RSpec.describe 'layouts/_flash' do
+ let_it_be(:template) { 'layouts/_flash' }
+ let_it_be(:flash_container_no_margin_class) { 'flash-container-no-margin' }
+
+ let(:locals) { {} }
+
before do
allow(view).to receive(:flash).and_return(flash)
- render
+ render(template: template, locals: locals)
+ end
+
+ describe 'default' do
+ it 'does not render flash container no margin class' do
+ expect(rendered).not_to have_selector(".#{flash_container_no_margin_class}")
+ end
end
describe 'closable flash messages' do
@@ -17,7 +28,7 @@ RSpec.describe 'layouts/_flash' do
let(:flash) { { flash_type => 'This is a closable flash message' } }
it 'shows a close button' do
- expect(rendered).to include('js-close-icon')
+ expect(rendered).to include('js-close')
end
end
end
@@ -31,8 +42,16 @@ RSpec.describe 'layouts/_flash' do
let(:flash) { { flash_type => 'This is a non closable flash message' } }
it 'does not show a close button' do
- expect(rendered).not_to include('js-close-icon')
+ expect(rendered).not_to include('js-close')
end
end
end
+
+ describe 'with flash_class in locals' do
+ let(:locals) { { flash_container_no_margin: true } }
+
+ it 'adds class to flash-container' do
+ expect(rendered).to have_selector(".flash-container.#{flash_container_no_margin_class}")
+ end
+ end
end
diff --git a/spec/views/layouts/fullscreen.html.haml_spec.rb b/spec/views/layouts/fullscreen.html.haml_spec.rb
index 14b382bc238..7b345fea2ad 100644
--- a/spec/views/layouts/fullscreen.html.haml_spec.rb
+++ b/spec/views/layouts/fullscreen.html.haml_spec.rb
@@ -3,6 +3,10 @@
require 'spec_helper'
RSpec.describe 'layouts/fullscreen' do
+ let_it_be(:template) { 'layouts/fullscreen' }
+ let_it_be(:top_nav_partial) { 'layouts/header/_default' }
+ let_it_be(:top_nav_responsive_partial) { 'layouts/nav/_top_nav_responsive' }
+
let_it_be(:user) { create(:user) }
before do
@@ -16,6 +20,20 @@ RSpec.describe 'layouts/fullscreen' do
expect(rendered).to have_selector(".gl--flex-full.gl-w-full")
end
+ it 'renders flash container' do
+ render
+
+ expect(view).to render_template("layouts/_flash")
+ expect(rendered).to have_selector(".flash-container.flash-container-no-margin")
+ end
+
+ it 'renders top nav' do
+ render
+
+ expect(view).to render_template(top_nav_partial)
+ expect(view).to render_template(top_nav_responsive_partial)
+ end
+
it_behaves_like 'a layout which reflects the application theme setting'
describe 'sidebar' do
@@ -51,4 +69,15 @@ RSpec.describe 'layouts/fullscreen' do
end
end
end
+
+ context 'when minimal is set' do
+ subject { render(template: template, formats: :html, locals: { minimal: true }) }
+
+ it 'does not render top nav' do
+ subject
+
+ expect(view).not_to render_template(top_nav_partial)
+ expect(view).not_to render_template(top_nav_responsive_partial)
+ end
+ end
end
diff --git a/spec/views/layouts/header/_gitlab_version.html.haml_spec.rb b/spec/views/layouts/header/_gitlab_version.html.haml_spec.rb
index 0e24810f835..2f423c72ca6 100644
--- a/spec/views/layouts/header/_gitlab_version.html.haml_spec.rb
+++ b/spec/views/layouts/header/_gitlab_version.html.haml_spec.rb
@@ -12,5 +12,11 @@ RSpec.describe 'layouts/header/_gitlab_version' do
it 'renders the version check badge' do
expect(rendered).to have_selector('.js-gitlab-version-check')
end
+
+ it 'renders the container as a link' do
+ expect(rendered).to have_selector(
+ 'a[data-testid="gitlab-version-container"][href="/help/update/index"]'
+ )
+ end
end
end
diff --git a/spec/views/layouts/nav/sidebar/_profile.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_profile.html.haml_spec.rb
index 3d28be68b25..f5a0a7a935c 100644
--- a/spec/views/layouts/nav/sidebar/_profile.html.haml_spec.rb
+++ b/spec/views/layouts/nav/sidebar/_profile.html.haml_spec.rb
@@ -11,4 +11,20 @@ RSpec.describe 'layouts/nav/sidebar/_profile' do
it_behaves_like 'has nav sidebar'
it_behaves_like 'sidebar includes snowplow attributes', 'render', 'user_side_navigation', 'user_side_navigation'
+
+ it 'has a link to access tokens' do
+ render
+
+ expect(rendered).to have_link(_('Access Tokens'), href: profile_personal_access_tokens_path)
+ end
+
+ context 'when personal access tokens are disabled' do
+ it 'does not have a link to access tokens' do
+ allow(::Gitlab::CurrentSettings).to receive_messages(personal_access_tokens_disabled?: true)
+
+ render
+
+ expect(rendered).not_to have_link(_('Access Tokens'), href: profile_personal_access_tokens_path)
+ end
+ end
end
diff --git a/spec/views/projects/hooks/edit.html.haml_spec.rb b/spec/views/projects/hooks/edit.html.haml_spec.rb
index c4ec2149794..2a95656645e 100644
--- a/spec/views/projects/hooks/edit.html.haml_spec.rb
+++ b/spec/views/projects/hooks/edit.html.haml_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe 'projects/hooks/edit' do
it 'renders alert' do
render
- expect(rendered).to have_text(s_('Webhooks|Webhook was automatically disabled'))
+ expect(rendered).to have_text(s_('Webhooks|Webhook rate limit has been reached'))
end
end
diff --git a/spec/views/projects/merge_requests/_close_reopen_draft_report_toggle.html.haml_spec.rb b/spec/views/projects/merge_requests/_close_reopen_draft_report_toggle.html.haml_spec.rb
new file mode 100644
index 00000000000..416f4253e1b
--- /dev/null
+++ b/spec/views/projects/merge_requests/_close_reopen_draft_report_toggle.html.haml_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'projects/merge_requests/_close_reopen_draft_report_toggle.html.haml' do
+ let_it_be(:merge_request) { create(:merge_request, state: :merged) }
+
+ before do
+ assign(:merge_request, merge_request)
+ assign(:project, merge_request.target_project)
+
+ allow(view).to receive(:moved_mr_sidebar_enabled?).and_return(true)
+ end
+
+ describe 'notifcations toggle' do
+ context 'when mr merged and logged in' do
+ it 'is present' do
+ allow(view).to receive(:current_user).and_return(merge_request.author)
+
+ render
+
+ expect(rendered).to have_css('li', class: 'js-sidebar-subscriptions-entry-point')
+ end
+ end
+
+ context 'when mr merged and not logged in' do
+ it 'is not present' do
+ render
+
+ expect(rendered).not_to have_css('li', class: 'js-sidebar-subscriptions-entry-point')
+ end
+ end
+ end
+end
diff --git a/spec/views/registrations/welcome/show.html.haml_spec.rb b/spec/views/registrations/welcome/show.html.haml_spec.rb
index d9c5d348e15..99d87ac449b 100644
--- a/spec/views/registrations/welcome/show.html.haml_spec.rb
+++ b/spec/views/registrations/welcome/show.html.haml_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe 'registrations/welcome/show' do
allow(view).to receive(:in_trial_flow?).and_return(false)
allow(view).to receive(:user_has_memberships?).and_return(false)
allow(view).to receive(:in_oauth_flow?).and_return(false)
+ allow(view).to receive(:glm_tracking_params).and_return({})
render
end
diff --git a/spec/views/search/_results.html.haml_spec.rb b/spec/views/search/_results.html.haml_spec.rb
index 72e2d7131c0..2149c394320 100644
--- a/spec/views/search/_results.html.haml_spec.rb
+++ b/spec/views/search/_results.html.haml_spec.rb
@@ -12,6 +12,13 @@ RSpec.describe 'search/_results' do
controller.params[:action] = 'show'
controller.params[:search] = term
+ allow(self).to receive(:current_user).and_return(user)
+ allow(@search_results).to receive(:formatted_count).with(scope).and_return(10)
+ allow(self).to receive(:search_count_path).with(any_args).and_return("test count link")
+ allow(self).to receive(:search_path).with(any_args).and_return("link test")
+
+ stub_feature_flags(search_page_vertical_nav: false)
+
create_list(:issue, 3)
@search_objects = search_objects
@@ -147,7 +154,7 @@ RSpec.describe 'search/_results' do
it 'does not render the sidebar' do
render
- expect(rendered).not_to have_selector('#js-search-sidebar')
+ expect(rendered).not_to have_selector('form.search-sidebar')
end
end
end
diff --git a/spec/views/search/show.html.haml_spec.rb b/spec/views/search/show.html.haml_spec.rb
index a336ec91ff2..565dadd64fe 100644
--- a/spec/views/search/show.html.haml_spec.rb
+++ b/spec/views/search/show.html.haml_spec.rb
@@ -4,93 +4,118 @@ require 'spec_helper'
RSpec.describe 'search/show' do
let(:search_term) { nil }
+ let(:user) { build(:user) }
before do
stub_template "search/_category.html.haml" => 'Category Partial'
stub_template "search/_results.html.haml" => 'Results Partial'
-
- @search_term = search_term
-
- render
end
- context 'when the search page is opened' do
- it 'displays the title' do
- expect(rendered).to have_selector('h1.page-title', text: 'Search')
- expect(rendered).not_to have_selector('h1.page-title code')
+ context 'feature flag enabled' do
+ before do
+ allow(self).to receive(:current_user).and_return(user)
+ @search_term = search_term
+
+ render
end
- it 'does not render partials' do
- expect(rendered).not_to render_template('search/_category')
- expect(rendered).not_to render_template('search/_results')
+ context 'when search term is supplied' do
+ let(:search_term) { 'Search Foo' }
+
+ it 'will not render category partial' do
+ expect(rendered).not_to render_template('search/_category')
+ expect(rendered).to render_template('search/_results')
+ end
end
end
- context 'when search term is supplied' do
- let(:search_term) { 'Search Foo' }
+ context 'feature flag disabled' do
+ before do
+ stub_feature_flags(search_page_vertical_nav: false)
- it 'renders partials' do
- expect(rendered).to render_template('search/_category')
- expect(rendered).to render_template('search/_results')
+ @search_term = search_term
+
+ render
end
- context 'unfurling support' do
- let(:group) { build(:group) }
- let(:search_results) do
- instance_double(Gitlab::GroupSearchResults).tap do |double|
- allow(double).to receive(:formatted_count).and_return(0)
- end
+ context 'when the search page is opened' do
+ it 'displays the title' do
+ expect(rendered).to have_selector('h1.page-title', text: 'Search')
+ expect(rendered).not_to have_selector('h1.page-title code')
end
- before do
- assign(:search_results, search_results)
- assign(:scope, 'issues')
- assign(:group, group)
+ it 'does not render partials' do
+ expect(rendered).not_to render_template('search/_category')
+ expect(rendered).not_to render_template('search/_results')
end
+ end
+
+ context 'when search term is supplied' do
+ let(:search_term) { 'Search Foo' }
+
+ it 'renders partials' do
+ expect(rendered).to render_template('search/_category')
+ expect(rendered).to render_template('search/_results')
+ end
+
+ context 'unfurling support' do
+ let(:group) { build(:group) }
+ let(:search_results) do
+ instance_double(Gitlab::GroupSearchResults).tap do |double|
+ allow(double).to receive(:formatted_count).and_return(0)
+ end
+ end
- context 'search with full count' do
before do
- assign(:without_count, false)
+ assign(:search_results, search_results)
+ assign(:scope, 'issues')
+ assign(:group, group)
end
- it 'renders meta tags for a group' do
- render
+ context 'search with full count' do
+ before do
+ assign(:without_count, false)
+ end
- expect(view.page_description).to match(/\d+ issues for term '#{search_term}'/)
- expect(view.page_card_attributes).to eq("Namespace" => group.full_path)
- end
+ it 'renders meta tags for a group' do
+ render
- it 'renders meta tags for both group and project' do
- project = build(:project, group: group)
- assign(:project, project)
+ expect(view.page_description).to match(/\d+ issues for term '#{search_term}'/)
+ expect(view.page_card_attributes).to eq("Namespace" => group.full_path)
+ end
- render
+ it 'renders meta tags for both group and project' do
+ project = build(:project, group: group)
+ assign(:project, project)
- expect(view.page_description).to match(/\d+ issues for term '#{search_term}'/)
- expect(view.page_card_attributes).to eq("Namespace" => group.full_path, "Project" => project.full_path)
- end
- end
+ render
- context 'search without full count' do
- before do
- assign(:without_count, true)
+ expect(view.page_description).to match(/\d+ issues for term '#{search_term}'/)
+ expect(view.page_card_attributes).to eq("Namespace" => group.full_path, "Project" => project.full_path)
+ end
end
- it 'renders meta tags for a group' do
- render
+ context 'search without full count' do
+ before do
+ assign(:without_count, true)
+ end
- expect(view.page_description).to match(/issues results for term '#{search_term}'/)
- expect(view.page_card_attributes).to eq("Namespace" => group.full_path)
- end
+ it 'renders meta tags for a group' do
+ render
+
+ expect(view.page_description).to match(/issues results for term '#{search_term}'/)
+ expect(view.page_card_attributes).to eq("Namespace" => group.full_path)
+ end
- it 'renders meta tags for both group and project' do
- project = build(:project, group: group)
- assign(:project, project)
+ it 'renders meta tags for both group and project' do
+ project = build(:project, group: group)
+ assign(:project, project)
- render
+ render
- expect(view.page_description).to match(/issues results for term '#{search_term}'/)
- expect(view.page_card_attributes).to eq("Namespace" => group.full_path, "Project" => project.full_path)
+ expect(view.page_description).to match(/issues results for term '#{search_term}'/)
+ expect(view.page_card_attributes).to eq("Namespace" => group.full_path, "Project" => project.full_path)
+ end
end
end
end
diff --git a/spec/views/shared/projects/_project.html.haml_spec.rb b/spec/views/shared/projects/_project.html.haml_spec.rb
index 62f23338c48..7575c3b8b92 100644
--- a/spec/views/shared/projects/_project.html.haml_spec.rb
+++ b/spec/views/shared/projects/_project.html.haml_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe 'shared/projects/_project.html.haml' do
it 'renders creator avatar if project has a creator' do
render 'shared/projects/project', use_creator_avatar: true, project: project
- expect(rendered).to have_selector('img.avatar')
+ expect(rendered).to have_selector('img.gl-avatar')
end
it 'renders a generic avatar if project does not have a creator' do
@@ -21,6 +21,6 @@ RSpec.describe 'shared/projects/_project.html.haml' do
render 'shared/projects/project', use_creator_avatar: true, project: project
- expect(rendered).to have_selector('.project-avatar')
+ expect(rendered).to have_selector('.gl-avatar-identicon')
end
end
diff --git a/spec/workers/bulk_import_worker_spec.rb b/spec/workers/bulk_import_worker_spec.rb
index 7e301efe708..0d0b81d2ec0 100644
--- a/spec/workers/bulk_import_worker_spec.rb
+++ b/spec/workers/bulk_import_worker_spec.rb
@@ -87,7 +87,6 @@ RSpec.describe BulkImportWorker do
create(:bulk_import_entity, :created, bulk_import: bulk_import)
expect(described_class).to receive(:perform_in).with(described_class::PERFORM_DELAY, bulk_import.id)
- expect(BulkImports::EntityWorker).to receive(:perform_async).twice
expect(BulkImports::ExportRequestWorker).to receive(:perform_async).twice
subject.perform(bulk_import.id)
@@ -111,7 +110,7 @@ RSpec.describe BulkImportWorker do
bulk_import = create(:bulk_import, :created)
create(:bulk_import_entity, :created, bulk_import: bulk_import)
- allow(BulkImports::EntityWorker).to receive(:perform_async).and_raise(StandardError)
+ allow(BulkImports::ExportRequestWorker).to receive(:perform_async).and_raise(StandardError)
expect(Gitlab::ErrorTracking).to receive(:track_exception).with(kind_of(StandardError), bulk_import_id: bulk_import.id)
diff --git a/spec/workers/bulk_imports/entity_worker_spec.rb b/spec/workers/bulk_imports/entity_worker_spec.rb
index ab85b587975..0fcdbccc304 100644
--- a/spec/workers/bulk_imports/entity_worker_spec.rb
+++ b/spec/workers/bulk_imports/entity_worker_spec.rb
@@ -37,9 +37,11 @@ RSpec.describe BulkImports::EntityWorker do
.to receive(:info).twice
.with(
hash_including(
- 'entity_id' => entity.id,
+ 'bulk_import_entity_id' => entity.id,
+ 'bulk_import_id' => entity.bulk_import_id,
'current_stage' => nil,
- 'message' => 'Stage starting'
+ 'message' => 'Stage starting',
+ 'importer' => 'gitlab_migration'
)
)
end
@@ -67,8 +69,10 @@ RSpec.describe BulkImports::EntityWorker do
.to receive(:info).twice
.with(
hash_including(
- 'entity_id' => entity.id,
- 'current_stage' => nil
+ 'bulk_import_entity_id' => entity.id,
+ 'bulk_import_id' => entity.bulk_import_id,
+ 'current_stage' => nil,
+ 'importer' => 'gitlab_migration'
)
)
@@ -76,16 +80,23 @@ RSpec.describe BulkImports::EntityWorker do
.to receive(:error)
.with(
hash_including(
- 'entity_id' => entity.id,
+ 'bulk_import_entity_id' => entity.id,
+ 'bulk_import_id' => entity.bulk_import_id,
'current_stage' => nil,
- 'message' => 'Error!'
+ 'message' => 'Error!',
+ 'importer' => 'gitlab_migration'
)
)
end
expect(Gitlab::ErrorTracking)
.to receive(:track_exception)
- .with(exception, entity_id: entity.id)
+ .with(
+ exception,
+ bulk_import_entity_id: entity.id,
+ bulk_import_id: entity.bulk_import_id,
+ importer: 'gitlab_migration'
+ )
subject
end
@@ -99,9 +110,11 @@ RSpec.describe BulkImports::EntityWorker do
.to receive(:info).twice
.with(
hash_including(
- 'entity_id' => entity.id,
+ 'bulk_import_entity_id' => entity.id,
+ 'bulk_import_id' => entity.bulk_import_id,
'current_stage' => 0,
- 'message' => 'Stage running'
+ 'message' => 'Stage running',
+ 'importer' => 'gitlab_migration'
)
)
end
@@ -127,8 +140,10 @@ RSpec.describe BulkImports::EntityWorker do
.to receive(:info).twice
.with(
hash_including(
- 'entity_id' => entity.id,
- 'current_stage' => 0
+ 'bulk_import_entity_id' => entity.id,
+ 'bulk_import_id' => entity.bulk_import_id,
+ 'current_stage' => 0,
+ 'importer' => 'gitlab_migration'
)
)
end
diff --git a/spec/workers/bulk_imports/export_request_worker_spec.rb b/spec/workers/bulk_imports/export_request_worker_spec.rb
index a7f7aaa7dba..597eed3a9b9 100644
--- a/spec/workers/bulk_imports/export_request_worker_spec.rb
+++ b/spec/workers/bulk_imports/export_request_worker_spec.rb
@@ -21,38 +21,128 @@ RSpec.describe BulkImports::ExportRequestWorker do
shared_examples 'requests relations export for api resource' do
include_examples 'an idempotent worker' do
- it 'requests relations export' do
+ it 'requests relations export & schedules entity worker' do
expect_next_instance_of(BulkImports::Clients::HTTP) do |client|
expect(client).to receive(:post).with(expected).twice
end
+ expect(BulkImports::EntityWorker).to receive(:perform_async).twice
+
perform_multiple(job_args)
end
context 'when network error is raised' do
- it 'logs export failure and marks entity as failed' do
+ let(:exception) { BulkImports::NetworkError.new('Export error') }
+
+ before do
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
+ allow(client).to receive(:post).and_raise(exception).twice
+ end
+ end
+
+ context 'when error is retriable' do
+ it 'logs retry request and reenqueues' do
+ allow(exception).to receive(:retriable?).twice.and_return(true)
+
+ expect(Gitlab::Import::Logger).to receive(:error).with(
+ hash_including(
+ 'bulk_import_entity_id' => entity.id,
+ 'pipeline_class' => 'ExportRequestWorker',
+ 'exception_class' => 'BulkImports::NetworkError',
+ 'exception_message' => 'Export error',
+ 'bulk_import_id' => bulk_import.id,
+ 'bulk_import_entity_type' => entity.source_type,
+ 'importer' => 'gitlab_migration',
+ 'message' => 'Retrying export request'
+ )
+ ).twice
+
+ expect(described_class).to receive(:perform_in).twice.with(2.seconds, entity.id)
+
+ perform_multiple(job_args)
+ end
+ end
+
+ context 'when error is not retriable' do
+ it 'logs export failure and marks entity as failed' do
+ expect(Gitlab::Import::Logger).to receive(:error).with(
+ hash_including(
+ 'bulk_import_entity_id' => entity.id,
+ 'pipeline_class' => 'ExportRequestWorker',
+ 'exception_class' => 'BulkImports::NetworkError',
+ 'exception_message' => 'Export error',
+ 'correlation_id_value' => anything,
+ 'bulk_import_id' => bulk_import.id,
+ 'bulk_import_entity_type' => entity.source_type,
+ 'importer' => 'gitlab_migration'
+ )
+ ).twice
+
+ perform_multiple(job_args)
+
+ failure = entity.failures.last
+
+ expect(failure.pipeline_class).to eq('ExportRequestWorker')
+ expect(failure.exception_message).to eq('Export error')
+ end
+ end
+ end
+
+ context 'when source id is nil' do
+ let(:entity_source_id) { 'gid://gitlab/Model/1234567' }
+
+ before do
+ graphql_client = instance_double(BulkImports::Clients::Graphql)
+ response = double(original_hash: { 'data' => { entity.entity_type => { 'id' => entity_source_id } } })
+
+ allow(BulkImports::Clients::Graphql).to receive(:new).and_return(graphql_client)
+ allow(graphql_client).to receive(:parse)
+ allow(graphql_client).to receive(:execute).and_return(response)
+ end
+
+ it 'updates entity source id & requests export using source id' do
expect_next_instance_of(BulkImports::Clients::HTTP) do |client|
- expect(client).to receive(:post).and_raise(BulkImports::NetworkError, 'Export error').twice
+ expect(client)
+ .to receive(:post)
+ .with("/#{entity.pluralized_name}/1234567/export_relations")
+ .twice
end
- expect(Gitlab::Import::Logger).to receive(:error).with(
- hash_including(
- 'bulk_import_entity_id' => entity.id,
- 'pipeline_class' => 'ExportRequestWorker',
- 'exception_class' => 'BulkImports::NetworkError',
- 'exception_message' => 'Export error',
- 'correlation_id_value' => anything,
- 'bulk_import_id' => bulk_import.id,
- 'bulk_import_entity_type' => entity.source_type
- )
- ).twice
+ entity.update!(source_xid: nil)
perform_multiple(job_args)
- failure = entity.failures.last
+ expect(entity.reload.source_xid).to eq(1234567)
+ end
+
+ context 'when something goes wrong during source id fetch' do
+ let(:entity_source_id) { 'invalid' }
+
+ it 'logs the error & requests relations export using full path url' do
+ expect_next_instance_of(BulkImports::Clients::HTTP) do |client|
+ expect(client).to receive(:post).with(full_path_url).twice
+ end
+
+ entity.update!(source_xid: nil)
- expect(failure.pipeline_class).to eq('ExportRequestWorker')
- expect(failure.exception_message).to eq('Export error')
+ expect(Gitlab::Import::Logger).to receive(:error).with(
+ a_hash_including(
+ 'message' => 'Failed to fetch source entity id',
+ 'bulk_import_entity_id' => entity.id,
+ 'pipeline_class' => 'ExportRequestWorker',
+ 'exception_class' => 'NoMethodError',
+ 'exception_message' => "undefined method `model_id' for nil:NilClass",
+ 'correlation_id_value' => anything,
+ 'bulk_import_id' => bulk_import.id,
+ 'bulk_import_entity_type' => entity.source_type,
+ 'importer' => 'gitlab_migration'
+ )
+ ).twice
+
+ perform_multiple(job_args)
+
+ expect(entity.source_xid).to be_nil
+ end
end
end
end
@@ -60,14 +150,16 @@ RSpec.describe BulkImports::ExportRequestWorker do
context 'when entity is group' do
let(:entity) { create(:bulk_import_entity, :group_entity, source_full_path: 'foo/bar', bulk_import: bulk_import) }
- let(:expected) { '/groups/foo%2Fbar/export_relations' }
+ let(:expected) { "/groups/#{entity.source_xid}/export_relations" }
+ let(:full_path_url) { '/groups/foo%2Fbar/export_relations' }
include_examples 'requests relations export for api resource'
end
context 'when entity is project' do
let(:entity) { create(:bulk_import_entity, :project_entity, source_full_path: 'foo/bar', bulk_import: bulk_import) }
- let(:expected) { '/projects/foo%2Fbar/export_relations' }
+ let(:expected) { "/projects/#{entity.source_xid}/export_relations" }
+ let(:full_path_url) { '/projects/foo%2Fbar/export_relations' }
include_examples 'requests relations export for api resource'
end
diff --git a/spec/workers/bulk_imports/pipeline_worker_spec.rb b/spec/workers/bulk_imports/pipeline_worker_spec.rb
index fe2039bd79e..ee65775f170 100644
--- a/spec/workers/bulk_imports/pipeline_worker_spec.rb
+++ b/spec/workers/bulk_imports/pipeline_worker_spec.rb
@@ -37,7 +37,9 @@ RSpec.describe BulkImports::PipelineWorker do
.with(
hash_including(
'pipeline_name' => 'FakePipeline',
- 'entity_id' => entity.id
+ 'bulk_import_entity_id' => entity.id,
+ 'bulk_import_id' => entity.bulk_import_id,
+ 'importer' => 'gitlab_migration'
)
)
end
@@ -83,8 +85,10 @@ RSpec.describe BulkImports::PipelineWorker do
.with(
hash_including(
'pipeline_tracker_id' => pipeline_tracker.id,
- 'entity_id' => entity.id,
- 'message' => 'Unstarted pipeline not found'
+ 'bulk_import_entity_id' => entity.id,
+ 'bulk_import_id' => entity.bulk_import_id,
+ 'message' => 'Unstarted pipeline not found',
+ 'importer' => 'gitlab_migration'
)
)
end
@@ -120,8 +124,10 @@ RSpec.describe BulkImports::PipelineWorker do
.with(
hash_including(
'pipeline_name' => 'FakePipeline',
- 'entity_id' => entity.id,
- 'message' => 'Error!'
+ 'bulk_import_entity_id' => entity.id,
+ 'bulk_import_id' => entity.bulk_import_id,
+ 'message' => 'Error!',
+ 'importer' => 'gitlab_migration'
)
)
end
@@ -130,8 +136,10 @@ RSpec.describe BulkImports::PipelineWorker do
.to receive(:track_exception)
.with(
instance_of(StandardError),
- entity_id: entity.id,
- pipeline_name: pipeline_tracker.pipeline_name
+ bulk_import_entity_id: entity.id,
+ bulk_import_id: entity.bulk_import_id,
+ pipeline_name: pipeline_tracker.pipeline_name,
+ importer: 'gitlab_migration'
)
expect(BulkImports::EntityWorker)
@@ -160,7 +168,7 @@ RSpec.describe BulkImports::PipelineWorker do
end
context 'when entity is failed' do
- it 'marks tracker as failed and logs the error' do
+ it 'marks tracker as skipped and logs the skip' do
pipeline_tracker = create(
:bulk_import_tracker,
entity: entity,
@@ -170,22 +178,25 @@ RSpec.describe BulkImports::PipelineWorker do
entity.update!(status: -1)
- expect(BulkImports::Failure).to receive(:create)
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ allow(logger).to receive(:info)
+
expect(logger)
- .to receive(:error)
+ .to receive(:info)
.with(
hash_including(
'pipeline_name' => 'FakePipeline',
- 'entity_id' => entity.id,
- 'message' => 'Failed entity status'
+ 'bulk_import_entity_id' => entity.id,
+ 'bulk_import_id' => entity.bulk_import_id,
+ 'message' => 'Skipping pipeline due to failed entity',
+ 'importer' => 'gitlab_migration'
)
)
end
subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
- expect(pipeline_tracker.reload.status_name).to eq(:failed)
+ expect(pipeline_tracker.reload.status_name).to eq(:skipped)
end
end
@@ -224,7 +235,9 @@ RSpec.describe BulkImports::PipelineWorker do
.with(
hash_including(
'pipeline_name' => 'FakePipeline',
- 'entity_id' => entity.id
+ 'bulk_import_entity_id' => entity.id,
+ 'bulk_import_id' => entity.bulk_import_id,
+ 'importer' => 'gitlab_migration'
)
)
end
@@ -347,8 +360,10 @@ RSpec.describe BulkImports::PipelineWorker do
.with(
hash_including(
'pipeline_name' => 'NdjsonPipeline',
- 'entity_id' => entity.id,
- 'message' => 'Pipeline timeout'
+ 'bulk_import_entity_id' => entity.id,
+ 'message' => 'Pipeline timeout',
+ 'bulk_import_id' => entity.bulk_import_id,
+ 'importer' => 'gitlab_migration'
)
)
end
@@ -374,8 +389,10 @@ RSpec.describe BulkImports::PipelineWorker do
.with(
hash_including(
'pipeline_name' => 'NdjsonPipeline',
- 'entity_id' => entity.id,
- 'message' => 'Error!'
+ 'bulk_import_entity_id' => entity.id,
+ 'message' => 'Export from source instance failed: Error!',
+ 'bulk_import_id' => entity.bulk_import_id,
+ 'importer' => 'gitlab_migration'
)
)
end
diff --git a/spec/workers/ci/parse_secure_file_metadata_worker_spec.rb b/spec/workers/ci/parse_secure_file_metadata_worker_spec.rb
new file mode 100644
index 00000000000..57bbd8a6ff0
--- /dev/null
+++ b/spec/workers/ci/parse_secure_file_metadata_worker_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::ParseSecureFileMetadataWorker do
+ describe '#perform' do
+ include_examples 'an idempotent worker' do
+ let(:secure_file) { create(:ci_secure_file) }
+ subject { described_class.new.perform(secure_file&.id) }
+
+ context 'when the file is found' do
+ it 'calls update_metadata!' do
+ allow(::Ci::SecureFile).to receive(:find_by_id).and_return(secure_file)
+ expect(secure_file).to receive(:update_metadata!)
+
+ subject
+ end
+ end
+ end
+
+ context 'when file is not found' do
+ let(:secure_file) { nil }
+
+ it 'does not call update_metadata!' do
+ expect(secure_file).not_to receive(:update_metadata!)
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/workers/ci/pipeline_success_unlock_artifacts_worker_spec.rb b/spec/workers/ci/pipeline_success_unlock_artifacts_worker_spec.rb
index cb2cf58d50b..3b33972c76f 100644
--- a/spec/workers/ci/pipeline_success_unlock_artifacts_worker_spec.rb
+++ b/spec/workers/ci/pipeline_success_unlock_artifacts_worker_spec.rb
@@ -27,15 +27,18 @@ RSpec.describe Ci::PipelineSuccessUnlockArtifactsWorker do
end
context 'when pipeline exists' do
- let(:pipeline) { create(:ci_pipeline, :success, :with_job) }
+ let!(:pipeline) { create(:ci_pipeline, :success, :with_job) }
let(:pipeline_id) { pipeline.id }
- context 'when pipeline has artifacts' do
- before do
- create(:ci_job_artifact, job: pipeline.builds.first)
- end
+ before do
+ allow(Ci::Pipeline).to receive(:find_by_id).with(pipeline.id).and_return(pipeline)
+ allow(pipeline).to receive(:has_erasable_artifacts?).and_return(has_erasable_artifacts)
+ end
- it 'calls the service' do
+ context 'when pipeline has erasable artifacts' do
+ let(:has_erasable_artifacts) { true }
+
+ it 'calls the unlock service' do
service = spy(Ci::UnlockArtifactsService)
expect(Ci::UnlockArtifactsService).to receive(:new).and_return(service)
@@ -45,8 +48,10 @@ RSpec.describe Ci::PipelineSuccessUnlockArtifactsWorker do
end
end
- context 'when pipeline does not have artifacts' do
- it 'does not call service' do
+ context 'when pipeline has no erasable artifacts' do
+ let(:has_erasable_artifacts) { false }
+
+ it 'does not call the unlock service' do
expect(Ci::UnlockArtifactsService).not_to receive(:new)
perform
diff --git a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
index 5a32c1b40bb..ece0c5053cb 100644
--- a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
+++ b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
@@ -159,11 +159,13 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures do
.with(
project_id: project.id,
exception: exception,
- error_source: 'klass_name'
+ error_source: 'klass_name',
+ fail_import: false
)
.and_call_original
- worker.import(project, client, { 'number' => 10, 'github_id' => 1 })
+ expect { worker.import(project, client, { 'number' => 10, 'github_id' => 1 }) }
+ .to raise_error(exception)
expect(project.import_state.reload.status).to eq('started')
@@ -193,4 +195,12 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures do
end
end
end
+
+ describe '#increment_object_counter?' do
+ let(:issue) { double(:issue, pull_request?: true) }
+
+ it 'returns true' do
+ expect(worker).to be_increment_object_counter(issue)
+ end
+ end
end
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index 6b67c2b474c..322f516fbeb 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -225,7 +225,6 @@ RSpec.describe 'Every Sidekiq worker' do
'Environments::CanaryIngress::UpdateWorker' => false,
'Epics::UpdateEpicsDatesWorker' => 3,
'ErrorTrackingIssueLinkWorker' => 3,
- 'Experiments::RecordConversionEventWorker' => 3,
'ExportCsvWorker' => 3,
'ExternalServiceReactiveCachingWorker' => 3,
'FileHookWorker' => false,
@@ -258,6 +257,10 @@ RSpec.describe 'Every Sidekiq worker' do
'GitGarbageCollectWorker' => false,
'Gitlab::GithubImport::AdvanceStageWorker' => 3,
'Gitlab::GithubImport::ImportReleaseAttachmentsWorker' => 5,
+ 'Gitlab::GithubImport::Attachments::ImportReleaseWorker' => 5,
+ 'Gitlab::GithubImport::Attachments::ImportNoteWorker' => 5,
+ 'Gitlab::GithubImport::Attachments::ImportIssueWorker' => 5,
+ 'Gitlab::GithubImport::Attachments::ImportMergeRequestWorker' => 5,
'Gitlab::GithubImport::ImportDiffNoteWorker' => 5,
'Gitlab::GithubImport::ImportIssueWorker' => 5,
'Gitlab::GithubImport::ImportIssueEventWorker' => 5,
@@ -344,10 +347,10 @@ RSpec.describe 'Every Sidekiq worker' do
'Metrics::Dashboard::PruneOldAnnotationsWorker' => 3,
'Metrics::Dashboard::SyncDashboardsWorker' => 3,
'MigrateExternalDiffsWorker' => 3,
- 'Namespaces::OnboardingIssueCreatedWorker' => 3,
- 'Namespaces::OnboardingPipelineCreatedWorker' => 3,
- 'Namespaces::OnboardingProgressWorker' => 3,
- 'Namespaces::OnboardingUserAddedWorker' => 3,
+ 'Onboarding::IssueCreatedWorker' => 3,
+ 'Onboarding::PipelineCreatedWorker' => 3,
+ 'Onboarding::ProgressWorker' => 3,
+ 'Onboarding::UserAddedWorker' => 3,
'Namespaces::RefreshRootStatisticsWorker' => 3,
'Namespaces::RootStatisticsWorker' => 3,
'Namespaces::ScheduleAggregationWorker' => 3,
diff --git a/spec/workers/experiments/record_conversion_event_worker_spec.rb b/spec/workers/experiments/record_conversion_event_worker_spec.rb
deleted file mode 100644
index 05e4ebc13ba..00000000000
--- a/spec/workers/experiments/record_conversion_event_worker_spec.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Experiments::RecordConversionEventWorker, '#perform' do
- subject(:perform) { described_class.new.perform(:experiment_key, 1234) }
-
- before do
- stub_experiment(experiment_key: experiment_active)
- end
-
- context 'when the experiment is active' do
- let(:experiment_active) { true }
-
- include_examples 'an idempotent worker' do
- subject { perform }
-
- it 'records the event' do
- expect(Experiment).to receive(:record_conversion_event).with(:experiment_key, 1234)
-
- perform
- end
- end
- end
-
- context 'when the experiment is not active' do
- let(:experiment_active) { false }
-
- it 'records the event' do
- expect(Experiment).not_to receive(:record_conversion_event)
-
- perform
- end
- end
-end
diff --git a/spec/workers/gitlab/github_import/attachments/import_issue_worker_spec.rb b/spec/workers/gitlab/github_import/attachments/import_issue_worker_spec.rb
new file mode 100644
index 00000000000..6d617755861
--- /dev/null
+++ b/spec/workers/gitlab/github_import/attachments/import_issue_worker_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Attachments::ImportIssueWorker do
+ subject(:worker) { described_class.new }
+
+ describe '#import' do
+ let(:import_state) { create(:import_state, :started) }
+
+ let(:project) do
+ instance_double('Project', full_path: 'foo/bar', id: 1, import_state: import_state)
+ end
+
+ let(:client) { instance_double('Gitlab::GithubImport::Client') }
+
+ it 'imports an issue attachments' do
+ expect_next_instance_of(
+ Gitlab::GithubImport::Importer::NoteAttachmentsImporter,
+ an_instance_of(Gitlab::GithubImport::Representation::NoteText),
+ project,
+ client
+ ) do |note_attachments_importer|
+ expect(note_attachments_importer).to receive(:execute)
+ end
+
+ expect(Gitlab::GithubImport::ObjectCounter)
+ .to receive(:increment)
+ .and_call_original
+
+ worker.import(project, client, {})
+ end
+ end
+end
diff --git a/spec/workers/gitlab/github_import/attachments/import_merge_request_worker_spec.rb b/spec/workers/gitlab/github_import/attachments/import_merge_request_worker_spec.rb
new file mode 100644
index 00000000000..66dfc027e6e
--- /dev/null
+++ b/spec/workers/gitlab/github_import/attachments/import_merge_request_worker_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Attachments::ImportMergeRequestWorker do
+ subject(:worker) { described_class.new }
+
+ describe '#import' do
+ let(:import_state) { create(:import_state, :started) }
+
+ let(:project) do
+ instance_double('Project', full_path: 'foo/bar', id: 1, import_state: import_state)
+ end
+
+ let(:client) { instance_double('Gitlab::GithubImport::Client') }
+
+ it 'imports an merge request attachments' do
+ expect_next_instance_of(
+ Gitlab::GithubImport::Importer::NoteAttachmentsImporter,
+ an_instance_of(Gitlab::GithubImport::Representation::NoteText),
+ project,
+ client
+ ) do |note_attachments_importer|
+ expect(note_attachments_importer).to receive(:execute)
+ end
+
+ expect(Gitlab::GithubImport::ObjectCounter)
+ .to receive(:increment)
+ .and_call_original
+
+ worker.import(project, client, {})
+ end
+ end
+end
diff --git a/spec/workers/gitlab/github_import/attachments/import_note_worker_spec.rb b/spec/workers/gitlab/github_import/attachments/import_note_worker_spec.rb
new file mode 100644
index 00000000000..7b60cdecca6
--- /dev/null
+++ b/spec/workers/gitlab/github_import/attachments/import_note_worker_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Attachments::ImportNoteWorker do
+ subject(:worker) { described_class.new }
+
+ describe '#import' do
+ let(:import_state) { create(:import_state, :started) }
+
+ let(:project) do
+ instance_double('Project', full_path: 'foo/bar', id: 1, import_state: import_state)
+ end
+
+ let(:client) { instance_double('Gitlab::GithubImport::Client') }
+ let(:importer) { instance_double('Gitlab::GithubImport::Importer::NoteAttachmentsImporter') }
+
+ let(:note_hash) do
+ {
+ 'record_db_id' => rand(100),
+ 'record_type' => 'Note',
+ 'text' => <<-TEXT
+ Some text...
+
+ ![special-image](https://user-images.githubusercontent.com...)
+ TEXT
+ }
+ end
+
+ it 'imports an release attachments' do
+ expect(Gitlab::GithubImport::Importer::NoteAttachmentsImporter)
+ .to receive(:new)
+ .with(
+ an_instance_of(Gitlab::GithubImport::Representation::NoteText),
+ project,
+ client
+ )
+ .and_return(importer)
+
+ expect(importer).to receive(:execute)
+
+ expect(Gitlab::GithubImport::ObjectCounter)
+ .to receive(:increment)
+ .and_call_original
+
+ worker.import(project, client, note_hash)
+ end
+ end
+end
diff --git a/spec/workers/gitlab/github_import/attachments/import_release_worker_spec.rb b/spec/workers/gitlab/github_import/attachments/import_release_worker_spec.rb
new file mode 100644
index 00000000000..e49b2fb6504
--- /dev/null
+++ b/spec/workers/gitlab/github_import/attachments/import_release_worker_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Attachments::ImportReleaseWorker do
+ subject(:worker) { described_class.new }
+
+ describe '#import' do
+ let(:import_state) { create(:import_state, :started) }
+
+ let(:project) do
+ instance_double('Project', full_path: 'foo/bar', id: 1, import_state: import_state)
+ end
+
+ let(:client) { instance_double('Gitlab::GithubImport::Client') }
+ let(:importer) { instance_double('Gitlab::GithubImport::Importer::NoteAttachmentsImporter') }
+
+ let(:note_hash) do
+ {
+ 'record_db_id' => rand(100),
+ 'record_type' => 'Release',
+ 'text' => <<-TEXT
+ Some text...
+
+ ![special-image](https://user-images.githubusercontent.com...)
+ TEXT
+ }
+ end
+
+ it 'imports an release attachments' do
+ expect(Gitlab::GithubImport::Importer::NoteAttachmentsImporter)
+ .to receive(:new)
+ .with(
+ an_instance_of(Gitlab::GithubImport::Representation::NoteText),
+ project,
+ client
+ )
+ .and_return(importer)
+
+ expect(importer).to receive(:execute)
+
+ expect(Gitlab::GithubImport::ObjectCounter)
+ .to receive(:increment)
+ .and_call_original
+
+ worker.import(project, client, note_hash)
+ end
+ end
+end
diff --git a/spec/workers/gitlab/github_import/import_issue_worker_spec.rb b/spec/workers/gitlab/github_import/import_issue_worker_spec.rb
index c2a7639fde4..ef1d2e3f3e7 100644
--- a/spec/workers/gitlab/github_import/import_issue_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/import_issue_worker_spec.rb
@@ -45,4 +45,15 @@ RSpec.describe Gitlab::GithubImport::ImportIssueWorker do
worker.import(project, client, hash)
end
end
+
+ describe '#increment_object_counter?' do
+ context 'when github issue is a pull request' do
+ let(:issue) { double(:issue, pull_request?: true) }
+ let(:project) { double(:project) }
+
+ it 'returns false' do
+ expect(worker).not_to be_increment_object_counter(issue)
+ end
+ end
+ end
end
diff --git a/spec/workers/gitlab/github_import/import_release_attachments_worker_spec.rb b/spec/workers/gitlab/github_import/import_release_attachments_worker_spec.rb
index cd53c6ee9c0..1d32d5c0e21 100644
--- a/spec/workers/gitlab/github_import/import_release_attachments_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/import_release_attachments_worker_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Gitlab::GithubImport::ImportReleaseAttachmentsWorker do
end
let(:client) { instance_double('Gitlab::GithubImport::Client') }
- let(:importer) { instance_double('Gitlab::GithubImport::Importer::ReleaseAttachmentsImporter') }
+ let(:importer) { instance_double('Gitlab::GithubImport::Importer::NoteAttachmentsImporter') }
let(:release_hash) do
{
@@ -27,10 +27,10 @@ RSpec.describe Gitlab::GithubImport::ImportReleaseAttachmentsWorker do
end
it 'imports an issue event' do
- expect(Gitlab::GithubImport::Importer::ReleaseAttachmentsImporter)
+ expect(Gitlab::GithubImport::Importer::NoteAttachmentsImporter)
.to receive(:new)
.with(
- an_instance_of(Gitlab::GithubImport::Representation::ReleaseAttachments),
+ an_instance_of(Gitlab::GithubImport::Representation::NoteText),
project,
client
)
diff --git a/spec/workers/gitlab/github_import/stage/import_attachments_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_attachments_worker_spec.rb
index c2c5e1dbf4e..ecfece735af 100644
--- a/spec/workers/gitlab/github_import/stage/import_attachments_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_attachments_worker_spec.rb
@@ -5,40 +5,60 @@ require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Stage::ImportAttachmentsWorker do
subject(:worker) { described_class.new }
- let(:project) { create(:project) }
- let!(:group) { create(:group, projects: [project]) }
- let(:feature_flag_state) { [group] }
+ let_it_be(:project) { create(:project) }
+ let(:settings) { ::Gitlab::GithubImport::Settings.new(project) }
+ let(:stage_enabled) { true }
+
+ before do
+ settings.write({ attachments_import: stage_enabled })
+ end
describe '#import' do
- let(:importer) { instance_double('Gitlab::GithubImport::Importer::ReleasesAttachmentsImporter') }
let(:client) { instance_double('Gitlab::GithubImport::Client') }
-
- before do
- stub_feature_flags(github_importer_attachments_import: feature_flag_state)
+ let(:importers) do
+ [
+ {
+ klass: Gitlab::GithubImport::Importer::Attachments::ReleasesImporter,
+ double: instance_double('Gitlab::GithubImport::Importer::Attachments::ReleasesImporter'),
+ waiter: Gitlab::JobWaiter.new(2, '123')
+ },
+ {
+ klass: Gitlab::GithubImport::Importer::Attachments::NotesImporter,
+ double: instance_double('Gitlab::GithubImport::Importer::Attachments::NotesImporter'),
+ waiter: Gitlab::JobWaiter.new(3, '234')
+ },
+ {
+ klass: Gitlab::GithubImport::Importer::Attachments::IssuesImporter,
+ double: instance_double('Gitlab::GithubImport::Importer::Attachments::IssuesImporter'),
+ waiter: Gitlab::JobWaiter.new(4, '345')
+ },
+ {
+ klass: Gitlab::GithubImport::Importer::Attachments::MergeRequestsImporter,
+ double: instance_double('Gitlab::GithubImport::Importer::Attachments::MergeRequestsImporter'),
+ waiter: Gitlab::JobWaiter.new(5, '456')
+ }
+ ]
end
- it 'imports release attachments' do
- waiter = Gitlab::JobWaiter.new(2, '123')
-
- expect(Gitlab::GithubImport::Importer::ReleasesAttachmentsImporter)
- .to receive(:new)
- .with(project, client)
- .and_return(importer)
-
- expect(importer).to receive(:execute).and_return(waiter)
+ it 'imports attachments' do
+ importers.each do |importer|
+ expect_next_instance_of(importer[:klass], project, client) do |instance|
+ expect(instance).to receive(:execute).and_return(importer[:waiter])
+ end
+ end
expect(Gitlab::GithubImport::AdvanceStageWorker)
.to receive(:perform_async)
- .with(project.id, { '123' => 2 }, :protected_branches)
+ .with(project.id, { '123' => 2, '234' => 3, '345' => 4, '456' => 5 }, :protected_branches)
worker.import(client, project)
end
- context 'when feature flag is disabled' do
- let(:feature_flag_state) { false }
+ context 'when stage is disabled' do
+ let(:stage_enabled) { false }
it 'skips release attachments import and calls next stage' do
- expect(Gitlab::GithubImport::Importer::ReleasesAttachmentsImporter).not_to receive(:new)
+ importers.each { |importer| expect(importer[:klass]).not_to receive(:new) }
expect(Gitlab::GithubImport::AdvanceStageWorker)
.to receive(:perform_async).with(project.id, {}, :protected_branches)
diff --git a/spec/workers/gitlab/github_import/stage/import_issue_events_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_issue_events_worker_spec.rb
index 932152c0764..199d1b9a3ca 100644
--- a/spec/workers/gitlab/github_import/stage/import_issue_events_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_issue_events_worker_spec.rb
@@ -7,23 +7,21 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportIssueEventsWorker do
let(:project) { create(:project) }
let!(:group) { create(:group, projects: [project]) }
- let(:feature_flag_state) { [group] }
- let(:single_endpoint_feature_flag_state) { [group] }
+ let(:settings) { ::Gitlab::GithubImport::Settings.new(project) }
+ let(:stage_enabled) { true }
+
+ before do
+ settings.write({ single_endpoint_issue_events_import: stage_enabled })
+ end
describe '#import' do
let(:importer) { instance_double('Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter') }
let(:client) { instance_double('Gitlab::GithubImport::Client') }
- before do
- stub_feature_flags(github_importer_single_endpoint_issue_events_import: single_endpoint_feature_flag_state)
- stub_feature_flags(github_importer_issue_events_import: feature_flag_state)
- end
-
- context 'when single endpoint feature flag enabled' do
- it 'imports all the issue events' do
+ context 'when stage is enabled' do
+ it 'imports issue events' do
waiter = Gitlab::JobWaiter.new(2, '123')
- expect(Gitlab::GithubImport::Importer::IssueEventsImporter).not_to receive(:new)
expect(Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter)
.to receive(:new)
.with(project, client)
@@ -39,35 +37,11 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportIssueEventsWorker do
end
end
- context 'when import issue events feature flag enabled' do
- let(:single_endpoint_feature_flag_state) { false }
-
- it 'imports the issue events partly' do
- waiter = Gitlab::JobWaiter.new(2, '123')
-
- expect(Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter).not_to receive(:new)
- expect(Gitlab::GithubImport::Importer::IssueEventsImporter)
- .to receive(:new)
- .with(project, client)
- .and_return(importer)
-
- expect(importer).to receive(:execute).and_return(waiter)
-
- expect(Gitlab::GithubImport::AdvanceStageWorker)
- .to receive(:perform_async)
- .with(project.id, { '123' => 2 }, :notes)
-
- worker.import(client, project)
- end
- end
-
- context 'when feature flags are disabled' do
- let(:feature_flag_state) { false }
- let(:single_endpoint_feature_flag_state) { false }
+ context 'when stage is disabled' do
+ let(:stage_enabled) { false }
it 'skips issue events import and calls next stage' do
expect(Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter).not_to receive(:new)
- expect(Gitlab::GithubImport::Importer::IssueEventsImporter).not_to receive(:new)
expect(Gitlab::GithubImport::AdvanceStageWorker).to receive(:perform_async).with(project.id, {}, :notes)
worker.import(client, project)
diff --git a/spec/workers/gitlab/github_import/stage/import_issues_and_diff_notes_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_issues_and_diff_notes_worker_spec.rb
index a88256b3cae..beef0864715 100644
--- a/spec/workers/gitlab/github_import/stage/import_issues_and_diff_notes_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_issues_and_diff_notes_worker_spec.rb
@@ -6,6 +6,13 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportIssuesAndDiffNotesWorker do
let(:project) { create(:project) }
let(:worker) { described_class.new }
+ let(:settings) { ::Gitlab::GithubImport::Settings.new(project) }
+ let(:single_endpoint_optional_stage) { true }
+
+ before do
+ settings.write({ single_endpoint_notes_import: single_endpoint_optional_stage })
+ end
+
describe '#import' do
it 'imports the issues and diff notes' do
client = double(:client)
@@ -33,37 +40,18 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportIssuesAndDiffNotesWorker do
end
describe '#importers' do
- context 'when project group is present' do
- let_it_be(:project) { create(:project) }
- let_it_be(:group) { create(:group, projects: [project]) }
-
- context 'when feature flag github_importer_single_endpoint_notes_import is enabled' do
- it 'includes single endpoint diff notes importer' do
- project = create(:project)
- group = create(:group, projects: [project])
-
- stub_feature_flags(github_importer_single_endpoint_notes_import: group)
-
- expect(worker.importers(project)).to contain_exactly(
- Gitlab::GithubImport::Importer::IssuesImporter,
- Gitlab::GithubImport::Importer::SingleEndpointDiffNotesImporter
- )
- end
- end
-
- context 'when feature flag github_importer_single_endpoint_notes_import is disabled' do
- it 'includes default diff notes importer' do
- stub_feature_flags(github_importer_single_endpoint_notes_import: false)
-
- expect(worker.importers(project)).to contain_exactly(
- Gitlab::GithubImport::Importer::IssuesImporter,
- Gitlab::GithubImport::Importer::DiffNotesImporter
- )
- end
+ context 'when optional stage single_endpoint_notes_import is enabled' do
+ it 'includes single endpoint diff notes importer' do
+ expect(worker.importers(project)).to contain_exactly(
+ Gitlab::GithubImport::Importer::IssuesImporter,
+ Gitlab::GithubImport::Importer::SingleEndpointDiffNotesImporter
+ )
end
end
- context 'when project group is missing' do
+ context 'when optional stage single_endpoint_notes_import is disabled' do
+ let(:single_endpoint_optional_stage) { false }
+
it 'includes default diff notes importer' do
expect(worker.importers(project)).to contain_exactly(
Gitlab::GithubImport::Importer::IssuesImporter,
diff --git a/spec/workers/gitlab/github_import/stage/import_notes_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_notes_worker_spec.rb
index adf20d24a7e..dbcf2083ec1 100644
--- a/spec/workers/gitlab/github_import/stage/import_notes_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_notes_worker_spec.rb
@@ -6,6 +6,13 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportNotesWorker do
let(:project) { create(:project) }
let(:worker) { described_class.new }
+ let(:settings) { ::Gitlab::GithubImport::Settings.new(project) }
+ let(:single_endpoint_optional_stage) { true }
+
+ before do
+ settings.write({ single_endpoint_notes_import: single_endpoint_optional_stage })
+ end
+
describe '#import' do
it 'imports all the notes' do
client = double(:client)
@@ -33,37 +40,19 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportNotesWorker do
end
describe '#importers' do
- context 'when project group is present' do
- let_it_be(:project) { create(:project) }
- let_it_be(:group) { create(:group, projects: [project]) }
-
- context 'when feature flag github_importer_single_endpoint_notes_import is enabled' do
- it 'includes single endpoint mr and issue notes importers' do
- project = create(:project)
- group = create(:group, projects: [project])
-
- stub_feature_flags(github_importer_single_endpoint_notes_import: group)
-
- expect(worker.importers(project)).to contain_exactly(
- Gitlab::GithubImport::Importer::SingleEndpointMergeRequestNotesImporter,
- Gitlab::GithubImport::Importer::SingleEndpointIssueNotesImporter
- )
- end
- end
-
- context 'when feature flag github_importer_single_endpoint_notes_import is disabled' do
- it 'includes default notes importer' do
- stub_feature_flags(github_importer_single_endpoint_notes_import: false)
-
- expect(worker.importers(project)).to contain_exactly(
- Gitlab::GithubImport::Importer::NotesImporter
- )
- end
+ context 'when settings single_endpoint_notes_import is enabled' do
+ it 'includes single endpoint mr and issue notes importers' do
+ expect(worker.importers(project)).to contain_exactly(
+ Gitlab::GithubImport::Importer::SingleEndpointMergeRequestNotesImporter,
+ Gitlab::GithubImport::Importer::SingleEndpointIssueNotesImporter
+ )
end
end
- context 'when project group is missing' do
- it 'includes default diff notes importer' do
+ context 'when settings single_endpoint_notes_import is disabled' do
+ let(:single_endpoint_optional_stage) { false }
+
+ it 'includes default notes importer' do
expect(worker.importers(project)).to contain_exactly(
Gitlab::GithubImport::Importer::NotesImporter
)
diff --git a/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb
index 582cb76a6cd..24fca3b7c73 100644
--- a/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_repository_worker_spec.rb
@@ -19,18 +19,69 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportRepositoryWorker do
end
context 'when the import succeeds' do
- it 'schedules the importing of the base data' do
- client = double(:client)
+ context 'with issues' do
+ it 'schedules the importing of the base data' do
+ client = double(:client)
+ options = { state: 'all', sort: 'number', direction: 'desc', per_page: '1' }
- expect_next_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter) do |instance|
- expect(instance).to receive(:execute).and_return(true)
+ expect_next_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter) do |instance|
+ expect(instance).to receive(:execute).and_return(true)
+ end
+
+ expect(InternalId).to receive(:exists?).and_return(false)
+ expect(client).to receive(:each_object).with(
+ :issues, project.import_source, options
+ ).and_return([{ number: 5 }].each)
+
+ expect(Issue).to receive(:track_project_iid!).with(project, 5)
+
+ expect(Gitlab::GithubImport::Stage::ImportBaseDataWorker)
+ .to receive(:perform_async)
+ .with(project.id)
+
+ worker.import(client, project)
end
+ end
- expect(Gitlab::GithubImport::Stage::ImportBaseDataWorker)
- .to receive(:perform_async)
- .with(project.id)
+ context 'without issues' do
+ it 'schedules the importing of the base data' do
+ client = double(:client)
+ options = { state: 'all', sort: 'number', direction: 'desc', per_page: '1' }
+
+ expect_next_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter) do |instance|
+ expect(instance).to receive(:execute).and_return(true)
+ end
+
+ expect(InternalId).to receive(:exists?).and_return(false)
+ expect(client).to receive(:each_object).with(:issues, project.import_source, options).and_return([nil].each)
+ expect(Issue).not_to receive(:track_project_iid!)
- worker.import(client, project)
+ expect(Gitlab::GithubImport::Stage::ImportBaseDataWorker)
+ .to receive(:perform_async)
+ .with(project.id)
+
+ worker.import(client, project)
+ end
+ end
+
+ context 'when retrying' do
+ it 'does not allocate internal ids' do
+ client = double(:client)
+
+ expect_next_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter) do |instance|
+ expect(instance).to receive(:execute).and_return(true)
+ end
+
+ expect(InternalId).to receive(:exists?).and_return(true)
+ expect(client).not_to receive(:each_object)
+ expect(Issue).not_to receive(:track_project_iid!)
+
+ expect(Gitlab::GithubImport::Stage::ImportBaseDataWorker)
+ .to receive(:perform_async)
+ .with(project.id)
+
+ worker.import(client, project)
+ end
end
end
@@ -43,6 +94,10 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportRepositoryWorker do
expect(instance).to receive(:execute).and_raise(exception_class)
end
+ expect(InternalId).to receive(:exists?).and_return(false)
+ expect(client).to receive(:each_object).and_return([nil].each)
+ expect(Issue).not_to receive(:track_project_iid!)
+
expect(Gitlab::Import::ImportFailureService).to receive(:track)
.with(
project_id: project.id,
diff --git a/spec/workers/integrations/create_external_cross_reference_worker_spec.rb b/spec/workers/integrations/create_external_cross_reference_worker_spec.rb
index 61723f44aa5..8e586b90905 100644
--- a/spec/workers/integrations/create_external_cross_reference_worker_spec.rb
+++ b/spec/workers/integrations/create_external_cross_reference_worker_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Integrations::CreateExternalCrossReferenceWorker do
include AfterNextHelpers
using RSpec::Parameterized::TableSyntax
- let_it_be(:project) { create(:jira_project, :repository) }
+ let_it_be(:project) { create(:project, :with_jira_integration, :repository) }
let_it_be(:author) { create(:user) }
let_it_be(:commit) { project.commit }
let_it_be(:issue) { create(:issue, project: project) }
diff --git a/spec/workers/merge_requests/delete_source_branch_worker_spec.rb b/spec/workers/merge_requests/delete_source_branch_worker_spec.rb
index 957adbbbd6e..fe677103fd0 100644
--- a/spec/workers/merge_requests/delete_source_branch_worker_spec.rb
+++ b/spec/workers/merge_requests/delete_source_branch_worker_spec.rb
@@ -53,6 +53,48 @@ RSpec.describe MergeRequests::DeleteSourceBranchWorker do
worker.perform(merge_request.id, 'new-source-branch-sha', user.id)
end
end
+
+ context 'when delete service returns an error' do
+ let(:service_result) { ServiceResponse.error(message: 'placeholder') }
+
+ it 'tracks the exception' do
+ expect_next_instance_of(::Branches::DeleteService) do |instance|
+ expect(instance).to receive(:execute).with(merge_request.source_branch).and_return(service_result)
+ end
+
+ expect(service_result).to receive(:track_exception).and_call_original
+
+ worker.perform(merge_request.id, sha, user.id)
+ end
+
+ context 'when track_delete_source_errors is disabled' do
+ before do
+ stub_feature_flags(track_delete_source_errors: false)
+ end
+
+ it 'does not track the exception' do
+ expect_next_instance_of(::Branches::DeleteService) do |instance|
+ expect(instance).to receive(:execute).with(merge_request.source_branch).and_return(service_result)
+ end
+
+ expect(service_result).not_to receive(:track_exception)
+
+ worker.perform(merge_request.id, sha, user.id)
+ end
+ end
+
+ it 'still retargets the merge request' do
+ expect_next_instance_of(::Branches::DeleteService) do |instance|
+ expect(instance).to receive(:execute).with(merge_request.source_branch).and_return(service_result)
+ end
+
+ expect_next_instance_of(::MergeRequests::RetargetChainService) do |instance|
+ expect(instance).to receive(:execute).with(merge_request)
+ end
+
+ worker.perform(merge_request.id, sha, user.id)
+ end
+ end
end
it_behaves_like 'an idempotent worker' do
diff --git a/spec/workers/namespaces/prune_aggregation_schedules_worker_spec.rb b/spec/workers/namespaces/prune_aggregation_schedules_worker_spec.rb
index 11f2501cbe3..d8c60932d92 100644
--- a/spec/workers/namespaces/prune_aggregation_schedules_worker_spec.rb
+++ b/spec/workers/namespaces/prune_aggregation_schedules_worker_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Namespaces::PruneAggregationSchedulesWorker, '#perform', :clean_g
include ExclusiveLeaseHelpers
let(:namespaces) { create_list(:namespace, 5, :with_aggregation_schedule) }
- let(:timeout) { Namespace::AggregationSchedule::DEFAULT_LEASE_TIMEOUT }
+ let(:timeout) { Namespace::AggregationSchedule.default_lease_timeout }
subject(:worker) { described_class.new }
diff --git a/spec/workers/namespaces/onboarding_issue_created_worker_spec.rb b/spec/workers/onboarding/issue_created_worker_spec.rb
index 0a896d864b7..70a0156d444 100644
--- a/spec/workers/namespaces/onboarding_issue_created_worker_spec.rb
+++ b/spec/workers/onboarding/issue_created_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Namespaces::OnboardingIssueCreatedWorker, '#perform' do
+RSpec.describe Onboarding::IssueCreatedWorker, '#perform' do
let_it_be(:issue) { create(:issue) }
let(:namespace) { issue.project.namespace }
diff --git a/spec/workers/namespaces/onboarding_pipeline_created_worker_spec.rb b/spec/workers/onboarding/pipeline_created_worker_spec.rb
index 6d69ccb50bd..75bdea28eef 100644
--- a/spec/workers/namespaces/onboarding_pipeline_created_worker_spec.rb
+++ b/spec/workers/onboarding/pipeline_created_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Namespaces::OnboardingPipelineCreatedWorker, '#perform' do
+RSpec.describe Onboarding::PipelineCreatedWorker, '#perform' do
let_it_be(:ci_pipeline) { create(:ci_pipeline) }
it_behaves_like 'records an onboarding progress action', :pipeline_created do
diff --git a/spec/workers/namespaces/onboarding_progress_worker_spec.rb b/spec/workers/onboarding/progress_worker_spec.rb
index 76ac078ddcf..bbf4875069e 100644
--- a/spec/workers/namespaces/onboarding_progress_worker_spec.rb
+++ b/spec/workers/onboarding/progress_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Namespaces::OnboardingProgressWorker, '#perform' do
+RSpec.describe Onboarding::ProgressWorker, '#perform' do
let_it_be(:namespace) { create(:namespace) }
let_it_be(:action) { 'git_pull' }
diff --git a/spec/workers/namespaces/onboarding_user_added_worker_spec.rb b/spec/workers/onboarding/user_added_worker_spec.rb
index 14428c0ecb8..6dbd875c93b 100644
--- a/spec/workers/namespaces/onboarding_user_added_worker_spec.rb
+++ b/spec/workers/onboarding/user_added_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Namespaces::OnboardingUserAddedWorker, '#perform' do
+RSpec.describe Onboarding::UserAddedWorker, '#perform' do
let_it_be(:namespace) { create(:group) }
subject { described_class.new.perform(namespace.id) }
diff --git a/spec/workers/pages/invalidate_domain_cache_worker_spec.rb b/spec/workers/pages/invalidate_domain_cache_worker_spec.rb
index 9272e26a34f..b9c27c54fa1 100644
--- a/spec/workers/pages/invalidate_domain_cache_worker_spec.rb
+++ b/spec/workers/pages/invalidate_domain_cache_worker_spec.rb
@@ -126,6 +126,107 @@ RSpec.describe Pages::InvalidateDomainCacheWorker do
{ type: :namespace, id: 3 }
]
+ it_behaves_like 'clears caches with',
+ event_class: PagesDomains::PagesDomainDeletedEvent,
+ event_data: {
+ project_id: 1,
+ namespace_id: 2,
+ root_namespace_id: 3,
+ domain: 'somedomain.com'
+ },
+ caches: [
+ { type: :project, id: 1 },
+ { type: :namespace, id: 3 }
+ ]
+
+ it_behaves_like 'clears caches with',
+ event_class: PagesDomains::PagesDomainUpdatedEvent,
+ event_data: {
+ project_id: 1,
+ namespace_id: 2,
+ root_namespace_id: 3,
+ domain: 'somedomain.com'
+ },
+ caches: [
+ { type: :project, id: 1 },
+ { type: :namespace, id: 3 }
+ ]
+
+ it_behaves_like 'clears caches with',
+ event_class: PagesDomains::PagesDomainCreatedEvent,
+ event_data: {
+ project_id: 1,
+ namespace_id: 2,
+ root_namespace_id: 3,
+ domain: 'somedomain.com'
+ },
+ caches: [
+ { type: :project, id: 1 },
+ { type: :namespace, id: 3 }
+ ]
+
+ context 'when project attributes change' do
+ Projects::ProjectAttributesChangedEvent::PAGES_RELATED_ATTRIBUTES.each do |attribute|
+ it_behaves_like 'clears caches with',
+ event_class: Projects::ProjectAttributesChangedEvent,
+ event_data: {
+ project_id: 1,
+ namespace_id: 2,
+ root_namespace_id: 3,
+ attributes: [attribute]
+ },
+ caches: [
+ { type: :project, id: 1 },
+ { type: :namespace, id: 3 }
+ ]
+ end
+
+ it 'does not clear the cache when the attributes is not pages related' do
+ event = Projects::ProjectAttributesChangedEvent.new(
+ data: {
+ project_id: 1,
+ namespace_id: 2,
+ root_namespace_id: 3,
+ attributes: ['unknown']
+ }
+ )
+
+ expect(described_class).not_to receive(:clear_cache)
+
+ ::Gitlab::EventStore.publish(event)
+ end
+ end
+
+ context 'when project features change' do
+ it_behaves_like 'clears caches with',
+ event_class: Projects::ProjectFeaturesChangedEvent,
+ event_data: {
+ project_id: 1,
+ namespace_id: 2,
+ root_namespace_id: 3,
+ features: ["pages_access_level"]
+ },
+ caches: [
+ { type: :project, id: 1 },
+ { type: :namespace, id: 3 }
+ ]
+
+ it 'does not clear the cache when the features is not pages related' do
+ event = Projects::ProjectFeaturesChangedEvent.new(
+ data: {
+ project_id: 1,
+ namespace_id: 2,
+ root_namespace_id: 3,
+ features: ['unknown']
+ }
+ )
+
+ expect(described_class).not_to receive(:clear_cache)
+
+ ::Gitlab::EventStore.publish(event)
+ end
+ end
+
context 'when namespace based cache keys are duplicated' do
# de-dups namespace cache keys
it_behaves_like 'clears caches with',
diff --git a/spec/workers/process_commit_worker_spec.rb b/spec/workers/process_commit_worker_spec.rb
index a445db3a276..01c44399b0c 100644
--- a/spec/workers/process_commit_worker_spec.rb
+++ b/spec/workers/process_commit_worker_spec.rb
@@ -120,34 +120,6 @@ RSpec.describe ProcessCommitWorker do
worker.close_issues(project, user, user, commit, [issue])
end.to change(Issues::CloseWorker.jobs, :size).by(1)
end
-
- context 'when process_issue_closure_in_background flag is disabled' do
- before do
- stub_feature_flags(process_issue_closure_in_background: false)
- end
-
- context 'when the user can update the issues' do
- it 'closes the issues' do
- worker.close_issues(project, user, user, commit, [issue])
-
- issue.reload
-
- expect(issue.closed?).to eq(true)
- end
- end
-
- context 'when the user can not update the issues' do
- it 'does not close the issues' do
- other_user = create(:user)
-
- worker.close_issues(project, other_user, other_user, commit, [issue])
-
- issue.reload
-
- expect(issue.closed?).to eq(false)
- end
- end
- end
end
describe '#update_issue_metrics', :clean_gitlab_redis_cache do
diff --git a/spec/workers/project_cache_worker_spec.rb b/spec/workers/project_cache_worker_spec.rb
index 30c85464452..3c807ef9ffd 100644
--- a/spec/workers/project_cache_worker_spec.rb
+++ b/spec/workers/project_cache_worker_spec.rb
@@ -39,7 +39,7 @@ RSpec.describe ProjectCacheWorker do
context 'with an existing project' do
before do
lease_key = "namespace:namespaces_root_statistics:#{project.namespace_id}"
- stub_exclusive_lease_taken(lease_key, timeout: Namespace::AggregationSchedule::DEFAULT_LEASE_TIMEOUT)
+ stub_exclusive_lease_taken(lease_key, timeout: Namespace::AggregationSchedule.default_lease_timeout)
end
it 'refreshes the method caches' do
@@ -74,8 +74,8 @@ RSpec.describe ProjectCacheWorker do
context 'with plain readme' do
it 'refreshes the method caches' do
- allow(MarkupHelper).to receive(:gitlab_markdown?).and_return(false)
- allow(MarkupHelper).to receive(:plain?).and_return(true)
+ allow(Gitlab::MarkupHelper).to receive(:gitlab_markdown?).and_return(false)
+ allow(Gitlab::MarkupHelper).to receive(:plain?).and_return(true)
expect_any_instance_of(Repository).to receive(:refresh_method_caches)
.with(%i(readme))
diff --git a/spec/workers/project_destroy_worker_spec.rb b/spec/workers/project_destroy_worker_spec.rb
index 0b0543a5089..25508928bbf 100644
--- a/spec/workers/project_destroy_worker_spec.rb
+++ b/spec/workers/project_destroy_worker_spec.rb
@@ -4,11 +4,7 @@ require 'spec_helper'
RSpec.describe ProjectDestroyWorker do
let(:project) { create(:project, :repository, pending_delete: true) }
- let(:path) do
- Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- project.repository.path_to_repo
- end
- end
+ let!(:repository) { project.repository.raw }
subject { described_class.new }
@@ -17,7 +13,7 @@ RSpec.describe ProjectDestroyWorker do
subject.perform(project.id, project.first_owner.id, {})
expect(Project.all).not_to include(project)
- expect(Dir.exist?(path)).to be_falsey
+ expect(repository).not_to exist
end
it 'does not raise error when project could not be found' do
diff --git a/spec/workers/repository_check/single_repository_worker_spec.rb b/spec/workers/repository_check/single_repository_worker_spec.rb
index 205d7c08f54..dbb24cc047e 100644
--- a/spec/workers/repository_check/single_repository_worker_spec.rb
+++ b/spec/workers/repository_check/single_repository_worker_spec.rb
@@ -68,7 +68,7 @@ RSpec.describe RepositoryCheck::SingleRepositoryWorker do
it 'creates missing wikis' do
project = create(:project, :wiki_enabled)
- TestEnv.rm_storage_dir(project.repository_storage, project.wiki.path)
+ project.wiki.repository.raw.remove
subject.perform(project.id)
@@ -77,8 +77,8 @@ RSpec.describe RepositoryCheck::SingleRepositoryWorker do
it 'does not create a wiki if the main repo does not exist at all' do
project = create(:project, :repository)
- TestEnv.rm_storage_dir(project.repository_storage, project.path)
- TestEnv.rm_storage_dir(project.repository_storage, project.wiki.path)
+ project.repository.raw.remove
+ project.wiki.repository.raw.remove
subject.perform(project.id)
diff --git a/spec/workers/repository_fork_worker_spec.rb b/spec/workers/repository_fork_worker_spec.rb
index 9c46b1e2a87..85dee935001 100644
--- a/spec/workers/repository_fork_worker_spec.rb
+++ b/spec/workers/repository_fork_worker_spec.rb
@@ -115,7 +115,7 @@ RSpec.describe RepositoryForkWorker do
context 'project ID, storage and repo paths passed' do
def perform!
- subject.perform(forked_project.id, TestEnv.repos_path, project.disk_path)
+ subject.perform(forked_project.id, 'repos/path', project.disk_path)
end
it_behaves_like 'RepositoryForkWorker performing'
diff --git a/spec/workers/run_pipeline_schedule_worker_spec.rb b/spec/workers/run_pipeline_schedule_worker_spec.rb
index 846b4455bf9..10c22b736d2 100644
--- a/spec/workers/run_pipeline_schedule_worker_spec.rb
+++ b/spec/workers/run_pipeline_schedule_worker_spec.rb
@@ -4,7 +4,8 @@ require 'spec_helper'
RSpec.describe RunPipelineScheduleWorker do
describe '#perform' do
- let_it_be(:project) { create(:project) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, namespace: group) }
let_it_be(:user) { create(:user) }
let_it_be(:pipeline_schedule) { create(:ci_pipeline_schedule, :nightly, project: project ) }